bookmarks_movement: refactor bookmark movement for force-pushrebase

Summary:
Refactor control of movement of non-scratch bookmarks through force-pushrebase
or bookmark-only pushrebase.  These are equivalent to ordinary pushes, and so
can use the same code path for moving the bookmarks.

This has the side-effect of enabling some patterns that were previously not
possible, like populating git mappings with a force-pushrebase.

Reviewed By: ikostia

Differential Revision: D22844828

fbshipit-source-id: 4ef71fa4cef69cc2f1d124837631e8304644ca06
This commit is contained in:
Mark Thomas 2020-08-14 02:26:47 -07:00 committed by Facebook GitHub Bot
parent 279c3dcd8f
commit c59c2979d2
4 changed files with 178 additions and 436 deletions

View File

@ -13,25 +13,17 @@ use crate::{
use anyhow::{anyhow, format_err, Context, Error, Result};
use blobrepo::BlobRepo;
use blobrepo_hg::BlobRepoHg;
use blobstore::Loadable;
use bonsai_git_mapping::{
extract_git_sha1_from_bonsai_extra, BonsaiGitMapping, BonsaiGitMappingEntry,
};
use bookmarks::{
BookmarkName, BookmarkTransaction, BookmarkTransactionHook, BookmarkUpdateReason, BundleReplay,
};
use bookmarks::{BookmarkName, BookmarkUpdateReason, BundleReplay};
use bookmarks_movement::{BookmarkUpdatePolicy, BookmarkUpdateTargets};
use context::CoreContext;
use futures::{
compat::Future01CompatExt,
future::try_join,
stream::{FuturesOrdered, FuturesUnordered, TryStreamExt},
FutureExt, StreamExt, TryFutureExt,
stream::{FuturesUnordered, TryStreamExt},
};
use futures_stats::TimedFutureExt;
use git_mapping_pushrebase_hook::GitMappingPushrebaseHook;
use globalrev_pushrebase_hook::GlobalrevPushrebaseHook;
use maplit::hashset;
use mercurial_bundle_replay_data::BundleReplayData;
use metaconfig_types::{BookmarkAttrs, InfinitepushParams, PushParams, PushrebaseParams};
use mononoke_types::{BonsaiChangeset, ChangesetId, RawBundle2Id};
@ -43,7 +35,6 @@ use scuba_ext::ScubaSampleBuilderExt;
use slog::{debug, warn};
use stats::prelude::*;
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use tunables::tunables;
use crate::rate_limits::enforce_commit_rate_limits;
@ -52,10 +43,6 @@ use crate::response::{
UnbundlePushRebaseResponse, UnbundlePushResponse, UnbundleResponse,
};
enum BookmarkPush<T: Copy> {
PlainPush(PlainBookmarkPush<T>),
}
define_stats! {
prefix = "mononoke.unbundle.processed";
push: dynamic_timeseries("{}.push", (reponame: String); Rate, Sum),
@ -319,118 +306,6 @@ async fn save_to_reverse_filler_queue(
Ok(())
}
/// Return ancestors of `start` which have git mapping extras but do not
/// have git mapping entry set in db.
async fn find_ancestors_without_git_mapping(
ctx: &CoreContext,
repo: &BlobRepo,
start: HashSet<ChangesetId>,
) -> Result<HashMap<ChangesetId, BonsaiChangeset>, Error> {
let mut res = HashMap::new();
let mut visited = HashSet::new();
let mut queue = FuturesOrdered::new();
let mut get_new_queue_entry = |cs_id: ChangesetId| {
if visited.insert(cs_id) {
Some(async move {
let bcs_fut = cs_id
.load(ctx.clone(), &repo.get_blobstore())
.map_err(Error::from);
let mapping_fut = repo.bonsai_git_mapping().get(ctx, cs_id.into());
let (bcs, git_mapping) = try_join(bcs_fut, mapping_fut).await?;
Result::<_, Error>::Ok((cs_id, bcs, git_mapping))
})
} else {
None
}
};
for cs_id in start {
if let Some(entry) = get_new_queue_entry(cs_id) {
queue.push(entry)
}
}
while let Some(entry) = queue.next().await {
let (cs_id, bcs, git_mapping) = entry?;
if !git_mapping.is_empty() {
continue;
}
// Don't traverse past commits that do not have git sha1 set
// This is done deliberately to avoid retraversing these commits over
// and over.
if extract_git_sha1_from_bonsai_extra(bcs.extra())?.is_none() {
continue;
}
for p in bcs.parents() {
if let Some(entry) = get_new_queue_entry(p) {
queue.push(entry)
}
}
res.insert(cs_id, bcs);
}
Ok(res)
}
fn upload_git_mapping_bookmark_txn_hook(
bonsai_git_mapping: Arc<dyn BonsaiGitMapping>,
uploaded_bonsais: HashMap<ChangesetId, BonsaiChangeset>,
ancestors_no_git_mapping: HashMap<ChangesetId, BonsaiChangeset>,
) -> BookmarkTransactionHook {
Arc::new(move |ctx, sql_txn| {
let uploaded_bonsais_len = uploaded_bonsais.len();
let ancestors_no_git_mapping_len = ancestors_no_git_mapping.len();
let mut mapping_entries = vec![];
for (bcs_id, bonsai) in uploaded_bonsais
.iter()
.chain(ancestors_no_git_mapping.iter())
{
let maybe_git_sha1 = match extract_git_sha1_from_bonsai_extra(bonsai.extra()) {
Ok(r) => r,
Err(e) => return async move { Err(e.into()) }.boxed(),
};
if let Some(git_sha1) = maybe_git_sha1 {
let entry = BonsaiGitMappingEntry {
git_sha1,
bcs_id: *bcs_id,
};
mapping_entries.push(entry);
}
}
// Normally we expect git_mapping_new_changesets == git_mapping_inserting
// and git_mapping_ancestors_no_mapping == 0.
ctx.scuba()
.clone()
.add("git_mapping_new_changesets", uploaded_bonsais_len)
.add(
"git_mapping_ancestors_no_mapping",
ancestors_no_git_mapping_len,
)
.add("git_mapping_inserting", mapping_entries.len())
.log_with_msg("Inserting git mapping", None);
let bonsai_git_mapping = bonsai_git_mapping.clone();
async move {
let sql_txn = bonsai_git_mapping
.bulk_add_git_mapping_in_transaction(&ctx, &mapping_entries, sql_txn)
.map_err(Error::from)
.await?;
ctx.scuba()
.clone()
.log_with_msg("Inserted git mapping", None);
Ok(sql_txn)
}
.boxed()
})
}
async fn run_infinitepush(
ctx: &CoreContext,
repo: &BlobRepo,
@ -600,6 +475,7 @@ async fn run_pushrebase(
repo,
&pushrebase_params,
lca_hint,
uploaded_bonsais,
plain_push,
&maybe_hg_replay_data,
bookmark_attrs,
@ -655,44 +531,81 @@ async fn run_bookmark_only_pushrebase(
} = action;
let part_id = bookmark_push.part_id;
let reason = BookmarkUpdateReason::Pushrebase;
// Since this a bookmark-only pushrebase, there are no changeset timestamps
let bundle_replay_data = maybe_raw_bundle2_id.map(BundleReplayData::new);
let bundle_replay_data = bundle_replay_data
.as_ref()
.map(|data| data as &dyn BundleReplay);
let bookmark_push = check_plain_bookmark_push_allowed(
ctx,
repo,
bookmark_attrs,
non_fast_forward_policy,
infinitepush_params,
bookmark_push,
lca_hint,
)
.await?;
let mut txn_hook = None;
if pushrebase_params.populate_git_mapping {
if let Some(new) = bookmark_push.new {
let ancestors_no_git_mapping =
find_ancestors_without_git_mapping(&ctx, &repo, hashset! {new}).await?;
txn_hook = Some(upload_git_mapping_bookmark_txn_hook(
repo.bonsai_git_mapping().clone(),
HashMap::new(),
ancestors_no_git_mapping,
));
match (bookmark_push.old, bookmark_push.new) {
(None, Some(new_target)) => {
bookmarks_movement::CreateBookmarkOp::new(
&bookmark_push.name,
new_target,
BookmarkUpdateReason::Pushrebase,
)
.only_if_public()
.with_bundle_replay_data(bundle_replay_data)
.run(
ctx,
repo,
infinitepush_params,
pushrebase_params,
bookmark_attrs,
)
.await
.context("Failed to create bookmark")?;
}
(Some(old_target), Some(new_target)) => {
bookmarks_movement::UpdateBookmarkOp::new(
&bookmark_push.name,
BookmarkUpdateTargets {
old: old_target,
new: new_target,
},
if non_fast_forward_policy == NonFastForwardPolicy::Allowed {
BookmarkUpdatePolicy::AnyPermittedByConfig
} else {
BookmarkUpdatePolicy::FastForwardOnly
},
BookmarkUpdateReason::Pushrebase,
)
.only_if_public()
.with_bundle_replay_data(bundle_replay_data)
.run(
ctx,
repo,
lca_hint,
infinitepush_params,
pushrebase_params,
bookmark_attrs,
)
.await
.context(
if non_fast_forward_policy == NonFastForwardPolicy::Allowed {
"Failed to move bookmark"
} else {
"Failed to fast-forward bookmark (try --force?)"
},
)?;
}
(Some(old_target), None) => {
bookmarks_movement::DeleteBookmarkOp::new(
&bookmark_push.name,
old_target,
BookmarkUpdateReason::Pushrebase,
)
.only_if_public()
.with_bundle_replay_data(bundle_replay_data)
.run(ctx, repo, infinitepush_params, bookmark_attrs)
.await
.context("Failed to delete bookmark")?;
}
(None, None) => {}
}
let maybe_bookmark_push = Some(BookmarkPush::PlainPush(bookmark_push));
save_bookmark_pushes_to_db(
ctx,
repo,
reason,
&bundle_replay_data,
vec![maybe_bookmark_push],
txn_hook,
)
.await?;
Ok(UnbundleBookmarkOnlyPushRebaseResponse {
bookmark_push_part_id: part_id,
})
@ -779,130 +692,92 @@ async fn force_pushrebase(
repo: &BlobRepo,
pushrebase_params: &PushrebaseParams,
lca_hint: &dyn LeastCommonAncestorsHint,
uploaded_bonsais: HashSet<BonsaiChangeset>,
bookmark_push: PlainBookmarkPush<ChangesetId>,
maybe_hg_replay_data: &Option<pushrebase::HgReplayData>,
bookmark_attrs: &BookmarkAttrs,
infinitepush_params: &InfinitepushParams,
) -> Result<(ChangesetId, Vec<pushrebase::PushrebaseChangesetPair>), Error> {
if pushrebase_params.assign_globalrevs {
return Err(Error::msg(
"force_pushrebase is not allowed when assigning Globalrevs",
));
}
if pushrebase_params.populate_git_mapping {
return Err(Error::msg(
"force_pushrebase is not allowed as it would skip populating Git mappings",
));
let new_target = bookmark_push
.new
.ok_or_else(|| anyhow!("new changeset is required for force pushrebase"))?;
let mut new_changeset_ids = Vec::new();
let mut new_changesets = HashMap::new();
for bcs in uploaded_bonsais {
let cs_id = bcs.get_changeset_id();
new_changeset_ids.push(cs_id);
new_changesets.insert(cs_id, bcs);
}
let maybe_target_bcs = bookmark_push.new.clone();
let target_bcs = maybe_target_bcs
.ok_or_else(|| Error::msg("new changeset is required for force pushrebase"))?;
let reason = BookmarkUpdateReason::Pushrebase;
let bundle_replay_data = if let Some(hg_replay_data) = &maybe_hg_replay_data {
Some(hg_replay_data.to_bundle_replay_data(None).await?)
} else {
None
};
let bundle_replay_data = bundle_replay_data
.as_ref()
.map(|data| data as &dyn BundleReplay);
let maybe_bookmark_push = check_plain_bookmark_push_allowed(
match bookmark_push.old {
None => {
bookmarks_movement::CreateBookmarkOp::new(
&bookmark_push.name,
new_target,
BookmarkUpdateReason::Pushrebase,
)
.only_if_public()
.with_new_changesets(new_changesets)
.with_bundle_replay_data(bundle_replay_data)
.run(
ctx,
repo,
infinitepush_params,
pushrebase_params,
bookmark_attrs,
)
.await
.context("Failed to create bookmark")?;
}
Some(old_target) => {
bookmarks_movement::UpdateBookmarkOp::new(
&bookmark_push.name,
BookmarkUpdateTargets {
old: old_target,
new: new_target,
},
BookmarkUpdatePolicy::AnyPermittedByConfig,
BookmarkUpdateReason::Pushrebase,
)
.only_if_public()
.with_new_changesets(new_changesets)
.with_bundle_replay_data(bundle_replay_data)
.run(
ctx,
repo,
lca_hint,
infinitepush_params,
pushrebase_params,
bookmark_attrs,
)
.await
.context("Failed to move bookmark")?;
}
}
log_commits_to_scribe(
ctx,
repo,
bookmark_attrs,
NonFastForwardPolicy::Allowed,
infinitepush_params,
bookmark_push,
lca_hint,
Some(&bookmark_push.name),
new_changeset_ids,
pushrebase_params.commit_scribe_category.clone(),
)
.await
.map(|bp| Some(BookmarkPush::PlainPush(bp)))?;
save_bookmark_pushes_to_db(
ctx,
repo,
reason,
&bundle_replay_data,
vec![maybe_bookmark_push],
None,
)
.await?;
.await;
// Note that this push did not do any actual rebases, so we do not
// need to provide any actual mapping, an empty Vec will do
Ok((target_bcs, Vec::new()))
}
/// Save several bookmark pushes to the database
async fn save_bookmark_pushes_to_db<'a>(
ctx: &'a CoreContext,
repo: &'a BlobRepo,
reason: BookmarkUpdateReason,
bundle_replay_data: &'a Option<BundleReplayData>,
bonsai_bookmark_pushes: Vec<Option<BookmarkPush<ChangesetId>>>,
txn_hook: Option<BookmarkTransactionHook>,
) -> Result<(), Error> {
if bonsai_bookmark_pushes.is_empty() {
// If we have no bookmarks, then don't create an empty transaction. This is a
// temporary workaround for the fact that we committing an empty transaction
// evicts the cache.
return Ok(());
}
let mut txn = repo.update_bookmark_transaction(ctx.clone());
for bp in bonsai_bookmark_pushes.into_iter().flatten() {
add_bookmark_to_transaction(&mut txn, bp, reason, bundle_replay_data)?;
}
let ok = if let Some(txn_hook) = txn_hook {
txn.commit_with_hook(txn_hook).await?
} else {
txn.commit().await?
};
if ok {
Ok(())
} else {
Err(format_err!("Boookmark transaction failed"))
}
}
/// Run sanity checks for plain (non-infinitepush) bookmark pushes
async fn check_plain_bookmark_push_allowed(
ctx: &CoreContext,
repo: &BlobRepo,
bookmark_attrs: &BookmarkAttrs,
non_fast_forward_policy: NonFastForwardPolicy,
infinitepush_params: &InfinitepushParams,
bp: PlainBookmarkPush<ChangesetId>,
lca_hint: &dyn LeastCommonAncestorsHint,
) -> Result<PlainBookmarkPush<ChangesetId>, Error> {
check_plain_bookmark_move_preconditions(
&ctx,
&bp.name,
"push",
&bookmark_attrs,
&infinitepush_params,
)?;
let fastforward_only_bookmark = bookmark_attrs.is_fast_forward_only(&bp.name);
// only allow non fast forward moves if the pushvar is set and the bookmark does not
// explicitly block them.
let block_non_fast_forward =
fastforward_only_bookmark || non_fast_forward_policy == NonFastForwardPolicy::Disallowed;
match (bp.old, bp.new) {
(old, Some(new)) if block_non_fast_forward => {
check_is_ancestor_opt(ctx, repo, lca_hint, old, new)
.await
.map(|_| bp)
}
(Some(_old), None) if fastforward_only_bookmark => Err(format_err!(
"Deletion of bookmark {} is forbidden.",
bp.name
)),
_ => Ok(bp),
}
Ok((new_target, Vec::new()))
}
fn check_plain_bookmark_move_preconditions(
@ -935,51 +810,6 @@ fn check_plain_bookmark_move_preconditions(
Ok(())
}
fn add_bookmark_to_transaction(
txn: &mut Box<dyn BookmarkTransaction>,
bookmark_push: BookmarkPush<ChangesetId>,
reason: BookmarkUpdateReason,
bundle_replay_data: &Option<BundleReplayData>,
) -> Result<()> {
match bookmark_push {
BookmarkPush::PlainPush(PlainBookmarkPush { new, old, name, .. }) => {
let bundle_replay = bundle_replay_data
.as_ref()
.map(|data| data as &dyn BundleReplay);
match (new, old) {
(Some(new), Some(old)) => txn.update(&name, new, old, reason, bundle_replay),
(Some(new), None) => txn.create(&name, new, reason, bundle_replay),
(None, Some(old)) => txn.delete(&name, old, reason, bundle_replay),
_ => Ok(()),
}
}
}
}
async fn check_is_ancestor_opt(
ctx: &CoreContext,
repo: &BlobRepo,
lca_hint: &dyn LeastCommonAncestorsHint,
old: Option<ChangesetId>,
new: ChangesetId,
) -> Result<(), Error> {
if let Some(old) = old {
if old != new {
let is_ancestor = lca_hint
.is_ancestor(ctx, &repo.get_changeset_fetcher(), old, new)
.await?;
if !is_ancestor {
return Err(format_err!(
"Non fastforward bookmark move from {} to {}",
old,
new
));
}
}
}
Ok(())
}
async fn log_commits_to_scribe(
ctx: &CoreContext,
repo: &BlobRepo,
@ -1058,97 +888,3 @@ pub fn get_pushrebase_hooks(
hooks
}
#[cfg(test)]
mod tests {
use super::*;
use bonsai_git_mapping::{CONVERT_REVISION_EXTRA, HGGIT_SOURCE_EXTRA};
use fbinit::FacebookInit;
use fixtures::linear;
use maplit::hashset;
use mononoke_types::hash::GitSha1;
use mononoke_types_mocks::hash::{ONES_GIT_SHA1, TWOS_GIT_SHA1};
use tests_utils::CreateCommitContext;
#[fbinit::compat_test]
async fn test_find_ancestors_without_git_mapping_simple(fb: FacebookInit) -> Result<(), Error> {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
fn add_git_extras(context: CreateCommitContext, hash: GitSha1) -> CreateCommitContext {
context
.add_extra(
CONVERT_REVISION_EXTRA.to_string(),
format!("{}", hash).as_bytes().to_vec(),
)
.add_extra(HGGIT_SOURCE_EXTRA.to_string(), b"git".to_vec())
};
let parent = add_git_extras(CreateCommitContext::new_root(&ctx, &repo), ONES_GIT_SHA1)
.commit()
.await?;
let res = find_ancestors_without_git_mapping(&ctx, &repo, hashset! {parent}).await?;
assert_eq!(res.keys().collect::<HashSet<_>>(), hashset![&parent]);
let child = add_git_extras(
CreateCommitContext::new(&ctx, &repo, vec![parent]),
TWOS_GIT_SHA1,
)
.commit()
.await?;
let res = find_ancestors_without_git_mapping(&ctx, &repo, hashset! {child}).await?;
assert_eq!(
res.keys().collect::<HashSet<_>>(),
hashset![&parent, &child]
);
repo.bonsai_git_mapping()
.bulk_add(
&ctx,
&[BonsaiGitMappingEntry {
git_sha1: ONES_GIT_SHA1,
bcs_id: parent,
}],
)
.await?;
let res = find_ancestors_without_git_mapping(&ctx, &repo, hashset! {child}).await?;
assert_eq!(res.keys().collect::<HashSet<_>>(), hashset![&child]);
repo.bonsai_git_mapping()
.bulk_add(
&ctx,
&[BonsaiGitMappingEntry {
git_sha1: TWOS_GIT_SHA1,
bcs_id: child,
}],
)
.await?;
let res = find_ancestors_without_git_mapping(&ctx, &repo, hashset! {child}).await?;
assert_eq!(res.keys().collect::<HashSet<_>>(), hashset![]);
Ok(())
}
#[fbinit::compat_test]
async fn test_find_ancestors_without_git_mapping_no_extras(
fb: FacebookInit,
) -> Result<(), Error> {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let parent = CreateCommitContext::new_root(&ctx, &repo).commit().await?;
let res = find_ancestors_without_git_mapping(&ctx, &repo, hashset! {}).await?;
assert_eq!(res.keys().collect::<HashSet<_>>(), hashset![]);
let child = CreateCommitContext::new(&ctx, &repo, vec![parent])
.commit()
.await?;
let res = find_ancestors_without_git_mapping(&ctx, &repo, hashset! {child}).await?;
assert_eq!(res.keys().collect::<HashSet<_>>(), hashset![]);
Ok(())
}
}

View File

@ -187,15 +187,25 @@ Invalid push, with pushrebase enabled
remote: While doing a bookmark-only pushrebase
remote:
remote: Root cause:
remote: [push] Only Infinitepush bookmarks are allowed to match pattern ^(infinitepush1|infinitepush2)/.+$
remote: Invalid public bookmark: infinitepush2/456 (only scratch bookmarks may match pattern ^(infinitepush1|infinitepush2)/.+$)
remote:
remote: Caused by:
remote: [push] Only Infinitepush bookmarks are allowed to match pattern ^(infinitepush1|infinitepush2)/.+$
remote: Failed to create bookmark
remote: Caused by:
remote: Invalid public bookmark: infinitepush2/456 (only scratch bookmarks may match pattern ^(infinitepush1|infinitepush2)/.+$)
remote:
remote: Debug context:
remote: Error {
remote: context: "While doing a bookmark-only pushrebase",
remote: source: "[push] Only Infinitepush bookmarks are allowed to match pattern ^(infinitepush1|infinitepush2)/.+$",
remote: source: Error {
remote: context: "Failed to create bookmark",
remote: source: InvalidPublicBookmark {
remote: bookmark: BookmarkName {
remote: bookmark: "infinitepush2/456",
remote: },
remote: pattern: "^(infinitepush1|infinitepush2)/.+$",
remote: },
remote: },
remote: }
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]

View File

@ -29,32 +29,7 @@ Push another commit
$ hg ci -Aqm commit2 --extra hg-git-rename-source=git --extra convert_revision=2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b
$ hgmn push -q -r . --to master_bookmark
Check that a force pushrebase it not allowed
$ touch file3
$ hg ci -Aqm commit3
$ hgmn push -r . --to master_bookmark --force
pushing rev * to destination ssh://user@dummy/repo bookmark master_bookmark (glob)
searching for changes
remote: Command failed
remote: Error:
remote: While doing a force pushrebase
remote:
remote: Root cause:
remote: force_pushrebase is not allowed as it would skip populating Git mappings
remote:
remote: Caused by:
remote: force_pushrebase is not allowed as it would skip populating Git mappings
remote:
remote: Debug context:
remote: Error {
remote: context: "While doing a force pushrebase",
remote: source: "force_pushrebase is not allowed as it would skip populating Git mappings",
remote: }
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]
$ hg update -qC .^
Push another commit
Push another commit that conflicts
$ touch file3
$ hg ci -Aqm commit3 --extra hg-git-rename-source=git --extra convert_revision=2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b
$ hgmn push -r . --to master_bookmark
@ -72,7 +47,23 @@ Push another commit
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]
Force-push a commit
$ hg prev 2
0 files updated, 0 files merged, 2 files removed, 0 files unresolved
[2388bc] commit1
$ touch file4
$ hg ci -Aqm commit4 --extra hg-git-rename-source=git --extra convert_revision=4d4d4d4d4d4d4d4d4d4d4d4d4d4d4d4d4d4d4d4d
$ hgmn push -r . --to master_bookmark --force
pushing rev 1b5b68e81ae5 to destination ssh://user@dummy/repo bookmark master_bookmark
searching for changes
adding changesets
adding manifests
adding file changes
added 0 changesets with 0 changes to 0 files
updating bookmark master_bookmark
Check that mappings are populated
$ get_bonsai_git_mapping
3CEE0520D115C5973E538AFDEB6985C1DF2CFC2C8E58CE465B855D73993EFBA1|1A1A1A1A1A1A1A1A1A1A1A1A1A1A1A1A1A1A1A1A
E37E13B17B5C2B37965B2A9591A64CB2C44A68FD10F1362A595DA8C6E4EEFA41|2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B2B
32C125F232EF84EAD04050D1B0245B26EFFD4A8FF40292A54401A0AE40B1A63F|4D4D4D4D4D4D4D4D4D4D4D4D4D4D4D4D4D4D4D4D

View File

@ -46,15 +46,20 @@ Check that a force pushrebase it not allowed
remote: While doing a force pushrebase
remote:
remote: Root cause:
remote: force_pushrebase is not allowed when assigning Globalrevs
remote: Pushrebase required when assigning globalrevs
remote:
remote: Caused by:
remote: force_pushrebase is not allowed when assigning Globalrevs
remote: Failed to move bookmark
remote: Caused by:
remote: Pushrebase required when assigning globalrevs
remote:
remote: Debug context:
remote: Error {
remote: context: "While doing a force pushrebase",
remote: source: "force_pushrebase is not allowed when assigning Globalrevs",
remote: source: Error {
remote: context: "Failed to move bookmark",
remote: source: PushrebaseRequiredGlobalrevs,
remote: },
remote: }
abort: stream ended unexpectedly (got 0 bytes, expected 4)
[255]