getbundle: include mutations in getbundle response for draft commits

Summary:
When the client pulls draft commits, include mutation information in the bundle
response.

Reviewed By: farnz

Differential Revision: D20871339

fbshipit-source-id: a89a50426fbd8f9ec08bbe43f16fd0e4e3424e0b
This commit is contained in:
Mark Thomas 2020-05-13 10:58:18 -07:00 committed by Facebook GitHub Bot
parent 5774dbde9d
commit 14dfeecda8
7 changed files with 184 additions and 9 deletions

View File

@ -13,6 +13,7 @@ mercurial_types = { path = "../types" }
mononoke_types = { path = "../../mononoke_types" }
revisionstore_types = { path = "../../../scm/lib/revisionstore/types" }
scuba_ext = { path = "../../common/scuba_ext" }
types = { path = "../../../scm/lib/types" }
vlqencoding = { path = "../../../scm/lib/vlqencoding" }
async_compression = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
bytes_ext = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }

View File

@ -10,11 +10,12 @@
use std::io::Cursor;
use anyhow::{bail, Error, Result};
use byteorder::ReadBytesExt;
use byteorder::{ReadBytesExt, WriteBytesExt};
use bytes_old::{Bytes, BytesMut};
use mercurial_mutation::HgMutationEntry;
use tokio_io::codec::Decoder;
use vlqencoding::VLQDecode;
use types::mutation::MutationEntry;
use vlqencoding::{VLQDecode, VLQEncode};
use crate::utils::BytesExt;
@ -69,6 +70,8 @@ impl InfinitepushMutationUnpacker {
}
}
const MUTATION_PART_VERSION: u8 = 1;
/// Decoder for infinitepush mutation entries
///
/// This decoder decodes all entries in one operation, so needs to wait for eof.
@ -84,7 +87,7 @@ impl Decoder for InfinitepushMutationUnpacker {
let mut entries = Vec::new();
let mut cursor = Cursor::new(buf);
let version = cursor.read_u8()?;
if version != 1 {
if version != MUTATION_PART_VERSION {
bail!("Unsupported infinitepush mutation part format: {}", version);
}
let count = cursor.read_vlq()?;
@ -98,3 +101,14 @@ impl Decoder for InfinitepushMutationUnpacker {
Ok(Some(entries))
}
}
pub fn infinitepush_mutation_packer(entries: Vec<HgMutationEntry>) -> Result<Bytes> {
let mut buf = Vec::with_capacity(entries.len() * types::mutation::DEFAULT_ENTRY_SIZE);
buf.write_u8(MUTATION_PART_VERSION)?;
buf.write_vlq(entries.len())?;
for entry in entries {
let entry: MutationEntry = entry.into();
entry.serialize(&mut buf)?;
}
Ok(buf.into())
}

View File

@ -8,6 +8,7 @@
use super::changegroup::{packer::CgPacker, unpacker::CgVersion};
use super::changegroup::{CgDeltaChunk, Part, Section};
use super::chunk::Chunk;
use super::infinitepush::infinitepush_mutation_packer;
use super::obsmarkers::packer::obsmarkers_packer_stream;
use super::obsmarkers::MetadataEntry;
use super::wirepack;
@ -24,6 +25,7 @@ use futures::stream::{iter_ok, once};
use futures::{Future, Stream};
use futures_ext::{BoxFuture, BoxStream, StreamExt};
use futures_stats::Timed;
use mercurial_mutation::HgMutationEntry;
use mercurial_types::{
Delta, HgBlobNode, HgChangesetId, HgFileNodeId, HgNodeHash, HgPhase, MPath, RepoPath, RevFlags,
NULL_HASH,
@ -434,3 +436,13 @@ where
builder.set_data_generated(stream);
Ok(builder)
}
pub fn infinitepush_mutation_part<F>(entries: F) -> Result<PartEncodeBuilder>
where
F: Future<Item = Vec<HgMutationEntry>, Error = Error> + Send + 'static,
{
let mut builder = PartEncodeBuilder::advisory(PartHeaderType::B2xInfinitepushMutation)?;
let data = entries.and_then(infinitepush_mutation_packer);
builder.set_data_future(data);
Ok(builder)
}

View File

@ -14,6 +14,7 @@ use mercurial_types::{HgChangesetId, HgNodeHash};
use mononoke_types::DateTime;
use smallvec::SmallVec;
use types::mutation::MutationEntry;
use types::HgId;
/// Record of a Mercurial mutation operation (e.g. amend or rebase).
#[derive(Clone, Debug, PartialEq)]
@ -180,6 +181,41 @@ impl TryFrom<MutationEntry> for HgMutationEntry {
}
}
// Conversion to client mutation entry
impl Into<MutationEntry> for HgMutationEntry {
fn into(self: HgMutationEntry) -> MutationEntry {
MutationEntry {
succ: self.successor.into_nodehash().into(),
preds: self
.predecessors
.into_iter()
.map(HgChangesetId::into_nodehash)
.map(HgId::from)
.collect(),
split: self
.split
.into_iter()
.map(HgChangesetId::into_nodehash)
.map(HgId::from)
.collect(),
op: self.op,
user: self.user,
time: self.time.timestamp_secs(),
tz: self.time.tz_offset_secs(),
extra: self
.extra
.into_iter()
.map(|(key, value)| {
(
key.into_bytes().into_boxed_slice(),
value.into_bytes().into_boxed_slice(),
)
})
.collect(),
}
}
}
pub(crate) struct HgMutationEntrySet {
// The loaded entries, indexed by successor.
pub(crate) entries: HashMap<HgChangesetId, HgMutationEntry>,

View File

@ -23,6 +23,7 @@ phases = { path = "../../phases" }
reachabilityindex = { path = "../../reachabilityindex" }
repo_blobstore = { path = "../../blobrepo/repo_blobstore" }
revset = { path = "../../revset" }
tunables = { path = "../../tunables" }
cloned = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
futures_ext = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
stats = { git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }

View File

@ -56,6 +56,7 @@ use std::{
iter::FromIterator,
sync::Arc,
};
use tunables::tunables;
mod errors;
@ -125,9 +126,13 @@ pub async fn create_getbundle_response(
drafts_in_bundles_policy == DraftsInBundlesPolicy::WithTreesAndFiles;
let (maybe_manifests, maybe_filenodes): (Option<_>, Option<_>) =
if should_include_trees_and_files {
let (manifests, filenodes) =
get_manifests_and_filenodes(&ctx, &blobrepo, draft_commits, &lfs_params)
.await?;
let (manifests, filenodes) = get_manifests_and_filenodes(
&ctx,
&blobrepo,
draft_commits.clone(),
&lfs_params,
)
.await?;
report_manifests_and_filenodes(&ctx, reponame, manifests.len(), filenodes.iter());
(Some(manifests), Some(filenodes))
} else {
@ -151,6 +156,22 @@ pub async fn create_getbundle_response(
parts.push(tp_part);
}
if !draft_commits.is_empty() && tunables().get_mutation_generate_for_draft() {
let mutations_fut = {
cloned!(ctx);
let hg_mutation_store = blobrepo.hg_mutation_store().clone();
async move {
hg_mutation_store
.all_predecessors(&ctx, draft_commits)
.await
}
.boxed()
.compat()
};
let mut_part = parts::infinitepush_mutation_part(mutations_fut)?;
parts.push(mut_part);
}
}
// Phases part has to be after the changegroup part.

View File

@ -123,9 +123,99 @@ Pull the amended stack to the other repo
o 0: df4f53cec30a public 'base' master_bookmark
Check mutation metadata. NOTE: Mutation metadata hasn't been provided by the server.
Check mutation metadata.
$ hg debugmutation -r "draft()"
* a8543df036f16781d7f37d40d4f177056fc816a5
* a8543df036f16781d7f37d40d4f177056fc816a5 amend by test at 1970-01-01T00:00:00 from:
9b5a540873ab29fbced488597365cf798918a356
* a24671c3bce21e759d256fe69dedeb04d51c9895
* a24671c3bce21e759d256fe69dedeb04d51c9895 rebase by test at 1970-01-01T00:00:00 from:
f99c737e05b52a0c08f95a8736581813ff58d8de
Amend the stack again.
$ cd $TESTTMP/repo-push
$ hg prev
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
[a8543d] A2
$ echo 3 > A
$ hg amend -qm A3 --rebase
$ hg next
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
[647398] B1
$ hgmn push ssh://user@dummy/repo -r . --bundle-store --allow-anon
pushing to ssh://user@dummy/repo
searching for changes
Pull the amended stack to the other repo.
$ cd $TESTTMP/repo-pull
$ hgmn pull -r 647398
pulling from ssh://user@dummy/repo
searching for changes
adding changesets
adding manifests
adding file changes
added 2 changesets with 0 changes to 0 files
$ tglogm
o 4: 6473983c899c 'B1'
|
o 3: 5326b832c149 'A3'
|
| x 2: a24671c3bce2 'B1' (Rewritten using rebase into 6473983c899c)
| |
| x 1: a8543df036f1 'A2' (Rewritten using amend into 5326b832c149)
|/
o 0: df4f53cec30a 'base' master_bookmark
Do some more complicated mutations
$ cd $TESTTMP/repo-push
$ hg prev
0 files updated, 0 files merged, 1 files removed, 0 files unresolved
[5326b8] A3
$ echo 1 > C
$ hg commit -Aqm C1
$ echo 2 > C
$ hg amend -qm C2
$ echo 3 > C
$ hg amend -qm C3
$ hg fold --from ".^"
2 changesets folded
0 files updated, 0 files merged, 0 files removed, 0 files unresolved
rebasing 6473983c899c "B1"
$ hg next
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
[853e5b] B1
$ tglogm
@ 11: 853e5ba9bd35 'B1'
|
o 10: cdf849fe4126 'A3'
|
o 0: df4f53cec30a 'base' master_bookmark
$ hgmn push ssh://user@dummy/repo -r . --bundle-store --allow-anon
pushing to ssh://user@dummy/repo
searching for changes
Pull the modified stack to the other repo.
$ cd $TESTTMP/repo-pull
$ hgmn pull -r 853e5ba9bd35
pulling from ssh://user@dummy/repo
searching for changes
adding changesets
adding manifests
adding file changes
added 2 changesets with 0 changes to 0 files
$ tglogm
o 6: 853e5ba9bd35 'B1'
|
o 5: cdf849fe4126 'A3'
|
| x 4: 6473983c899c 'B1' (Rewritten using rebase into 853e5ba9bd35)
| |
| x 3: 5326b832c149 'A3' (Rewritten using fold into cdf849fe4126)
|/
| x 2: a24671c3bce2 'B1' (Rewritten using rebase into 6473983c899c)
| |
| x 1: a8543df036f1 'A2' (Rewritten using amend into 5326b832c149)
|/
o 0: df4f53cec30a 'base' master_bookmark