testtool: add mononoke-testtool drawdag

Summary:
Add a command that lets us inject a dag of commits into a test repo.

This is based on the `drawdag` implementation for unit tests.  We
extend the specification format so we can call `CreateCommitContext`
methods from the integration test code.

Reviewed By: yancouto

Differential Revision: D33854929

fbshipit-source-id: 8c903adfc2caa87d24ad89744a4415619aa5ebad
This commit is contained in:
Mark Juggurnauth-Thomas 2022-02-02 04:24:00 -08:00 committed by Facebook GitHub Bot
parent 2f338593de
commit e3b24a3725
12 changed files with 727 additions and 94 deletions

View File

@ -383,6 +383,7 @@ members = [
"time_window_counter",
"tools/admin",
"tools/example",
"tools/testtool",
"tunables",
"tunables/tunables-derive",
"unbundle_replay",

View File

@ -22,6 +22,7 @@ derived_data_remote = { version = "0.1.0", path = "../../derived_data/remote" }
environment = { version = "0.1.0", path = "../environment" }
facet = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
heck = "0.3.1"
megarepo_config = { version = "0.1.0", path = "../../megarepo_api/megarepo_config" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
metaconfig_parser = { version = "0.1.0", path = "../../metaconfig/parser" }

View File

@ -349,6 +349,12 @@ function mononoke_newadmin {
--mononoke-config-path "$TESTTMP"/mononoke-config "$@"
}
function mononoke_testtool {
GLOG_minloglevel=5 "$MONONOKE_TESTTOOL" \
"${COMMON_ARGS[@]}" \
--mononoke-config-path "$TESTTMP"/mononoke-config "$@"
}
function mononoke_admin_source_target {
local source_repo_id=$1
shift

View File

@ -8,15 +8,19 @@
$ . "${TEST_FIXTURES}/library.sh"
setup configuration
$ default_setup_blobimport "blob_sqlite"
hg repo
o C [draft;rev=2;26805aba1e60]
o B [draft;rev=1;112478962961]
o A [draft;rev=0;426bada5c675]
$
blobimporting
$ setup_common_config "blob_sqlite"
$ mononoke_testtool drawdag -R repo <<'EOF'
> Z-A
> \ \
> B-C
> # modify: C file "test content \xaa end"
> # delete: C Z
> EOF
*] Reloading redacted config from configerator (glob)
A=e26d4ad219658cadec76d086a28621bc612762d0499ae79ba093c5ec15efe5fc
B=ecf6ed0f7b5c6d1871a3b7b0bc78b04e2cc036a67f96890f2834b728355e5fc5
C=f9d662054cf779809fd1a55314f760dc7577eac63f1057162c1b8e56aa0f02a1
Z=e5c07a6110ea10bbcc576b969f936f91fc0a69df0b9bcf1fdfacbf3add06f07a
Check we can upload and fetch an arbitrary blob.
$ echo value > "$TESTTMP/value"
@ -37,28 +41,35 @@ key.
No blob exists for somekey
Examine some of the data
$ mononoke_newadmin blobstore -R repo fetch changeset.blake2.9feb8ddd3e8eddcfa3a4913b57df7842bedf84b8ea3b7b3fcb14c6424aa81fec
Key: changeset.blake2.9feb8ddd3e8eddcfa3a4913b57df7842bedf84b8ea3b7b3fcb14c6424aa81fec
$ mononoke_newadmin blobstore -R repo fetch changeset.blake2.f9d662054cf779809fd1a55314f760dc7577eac63f1057162c1b8e56aa0f02a1
Key: changeset.blake2.f9d662054cf779809fd1a55314f760dc7577eac63f1057162c1b8e56aa0f02a1
Ctime: * (glob)
Size: 69
Size: 194
BonsaiChangeset {
inner: BonsaiChangesetMut {
parents: [],
author: "test",
parents: [
ChangesetId(
Blake2(e26d4ad219658cadec76d086a28621bc612762d0499ae79ba093c5ec15efe5fc),
),
ChangesetId(
Blake2(ecf6ed0f7b5c6d1871a3b7b0bc78b04e2cc036a67f96890f2834b728355e5fc5),
),
],
author: "author",
author_date: DateTime(
1970-01-01T00:00:00+00:00,
),
committer: None,
committer_date: None,
message: "A",
message: "C",
extra: {},
file_changes: {
MPath("A"): Change(
MPath("C"): Change(
TrackedFileChange {
inner: BasicFileChange {
content_id: ContentId(
Blake2(eb56488e97bb4cf5eb17f05357b80108a4a71f6c3bab52dfcaec07161d105ec9),
Blake2(896ad5879a5df0403bfc93fc96507ad9c93b31b11f3d0fa05445da7918241e5d),
),
file_type: Regular,
size: 1,
@ -66,17 +77,32 @@ Examine some of the data
copy_from: None,
},
),
MPath("Z"): Deletion,
MPath("file"): Change(
TrackedFileChange {
inner: BasicFileChange {
content_id: ContentId(
Blake2(6e07d9ecc025ae219c0ed4dead08757d8962ca7532daf5d89484cadc5aae99d8),
),
file_type: Regular,
size: 18,
},
copy_from: None,
},
),
},
is_snapshot: false,
},
id: ChangesetId(
Blake2(9feb8ddd3e8eddcfa3a4913b57df7842bedf84b8ea3b7b3fcb14c6424aa81fec),
Blake2(f9d662054cf779809fd1a55314f760dc7577eac63f1057162c1b8e56aa0f02a1),
),
}
$ mononoke_newadmin blobstore --storage-name blobstore fetch repo0000.content.blake2.eb56488e97bb4cf5eb17f05357b80108a4a71f6c3bab52dfcaec07161d105ec9
Key: repo0000.content.blake2.eb56488e97bb4cf5eb17f05357b80108a4a71f6c3bab52dfcaec07161d105ec9
$ mononoke_newadmin blobstore --storage-name blobstore fetch repo0000.content.blake2.6e07d9ecc025ae219c0ed4dead08757d8962ca7532daf5d89484cadc5aae99d8
Key: repo0000.content.blake2.6e07d9ecc025ae219c0ed4dead08757d8962ca7532daf5d89484cadc5aae99d8
Ctime: * (glob)
Size: 4
Size: 21
00000000: 41 A
00000000: 7465737420636f6e74656e7420aa2065 test content . e
00000010: 6e64 nd

View File

@ -8,30 +8,30 @@
$ . "${TEST_FIXTURES}/library.sh"
setup configuration
$ default_setup_blobimport "blob_sqlite"
hg repo
o C [draft;rev=2;26805aba1e60]
o B [draft;rev=1;112478962961]
o A [draft;rev=0;426bada5c675]
$
blobimporting
$ mononoke_newadmin fetch -R repo -i 9feb8ddd3e8eddcfa3a4913b57df7842bedf84b8ea3b7b3fcb14c6424aa81fec
$ setup_common_config "blob_sqlite"
$ mononoke_testtool drawdag -R repo --derive-all <<'EOF'
> A-B-C
> # bookmark: C main
> EOF
*] Reloading redacted config from configerator (glob)
BonsaiChangesetId: 9feb8ddd3e8eddcfa3a4913b57df7842bedf84b8ea3b7b3fcb14c6424aa81fec
Author: test
Message: A
A=aa53d24251ff3f54b1b2c29ae02826701b2abeb0079f1bb13b8434b54cd87675
B=f8c75e41a0c4d29281df765f39de47bca1dcadfdc55ada4ccc2f6df567201658
C=e32a1e342cdb1e38e88466b4c1a01ae9f410024017aa21dc0a1c5da6b3963bf2
$ mononoke_newadmin fetch -R repo -i e32a1e342cdb1e38e88466b4c1a01ae9f410024017aa21dc0a1c5da6b3963bf2
*] Reloading redacted config from configerator (glob)
BonsaiChangesetId: e32a1e342cdb1e38e88466b4c1a01ae9f410024017aa21dc0a1c5da6b3963bf2
Author: author
Message: C
FileChanges:
ADDED/MODIFIED: A eb56488e97bb4cf5eb17f05357b80108a4a71f6c3bab52dfcaec07161d105ec9
ADDED/MODIFIED: C 896ad5879a5df0403bfc93fc96507ad9c93b31b11f3d0fa05445da7918241e5d
$ mononoke_newadmin fetch -R repo -i 9feb8ddd3e8eddcfa3a4913b57df7842bedf84b8ea3b7b3fcb14c6424aa81fec --json | jq -S .
$ mononoke_newadmin fetch -R repo -i aa53d24251ff3f54b1b2c29ae02826701b2abeb0079f1bb13b8434b54cd87675 --json | jq -S .
*] Reloading redacted config from configerator (glob)
{
"author": "test",
"author": "author",
"author_date": "1970-01-01T00:00:00+00:00",
"changeset_id": "9feb8ddd3e8eddcfa3a4913b57df7842bedf84b8ea3b7b3fcb14c6424aa81fec",
"changeset_id": "aa53d24251ff3f54b1b2c29ae02826701b2abeb0079f1bb13b8434b54cd87675",
"committer": null,
"committer_date": null,
"extra": {},
@ -51,9 +51,8 @@ setup configuration
"parents": []
}
$ mononoke_newadmin fetch -R repo -i 9feb8ddd3e8eddcfa3a4913b57df7842bedf84b8ea3b7b3fcb14c6424aa81fec -p ""
$ mononoke_newadmin fetch -R repo -B main -p ""
*] Reloading redacted config from configerator (glob)
A 005d992c5dcf32993668f7cede29d296c494a5d9 regular
B 35e7525ce3a48913275d7061dd9a867ffef1e34d regular
C a2e456504a5e61f763f1a0b36a6c247c7541b2b3 regular

View File

@ -0,0 +1,81 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License found in the LICENSE file in the root
# directory of this source tree.
$ . "${TEST_FIXTURES}/library.sh"
$ setup_common_config "blob_files"
Create commits using an ASCII-art DAG
The DAG can be horizontal (using - to connect, going left to right), or
vertical (using | to connect, going bottom to top).
By default, each commit will add a single file named and containing the
same as the node name in the graph. You can disable this with
--no-default-files.
You can also customize details about commits with special comments.
The tool will print the commit hash of each commit.
$ mononoke_testtool drawdag -R repo <<'EOF'
> I-J-K P-Q-R U-V-W
> / \ \ /
> A-B-C-D---H---L-M-N--O----S-T-X-Y-Z
> \ / /
> E-F-G----------/
> # modify: A path/to/file "add additional content"
> # delete: Z path/to/file
> # bookmark: M main
> # bookmark: Z zzzz
> EOF
*] Reloading redacted config from configerator (glob)
A=1c68a7b9eb5c92651e6e08a2d15a811f539f4051a23bc16d8f3bec7b69cc2fb2
B=f2133d7c1ed18900e4c2ce57aff0d32af25ba0d0de9128fa79056aea366cd46f
C=0750cdede19930eeaaf9a5653156112e7b1ae382010a00936c6319bf92351ebb
D=971c152b1c5d22763033d2fc23ac40c1e785a3c50df28e37b69bf188f2d949b5
E=b2ee1306f4b80638b11ee9893952807e4298f9c686c32eef55c09ef9753e39ef
F=4e33d245d2e3fe6456fd4e9cb0c14f87f2a77385b6008c47b88d136b3105685d
G=08d06276b2c3b348c308468818a389519d19201d5e0da2f5ba28743e3b79cc5a
H=4a561d26ebda75c180adf8f41ff2776b1952b272b93baf7d2e6f4d323f02d295
I=ddf42dcda9d9d37ad7e1f02a48a6887b85c6632d7df4e915c67a1ebf340b4a06
J=87369e5f4de3a683e1f06ba55b46a126984be570238977edf5e9253dce536206
K=247baaa2f5cbd31861f6005be1a3e33ef9fb4bfd2a8b3c5221152267c663b396
L=4fd4c4ba074425ed1bc68c1058e71098fe8a9a4379d4c4010a0637fb48e76655
M=9b1f47161fffde19746e86064907d9c9ce67e8460df6354ed2a249e2988d2547
N=e740c4212c22b02e2e0537285aed6e3e18b6cfbf64930a6f9d08b6575268f453
O=f8fb2925cf0ed23caf28b596547a39dfc66e39fea9c74f506d5323f5c6c00189
P=93fb02ab5bc2a5ce4fb47586a818e589bff242d4f38acf171132a936e3b483b7
Q=457604f9b00fe70d661363457439230ed7388ac75c8888dc71b95d5374d87553
R=fded11d5c4c4f8bd7285e7c7a8c84feb8a6a46b264cd44c0ef4a7121d45bc813
S=df61a91ffdfa241438f1f65ca7303258b8a04f66bf1ab4088015bfc85a463b6e
T=6bc1e9e3124f50afeab93167816e1c062a85e965739c3a67e02526555b0ca357
U=191f1f26c7a5a974a1f453e4126046930a253305eaf24aed0b5d07ef0afc3d2a
V=4720f0b263a03f20d2559b4fb29a518b311f57401faf86eaf8658a565efd534c
W=72706786c3e15ccc27917c68e0d5be053295585213f11748bec9ac1440d421e5
X=8d0a4b7a721c73fc712685487738bb7f1f3d0ccfaecbefbe3f0fdb2b7be78f94
Y=fe4d606906cf246e9dba2875cf11e7a0c20402c85f9a1c5064fbabf51048be85
Z=6b84580e28920fc0350d124319034b4f56a60c75625258601673c3011ada959f
The graph can be extended with more commits. The node names don't
need to match the previous graph (although it's probably a good idea).
$ mononoke_testtool drawdag -R repo <<'EOF'
> XX # modify: XX path/to/file "more additional content"
> / \ # bookmark: XX xxxx
> D1 W2
> | |
> | W1
> D0 | # exists: D0 971c152b1c5d22763033d2fc23ac40c1e785a3c50df28e37b69bf188f2d949b5
> W0 # exists: W0 72706786c3e15ccc27917c68e0d5be053295585213f11748bec9ac1440d421e5
> EOF
*] Reloading redacted config from configerator (glob)
D0=971c152b1c5d22763033d2fc23ac40c1e785a3c50df28e37b69bf188f2d949b5
D1=5d656cad8da7be07bf90202f578bd9855688da10bed44fd2b3d7f62e241abe75
W0=72706786c3e15ccc27917c68e0d5be053295585213f11748bec9ac1440d421e5
W1=59132264e74b167e7429fc2dab693113fb14fb2a07a94a1cd2243697b06f8aee
W2=0dadf010323f9f62459caba47668b50d92e1a0c05856c4c3adb6c042558c91d0
XX=c680ff4353433b1e77b110541fdf492f9847d95b6805dc8880072709caefd461

View File

@ -5,15 +5,76 @@
* GNU General Public License version 2.
*/
use std::collections::BTreeMap;
use std::collections::{BTreeMap, BTreeSet};
use anyhow::Result;
use anyhow::{anyhow, Result};
use blobrepo::BlobRepo;
use context::CoreContext;
use mononoke_types::ChangesetId;
use crate::CreateCommitContext;
pub type ChangeFn = dyn FnOnce(CreateCommitContext) -> CreateCommitContext + Send + Sync;
pub async fn extend_from_dag_with_changes<'a>(
ctx: &'a CoreContext,
repo: &'a BlobRepo,
dag: &'a str,
mut changes: BTreeMap<String, Box<ChangeFn>>,
existing: BTreeMap<String, ChangesetId>,
default_files: bool,
) -> Result<(
BTreeMap<String, ChangesetId>,
BTreeMap<String, BTreeSet<String>>,
)> {
let mut committed: BTreeMap<String, ChangesetId> = BTreeMap::new();
let dag = drawdag::parse(dag);
for (name, id) in existing {
if !dag.contains_key(&name) {
return Err(anyhow!("graph does not contain {}", name));
}
committed.insert(name, id);
}
while committed.len() < dag.len() {
let mut made_progress = false;
for (name, parents) in dag.iter() {
if committed.contains_key(name) {
// This node was already committed.
continue;
}
if parents.iter().any(|parent| !committed.contains_key(parent)) {
// This node still has uncommitted parents.
continue;
}
let parent_ids = parents
.iter()
.map(|parent| committed[parent].clone())
.collect();
let mut create_commit =
CreateCommitContext::new(ctx, repo, parent_ids).set_message(name);
if default_files {
create_commit = create_commit.add_file(name.as_str(), name.as_str());
}
if let Some(change) = changes.remove(name.as_str()) {
create_commit = change(create_commit);
}
let new_id = create_commit.commit().await?;
committed.insert(name.to_string(), new_id);
made_progress = true;
}
if !made_progress {
return Err(anyhow!("graph contains cycles"));
}
}
Ok((committed, dag))
}
/// Create commits from an ASCII DAG.
///
/// Creates a set of commits that correspond to an ASCII DAG, with
@ -58,43 +119,11 @@ pub async fn create_from_dag_with_changes<'a>(
ctx: &'a CoreContext,
repo: &'a BlobRepo,
dag: &'a str,
mut changes: BTreeMap<&'a str, Box<dyn FnMut(CreateCommitContext) -> CreateCommitContext>>,
changes: BTreeMap<String, Box<ChangeFn>>,
) -> Result<BTreeMap<String, ChangesetId>> {
let mut committed: BTreeMap<String, ChangesetId> = BTreeMap::new();
let dag = drawdag::parse(dag);
while committed.len() < dag.len() {
let mut made_progress = false;
for (name, parents) in dag.iter() {
if committed.contains_key(name) {
// This node was already committed.
continue;
}
if parents.iter().any(|parent| !committed.contains_key(parent)) {
// This node still has uncommitted parents.
continue;
}
let parent_ids = parents
.iter()
.map(|parent| committed[parent].clone())
.collect();
let mut create_commit = CreateCommitContext::new(ctx, repo, parent_ids)
.set_message(name)
.add_file(name.as_str(), name);
if let Some(change) = changes.get_mut(name.as_str()) {
create_commit = change(create_commit);
}
let new_id = create_commit.commit().await?;
committed.insert(name.to_string(), new_id);
made_progress = true;
}
assert!(made_progress, "graph contains cycles");
}
Ok(committed)
let (commits, _dag) =
extend_from_dag_with_changes(ctx, repo, dag, changes, BTreeMap::new(), true).await?;
Ok(commits)
}
/// Create commits from an ASCII DAG.
@ -148,12 +177,10 @@ pub async fn create_from_dag(
macro_rules! __drawdag_changes {
( $( $key:expr => | $c:ident | $body:expr ),* $( , )? ) => {
{
type ChangeFn =
dyn FnMut($crate::CreateCommitContext) -> $crate::CreateCommitContext;
let mut changes: std::collections::BTreeMap<&str, Box<ChangeFn>> =
let mut changes: std::collections::BTreeMap<String, Box<$crate::drawdag::ChangeFn>> =
std::collections::BTreeMap::new();
$(
changes.insert($key, Box::new(|$c: $crate::CreateCommitContext| $body));
changes.insert(String::from($key), Box::new(|$c: $crate::CreateCommitContext| $body));
)*
changes
}

View File

@ -171,7 +171,7 @@ impl<'a> CreateCommitContext<'a> {
self
}
pub fn add_file(mut self, path: impl TryInto<MPath>, content: impl Into<String>) -> Self {
pub fn add_file(mut self, path: impl TryInto<MPath>, content: impl Into<Vec<u8>>) -> Self {
self.files.insert(
path.try_into().ok().expect("Invalid path"),
CreateFileContext::FromHelper(content.into(), FileType::Regular, None),
@ -179,7 +179,7 @@ impl<'a> CreateCommitContext<'a> {
self
}
pub fn add_files<P: TryInto<MPath>, C: Into<String>, I: IntoIterator<Item = (P, C)>>(
pub fn add_files<P: TryInto<MPath>, C: Into<Vec<u8>>, I: IntoIterator<Item = (P, C)>>(
mut self,
path_contents: I,
) -> Self {
@ -206,7 +206,7 @@ impl<'a> CreateCommitContext<'a> {
pub fn add_file_with_type(
mut self,
path: impl TryInto<MPath>,
content: impl Into<String>,
content: impl Into<Vec<u8>>,
t: FileType,
) -> Self {
self.files.insert(
@ -219,7 +219,7 @@ impl<'a> CreateCommitContext<'a> {
pub fn add_file_with_copy_info(
mut self,
path: impl TryInto<MPath>,
content: impl Into<String>,
content: impl Into<Vec<u8>>,
(parent, parent_path): (impl Into<CommitIdentifier>, impl TryInto<MPath>),
) -> Self {
let copy_info = (
@ -315,7 +315,7 @@ impl<'a> CreateCommitContext<'a> {
}
enum CreateFileContext {
FromHelper(String, FileType, Option<(MPath, CommitIdentifier)>),
FromHelper(Vec<u8>, FileType, Option<(MPath, CommitIdentifier)>),
FromFileChange(FileChange),
Deleted,
}
@ -329,7 +329,7 @@ impl CreateFileContext {
) -> Result<FileChange, Error> {
let file_change = match self {
Self::FromHelper(content, file_type, copy_info) => {
let content = Bytes::copy_from_slice(content.as_bytes());
let content = Bytes::copy_from_slice(content.as_ref());
let meta = filestore::store(
repo.blobstore(),

View File

@ -26,7 +26,6 @@ facet = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
filestore = { version = "0.1.0", path = "../../filestore" }
git_types = { version = "0.1.0", path = "../../git/git_types" }
heck = "0.3.1"
manifest = { version = "0.1.0", path = "../../manifest" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
metaconfig_types = { version = "0.1.0", path = "../../metaconfig/types" }

View File

@ -0,0 +1,60 @@
# @generated by autocargo
[package]
name = "testtool"
version = "0.1.0"
authors = ["Facebook"]
edition = "2021"
license = "GPLv2+"
[dependencies]
anyhow = "1.0.51"
blame = { version = "0.1.0", path = "../../derived_data/blame" }
blobrepo = { version = "0.1.0", path = "../../blobrepo" }
blobstore = { version = "0.1.0", path = "../../blobstore" }
blobstore_factory = { version = "0.1.0", path = "../../blobstore/factory" }
bonsai_hg_mapping = { version = "0.1.0", path = "../../bonsai_hg_mapping" }
bookmarks = { version = "0.1.0", path = "../../bookmarks" }
cached_config = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
changeset_info = { version = "0.1.0", path = "../../derived_data/changeset_info" }
clap = { version = "3.0.9", features = ["derive", "regex", "unicode", "wrap_help"] }
cmdlib_displaying = { version = "0.1.0", path = "../../cmdlib/displaying" }
cmdlib_scrubbing = { version = "0.1.0", path = "../../cmdlib/scrubbing" }
context = { version = "0.1.0", path = "../../server/context" }
deleted_files_manifest = { version = "0.1.0", path = "../../derived_data/deleted_files_manifest" }
derived_data_filenodes = { version = "0.1.0", path = "../../derived_data/filenodes" }
derived_data_manager = { version = "0.1.0", path = "../../derived_data/manager" }
ephemeral_blobstore = { version = "0.1.0", path = "../../blobstore/ephemeral_blobstore" }
facet = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
fastlog = { version = "0.1.0", path = "../../derived_data/fastlog" }
fbinit = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "main" }
filestore = { version = "0.1.0", path = "../../filestore" }
fsnodes = { version = "0.1.0", path = "../../derived_data/fsnodes" }
futures = { version = "0.3.13", features = ["async-await", "compat"] }
git_types = { version = "0.1.0", path = "../../git/git_types" }
manifest = { version = "0.1.0", path = "../../manifest" }
mercurial_derived_data = { version = "0.1.0", path = "../../derived_data/mercurial_derived_data" }
mercurial_types = { version = "0.1.0", path = "../../mercurial/types" }
metaconfig_types = { version = "0.1.0", path = "../../metaconfig/types" }
mononoke_app = { version = "0.1.0", path = "../../cmdlib/mononoke_app" }
mononoke_types = { version = "0.1.0", path = "../../mononoke_types" }
repo_blobstore = { version = "0.1.0", path = "../../blobrepo/repo_blobstore" }
repo_derived_data = { version = "0.1.0", path = "../../repo_attributes/repo_derived_data" }
repo_identity = { version = "0.1.0", path = "../../repo_attributes/repo_identity" }
skeleton_manifest = { version = "0.1.0", path = "../../derived_data/skeleton_manifest" }
tests_utils = { version = "0.1.0", path = "../../tests/utils" }
tokio = { version = "1.15", features = ["full", "test-util", "tracing"] }
topo_sort = { version = "0.1.0", path = "../../common/topo_sort" }
unodes = { version = "0.1.0", path = "../../derived_data/unodes" }
[patch.crates-io]
daemonize = { git = "https://github.com/krallin/daemonize", rev = "f7be28efa1b4a70e43bb37b5f4ff4d664992edca" }
lru-disk-cache = { git = "https://github.com/mozilla/sccache", rev = "033ebaae69beeb0ac04e8c35d6ff1103487bd9a3" }
prost = { git = "https://github.com/gabrielrussoc/prost", branch = "protoc-runtime" }
prost-derive = { git = "https://github.com/gabrielrussoc/prost", branch = "protoc-runtime" }
prost-types = { git = "https://github.com/gabrielrussoc/prost", branch = "protoc-runtime" }
quickcheck = { git = "https://github.com/jakoschiko/quickcheck", rev = "6ecdf5bb4b0132ce66670b4d46453aa022ea892c" }
reqwest = { git = "https://github.com/vmagro/reqwest", rev = "b08239c2b6c837321a697c5bb2be394d276b59ca" }
rustfilt = { git = "https://github.com/jsgf/rustfilt.git", rev = "8141fa7f1caee562ee8daffb2ddeca3d1f0d36e5" }
shellexpand = { git = "https://github.com/fanzeyi/shellexpand.git", rev = "179447a3f8fccd765acfd2eed15a54c716c49cfe" }
toml = { git = "https://github.com/fbsource/toml", branch = "dotted-table-0.5.8" }

View File

@ -5,4 +5,6 @@
* GNU General Public License version 2.
*/
mononoke_app::subcommands! {}
mononoke_app::subcommands! {
mod drawdag;
}

View File

@ -0,0 +1,431 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
//! DrawDAG for Integration Tests
//!
//! A DrawDAG specification consists of an ASCII graph (either left-to-right
//! or bottom-to-top), and a series of comments that define additional
//! properties for each commit.
//!
//! Valid properties are:
//!
//! * Set a known changeset id for an already-existing commit
//! # exists: COMMIT id
//!
//! * Set a bookmark on a commit
//! # bookmark: COMMIT name
//!
//! * Set the content of a file.
//! # modify: COMMIT path/to/file "content"
//!
//! * Mark a file as deleted.
//! # delete: COMMIT path/to/file
//!
//! Paths can be surrounded by quotes if they contain special characters.
use std::collections::BTreeMap;
use std::io::Write;
use anyhow::{anyhow, Context, Error, Result};
use blame::RootBlameV2;
use blobrepo::BlobRepo;
use bookmarks::{BookmarkName, BookmarkUpdateReason};
use changeset_info::ChangesetInfo;
use clap::Parser;
use context::CoreContext;
use deleted_files_manifest::RootDeletedManifestId;
use derived_data_filenodes::FilenodesOnlyPublic;
use derived_data_manager::BatchDeriveOptions;
use derived_data_manager::BonsaiDerivable;
use fastlog::RootFastlog;
use fsnodes::RootFsnodeId;
use futures::try_join;
use mercurial_derived_data::MappedHgChangesetId;
use mononoke_app::args::RepoArgs;
use mononoke_app::MononokeApp;
use mononoke_types::ChangesetId;
use repo_derived_data::RepoDerivedDataRef;
use skeleton_manifest::RootSkeletonManifestId;
use tests_utils::drawdag::{extend_from_dag_with_changes, ChangeFn};
use tests_utils::CreateCommitContext;
use tokio::io::AsyncReadExt;
use topo_sort::sort_topological;
use unodes::RootUnodeManifestId;
/// Create commits from a drawn DAG.
#[derive(Parser)]
pub struct CommandArgs {
#[clap(flatten)]
repo_args: RepoArgs,
/// Disable creation of default files in each commit
#[clap(long)]
no_default_files: bool,
/// Derive all derived data types for all commits
#[clap(long)]
derive_all: bool,
}
#[derive(Clone, Debug, Eq, PartialEq)]
enum Action {
Exists {
name: String,
id: ChangesetId,
},
Bookmark {
name: String,
bookmark: BookmarkName,
},
Change {
name: String,
change: ChangeAction,
},
}
#[derive(Clone, Debug, Eq, PartialEq)]
enum ChangeAction {
Modify { path: Vec<u8>, content: Vec<u8> },
Delete { path: Vec<u8> },
}
impl Action {
fn new(spec: &str) -> Result<Self> {
if let Some((key, args)) = spec.trim().split_once(':') {
let args = ActionArg::parse_args(args)
.with_context(|| format!("Failed to parse args for '{}'", key))?;
match (key, args.as_slice()) {
("exists", [name, id]) => {
let name = name.to_string()?;
let id = id.to_string()?.parse()?;
Ok(Action::Exists { name, id })
}
("bookmark", [name, bookmark]) => {
let name = name.to_string()?;
let bookmark = bookmark.to_string()?.parse()?;
Ok(Action::Bookmark { name, bookmark })
}
("modify", [name, path, content]) => {
let name = name.to_string()?;
let path = path.to_bytes();
let content = content.to_bytes();
Ok(Action::Change {
name,
change: ChangeAction::Modify { path, content },
})
}
("delete", [name, path]) => {
let name = name.to_string()?;
let path = path.to_bytes();
Ok(Action::Change {
name,
change: ChangeAction::Delete { path },
})
}
_ => Err(anyhow!("Invalid spec for key: {}", key)),
}
} else {
Err(anyhow!("Invalid spec: {}", spec))
}
}
}
struct ActionArg(Vec<u8>);
impl ActionArg {
fn new() -> Self {
ActionArg(Vec::new())
}
fn to_bytes(&self) -> Vec<u8> {
self.0.clone()
}
fn to_string(&self) -> Result<String> {
let s = std::str::from_utf8(&self.0)
.context("Expected UTF-8 string for drawdag action argument")?;
Ok(s.to_string())
}
fn is_empty(&self) -> bool {
self.0.is_empty()
}
fn push(&mut self, ch: char) {
let mut buf = [0; 4];
self.0
.extend_from_slice(ch.encode_utf8(&mut buf).as_bytes());
}
fn push_byte(&mut self, byte: u8) {
self.0.push(byte)
}
fn push_hex(&mut self, mut iter: impl Iterator<Item = char>) -> Result<()> {
if let (Some(top_hex), Some(bottom_hex)) = (iter.next(), iter.next()) {
if let (Some(top_digit), Some(bottom_digit)) =
(top_hex.to_digit(16), bottom_hex.to_digit(16))
{
self.push_byte((top_digit * 0x10 + bottom_digit) as u8);
return Ok(());
}
}
Err(anyhow!("Expected two hex digits"))
}
fn parse_args(args: &str) -> Result<Vec<Self>> {
let mut iter = args.trim().chars();
let mut args = Vec::new();
let mut arg = ActionArg::new();
let mut in_quotes = false;
while let Some(ch) = iter.next() {
if in_quotes {
match ch {
'"' => in_quotes = false,
'\\' => match iter
.next()
.ok_or_else(|| anyhow!("Unexpected end-of-line after '\\'"))?
{
'\\' => arg.push('\\'),
'r' => arg.push('\r'),
'n' => arg.push('\n'),
't' => arg.push('\t'),
'f' => arg.push('\u{0C}'),
'b' => arg.push('\u{08}'),
'"' => arg.push('"'),
'x' => arg.push_hex(&mut iter)?,
esc => return Err(anyhow!("Unexpected escape sequence: '\\{}'", esc)),
},
ch => arg.push(ch),
}
} else {
match ch {
'"' => in_quotes = true,
ch if ch.is_whitespace() => {
if !arg.is_empty() {
args.push(arg);
arg = ActionArg::new();
}
}
ch if ch.is_alphanumeric() || "_./".contains(ch) => {
arg.push(ch);
}
ch => return Err(anyhow!("Unexpected character: '{}'", ch)),
}
}
}
if in_quotes {
return Err(anyhow!("Unterminated string literal"));
}
if !arg.is_empty() {
args.push(arg);
}
Ok(args)
}
}
pub async fn run(app: MononokeApp, args: CommandArgs) -> Result<()> {
let ctx = app.new_context();
let repo: BlobRepo = app
.open_repo(&args.repo_args)
.await
.context("Failed to open repo")?;
// Read DAG from stdin
let mut input = String::new();
tokio::io::stdin().read_to_string(&mut input).await?;
let mut dag_buffer = String::new();
let mut actions = Vec::new();
for line in input.lines() {
if let Some((dag_line, comment)) = line.split_once('#') {
dag_buffer.push_str(dag_line);
dag_buffer.push('\n');
actions.push(Action::new(comment)?);
} else {
dag_buffer.push_str(line);
dag_buffer.push('\n');
}
}
let mut existing: BTreeMap<String, ChangesetId> = BTreeMap::new();
let mut commit_changes: BTreeMap<String, Vec<ChangeAction>> = BTreeMap::new();
let mut bookmarks: BTreeMap<BookmarkName, String> = BTreeMap::new();
for action in actions {
match action {
Action::Exists { name, id } => {
existing.insert(name, id);
}
Action::Bookmark { name, bookmark } => {
bookmarks.insert(bookmark, name);
}
Action::Change { name, change } => {
commit_changes
.entry(name)
.or_insert_with(Vec::new)
.push(change);
}
}
}
let mut change_fns = BTreeMap::new();
for (name, changes) in commit_changes {
let apply: Box<ChangeFn> =
Box::new(move |c: CreateCommitContext| apply_changes(c, changes));
change_fns.insert(name, apply);
}
let (commits, dag) = extend_from_dag_with_changes(
&ctx,
&repo,
&dag_buffer,
change_fns,
existing,
!args.no_default_files,
)
.await?;
for (name, id) in commits.iter() {
writeln!(std::io::stdout(), "{}={}", name, id)?;
}
if !bookmarks.is_empty() {
let mut txn = repo.bookmarks().create_transaction(ctx.clone());
for (bookmark, name) in bookmarks {
let target = commits
.get(&name)
.ok_or_else(|| anyhow!("No commit {} for bookmark {}", name, bookmark))?;
txn.force_set(&bookmark, *target, BookmarkUpdateReason::TestMove, None)?;
}
txn.commit().await?;
}
if args.derive_all {
let dag = dag
.into_iter()
.map(|(k, v)| (k, v.into_iter().collect()))
.collect();
let sorted = sort_topological(&dag).ok_or_else(|| anyhow!("Graph has a cycle"))?;
let csids = sorted
.into_iter()
.map(|name| {
commits
.get(&name)
.cloned()
.ok_or_else(|| anyhow!("No commit found for {}", name))
})
.collect::<Result<Vec<_>>>()?;
derive_all(&ctx, &repo, &csids).await?;
}
Ok(())
}
fn apply_changes<'a>(
mut c: CreateCommitContext<'a>,
changes: Vec<ChangeAction>,
) -> CreateCommitContext<'a> {
for change in changes {
match change {
ChangeAction::Modify { path, content, .. } => c = c.add_file(path.as_slice(), content),
ChangeAction::Delete { path, .. } => c = c.delete_file(path.as_slice()),
}
}
c
}
async fn derive<D: BonsaiDerivable>(
ctx: &CoreContext,
repo: &BlobRepo,
csids: &[ChangesetId],
) -> Result<()> {
let mgr = repo.repo_derived_data().manager();
mgr.backfill_batch::<D>(
ctx,
csids.to_vec(),
BatchDeriveOptions::Parallel { gap_size: None },
None,
)
.await
.with_context(|| format!("Failed to derive {}", D::NAME))?;
Ok(())
}
async fn derive_all(ctx: &CoreContext, repo: &BlobRepo, csids: &[ChangesetId]) -> Result<()> {
let mercurial = async {
derive::<MappedHgChangesetId>(ctx, repo, csids).await?;
derive::<FilenodesOnlyPublic>(ctx, repo, csids).await?;
Ok::<_, Error>(())
};
let unodes = async {
derive::<RootUnodeManifestId>(ctx, repo, csids).await?;
try_join!(
derive::<RootBlameV2>(ctx, repo, csids),
derive::<RootDeletedManifestId>(ctx, repo, csids),
derive::<RootFastlog>(ctx, repo, csids),
)?;
Ok::<_, Error>(())
};
try_join!(
mercurial,
unodes,
derive::<RootFsnodeId>(ctx, repo, csids),
derive::<RootSkeletonManifestId>(ctx, repo, csids),
derive::<ChangesetInfo>(ctx, repo, csids),
)?;
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_action_specs() -> Result<()> {
assert_eq!(
Action::new(
"exists: A aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
)?,
Action::Exists {
name: "A".to_string(),
id: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".parse()?,
}
);
assert_eq!(
Action::new("bookmark: \"A-bookmark\" \"main\"/\"bookmark\"")?,
Action::Bookmark {
name: "A-bookmark".to_string(),
bookmark: "main/bookmark".parse()?,
}
);
assert_eq!(
Action::new(
"modify: _1 path/to/file \"this has \\xaa content\\n\\ton \\x02 lines with \\\"quotes\\\"\""
)?,
Action::Change {
name: "_1".to_string(),
change: ChangeAction::Modify {
path: b"path/to/file".to_vec(),
content: b"this has \xaa content\n\ton \x02 lines with \"quotes\"".to_vec(),
}
}
);
assert_eq!(
Action::new("delete: x path/\"to a deleted file\"")?,
Action::Change {
name: "x".to_string(),
change: ChangeAction::Delete {
path: b"path/to a deleted file".to_vec(),
}
}
);
Ok(())
}
}