mononoke: separate blobstore naming from repo naming in integration tests

Summary: Allow repos to share blobstore in tests by naming blobstore storage_config separately from the repo using  storage.toml

Reviewed By: krallin

Differential Revision: D18820758

fbshipit-source-id: b37159b1407481dc69f548fad08cc543d1bbc34f
This commit is contained in:
Alex Hornby 2019-12-10 03:59:21 -08:00 committed by Facebook Github Bot
parent fcc72578ee
commit 521388fdd5
11 changed files with 170 additions and 65 deletions

View File

@ -33,7 +33,7 @@ use slog_logview::LogViewDrain;
use crate::helpers::{
init_cachelib_from_settings, open_sql_with_config_and_myrouter_port, setup_repo_dir,
CachelibSettings,
CachelibSettings, CreateStorage,
};
use crate::log;
@ -767,7 +767,13 @@ fn open_repo_internal_with_repo_id<'a>(
info!(logger, "using repo \"{}\" repoid {:?}", reponame, repo_id);
match &config.storage_config.blobstore {
BlobConfig::Files { path } | BlobConfig::Rocks { path } | BlobConfig::Sqlite { path } => {
setup_repo_dir(path, create).expect("Setting up file blobrepo failed");
let create = if create {
// Many path repos can share one blobstore, so allow store to exist or create it.
CreateStorage::ExistingOrCreate
} else {
CreateStorage::ExistingOnly
};
try_boxfuture!(setup_repo_dir(path, create));
}
_ => {}
};

View File

@ -53,13 +53,20 @@ pub fn upload_and_show_trace(ctx: CoreContext) -> impl Future<Item = (), Error =
.right_future()
}
pub fn setup_repo_dir<P: AsRef<Path>>(data_dir: P, create: bool) -> Result<()> {
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum CreateStorage {
ExistingOnly,
ExistingOrCreate,
}
pub fn setup_repo_dir<P: AsRef<Path>>(data_dir: P, create: CreateStorage) -> Result<()> {
let data_dir = data_dir.as_ref();
if !data_dir.is_dir() {
bail!("{:?} does not exist or is not a directory", data_dir);
}
// Validate directory layout
for subdir in &["blobs"] {
let subdir = data_dir.join(subdir);
@ -67,22 +74,14 @@ pub fn setup_repo_dir<P: AsRef<Path>>(data_dir: P, create: bool) -> Result<()> {
bail!("{:?} already exists and is not a directory", subdir);
}
if create {
if subdir.exists() {
let content: Vec<_> = subdir.read_dir()?.collect();
if !content.is_empty() {
bail!(
"{:?} already exists and is not empty: {:?}",
subdir,
content
);
if !subdir.exists() {
if CreateStorage::ExistingOnly == create {
bail!("{:?} not found in ExistingOnly mode", subdir,);
}
} else {
fs::create_dir(&subdir)
.with_context(|| format!("failed to create subdirectory {:?}", subdir))?;
}
}
}
Ok(())
}

View File

@ -13,6 +13,7 @@ use cmdlib::{args, helpers::create_runtime};
use context::CoreContext;
use fbinit::FacebookInit;
use futures::{stream, Future, IntoFuture, Stream};
use futures_ext::FutureExt;
use lfs_import_lib::lfs_upload;
use mercurial_types::blobs::File;
@ -21,6 +22,7 @@ const NAME: &str = "lfs_import";
const ARG_LFS_HELPER: &str = "lfs-helper";
const ARG_CONCURRENCY: &str = "concurrency";
const ARG_POINTERS: &str = "pointers";
const ARG_NO_CREATE: &str = "no-create";
const DEFAULT_CONCURRENCY: usize = 16;
@ -37,6 +39,13 @@ fn main(fb: FacebookInit) -> Result<()> {
.takes_value(true)
.help("The number of OIDs to process in parallel"),
)
.arg(
Arg::with_name(ARG_NO_CREATE)
.long(ARG_NO_CREATE)
.takes_value(false)
.required(false)
.help("If provided won't create a new repo"),
)
.arg(
Arg::with_name(ARG_LFS_HELPER)
.required(true)
@ -56,7 +65,11 @@ fn main(fb: FacebookInit) -> Result<()> {
let logger = args::init_logging(fb, &matches);
let ctx = CoreContext::new_with_logger(fb, logger.clone());
let blobrepo = args::open_repo(fb, &logger, &matches);
let blobrepo = if matches.is_present(ARG_NO_CREATE) {
args::open_repo(fb, &logger, &matches).left_future()
} else {
args::create_repo(fb, &logger, &matches).right_future()
};
let lfs_helper = matches.value_of(ARG_LFS_HELPER).unwrap().to_string();
let concurrency: usize = matches

View File

@ -8,16 +8,8 @@
# Library routines and initial setup for Mononoke-related tests.
if [[ -n "$DB_SHARD_NAME" ]]; then
function db_config() {
echo "[storage.blobstore.db.remote]"
echo "db_address=\"$DB_SHARD_NAME\""
}
MONONOKE_DEFAULT_START_TIMEOUT=60
else
function db_config() {
echo "[storage.blobstore.db.local]"
echo "local_db_path=\"$TESTTMP/monsql\""
}
MONONOKE_DEFAULT_START_TIMEOUT=15
fi
@ -401,6 +393,12 @@ function setup_mononoke_config {
REPOTYPE="blob_rocks"
if [[ $# -gt 0 ]]; then
REPOTYPE="$1"
shift
fi
local blobstorename=blobstore
if [[ $# -gt 0 ]]; then
blobstorename="$1"
shift
fi
if [[ ! -e "$TESTTMP/mononoke_hgcli" ]]; then
@ -423,7 +421,49 @@ identity_type = "USER"
identity_data = "$ALLOWED_USERNAME"
CONFIG
setup_mononoke_repo_config "$REPONAME"
echo "# Start new config" > common/storage.toml
setup_mononoke_storage_config "$REPOTYPE" "$blobstorename"
setup_mononoke_repo_config "$REPONAME" "$blobstorename"
}
function db_config() {
local blobstorename="$1"
if [[ -n "$DB_SHARD_NAME" ]]; then
echo "[$blobstorename.db.remote]"
echo "db_address=\"$DB_SHARD_NAME\""
else
echo "[$blobstorename.db.local]"
echo "local_db_path=\"$TESTTMP/monsql\""
fi
}
function setup_mononoke_storage_config {
local underlyingstorage="$1"
local blobstorename="$2"
local blobstorepath="$TESTTMP/$blobstorename"
if [[ -v MULTIPLEXED ]]; then
mkdir -p "$blobstorepath/0" "$blobstorepath/1"
cat >> common/storage.toml <<CONFIG
$(db_config "$blobstorename")
[$blobstorename.blobstore.multiplexed]
components = [
{ blobstore_id = 0, blobstore = { $underlyingstorage = { path = "$blobstorepath/0" } } },
{ blobstore_id = 1, blobstore = { $underlyingstorage = { path = "$blobstorepath/1" } } },
]
CONFIG
else
mkdir -p "$blobstorepath"
cat >> common/storage.toml <<CONFIG
$(db_config "$blobstorename")
[$blobstorename.blobstore.$underlyingstorage]
path = "$blobstorepath"
CONFIG
fi
}
function setup_commitsyncmap {
@ -469,16 +509,15 @@ EOF
function setup_mononoke_repo_config {
cd "$TESTTMP/mononoke-config" || exit
local reponame="$1"
local storageconfig="$2"
mkdir -p "repos/$reponame"
mkdir -p "$TESTTMP/monsql"
mkdir -p "$TESTTMP/$reponame"
mkdir -p "$TESTTMP/traffic-replay-blobstore"
mkdir -p "$TESTTMP/$reponame/blobs"
cat > "repos/$reponame/server.toml" <<CONFIG
repoid=$REPOID
enabled=${ENABLED:-true}
hash_validation_percentage=100
storage_config = "blobstore"
CONFIG
if [[ ! -v NO_BOOKMARKS_CACHE ]]; then
@ -493,6 +532,16 @@ readonly=true
CONFIG
fi
# Normally point to common storageconfig, but if none passed, create per-repo
if [[ -z "$storageconfig" ]]; then
storageconfig="blobstore_$reponame"
setup_mononoke_storage_config "$REPOTYPE" "$storageconfig"
fi
cat >> "repos/$reponame/server.toml" <<CONFIG
storage_config = "$storageconfig"
CONFIG
if [[ -v FILESTORE ]]; then
cat >> "repos/$reponame/server.toml" <<CONFIG
[filestore]
@ -510,26 +559,6 @@ fi
if [[ -v LIST_KEYS_PATTERNS_MAX ]]; then
cat >> "repos/$reponame/server.toml" <<CONFIG
list_keys_patterns_max=$LIST_KEYS_PATTERNS_MAX
CONFIG
fi
if [[ -v MULTIPLEXED ]]; then
cat >> "repos/$reponame/server.toml" <<CONFIG
$(db_config "$reponame")
[storage.blobstore.blobstore.multiplexed]
components = [
{ blobstore_id = 0, blobstore = { blob_files = { path = "$TESTTMP/$reponame/0" } } },
{ blobstore_id = 1, blobstore = { blob_files = {path = "$TESTTMP/$reponame/1" } } },
]
CONFIG
else
cat >> "repos/$reponame/server.toml" <<CONFIG
$(db_config "$reponame")
[storage.blobstore.blobstore.$REPOTYPE]
path = "$TESTTMP/$reponame"
CONFIG
fi
@ -664,18 +693,25 @@ CONFIG
}
function blobimport {
local always_log=
if [[ "$1" == "--log" ]]; then
always_log=1
shift
fi
input="$1"
output="$2"
shift 2
mkdir -p "$output"
$MONONOKE_BLOBIMPORT --repo-id $REPOID \
--mononoke-config-path "$TESTTMP/mononoke-config" \
"$input" "${CACHING_ARGS[@]}" "$@" >> "$TESTTMP/blobimport.out" 2>&1
"$input" "${CACHING_ARGS[@]}" "$@" > "$TESTTMP/blobimport.out" 2>&1
BLOBIMPORT_RC="$?"
if [[ $BLOBIMPORT_RC -ne 0 ]]; then
cat "$TESTTMP/blobimport.out"
# set exit code, otherwise previous cat sets it to 0
return "$BLOBIMPORT_RC"
elif [[ -n "$always_log" ]]; then
cat "$TESTTMP/blobimport.out"
fi
}

View File

@ -25,7 +25,7 @@
$ blobimport repo-hg-nolfs/.hg repo
$ ls $TESTTMP/repo/blobs | grep "alias"
$ ls $TESTTMP/blobstore/blobs | grep "alias"
blob-repo0000.alias.gitsha1.45d9e0e9fc8859787c33081dffdf12f41b54fcf3
blob-repo0000.alias.gitsha1.8e1e71d5ce34c01b6fe83bc5051545f2918c8c2b
blob-repo0000.alias.gitsha1.9de77c18733ab8009a956c25e28c85fe203a17d7
@ -41,8 +41,8 @@
* Alias Verification finished: 0 errors found (glob)
$ rm -rf $TESTTMP/repo/blobs/blob-repo0000.alias.*
$ ls $TESTTMP/repo/blobs | grep "alias" | wc -l
$ rm -rf $TESTTMP/blobstore/blobs/blob-repo0000.alias.*
$ ls $TESTTMP/blobstore/blobs | grep "alias" | wc -l
0
$ aliasverify verify 2>&1 | grep "Alias Verification"
@ -54,7 +54,7 @@
* Missing alias blob: alias Sha256(d690916cdea320e620748799a2051a0f4e07d6d0c3e2bc199ea3c69e0c0b5e4f), content_id ContentId(Blake2(7ee06cac57ab4267c097ebc8ec36e903fb3c25867934fe360e069ea1ab2ed7fd)) (glob)
* Missing alias blob: alias Sha256(2ba85baaa7922ff4c0dfdbc00fd07bd69dcb1dce745c6a8c676fe8b5642a0d66), content_id ContentId(Blake2(1a3f1094cdae123ec6999b7baf4211ffd94f47970bedd71e13ec07f24a9aba6a)) (glob)
$ ls $TESTTMP/repo/blobs | grep "alias" | wc -l
$ ls $TESTTMP/blobstore/blobs | grep "alias" | wc -l
0
$ aliasverify generate --debug 2>&1 | grep "Missing alias blob"
@ -62,7 +62,7 @@
* Missing alias blob: alias Sha256(d690916cdea320e620748799a2051a0f4e07d6d0c3e2bc199ea3c69e0c0b5e4f), content_id ContentId(Blake2(7ee06cac57ab4267c097ebc8ec36e903fb3c25867934fe360e069ea1ab2ed7fd)) (glob)
* Missing alias blob: alias Sha256(2ba85baaa7922ff4c0dfdbc00fd07bd69dcb1dce745c6a8c676fe8b5642a0d66), content_id ContentId(Blake2(1a3f1094cdae123ec6999b7baf4211ffd94f47970bedd71e13ec07f24a9aba6a)) (glob)
$ ls $TESTTMP/repo/blobs | grep "alias"
$ ls $TESTTMP/blobstore/blobs | grep "alias"
blob-repo0000.alias.sha256.2ba85baaa7922ff4c0dfdbc00fd07bd69dcb1dce745c6a8c676fe8b5642a0d66
blob-repo0000.alias.sha256.b9a294f298d0ed2b65ca4488a42b473ff5f75d0b9843cbea84e1b472f9a514d1
blob-repo0000.alias.sha256.d690916cdea320e620748799a2051a0f4e07d6d0c3e2bc199ea3c69e0c0b5e4f

View File

@ -0,0 +1,51 @@
$ . "${TEST_FIXTURES}/library.sh"
setup configuration
$ setup_common_config
$ cd $TESTTMP
setup common configuration
$ cat >> $HGRCPATH <<EOF
> [ui]
> ssh="$DUMMYSSH"
> EOF
setup repo
$ hg init repo-hg
$ cd repo-hg
$ setup_hg_server
$ hg debugdrawdag <<EOF
> C
> |
> B
> |
> A
> EOF
create master bookmark
$ hg bookmark master_bookmark -r tip
blobimport --no-create with no storage present, should fail due to missing directory
$ cd ..
$ blobimport --log repo-hg/.hg repo --no-create
* using repo "repo" repoid RepositoryId(0)* (glob)
Error: "$TESTTMP/blobstore/blobs" not found in ExistingOnly mode
[1]
blobimport, succeeding, creates directory if not existing
$ blobimport --log repo-hg/.hg repo
* using repo "repo" repoid RepositoryId(0) (glob)
* inserted commits # 0 (glob)
* finished uploading changesets and globalrevs (glob)
* uploaded chunk of 1 bookmarks (glob)
check the bookmark is there after import
$ mononoke_admin --readonly-storage bookmarks log master_bookmark 2>&1 | grep master_bookmark
(master_bookmark) 26805aba1e600a82e93661149f2313866a221a7b blobimport * (glob)
blobimport --no-create after successful import, should be fine as storage shared with previous good run
$ blobimport --log repo-hg/.hg repo --no-create
* using repo "repo" repoid RepositoryId(0) (glob)
* inserted commits # 0 (glob)
* finished uploading changesets and globalrevs (glob)
* uploaded chunk of 0 bookmarks (glob)

View File

@ -50,7 +50,7 @@ Create a new commit in repo2
Do a push, while bundle preservation is disabled
$ hgmn push -qr . --to master_bookmark
$ ls $TESTTMP/repo/blobs | grep rawbundle2
$ ls $TESTTMP/blobstore/blobs | grep rawbundle2
[1]
Restart mononoke with enabled bundle2 preservation
@ -71,7 +71,7 @@ Do a push, while bundle preservation is enabled
searching for changes
updating bookmark master_bookmark
$ ls $TESTTMP/repo/blobs | grep rawbundle2 | wc -l
$ ls $TESTTMP/blobstore/blobs | grep rawbundle2 | wc -l
1
Do a pushrebase, while preservation is enabled
@ -84,5 +84,5 @@ Do a pushrebase, while preservation is enabled
adding file changes
added 1 changesets with 0 changes to 0 files
updating bookmark master_bookmark
$ ls $TESTTMP/repo/blobs | grep rawbundle2 | wc -l
$ ls $TESTTMP/blobstore/blobs | grep rawbundle2 | wc -l
2

View File

@ -38,7 +38,7 @@ blobimport them into Mononoke storage and start Mononoke
$ blobimport repo-hg/.hg repo
Corrupt blobs by replacing one content blob with another
$ cd repo/blobs
$ cd blobstore/blobs
$ cp blob-repo0000.content.blake2.896ad5879a5df0403bfc93fc96507ad9c93b31b11f3d0fa05445da7918241e5d blob-repo0000.content.blake2.eb56488e97bb4cf5eb17f05357b80108a4a71f6c3bab52dfcaec07161d105ec9
start mononoke

View File

@ -74,7 +74,7 @@
updating bookmark master_bookmark
# Check that alias.sha1.hgfilenode -> sha256.file_content is not generated
$ ls $TESTTMP/repo/blobs | grep "alias" | sort -h
$ ls $TESTTMP/blobstore/blobs | grep "alias" | sort -h
blob-repo0000.alias.gitsha1.23c160a91fd3e722a49a86017e103e83e7965af7
blob-repo0000.alias.gitsha1.b4785957bc986dc39c629de9fac9df46972c00fc
blob-repo0000.alias.sha1.8cfc11d4c1bf45aca9412afc5b95cfd1db83e885
@ -106,7 +106,7 @@
added 1 changesets with 0 changes to 0 files
new changesets 0db8825b9792
$ ls $TESTTMP/repo/blobs | grep "alias.content" | wc -l
$ ls $TESTTMP/blobstore/blobs | grep "alias.content" | wc -l
0
$ hgmn update -r master_bookmark -v
@ -119,7 +119,7 @@
2 files updated, 0 files merged, 0 files removed, 0 files unresolved
# Check that alias.sha1.hgfilenode -> sha256.file_content is generated
$ ls $TESTTMP/repo/blobs | grep "alias" | sort -h
$ ls $TESTTMP/blobstore/blobs | grep "alias" | sort -h
blob-repo0000.alias.gitsha1.23c160a91fd3e722a49a86017e103e83e7965af7
blob-repo0000.alias.gitsha1.b4785957bc986dc39c629de9fac9df46972c00fc
blob-repo0000.alias.sha1.8cfc11d4c1bf45aca9412afc5b95cfd1db83e885

View File

@ -1,7 +1,7 @@
$ . "${TEST_FIXTURES}/library.sh"
setup configuration
$ MULTIPLEXED=1 setup_common_config
$ MULTIPLEXED=1 REPOTYPE=blob_files setup_common_config
$ cd $TESTTMP
setup common configuration

View File

@ -20,13 +20,13 @@
$ cd "$TESTTMP"
$ blobimport repo-hg/.hg repo
$ FILENODE=$(ls "$TESTTMP/repo/blobs" | grep "hgfilenode" | cut -d "." -f 4)
$ FILENODE=$(ls "$TESTTMP/blobstore/blobs" | grep "hgfilenode" | cut -d "." -f 4)
# Check that nothing happens if the filestore is not enabled
$ mononoke_rechunker "$FILENODE"
* using repo "repo" repoid RepositoryId(0) (glob)
$ ls "$TESTTMP/repo/blobs" | grep hgfilenode
$ ls "$TESTTMP/blobstore/blobs" | grep hgfilenode
blob-repo0000.hgfilenode.sha1.92c09d364cd563132d6eb5f1424ff63523d51f73
# Check that the rechunker complains about an unknown filenode
@ -43,5 +43,5 @@
$ mononoke_rechunker "$FILENODE"
* using repo "repo" repoid RepositoryId(0) (glob)
$ ls "$TESTTMP/repo/blobs" | grep chunk | wc -l
$ ls "$TESTTMP/blobstore/blobs" | grep chunk | wc -l
2