sapling/cmds/blobimport.rs
Thomas Orozco 9b90fd63cb mononoke/blobimport: allow running without re-creating store
Summary:
This allows running blobimport multiple times over the same path locally (with a blob files storage, for example), which is how we use it in prod (but there we don't use the file blobstore so it works).

This is helpful when playing around with local changes to blobimport.

Reviewed By: HarveyHunt

Differential Revision: D16580697

fbshipit-source-id: 4a62ff89542f67ce6396948c666244ef40ffe5e7
2019-07-31 11:42:36 -07:00

144 lines
4.3 KiB
Rust

// Copyright (c) 2004-present, Facebook, Inc.
// All Rights Reserved.
//
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
#![deny(warnings)]
extern crate blobimport_lib;
extern crate clap;
extern crate cloned;
extern crate cmdlib;
extern crate failure_ext as failure;
extern crate futures;
extern crate futures_ext;
extern crate mercurial_types;
#[macro_use]
extern crate slog;
extern crate phases;
extern crate tokio;
extern crate tracing;
use std::str::FromStr;
use std::sync::Arc;
use crate::failure::{Result, SlogKVError};
use clap::{App, Arg};
use cloned::cloned;
use futures::Future;
use futures_ext::FutureExt;
use phases::SqlPhases;
use tracing::{trace_args, Traced};
use cmdlib::args;
use mercurial_types::HgNodeHash;
fn setup_app<'a, 'b>() -> App<'a, 'b> {
let app = args::MononokeApp {
safe_writes: true,
hide_advanced_args: false,
default_glog: true,
};
app.build("revlog to blob importer")
.version("0.0.0")
.about("Import a revlog-backed Mercurial repo into Mononoke blobstore.")
.args_from_usage(
r#"
<INPUT> 'input revlog repo'
--changeset [HASH] 'if provided, the only changeset to be imported'
--no-bookmark 'if provided won't update bookmarks'
--no-create 'if provided won't create a new repo (only meaningful for local)'
--lfs-helper [LFS_HELPER] 'if provided, path to an executable that accepts OID SIZE and returns a LFS blob to stdout'
"#,
)
.arg(
Arg::from_usage("--skip [SKIP] 'skips commits from the beginning'")
.conflicts_with("changeset"),
)
.arg(
Arg::from_usage(
"--commits-limit [LIMIT] 'import only LIMIT first commits from revlog repo'",
)
.conflicts_with("changeset"),
)
}
fn main() -> Result<()> {
let matches = setup_app().get_matches();
let ctx = args::get_core_context(&matches);
args::init_cachelib(&matches);
let revlogrepo_path = matches
.value_of("INPUT")
.expect("input is not specified")
.into();
let changeset = match matches.value_of("changeset") {
None => None,
Some(hash) => Some(HgNodeHash::from_str(hash)?),
};
let skip = if !matches.is_present("skip") {
None
} else {
Some(args::get_usize(&matches, "skip", 0))
};
let commits_limit = if !matches.is_present("commits-limit") {
None
} else {
Some(args::get_usize(&matches, "commits-limit", 0))
};
let no_bookmark = matches.is_present("no-bookmark");
let lfs_helper = matches.value_of("lfs-helper").map(|l| l.to_string());
let phases_store = args::open_sql::<SqlPhases>(&matches);
let blobrepo = if matches.is_present("no-create") {
args::open_repo(&ctx.logger(), &matches).left_future()
} else {
args::create_repo(&ctx.logger(), &matches).right_future()
};
let blobimport = blobrepo
.join(phases_store)
.and_then(move |(blobrepo, phases_store)| {
let blobrepo = Arc::new(blobrepo);
let phases_store = Arc::new(phases_store);
blobimport_lib::Blobimport {
ctx: ctx.clone(),
logger: ctx.logger().clone(),
blobrepo,
revlogrepo_path,
changeset,
skip,
commits_limit,
no_bookmark,
phases_store,
lfs_helper,
}
.import()
.traced(ctx.trace(), "blobimport", trace_args!())
.map_err({
cloned!(ctx);
move |err| {
error!(ctx.logger(), "error while blobimporting"; SlogKVError(err));
::std::process::exit(1);
}
})
.then(move |result| args::upload_and_show_trace(ctx).then(move |_| result))
});
let mut runtime = tokio::runtime::Runtime::new()?;
let result = runtime.block_on(blobimport);
// Let the runtime finish remaining work - uploading logs etc
runtime.shutdown_on_idle();
result
}