mononoke/admin: don't process::exit

Summary:
Sometimes, our Mononoke CLI tests report errors in mode/opt where they don't
actually output error messages at all when returning an error. Those error
messages actually get buffered by our logger, and are normally emitted when
said logger is dropped.

However, I have a suspicion that by calling process::exit(), we're not giving
Rust a chance to drop the logger (because that eventually just calls the exit
syscall), and as a result our buffer isn't getting flushed.

This patch attempts to fix this by removing all calls to process::exit in the
admin CLI, and instead returning an exit code from main, which I hope will
allow main to drop everything before it returns.

More broadly speaking, I don't think it's very good practice to exit from all
over the place in the admin CLI, so this fixes that :)

Reviewed By: HarveyHunt

Differential Revision: D16687747

fbshipit-source-id: 38e987463363e239d4f9166050a8dd26a4bef985
This commit is contained in:
Thomas Orozco 2019-08-07 06:39:41 -07:00 committed by Facebook Github Bot
parent 6011988595
commit 74b47b28da
14 changed files with 120 additions and 68 deletions

View File

@ -18,11 +18,13 @@ use mercurial_types::MPath;
use mononoke_types::{typed_hash::MononokeId, ContentId, Timestamp};
use slog::{debug, Logger};
use crate::error::SubcommandError;
pub fn subcommand_blacklist(
logger: Logger,
matches: &ArgMatches<'_>,
sub_m: &ArgMatches<'_>,
) -> BoxFuture<(), Error> {
) -> BoxFuture<(), SubcommandError> {
let rev = sub_m.value_of("hash").unwrap().to_string();
let task = sub_m.value_of("task").unwrap().to_string();
@ -81,5 +83,6 @@ pub fn subcommand_blacklist(
})
}
})
.from_err()
.boxify()
}

View File

@ -29,6 +29,8 @@ use slog::{info, warn, Logger};
use std::collections::HashMap;
use std::iter::FromIterator;
use crate::error::SubcommandError;
fn get_blobconfig(blob_config: BlobConfig, inner_blobstore_id: Option<u64>) -> Result<BlobConfig> {
match inner_blobstore_id {
None => Ok(blob_config),
@ -81,7 +83,7 @@ pub fn subcommand_blobstore_fetch(
logger: Logger,
matches: &ArgMatches<'_>,
sub_m: &ArgMatches<'_>,
) -> BoxFuture<(), Error> {
) -> BoxFuture<(), SubcommandError> {
let repo_id = try_boxfuture!(args::get_repo_id(&matches));
let (_, config) = try_boxfuture!(args::get_config(&matches));
let censoring = config.censoring;
@ -157,6 +159,7 @@ pub fn subcommand_blobstore_fetch(
}
}
})
.from_err()
.boxify()
}

View File

@ -7,7 +7,6 @@
use clap::ArgMatches;
use cmdlib::args;
use context::CoreContext;
use failure_ext::Error;
use futures::prelude::*;
use futures_ext::{BoxFuture, FutureExt};
use mononoke_types::{BonsaiChangeset, ChangesetId, DateTime, FileChange};
@ -16,12 +15,13 @@ use slog::Logger;
use std::collections::BTreeMap;
use crate::common::fetch_bonsai_changeset;
use crate::error::SubcommandError;
pub fn subcommand_bonsai_fetch(
logger: Logger,
matches: &ArgMatches<'_>,
sub_m: &ArgMatches<'_>,
) -> BoxFuture<(), Error> {
) -> BoxFuture<(), SubcommandError> {
let rev = sub_m
.value_of("HG_CHANGESET_OR_BOOKMARK")
.unwrap()
@ -67,6 +67,7 @@ pub fn subcommand_bonsai_fetch(
}
}
})
.from_err()
.boxify()
}

View File

@ -8,7 +8,7 @@ use clap::{App, Arg, ArgMatches, SubCommand};
use cloned::cloned;
use context::CoreContext;
use failure_ext::Error;
use futures::{future, Future, Stream};
use futures::{future, Future, IntoFuture, Stream};
use futures_ext::{try_boxfuture, BoxFuture, FutureExt};
use mercurial_types::HgChangesetId;
use mononoke_types::Timestamp;
@ -19,6 +19,7 @@ use blobrepo::BlobRepo;
use bookmarks::{BookmarkName, BookmarkUpdateReason};
use crate::common::{fetch_bonsai_changeset, format_bookmark_log_entry};
use crate::error::SubcommandError;
const SET_CMD: &'static str = "set";
const GET_CMD: &'static str = "get";
@ -111,17 +112,14 @@ pub fn handle_command<'a>(
repo: BoxFuture<BlobRepo, Error>,
matches: &ArgMatches<'a>,
_logger: Logger,
) -> BoxFuture<(), Error> {
) -> BoxFuture<(), SubcommandError> {
match matches.subcommand() {
(GET_CMD, Some(sub_m)) => handle_get(sub_m, ctx, repo),
(SET_CMD, Some(sub_m)) => handle_set(sub_m, ctx, repo),
(LOG_CMD, Some(sub_m)) => handle_log(sub_m, ctx, repo),
(LIST_CMD, Some(sub_m)) => handle_list(sub_m, ctx, repo),
(DEL_CMD, Some(sub_m)) => handle_delete(sub_m, ctx, repo),
_ => {
println!("{}", matches.usage());
::std::process::exit(1);
}
(GET_CMD, Some(sub_m)) => handle_get(sub_m, ctx, repo).from_err().boxify(),
(SET_CMD, Some(sub_m)) => handle_set(sub_m, ctx, repo).from_err().boxify(),
(LOG_CMD, Some(sub_m)) => handle_log(sub_m, ctx, repo).from_err().boxify(),
(LIST_CMD, Some(sub_m)) => handle_list(sub_m, ctx, repo).from_err().boxify(),
(DEL_CMD, Some(sub_m)) => handle_delete(sub_m, ctx, repo).from_err().boxify(),
_ => Err(SubcommandError::InvalidArgs).into_future().boxify(),
}
}
@ -141,7 +139,7 @@ fn handle_get<'a>(
args: &ArgMatches<'a>,
ctx: CoreContext,
repo: BoxFuture<BlobRepo, Error>,
) -> BoxFuture<(), Error> {
) -> impl Future<Item = (), Error = Error> {
let bookmark_name = args.value_of("BOOKMARK_NAME").unwrap().to_string();
let bookmark = BookmarkName::new(bookmark_name).unwrap();
let changeset_type = args.value_of("changeset-type").unwrap_or("hg");
@ -156,7 +154,7 @@ fn handle_get<'a>(
println!("{}", output);
future::ok(())
})
.boxify(),
.left_future(),
"bonsai" => repo
.and_then(move |repo| {
@ -169,7 +167,7 @@ fn handle_get<'a>(
},
)
})
.boxify(),
.right_future(),
_ => panic!("Unknown changeset-type supplied"),
}
@ -201,7 +199,7 @@ fn handle_log<'a>(
args: &ArgMatches<'a>,
ctx: CoreContext,
repo: BoxFuture<BlobRepo, Error>,
) -> BoxFuture<(), Error> {
) -> impl Future<Item = (), Error = Error> {
let bookmark_name = args.value_of("BOOKMARK_NAME").unwrap().to_string();
let bookmark = BookmarkName::new(bookmark_name).unwrap();
let changeset_type = args.value_of("changeset-type").unwrap_or("hg");
@ -300,7 +298,7 @@ fn handle_set<'a>(
args: &ArgMatches<'a>,
ctx: CoreContext,
repo: BoxFuture<BlobRepo, Error>,
) -> BoxFuture<(), Error> {
) -> impl Future<Item = (), Error = Error> {
let bookmark_name = args.value_of("BOOKMARK_NAME").unwrap().to_string();
let rev = args.value_of("HG_CHANGESET_ID").unwrap().to_string();
let bookmark = BookmarkName::new(bookmark_name).unwrap();
@ -316,14 +314,13 @@ fn handle_set<'a>(
transaction.commit().map(|_| ()).from_err().boxify()
})
})
.boxify()
}
fn handle_delete<'a>(
args: &ArgMatches<'a>,
ctx: CoreContext,
repo: BoxFuture<BlobRepo, Error>,
) -> BoxFuture<(), Error> {
) -> impl Future<Item = (), Error = Error> {
let bookmark_name = args.value_of("BOOKMARK_NAME").unwrap().to_string();
let bookmark = BookmarkName::new(bookmark_name).unwrap();
repo.and_then(move |repo| {
@ -331,7 +328,6 @@ fn handle_delete<'a>(
try_boxfuture!(transaction.force_delete(&bookmark, BookmarkUpdateReason::ManualMove));
transaction.commit().map(|_| ()).from_err().boxify()
})
.boxify()
}
#[cfg(test)]

View File

@ -19,12 +19,13 @@ use mercurial_types::{Changeset, MPath, MPathElement, Manifest};
use slog::{debug, Logger};
use crate::common::resolve_hg_rev;
use crate::error::SubcommandError;
pub fn subcommand_content_fetch(
logger: Logger,
matches: &ArgMatches<'_>,
sub_m: &ArgMatches<'_>,
) -> BoxFuture<(), Error> {
) -> BoxFuture<(), SubcommandError> {
let rev = sub_m.value_of("CHANGESET_ID").unwrap().to_string();
let path = sub_m.value_of("PATH").unwrap().to_string();
@ -70,6 +71,7 @@ pub fn subcommand_content_fetch(
future::ok(()).boxify()
}
})
.map_err(|e| e.into())
.boxify()
}

18
cmds/admin/error.rs Normal file
View File

@ -0,0 +1,18 @@
// Copyright (c) 2019-present, Facebook, Inc.
// All Rights Reserved.
//
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
use failure_ext::Error;
pub enum SubcommandError {
InvalidArgs,
Error(Error),
}
impl From<Error> for SubcommandError {
fn from(err: Error) -> SubcommandError {
SubcommandError::Error(err)
}
}

View File

@ -18,11 +18,13 @@ use mercurial_types::MPath;
use mononoke_types::RepoPath;
use slog::{debug, info, Logger};
use crate::error::SubcommandError;
pub fn subcommand_filenodes(
logger: Logger,
matches: &ArgMatches<'_>,
sub_m: &ArgMatches<'_>,
) -> BoxFuture<(), Error> {
) -> BoxFuture<(), SubcommandError> {
let rev = sub_m
.value_of("hg-changeset-or-bookmark")
.unwrap()
@ -105,5 +107,6 @@ pub fn subcommand_filenodes(
);
});
})
.from_err()
.boxify()
}

View File

@ -8,7 +8,7 @@ use clap::{App, Arg, ArgMatches, SubCommand};
use cloned::cloned;
use cmdlib::args;
use context::CoreContext;
use failure_ext::{err_msg, format_err, Error, Result};
use failure_ext::{err_msg, format_err, Result};
use filestore::{self, FetchKey};
use futures::{Future, IntoFuture};
use futures_ext::{BoxFuture, FutureExt};
@ -19,6 +19,8 @@ use mononoke_types::{
use slog::Logger;
use std::str::FromStr;
use crate::error::SubcommandError;
const COMMAND_METADATA: &str = "metadata";
const COMMAND_VERIFY: &str = "verify";
@ -58,7 +60,7 @@ pub fn execute_command(
logger: Logger,
matches: &ArgMatches<'_>,
sub_matches: &ArgMatches<'_>,
) -> BoxFuture<(), Error> {
) -> BoxFuture<(), SubcommandError> {
let blobrepo = args::open_repo(&logger, &matches);
let ctx = CoreContext::test_mock();
@ -70,6 +72,7 @@ pub fn execute_command(
.inspect(|r| println!("{:?}", r))
.map(|_| ())
})
.from_err()
.boxify(),
(COMMAND_VERIFY, Some(matches)) => (blobrepo, extract_fetch_key(matches).into_future())
.into_future()
@ -113,11 +116,9 @@ pub fn execute_command(
println!("git_sha1: {:?}", git_sha1.is_ok());
})
})
.from_err()
.boxify(),
_ => {
eprintln!("{}", matches.usage());
::std::process::exit(1);
}
_ => Err(SubcommandError::InvalidArgs).into_future().boxify(),
}
}

View File

@ -5,7 +5,6 @@
// GNU General Public License version 2 or any later version.
use clap::ArgMatches;
use failure_ext::Error;
use futures::prelude::*;
use futures_ext::{BoxFuture, FutureExt};
use std::str::FromStr;
@ -16,11 +15,13 @@ use mercurial_types::HgChangesetId;
use mononoke_types::ChangesetId;
use slog::Logger;
use crate::error::SubcommandError;
pub fn subcommand_hash_convert(
logger: Logger,
matches: &ArgMatches<'_>,
sub_m: &ArgMatches<'_>,
) -> BoxFuture<(), Error> {
) -> BoxFuture<(), SubcommandError> {
let source_hash = sub_m.value_of("HASH").unwrap().to_string();
let source = sub_m.value_of("from").unwrap().to_string();
let target = sub_m.value_of("to").unwrap();
@ -66,5 +67,6 @@ pub fn subcommand_hash_convert(
.right_future()
}
})
.from_err()
.boxify()
}

View File

@ -23,12 +23,13 @@ use std::io;
use std::str::FromStr;
use crate::cmdargs::{HG_CHANGESET_DIFF, HG_CHANGESET_RANGE};
use crate::error::SubcommandError;
pub fn subcommand_hg_changeset(
logger: Logger,
matches: &ArgMatches<'_>,
sub_m: &ArgMatches<'_>,
) -> BoxFuture<(), Error> {
) -> BoxFuture<(), SubcommandError> {
match sub_m.subcommand() {
(HG_CHANGESET_DIFF, Some(sub_m)) => {
// TODO(T37478150, luk) This is not a test case, fix it up in future diffs
@ -57,6 +58,7 @@ pub fn subcommand_hg_changeset(
.map(|_| ())
.map_err(Error::from)
})
.from_err()
.boxify()
}
(HG_CHANGESET_RANGE, Some(sub_m)) => {
@ -113,12 +115,10 @@ pub fn subcommand_hg_changeset(
.map_err(Error::from)
})
})
.from_err()
.boxify()
}
_ => {
println!("{}", sub_m.usage());
::std::process::exit(1);
}
_ => Err(SubcommandError::InvalidArgs).into_future().boxify(),
}
}

View File

@ -23,13 +23,15 @@ use crate::cmdargs::{
HG_SYNC_FETCH_BUNDLE, HG_SYNC_LAST_PROCESSED, HG_SYNC_REMAINS, HG_SYNC_SHOW, HG_SYNC_VERIFY,
};
use crate::common::{format_bookmark_log_entry, LATEST_REPLAYED_REQUEST_KEY};
use crate::error::SubcommandError;
pub fn subcommand_process_hg_sync(
sub_m: &ArgMatches<'_>,
matches: &ArgMatches<'_>,
logger: Logger,
) -> BoxFuture<(), Error> {
let repo_id = try_boxfuture!(args::get_repo_id(&matches));
) -> BoxFuture<(), SubcommandError> {
let repo_id =
try_boxfuture!(args::get_repo_id(&matches).map_err(|e| SubcommandError::Error(e)));
let ctx = CoreContext::test_mock();
@ -48,11 +50,14 @@ pub fn subcommand_process_hg_sync(
sub_m.is_present("dry-run"),
) {
(Some(..), true, ..) => {
future::err(err_msg("cannot pass both --set and --skip-blobimport")).boxify()
future::err(err_msg("cannot pass both --set and --skip-blobimport"))
.from_err()
.boxify()
}
(.., false, true) => future::err(err_msg(
"--dry-run is meaningless without --skip-blobimport",
))
.from_err()
.boxify(),
(Some(new_value), false, false) => {
let new_value = i64::from_str_radix(new_value, 10).unwrap();
@ -86,6 +91,7 @@ pub fn subcommand_process_hg_sync(
}
})
})
.from_err()
.boxify()
}
(None, skip, dry_run) => mutable_counters
@ -164,6 +170,7 @@ pub fn subcommand_process_hg_sync(
}
})
})
.from_err()
.boxify(),
},
(HG_SYNC_REMAINS, Some(sub_m)) => {
@ -231,6 +238,7 @@ pub fn subcommand_process_hg_sync(
}
})
})
.from_err()
.boxify()
}
(HG_SYNC_SHOW, Some(sub_m)) => {
@ -290,6 +298,7 @@ pub fn subcommand_process_hg_sync(
Ok(())
})
})
.from_err()
.boxify()
}
(HG_SYNC_FETCH_BUNDLE, Some(sub_m)) => {
@ -298,7 +307,9 @@ pub fn subcommand_process_hg_sync(
let id = args::get_u64_opt(sub_m, "id");
let id = try_boxfuture!(id.ok_or(err_msg("--id is not specified")));
if id == 0 {
return future::err(err_msg("--id has to be greater than 0")).boxify();
return future::err(err_msg("--id has to be greater than 0"))
.from_err()
.boxify();
}
let output_file = try_boxfuture!(sub_m
@ -340,6 +351,7 @@ pub fn subcommand_process_hg_sync(
.boxify()
})
})
.from_err()
.boxify()
}
(HG_SYNC_VERIFY, Some(..)) => mutable_counters
@ -350,11 +362,9 @@ pub fn subcommand_process_hg_sync(
process_hg_sync_verify(ctx, repo_id, mutable_counters, bookmarks, logger)
}
})
.from_err()
.boxify(),
_ => {
eprintln!("{}", matches.usage());
::std::process::exit(1);
}
_ => Err(SubcommandError::InvalidArgs).into_future().boxify(),
}
}

View File

@ -5,10 +5,13 @@
// GNU General Public License version 2 or any later version.
#![deny(warnings)]
#![feature(process_exitcode_placeholder)]
use clap::{App, Arg, SubCommand};
use failure_ext::Result;
use futures::future::Future;
use futures::IntoFuture;
use futures_ext::FutureExt;
use std::process::ExitCode;
use tokio::runtime::Runtime;
use cmdlib::args;
use context::CoreContext;
@ -24,6 +27,7 @@ use crate::cmdargs::{
HG_SYNC_VERIFY, SKIPLIST, SKIPLIST_BUILD, SKIPLIST_READ,
};
use crate::content_fetch::subcommand_content_fetch;
use crate::error::SubcommandError;
use crate::filenodes::subcommand_filenodes;
use crate::hash_convert::subcommand_hash_convert;
use crate::hg_changeset::subcommand_hg_changeset;
@ -38,6 +42,7 @@ mod bookmarks_manager;
mod cmdargs;
mod common;
mod content_fetch;
mod error;
mod filenodes;
mod filestore;
mod hash_convert;
@ -310,7 +315,7 @@ fn setup_app<'a, 'b>() -> App<'a, 'b> {
.subcommand(filestore::build_subcommand(FILESTORE))
}
fn main() -> Result<()> {
fn main() -> ExitCode {
let matches = setup_app().get_matches();
let logger = args::get_logger(&matches);
@ -337,22 +342,27 @@ fn main() -> Result<()> {
(BLACKLIST, Some(sub_m)) => subcommand_blacklist(logger, &matches, sub_m),
(FILENODES, Some(sub_m)) => subcommand_filenodes(logger, &matches, sub_m),
(FILESTORE, Some(sub_m)) => filestore::execute_command(logger, &matches, sub_m),
_ => {
eprintln!("{}", matches.usage());
::std::process::exit(1);
}
_ => Err(SubcommandError::InvalidArgs).into_future().boxify(),
};
let debug = matches.is_present("debug");
tokio::run(future.map_err(move |err| {
error!(error_logger, "{:?}", err);
if debug {
error!(error_logger, "\n============ DEBUG ERROR ============");
error!(error_logger, "{:#?}", err);
}
::std::process::exit(1);
}));
let mut runtime = Runtime::new().expect("failed to initialize Tokio runtime");
let res = runtime.block_on(future);
Ok(())
match res {
Ok(_) => ExitCode::SUCCESS,
Err(SubcommandError::Error(err)) => {
error!(error_logger, "{:?}", err);
if debug {
error!(error_logger, "\n============ DEBUG ERROR ============");
error!(error_logger, "{:#?}", err);
}
ExitCode::FAILURE
}
Err(SubcommandError::InvalidArgs) => {
eprintln!("{}", matches.usage());
ExitCode::FAILURE
}
}
}

View File

@ -27,6 +27,8 @@ use mercurial_types::HgChangesetId;
use phases::SqlPhases;
use slog::{info, Logger};
use crate::error::SubcommandError;
fn add_public_phases(
ctx: CoreContext,
repo: BlobRepo,
@ -79,7 +81,7 @@ pub fn subcommand_add_public_phases(
logger: Logger,
matches: &ArgMatches<'_>,
sub_m: &ArgMatches<'_>,
) -> BoxFuture<(), Error> {
) -> BoxFuture<(), SubcommandError> {
let path = String::from(sub_m.value_of("input-file").unwrap());
let chunk_size = sub_m
.value_of("chunk-size")
@ -97,5 +99,6 @@ pub fn subcommand_add_public_phases(
.and_then(move |(repo, phases)| {
add_public_phases(ctx, repo, Arc::new(phases), logger, path, chunk_size)
})
.from_err()
.boxify()
}

View File

@ -26,12 +26,13 @@ use skiplist::{deserialize_skiplist_index, SkiplistIndex, SkiplistNodeType};
use slog::{debug, info, Logger};
use crate::cmdargs::{SKIPLIST_BUILD, SKIPLIST_READ};
use crate::error::SubcommandError;
pub fn subcommand_skiplist(
logger: Logger,
matches: &ArgMatches<'_>,
sub_m: &ArgMatches<'_>,
) -> BoxFuture<(), Error> {
) -> BoxFuture<(), SubcommandError> {
match sub_m.subcommand() {
(SKIPLIST_BUILD, Some(sub_m)) => {
let key = sub_m
@ -47,6 +48,7 @@ pub fn subcommand_skiplist(
.and_then(move |(repo, sql_changesets)| {
build_skiplist_index(ctx, repo, key, logger, sql_changesets)
})
.from_err()
.boxify()
}
(SKIPLIST_READ, Some(sub_m)) => {
@ -59,12 +61,10 @@ pub fn subcommand_skiplist(
let ctx = CoreContext::test_mock();
args::open_repo(&logger, &matches)
.and_then(move |repo| read_skiplist_index(ctx.clone(), repo, key, logger))
.from_err()
.boxify()
}
_ => {
println!("{}", sub_m.usage());
::std::process::exit(1);
}
_ => Err(SubcommandError::InvalidArgs).into_future().boxify(),
}
}