sapling/eden/mononoke/cmds/upload_globalrevs.rs
David Tolnay fe65402e46 rust: Move futures-old rdeps to renamed futures-old
Summary:
In targets that depend on *both* 0.1 and 0.3 futures, this codemod renames the 0.1 dependency to be exposed as futures_old::. This is in preparation for flipping the 0.3 dependencies from futures_preview:: to plain futures::.

rs changes performed by:

```
rg \
    --files-with-matches \
    --type-add buck:TARGETS \
    --type buck \
    --glob '!/experimental' \
    --regexp '(_|\b)rust(_|\b)' \
| sed 's,TARGETS$,:,' \
| xargs \
    -x \
    buck query "labels(srcs,
        rdeps(%Ss, fbsource//third-party/rust:futures-old, 1)
        intersect
        rdeps(%Ss, //common/rust/renamed:futures-preview, 1)
    )" \
| xargs sed -i 's/\bfutures::/futures_old::/'
```

Reviewed By: jsgf

Differential Revision: D20168958

fbshipit-source-id: d2c099f9170c427e542975bc22fd96138a7725b0
2020-03-02 21:02:50 -08:00

104 lines
3.1 KiB
Rust

/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::Error;
use blobrepo::BlobRepo;
use blobstore::Loadable;
use bonsai_globalrev_mapping::{
bulk_import_globalrevs, BonsaiGlobalrevMapping, SqlBonsaiGlobalrevMapping,
};
use bytes::Bytes;
use changesets::{deserialize_cs_entries, ChangesetEntry};
use clap::{App, Arg};
use cloned::cloned;
use cmdlib::{args, helpers::block_execute};
use context::CoreContext;
use fbinit::FacebookInit;
use futures_ext::{BoxFuture, FutureExt};
use futures_old::future::{Future, IntoFuture};
use futures_old::stream;
use futures_old::stream::Stream;
use futures_preview::compat::Future01CompatExt;
use std::fs;
use std::path::Path;
use std::sync::Arc;
fn setup_app<'a, 'b>() -> App<'a, 'b> {
args::MononokeApp::new("Tool to upload globalrevs from commits saved in file")
.build()
.version("0.0.0")
.arg(Arg::from_usage(
"<IN_FILENAME> 'file with bonsai changesets'",
))
}
fn parse_serialized_commits<P: AsRef<Path>>(file: P) -> Result<Vec<ChangesetEntry>, Error> {
let data = fs::read(file).map_err(Error::from)?;
deserialize_cs_entries(&Bytes::from(data))
}
pub fn upload<P: AsRef<Path>>(
ctx: CoreContext,
repo: BlobRepo,
in_path: P,
globalrevs_store: Arc<dyn BonsaiGlobalrevMapping>,
) -> BoxFuture<(), Error> {
let chunk_size = 1000;
parse_serialized_commits(in_path)
.into_future()
.and_then(move |changesets| {
stream::iter_ok(changesets)
.map({
cloned!(ctx, repo);
move |entry| entry.cs_id.load(ctx.clone(), repo.blobstore()).from_err()
})
.buffered(chunk_size)
.chunks(chunk_size)
.and_then(move |chunk| {
bulk_import_globalrevs(
ctx.clone(),
repo.get_repoid(),
globalrevs_store.clone(),
chunk.iter(),
)
})
.for_each(|_| Ok(()))
})
.boxify()
}
#[fbinit::main]
fn main(fb: FacebookInit) -> Result<(), Error> {
let matches = setup_app().get_matches();
args::init_cachelib(fb, &matches, None);
let logger = args::init_logging(fb, &matches);
let ctx = CoreContext::new_with_logger(fb, logger.clone());
let globalrevs_store = args::open_sql::<SqlBonsaiGlobalrevMapping>(fb, &matches);
let run = args::open_repo(fb, &logger, &matches)
.join(globalrevs_store)
.and_then({
let matches = matches.clone();
move |(repo, globalrevs_store)| {
let in_filename = matches.value_of("IN_FILENAME").unwrap();
let globalrevs_store = Arc::new(globalrevs_store);
upload(ctx, repo, in_filename, globalrevs_store)
}
});
block_execute(
run.compat(),
fb,
"upload_globalrevs",
&logger,
&matches,
cmdlib::monitoring::AliveService,
)
}