2019-10-11 23:51:17 +03:00
|
|
|
/*
|
|
|
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
|
|
*
|
|
|
|
* This software may be used and distributed according to the terms of the
|
2020-02-11 13:42:43 +03:00
|
|
|
* GNU General Public License version 2.
|
2019-10-11 23:51:17 +03:00
|
|
|
*/
|
2019-08-16 19:07:36 +03:00
|
|
|
|
|
|
|
#![deny(warnings)]
|
|
|
|
|
2019-12-07 03:26:57 +03:00
|
|
|
use anyhow::{format_err, Error};
|
2020-02-04 17:57:26 +03:00
|
|
|
use blobstore::Loadable;
|
2019-08-16 19:07:36 +03:00
|
|
|
use clap::Arg;
|
|
|
|
use cloned::cloned;
|
|
|
|
use context::CoreContext;
|
2019-09-14 06:16:08 +03:00
|
|
|
use fbinit::FacebookInit;
|
rust: Rename futures_preview:: to futures::
Summary:
Context: https://fb.workplace.com/groups/rust.language/permalink/3338940432821215/
This codemod replaces *all* dependencies on `//common/rust/renamed:futures-preview` with `fbsource//third-party/rust:futures-preview` and their uses in Rust code from `futures_preview::` to `futures::`.
This does not introduce any collisions with `futures::` meaning 0.1 futures because D20168958 previously renamed all of those to `futures_old::` in crates that depend on *both* 0.1 and 0.3 futures.
Codemod performed by:
```
rg \
--files-with-matches \
--type-add buck:TARGETS \
--type buck \
--glob '!/experimental' \
--regexp '(_|\b)rust(_|\b)' \
| sed 's,TARGETS$,:,' \
| xargs \
-x \
buck query "labels(srcs, rdeps(%Ss, //common/rust/renamed:futures-preview, 1))" \
| xargs sed -i 's,\bfutures_preview::,futures::,'
rg \
--files-with-matches \
--type-add buck:TARGETS \
--type buck \
--glob '!/experimental' \
--regexp '(_|\b)rust(_|\b)' \
| xargs sed -i 's,//common/rust/renamed:futures-preview,fbsource//third-party/rust:futures-preview,'
```
Reviewed By: k21
Differential Revision: D20213432
fbshipit-source-id: 07ee643d350c5817cda1f43684d55084f8ac68a6
2020-03-03 21:58:44 +03:00
|
|
|
use futures::{
|
2020-02-12 07:25:15 +03:00
|
|
|
compat::Future01CompatExt,
|
|
|
|
stream::{self, TryStreamExt},
|
|
|
|
};
|
|
|
|
|
2019-08-16 19:07:36 +03:00
|
|
|
use mercurial_types::{HgFileNodeId, HgNodeHash};
|
|
|
|
use std::str::FromStr;
|
|
|
|
|
2020-01-22 03:10:07 +03:00
|
|
|
use cmdlib::{args, helpers::block_execute};
|
2019-08-16 19:07:36 +03:00
|
|
|
|
|
|
|
const NAME: &str = "rechunker";
|
|
|
|
const DEFAULT_NUM_JOBS: usize = 10;
|
|
|
|
|
2019-09-14 06:16:08 +03:00
|
|
|
#[fbinit::main]
|
|
|
|
fn main(fb: FacebookInit) -> Result<(), Error> {
|
2019-10-28 17:57:34 +03:00
|
|
|
let matches = args::MononokeApp::new(NAME)
|
|
|
|
.with_advanced_args_hidden()
|
|
|
|
.build()
|
2019-08-16 19:07:36 +03:00
|
|
|
.version("0.0.0")
|
|
|
|
.about("Rechunk blobs using the filestore")
|
|
|
|
.arg(
|
|
|
|
Arg::with_name("filenodes")
|
|
|
|
.value_name("FILENODES")
|
|
|
|
.takes_value(true)
|
|
|
|
.required(true)
|
|
|
|
.min_values(1)
|
|
|
|
.help("filenode IDs for blobs to be rechunked"),
|
|
|
|
)
|
|
|
|
.arg(
|
|
|
|
Arg::with_name("jobs")
|
|
|
|
.short("j")
|
|
|
|
.long("jobs")
|
|
|
|
.value_name("JOBS")
|
|
|
|
.takes_value(true)
|
|
|
|
.help("The number of filenodes to rechunk in parallel"),
|
|
|
|
)
|
|
|
|
.get_matches();
|
|
|
|
|
2020-01-31 20:50:17 +03:00
|
|
|
args::init_cachelib(fb, &matches, None);
|
2019-08-16 19:07:36 +03:00
|
|
|
|
2019-10-13 10:44:52 +03:00
|
|
|
let logger = args::init_logging(fb, &matches);
|
2019-09-14 06:16:08 +03:00
|
|
|
let ctx = CoreContext::new_with_logger(fb, logger.clone());
|
2019-08-16 19:07:36 +03:00
|
|
|
|
|
|
|
let jobs: usize = matches
|
|
|
|
.value_of("jobs")
|
|
|
|
.map_or(Ok(DEFAULT_NUM_JOBS), |j| j.parse())
|
|
|
|
.map_err(Error::from)?;
|
|
|
|
|
|
|
|
let filenode_ids: Vec<_> = matches
|
|
|
|
.values_of("filenodes")
|
|
|
|
.unwrap()
|
|
|
|
.into_iter()
|
|
|
|
.map(|f| {
|
|
|
|
HgNodeHash::from_str(f)
|
|
|
|
.map(HgFileNodeId::new)
|
2019-12-06 23:51:47 +03:00
|
|
|
.map_err(|e| format_err!("Invalid Sha1: {}", e))
|
2019-08-16 19:07:36 +03:00
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
2020-02-12 07:25:15 +03:00
|
|
|
let blobrepo = args::open_repo(fb, &logger, &matches);
|
|
|
|
let rechunk = async move {
|
|
|
|
let blobrepo = blobrepo.compat().await?;
|
|
|
|
stream::iter(filenode_ids)
|
|
|
|
.try_for_each_concurrent(jobs, |fid| {
|
|
|
|
cloned!(blobrepo, ctx);
|
|
|
|
async move {
|
2020-06-25 18:43:18 +03:00
|
|
|
let env = fid.load(ctx.clone(), blobrepo.blobstore()).await?;
|
2020-02-12 07:25:15 +03:00
|
|
|
let content_id = env.content_id();
|
2020-03-19 15:14:43 +03:00
|
|
|
filestore::force_rechunk(
|
2020-02-12 07:25:15 +03:00
|
|
|
blobrepo.get_blobstore(),
|
|
|
|
blobrepo.filestore_config().clone(),
|
|
|
|
ctx,
|
|
|
|
content_id,
|
|
|
|
)
|
|
|
|
.await
|
|
|
|
.map(|_| ())
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.await
|
|
|
|
};
|
2019-08-16 19:07:36 +03:00
|
|
|
|
2020-01-29 20:39:49 +03:00
|
|
|
block_execute(
|
2020-02-12 07:25:15 +03:00
|
|
|
rechunk,
|
2020-01-29 20:39:49 +03:00
|
|
|
fb,
|
|
|
|
"rechunker",
|
|
|
|
&logger,
|
|
|
|
&matches,
|
|
|
|
cmdlib::monitoring::AliveService,
|
|
|
|
)
|
2019-08-16 19:07:36 +03:00
|
|
|
}
|