mirror of
https://github.com/facebook/sapling.git
synced 2024-10-10 08:47:12 +03:00
b873f821ca
Summary: Output of BookmarkUpdateLog.list_bookmark_log_entries method in eden/mononoke/blobrepo/src/repo.rs was extended with bundle_id. This allows to improve bookmark logs in admin tool: now it prints bundle_id as a first column: ``` I0114 16:53:00.592448 413912 [main] eden/mononoke/cmdlib/src/args/mod.rs:1477] using repo "instagram-server_test" repoid RepositoryId(502) 12637312 (master) 296a8f7c7ac785eeb2437d4676aeb3fbf6e690b1 pushrebase Nov 27 09:06:23 2020 12637526 (master) 4be9b55e41af535128e53eb04fedc830522799f6 pushrebase Nov 27 09:43:10 2020 12637462 (master) ffff84c16fcc81ee5d3eb6ea037c58f2c0f7ac04 pushrebase Nov 27 09:32:14 2020 12637540 (master) 7859f7b8beefab7fb44f771fa733694e6b8fa2d4 pushrebase Nov 27 09:44:59 2020 10095754 (master) dcf7c54125b29a0d4ea404f6c056c9ccb0d27666 pushrebase Sep 10 17:41:48 2020 12637464 (master) 747f205265ca52d2fc98fbba82c18ca17b4dd02e pushrebase Nov 27 09:32:36 2020 12637527 (master) b1a1500888f4d49ad1bc29be84c285eddd26b1ce pushrebase Nov 27 09:43:25 2020 12637520 (master) 440863d2b0a43cd039e26442e9c5786a6d933b21 pushrebase Nov 27 09:42:15 2020 12637522 (master) 2349c7c21b40b06547085f2a76ddef55a1ab4379 pushrebase Nov 27 09:42:38 2020 12637466 (master) 5c607e8575b2e6488b09b1580ff614d67ab1c723 pushrebase Nov 27 09:32:51 2020 ``` Reviewed By: ikostia Differential Revision: D25918315 fbshipit-source-id: 22955e6b839a93e3e3c5b53ee73486f599cf5bda
152 lines
5.1 KiB
Rust
152 lines
5.1 KiB
Rust
/*
|
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
*
|
|
* This software may be used and distributed according to the terms of the
|
|
* GNU General Public License version 2.
|
|
*/
|
|
|
|
use anyhow::{format_err, Error};
|
|
use blobrepo::BlobRepo;
|
|
use blobstore::Loadable;
|
|
use bookmarks::{BookmarkName, BookmarkUpdateReason};
|
|
use cmdlib::{args, helpers};
|
|
use context::CoreContext;
|
|
use fbinit::FacebookInit;
|
|
use futures::TryStreamExt;
|
|
use futures::{compat::Future01CompatExt, try_join};
|
|
use manifest::ManifestOps;
|
|
use mercurial_types::{HgChangesetId, HgFileNodeId, MPath};
|
|
use mononoke_types::{BonsaiChangeset, DateTime, Timestamp};
|
|
use serde_json::{json, to_string_pretty};
|
|
use slog::{debug, Logger};
|
|
use std::collections::HashMap;
|
|
use synced_commit_mapping::SqlSyncedCommitMapping;
|
|
|
|
pub const LATEST_REPLAYED_REQUEST_KEY: &str = "latest-replayed-request";
|
|
|
|
pub async fn fetch_bonsai_changeset(
|
|
ctx: CoreContext,
|
|
rev: &str,
|
|
repo: &BlobRepo,
|
|
) -> Result<BonsaiChangeset, Error> {
|
|
let csid = helpers::csid_resolve(ctx.clone(), repo.clone(), rev.to_string())
|
|
.compat()
|
|
.await?;
|
|
let cs = csid.load(&ctx, repo.blobstore()).await?;
|
|
Ok(cs)
|
|
}
|
|
|
|
pub fn print_bonsai_changeset(bcs: &BonsaiChangeset) {
|
|
println!(
|
|
"BonsaiChangesetId: {} \n\
|
|
Author: {} \n\
|
|
Message: {} \n\
|
|
FileChanges:",
|
|
bcs.get_changeset_id(),
|
|
bcs.author(),
|
|
bcs.message().lines().next().unwrap_or("")
|
|
);
|
|
|
|
for (path, file_change) in bcs.file_changes() {
|
|
match file_change {
|
|
Some(file_change) => match file_change.copy_from() {
|
|
Some(_) => println!("\t COPY/MOVE: {} {}", path, file_change.content_id()),
|
|
None => println!("\t ADDED/MODIFIED: {} {}", path, file_change.content_id()),
|
|
},
|
|
None => println!("\t REMOVED: {}", path),
|
|
}
|
|
}
|
|
}
|
|
|
|
pub fn format_bookmark_log_entry(
|
|
json_flag: bool,
|
|
changeset_id: String,
|
|
reason: BookmarkUpdateReason,
|
|
timestamp: Timestamp,
|
|
changeset_type: &str,
|
|
bookmark: BookmarkName,
|
|
bundle_id: Option<u64>,
|
|
) -> String {
|
|
let reason_str = reason.to_string();
|
|
if json_flag {
|
|
let answer = json!({
|
|
"changeset_type": changeset_type,
|
|
"changeset_id": changeset_id,
|
|
"reason": reason_str,
|
|
"timestamp_sec": timestamp.timestamp_seconds(),
|
|
"bundle_id": bundle_id,
|
|
});
|
|
to_string_pretty(&answer).unwrap()
|
|
} else {
|
|
let dt: DateTime = timestamp.into();
|
|
let dts = dt.as_chrono().format("%b %e %T %Y");
|
|
match bundle_id {
|
|
Some(bundle_id) => format!(
|
|
"{} ({}) {} {} {}",
|
|
bundle_id, bookmark, changeset_id, reason, dts
|
|
),
|
|
None => format!("({}) {} {} {}", bookmark, changeset_id, reason, dts),
|
|
}
|
|
}
|
|
}
|
|
|
|
// The function retrieves the HgFileNodeId of a file, based on path and rev.
|
|
// If the path is not valid an error is expected.
|
|
pub async fn get_file_nodes(
|
|
ctx: CoreContext,
|
|
logger: Logger,
|
|
repo: &BlobRepo,
|
|
cs_id: HgChangesetId,
|
|
paths: Vec<MPath>,
|
|
) -> Result<Vec<HgFileNodeId>, Error> {
|
|
let cs = cs_id.load(&ctx, repo.blobstore()).await?;
|
|
let root_mf_id = cs.manifestid().clone();
|
|
let manifest_entries: HashMap<_, _> = root_mf_id
|
|
.find_entries(ctx, repo.get_blobstore(), paths.clone())
|
|
.try_filter_map(|(path, entry)| async move {
|
|
let result =
|
|
path.and_then(move |path| entry.into_leaf().map(move |leaf| (path, leaf.1)));
|
|
Ok(result)
|
|
})
|
|
.try_collect()
|
|
.await?;
|
|
|
|
let mut existing_hg_nodes = Vec::new();
|
|
let mut non_existing_paths = Vec::new();
|
|
for path in paths.iter() {
|
|
match manifest_entries.get(&path) {
|
|
Some(hg_node) => existing_hg_nodes.push(*hg_node),
|
|
None => non_existing_paths.push(path.clone()),
|
|
};
|
|
}
|
|
match non_existing_paths.len() {
|
|
0 => {
|
|
debug!(logger, "All the file paths are valid");
|
|
Ok(existing_hg_nodes)
|
|
}
|
|
_ => Err(format_err!(
|
|
"failed to identify the files associated with the file paths {:?}",
|
|
non_existing_paths
|
|
)),
|
|
}
|
|
}
|
|
|
|
pub async fn get_source_target_repos_and_mapping<'a>(
|
|
fb: FacebookInit,
|
|
logger: Logger,
|
|
matches: &'a args::MononokeMatches<'_>,
|
|
) -> Result<(BlobRepo, BlobRepo, SqlSyncedCommitMapping), Error> {
|
|
let config_store = args::init_config_store(fb, &logger, matches)?;
|
|
|
|
let source_repo_id = args::get_source_repo_id(config_store, matches)?;
|
|
let target_repo_id = args::get_target_repo_id(config_store, matches)?;
|
|
|
|
let source_repo = args::open_repo_with_repo_id(fb, &logger, source_repo_id, matches);
|
|
let target_repo = args::open_repo_with_repo_id(fb, &logger, target_repo_id, matches);
|
|
// TODO(stash): in reality both source and target should point to the same mapping
|
|
// It'll be nice to verify it
|
|
let mapping = args::open_source_sql::<SqlSyncedCommitMapping>(fb, config_store, &matches);
|
|
|
|
try_join!(source_repo, target_repo, mapping)
|
|
}
|