repo_client: log full gettreepack args to scuba in verbose mode

Summary:
This will allow us to have greater visibility into what's going on when there are production issues.

Note: for getpack, the params data model is `[MPath, [Node]]`. In practice there seems to always just be 1 node per mpath. However, to preserve the mapping, I log every mpath in a separate sample.

Reviewed By: ahornby

Differential Revision: D26690685

fbshipit-source-id: 36616256747b61390b0435467892daeff2b4dd07
This commit is contained in:
Kostia Balytskyi 2021-04-14 08:29:00 -07:00 committed by Facebook GitHub Bot
parent 0948b82c30
commit fc3908e9fa
4 changed files with 133 additions and 17 deletions

View File

@ -9,7 +9,8 @@
use fbinit::FacebookInit;
use futures_stats::{FutureStats, StreamStats};
use observability::{ObservabilityContext, ScubaLoggingDecisionFields, ScubaVerbosityLevel};
pub use observability::ScubaVerbosityLevel;
use observability::{ObservabilityContext, ScubaLoggingDecisionFields};
use scuba::{builder::ServerData, ScubaSample, ScubaSampleBuilder};
pub use scuba::{Sampling, ScubaValue};
use sshrelay::{Metadata, Preamble};
@ -79,7 +80,7 @@ impl MononokeScubaSampleBuilder {
}
}
fn should_log_with_level(&self, level: ScubaVerbosityLevel) -> bool {
pub fn should_log_with_level(&self, level: ScubaVerbosityLevel) -> bool {
match level {
ScubaVerbosityLevel::Normal => true,
ScubaVerbosityLevel::Verbose => self

View File

@ -28,6 +28,7 @@ futures_stats = { version = "0.1.0", git = "https://github.com/facebookexperimen
getbundle_response = { version = "0.1.0", path = "getbundle_response" }
hgproto = { version = "0.1.0", path = "../hgproto" }
hostname = { version = "0.1.0", git = "https://github.com/facebookexperimental/rust-shed.git", branch = "master" }
iterhelpers = { version = "0.1.0", path = "../common/iterhelpers" }
itertools = "0.8"
lazy_static = "1.0"
load_limiter = { version = "0.1.0", path = "../load_limiter" }

View File

@ -15,9 +15,14 @@ use futures::{compat::Future01CompatExt, FutureExt, TryFutureExt};
use futures_01_ext::FutureExt as _;
use futures_old::{future, Future};
use futures_stats::{FutureStats, StreamStats};
use hgproto::GettreepackArgs;
use iterhelpers::chunk_by_accumulation;
use mercurial_types::HgManifestId;
use mononoke_types::MPath;
use rand::{distributions::Alphanumeric, thread_rng, Rng};
#[cfg(fbcode_build)]
use scribe::ScribeClient;
use scuba_ext::ScubaVerbosityLevel;
use scuba_ext::{MononokeScubaSampleBuilder, ScribeClientImplementation, ScubaValue};
use stats::prelude::*;
use std::collections::HashMap;
@ -35,6 +40,9 @@ define_stats! {
wireproto_serialization_failure: timeseries(Rate, Sum),
}
const COLUMN_SIZE_LIMIT: usize = 500_1000;
const FULL_ARGS_LOG_TAG: &str = "Full Command Args";
pub struct WireprotoLogging {
reponame: String,
scribe_args: Option<(ScribeClientImplementation, String)>,
@ -313,3 +321,113 @@ fn do_wireproto_logging<'a>(
fn generate_random_string(len: usize) -> String {
thread_rng().sample_iter(&Alphanumeric).take(len).collect()
}
fn debug_format_directory<T: AsRef<[u8]>>(directory: &T) -> String {
String::from_utf8_lossy(&hgproto::batch::escape(directory)).to_string()
}
pub fn debug_format_manifest(node: &HgManifestId) -> String {
format!("{}", node)
}
pub fn debug_format_path(path: &Option<MPath>) -> String {
match path {
Some(p) => format!("{}", p),
None => String::new(),
}
}
fn greater_than_column_size(a: usize) -> bool {
a > COLUMN_SIZE_LIMIT
}
pub fn log_gettreepack_params_verbose(ctx: &CoreContext, args: &GettreepackArgs) {
if !ctx
.scuba()
.should_log_with_level(ScubaVerbosityLevel::Verbose)
{
return;
}
let mut sample = ctx.scuba().clone();
sample.add("gettreepack_rootdir", debug_format_path(&args.rootdir));
if let Some(depth) = args.depth {
sample.add("gettreepack_depth", depth);
}
let msg = "gettreepack rootdir and depth".to_string();
sample.log_with_msg_verbose(FULL_ARGS_LOG_TAG, msg);
let mfnode_chunks = chunk_by_accumulation(
args.mfnodes.iter().map(debug_format_manifest),
0,
|acc, s| acc + s.len(),
greater_than_column_size,
);
let msg = "gettreepack mfnodes".to_string();
for (i, mfnode_chunk) in mfnode_chunks.into_iter().enumerate() {
ctx.scuba()
.clone()
.add("gettreepack_mfnode_chunk_idx", i)
.add("gettreepack_mfnode_chunk", mfnode_chunk)
.log_with_msg_verbose(FULL_ARGS_LOG_TAG, msg.clone());
}
let basemfnode_chunks = chunk_by_accumulation(
args.basemfnodes.iter().map(debug_format_manifest),
0,
|acc, s| acc + s.len(),
greater_than_column_size,
);
let msg = "gettreepack basemfnodes".to_string();
for (i, basemfnode_chunk) in basemfnode_chunks.into_iter().enumerate() {
ctx.scuba()
.clone()
.add("gettreepack_basemfnode_chunk_idx", i)
.add("gettreepack_basemfnode_chunk", basemfnode_chunk)
.log_with_msg_verbose(FULL_ARGS_LOG_TAG, msg.clone());
}
let directory_chunks = chunk_by_accumulation(
args.directories.iter().map(debug_format_directory),
0,
|acc, s| acc + s.len(),
greater_than_column_size,
);
let msg = "gettreepack directories".to_string();
for (i, directory_chunk) in directory_chunks.into_iter().enumerate() {
ctx.scuba()
.clone()
.add("gettreepack_directory_chunk_idx", i)
.add("gettreepack_directory_chunk", directory_chunk)
.log_with_msg_verbose(FULL_ARGS_LOG_TAG, msg.clone());
}
}
pub fn log_getpack_params_verbose(ctx: &CoreContext, encoded_params: &[(String, Vec<String>)]) {
if !ctx
.scuba()
.should_log_with_level(ScubaVerbosityLevel::Verbose)
{
return;
}
for (mpath, filenodes) in encoded_params {
let filenode_chunks = chunk_by_accumulation(
filenodes.iter().cloned(),
0,
|acc, s| acc + s.len(),
greater_than_column_size,
);
for (i, filenode_chunk) in filenode_chunks.into_iter().enumerate() {
ctx.scuba()
.clone()
.add("getpack_mpath", mpath.clone())
.add("getpack_filenode_chunk_idx", i)
.add("getpack_filenode_chunk", filenode_chunk)
.log_with_msg_verbose(FULL_ARGS_LOG_TAG, None);
}
}
}

View File

@ -93,8 +93,11 @@ mod monitor;
mod session_bookmarks_cache;
mod tests;
use logging::CommandLogger;
pub use logging::WireprotoLogging;
use logging::{
debug_format_manifest, debug_format_path, log_getpack_params_verbose,
log_gettreepack_params_verbose, CommandLogger,
};
use monitor::Monitor;
use session_bookmarks_cache::SessionBookmarkCache;
@ -162,19 +165,12 @@ fn gettreepack_scuba_sampling_rate(params: &GettreepackArgs) -> SamplingRate {
}
}
fn debug_format_path(path: &Option<MPath>) -> String {
match path {
Some(p) => format!("{}", p),
None => String::new(),
}
}
fn debug_format_nodes<'a>(nodes: impl IntoIterator<Item = &'a HgChangesetId>) -> String {
nodes.into_iter().map(|node| format!("{}", node)).join(" ")
}
fn debug_format_manifests<'a>(nodes: impl IntoIterator<Item = &'a HgManifestId>) -> String {
nodes.into_iter().map(|node| format!("{}", node)).join(" ")
nodes.into_iter().map(debug_format_manifest).join(" ")
}
fn debug_format_directories<'a, T: AsRef<[u8]> + 'a>(
@ -894,7 +890,7 @@ impl RepoClient {
.add_value(stats.completion_time.as_millis_unchecked() as i64);
let encoded_params = {
let getpack_params = getpack_params.lock().unwrap();
let mut encoded_params = vec![];
let mut encoded_params: Vec<(String, Vec<String>)> = vec![];
for (path, filenodes) in getpack_params.iter() {
let mut encoded_filenodes = vec![];
for filenode in filenodes {
@ -913,11 +909,9 @@ impl RepoClient {
encoded_params.len() as i64,
);
command_logger.finalize_command(
ctx,
&stats,
Some(&json! {encoded_params}),
);
log_getpack_params_verbose(&ctx, &encoded_params);
let json_params = json! {encoded_params};
command_logger.finalize_command(ctx, &stats, Some(&json_params));
Ok(())
}
@ -1898,6 +1892,8 @@ impl HgCommands for RepoClient {
.add("gettreepack_directories", params.directories.len())
.log_with_msg("Gettreepack Params", None);
log_gettreepack_params_verbose(&ctx, &params);
let s = self
.gettreepack_untimed(ctx.clone(), params)
.compat()