add scuba logging to hook execution.

Reviewed By: krallin

Differential Revision: D18477057

fbshipit-source-id: 23f25ad402fa4f70fc75dced50700dc39f3d8b6a
This commit is contained in:
Andreas Backx 2019-11-15 07:04:51 -08:00 committed by Facebook Github Bot
parent 6724b8035f
commit b25a733b90
11 changed files with 103 additions and 2 deletions

View File

@ -142,6 +142,7 @@ impl<S> Middleware<S> for CoreContextMiddleware {
session_id,
TraceContext::default(),
None,
None,
SshEnvVars::default(),
None,
);

View File

@ -128,6 +128,7 @@ impl MononokeAPIServiceImpl {
session_id,
TraceContext::default(),
None,
None,
SshEnvVars::default(),
None,
);

View File

@ -23,6 +23,7 @@ use mercurial_types::HgChangesetId;
use metaconfig_types::RepoConfig;
use mononoke_types::ChangesetId;
use revset::AncestorsNodeStream;
use scuba_ext::ScubaSampleBuilder;
use slog::{debug, info};
use std::collections::HashSet;
use std::sync::Arc;
@ -56,6 +57,7 @@ impl Tailer {
Box::new(changeset_store),
content_store,
Default::default(),
ScubaSampleBuilder::with_discard(),
);
load_hooks(ctx.fb, &mut hook_manager, config, disabled_hooks)?;

View File

@ -35,6 +35,7 @@ use metaconfig_types::{
};
use mononoke_types::{FileType, RepositoryId};
use regex::Regex;
use scuba_ext::ScubaSampleBuilder;
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
use std::str::FromStr;
@ -1030,6 +1031,7 @@ fn hook_manager_blobrepo(fb: FacebookInit) -> HookManager {
Box::new(changeset_store),
Arc::new(content_store),
Default::default(),
ScubaSampleBuilder::with_discard(),
)
}
@ -1093,6 +1095,7 @@ fn hook_manager_inmem(fb: FacebookInit) -> HookManager {
Box::new(changeset_store),
Arc::new(content_store),
Default::default(),
ScubaSampleBuilder::with_discard(),
)
}
@ -1109,6 +1112,7 @@ fn default_repo_config() -> RepoConfig {
generation_cache_size: 1,
repoid: RepositoryId::new(1),
scuba_table: None,
scuba_table_hooks: None,
cache_warmup: None,
hook_manager_params: None,
bookmarks_cache_ttl: None,

View File

@ -31,11 +31,14 @@ pub use errors::*;
use failure_ext::{err_msg, Error, FutureFailureErrorExt};
use futures::{future, Future, IntoFuture};
use futures_ext::{try_boxfuture, BoxFuture, FutureExt};
use futures_stats::Timed;
use hooks_content_stores::{ChangedFileType, ChangesetStore, FileContentStore};
use mercurial_types::{Changeset, FileBytes, HgChangesetId, HgFileNodeId, HgParents, MPath};
use metaconfig_types::{BookmarkOrRegex, HookBypass, HookConfig, HookManagerParams};
use mononoke_types::FileType;
use regex::Regex;
use scuba::builder::ServerData;
use scuba_ext::ScubaSampleBuilder;
use slog::debug;
use std::collections::{HashMap, HashSet};
use std::fmt;
@ -58,6 +61,7 @@ pub struct HookManager {
changeset_store: Box<dyn ChangesetStore>,
content_store: Arc<dyn FileContentStore>,
reviewers_acl_checker: Arc<Option<AclChecker>>,
scuba: ScubaSampleBuilder,
}
impl HookManager {
@ -66,11 +70,20 @@ impl HookManager {
changeset_store: Box<dyn ChangesetStore>,
content_store: Arc<dyn FileContentStore>,
hook_manager_params: HookManagerParams,
mut scuba: ScubaSampleBuilder,
) -> HookManager {
let fb = ctx.fb;
let changeset_hooks = HashMap::new();
let file_hooks = HashMap::new();
scuba
.add("driver", "mononoke")
.add("scm", "hg")
.add_mapped_common_server_data(|data| match data {
ServerData::Hostname => "host",
_ => data.default_key(),
});
let reviewers_acl_checker = if !hook_manager_params.disable_acl_checker {
let identity = Identity::from_groupname(facebook::REVIEWERS_ACL_GROUP_NAME);
@ -96,6 +109,7 @@ impl HookManager {
changeset_store,
content_store,
reviewers_acl_checker: Arc::new(reviewers_acl_checker),
scuba,
}
}
@ -221,6 +235,9 @@ impl HookManager {
})
.collect();
let hooks = try_boxfuture!(hooks);
cloned!(mut self.scuba);
scuba.add("hash", changeset_id.to_hex().to_string());
self.get_hook_changeset(ctx.clone(), changeset_id)
.and_then({
cloned!(bookmark);
@ -236,6 +253,7 @@ impl HookManager {
hcs.clone(),
hooks.clone(),
bookmark,
scuba,
)
}
})
@ -260,12 +278,14 @@ impl HookManager {
changeset: HookChangeset,
hooks: Vec<(String, Arc<dyn Hook<HookChangeset>>, HookConfig)>,
bookmark: BookmarkName,
scuba: ScubaSampleBuilder,
) -> BoxFuture<Vec<(String, HookExecution)>, Error> {
futures::future::join_all(hooks.into_iter().map(move |(hook_name, hook, config)| {
HookManager::run_hook(
ctx.clone(),
hook,
HookContext::new(hook_name, config, changeset.clone(), bookmark.clone()),
scuba.clone(),
)
}))
.boxify()
@ -327,6 +347,8 @@ impl HookManager {
})
.collect();
let hooks = try_boxfuture!(hooks);
cloned!(mut self.scuba);
scuba.add("hash", changeset_id.to_hex().to_string());
self.get_hook_changeset(ctx.clone(), changeset_id)
.and_then({
@ -343,6 +365,7 @@ impl HookManager {
hcs.clone(),
hooks,
bookmark,
scuba,
)
}
})
@ -355,6 +378,7 @@ impl HookManager {
changeset: HookChangeset,
hooks: Vec<(String, Arc<dyn Hook<HookFile>>, HookConfig)>,
bookmark: BookmarkName,
scuba: ScubaSampleBuilder,
) -> BoxFuture<Vec<(FileHookExecutionID, HookExecution)>, Error> {
let v: Vec<BoxFuture<Vec<(FileHookExecutionID, HookExecution)>, _>> = changeset
.files
@ -369,6 +393,7 @@ impl HookManager {
file.clone(),
hooks.clone(),
bookmark.clone(),
scuba.clone(),
)
),
ChangedFileType::Deleted => None,
@ -386,6 +411,7 @@ impl HookManager {
file: HookFile,
hooks: Vec<(String, Arc<dyn Hook<HookFile>>, HookConfig)>,
bookmark: BookmarkName,
scuba: ScubaSampleBuilder,
) -> BoxFuture<Vec<(FileHookExecutionID, HookExecution)>, Error> {
let hook_futs = hooks.into_iter().map(move |(hook_name, hook, config)| {
let hook_context = HookContext::new(
@ -395,7 +421,10 @@ impl HookManager {
bookmark.clone(),
);
HookManager::run_hook(ctx.clone(), hook, hook_context).map({
cloned!(mut scuba);
scuba.add("hash", cs_id.to_hex().to_string());
HookManager::run_hook(ctx.clone(), hook, hook_context, scuba).map({
cloned!(file, bookmark);
move |(hook_name, exec)| {
(
@ -417,14 +446,45 @@ impl HookManager {
ctx: CoreContext,
hook: Arc<dyn Hook<T>>,
hook_context: HookContext<T>,
mut scuba: ScubaSampleBuilder,
) -> BoxFuture<(String, HookExecution), Error> {
let hook_name = hook_context.hook_name.clone();
debug!(ctx.logger(), "Running hook {:?}", hook_context.hook_name);
// Try getting the source hostname, otherwise use the unix name.
let user_option = ctx
.source_hostname()
.as_ref()
.or(ctx.user_unix_name().as_ref())
.map(|s| s.as_str());
if let Some(user) = user_option {
scuba.add("user", user);
}
scuba.add("hook", hook_name.clone());
hook.run(ctx, hook_context)
.map({
cloned!(hook_name);
move |he| (hook_name, he)
})
.timed(move |stats, result| {
if let Err(e) = result.as_ref() {
scuba.add("stderr", e.to_string());
}
let elapsed = stats.completion_time.as_millis() as i64;
scuba
.add("elapsed", elapsed)
.add("total_time", elapsed)
.add("errorcode", result.is_err() as i32)
.add("failed_hooks", result.is_err() as i32)
.log();
Ok(())
})
.with_context(move || format!("while executing hook {}", hook_name))
.from_err()
.boxify()

View File

@ -580,6 +580,7 @@ impl RepoConfigs {
let generation_cache_size = this.generation_cache_size.unwrap_or(10 * 1024 * 1024);
let repoid = RepositoryId::new(this.repoid);
let scuba_table = this.scuba_table;
let scuba_table_hooks = this.scuba_table_hooks;
let wireproto_logging = this
.wireproto_logging
@ -752,6 +753,7 @@ impl RepoConfigs {
generation_cache_size,
repoid,
scuba_table,
scuba_table_hooks,
cache_warmup,
hook_manager_params,
bookmarks,
@ -854,6 +856,7 @@ struct RawRepoConfig {
// TODO: work out what these all are
generation_cache_size: Option<usize>,
scuba_table: Option<String>,
scuba_table_hooks: Option<String>,
delay_mean: Option<u64>,
delay_stddev: Option<u64>,
cache_warmup: Option<RawCacheWarmupConfig>,
@ -1501,6 +1504,7 @@ mod test {
let www_content = r#"
repoid=1
scuba_table="scuba_table"
scuba_table_hooks="scm_hooks"
storage_config="files"
[storage.files]
@ -1540,6 +1544,7 @@ mod test {
generation_cache_size=1048576
repoid=0
scuba_table="scuba_table"
scuba_table_hooks="scm_hooks"
skiplist_index_blobstore_key="skiplist_key"
bookmarks_cache_ttl=5000
storage_config="main"
@ -1634,6 +1639,7 @@ mod test {
let www_content = r#"
repoid=1
scuba_table="scuba_table"
scuba_table_hooks="scm_hooks"
storage_config="files"
[storage.files]
@ -1705,6 +1711,7 @@ mod test {
generation_cache_size: 1024 * 1024,
repoid: RepositoryId::new(0),
scuba_table: Some("scuba_table".to_string()),
scuba_table_hooks: Some("scm_hooks".to_string()),
cache_warmup: Some(CacheWarmupParams {
bookmark: BookmarkName::new("master").unwrap(),
commit_limit: 100,
@ -1829,6 +1836,7 @@ mod test {
generation_cache_size: 10 * 1024 * 1024,
repoid: RepositoryId::new(1),
scuba_table: Some("scuba_table".to_string()),
scuba_table_hooks: Some("scm_hooks".to_string()),
cache_warmup: None,
hook_manager_params: None,
bookmarks: vec![],

View File

@ -68,6 +68,8 @@ pub struct RepoConfig {
pub repoid: RepositoryId,
/// Scuba table for logging performance of operations
pub scuba_table: Option<String>,
/// Scuba table for logging hook executions
pub scuba_table_hooks: Option<String>,
/// Parameters of how to warm up the cache
pub cache_warmup: Option<CacheWarmupParams>,
/// Configuration for bookmarks

View File

@ -241,6 +241,7 @@ impl SourceControlServiceImpl {
session_id,
TraceContext::default(),
None,
None,
SshEnvVars::default(),
None,
);

View File

@ -331,6 +331,7 @@ pub struct SessionContainerInner {
session_id: SessionId,
trace: TraceContext,
user_unix_name: Option<String>,
source_hostname: Option<String>,
ssh_env_vars: SshEnvVars,
load_limiter: Option<LoadLimiter>,
}
@ -347,6 +348,7 @@ impl SessionContainer {
session_id: SessionId,
trace: TraceContext,
user_unix_name: Option<String>,
source_hostname: Option<String>,
ssh_env_vars: SshEnvVars,
load_limiter: Option<(MononokeThrottleLimit, RateLimits, String)>,
) -> Self {
@ -358,6 +360,7 @@ impl SessionContainer {
session_id,
trace,
user_unix_name,
source_hostname,
ssh_env_vars,
load_limiter,
};
@ -374,6 +377,7 @@ impl SessionContainer {
generate_session_id(),
TraceContext::new(generate_trace_id(), Instant::now()),
None,
None,
SshEnvVars::default(),
None,
)
@ -405,6 +409,10 @@ impl SessionContainer {
&self.inner.user_unix_name
}
pub fn source_hostname(&self) -> &Option<String> {
&self.inner.source_hostname
}
pub fn ssh_env_vars(&self) -> &SshEnvVars {
&self.inner.ssh_env_vars
}
@ -486,7 +494,6 @@ pub struct CoreContext {
impl CoreContext {
pub fn new_with_logger(fb: FacebookInit, logger: Logger) -> Self {
let session = SessionContainer::new_with_defaults(fb);
session.new_context(logger, ScubaSampleBuilder::with_discard())
}
@ -496,6 +503,7 @@ impl CoreContext {
generate_session_id(),
TraceContext::default(),
None,
None,
SshEnvVars::default(),
None,
);
@ -538,6 +546,10 @@ impl CoreContext {
&self.session.user_unix_name()
}
pub fn source_hostname(&self) -> &Option<String> {
&self.session.source_hostname()
}
pub fn ssh_env_vars(&self) -> &SshEnvVars {
&self.session.ssh_env_vars()
}

View File

@ -276,6 +276,7 @@ pub fn repo_handlers(
infinitepush,
list_keys_patterns_max,
scuba_table,
scuba_table_hooks,
hash_validation_percentage,
wireproto_logging,
bundle2_replay_params,
@ -287,11 +288,19 @@ pub fn repo_handlers(
let hook_manager_params = hook_manager_params.unwrap_or(Default::default());
let mut scuba = if let Some(table_name) = scuba_table_hooks {
ScubaSampleBuilder::new(fb, table_name)
} else {
ScubaSampleBuilder::with_discard()
};
scuba.add("repo", reponame.clone());
let mut hook_manager = HookManager::new(
ctx.clone(),
Box::new(BlobRepoChangesetStore::new(blobrepo.clone())),
blobrepo_text_only_store(blobrepo.clone(), hook_max_file_size),
hook_manager_params,
scuba,
);
// TODO: Don't require full config in load_hooks so we can avoid a clone here.

View File

@ -148,6 +148,7 @@ pub fn request_handler(
session_id,
trace.clone(),
preamble.misc.get("unix_username").cloned(),
preamble.misc.get("source_hostname").cloned(),
ssh_env_vars,
load_limiting_config,
);