Replace tracing (#4017)

Logging: Replace tracing with an efficient logging implementation, with 0-runtime cost for disabled log levels. (https://www.pivotaltracker.com/story/show/183755412)

Profiling: Support submitting `profiler` events to the User Timing Web API, so that measurements can be viewed directly in the browser. (https://www.pivotaltracker.com/story/show/184003550)

# Important Notes
Logging interface:
- The macros (`warn!`, etc.) now take standard `format_args!` arguments (the tracing implementations accepted a broader syntax).
- Compile-time log levels can now be set through the CLI, like so:
`./run ide start --log-level=trace --uncollapsed-log-level=info`

Profiling:
- The hotkey Ctrl+Alt+Shift+P submits all `profiler` events logged since the application was loaded to the Web API, so that they can then be viewed with the browser's developer tools. Note that standard tools are not able to represent async task lifetimes or metadata; this is a convenient interface to a subset of `profiler` data.
- As an alternative interface, a runtime flag enables continuous measurement submission. In the browser it can be set through a URL parameter, like http://localhost:8080/?emit_user_timing_measurements=true. Note that this mode significantly impacts performance.
This commit is contained in:
Kaz Wesley 2023-01-16 12:31:01 -08:00 committed by GitHub
parent b8967b96b9
commit 662992eb37
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
45 changed files with 2004 additions and 886 deletions

36
Cargo.lock generated
View File

@ -2201,6 +2201,25 @@ dependencies = [
"web-sys",
]
[[package]]
name = "enso-logging"
version = "0.3.1"
dependencies = [
"enso-logging-macros",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "enso-logging-macros"
version = "0.1.0"
dependencies = [
"Inflector",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "enso-macro-utils"
version = "0.2.0"
@ -2320,6 +2339,7 @@ dependencies = [
"derivative",
"derive_more",
"enclose",
"enso-logging",
"enso-reflect",
"enso-shapely",
"enso-web",
@ -2336,9 +2356,6 @@ dependencies = [
"serde_json",
"shrinkwraprs 0.3.0",
"smallvec 1.9.0",
"tracing",
"tracing-subscriber",
"tracing-wasm",
"wasm-bindgen",
"wasm-bindgen-test",
"weak-table",
@ -2525,12 +2542,12 @@ dependencies = [
"console_error_panic_hook",
"derivative",
"enso-debug-api",
"enso-logging",
"enso-shapely",
"failure",
"gloo-timers",
"js-sys",
"nalgebra",
"tracing",
"wasm-bindgen",
"wasm-bindgen-test",
"web-sys",
@ -7313,17 +7330,6 @@ dependencies = [
"tracing-log",
]
[[package]]
name = "tracing-wasm"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4575c663a174420fa2d78f4108ff68f65bf2fbb7dd89f33749b6e826b3626e07"
dependencies = [
"tracing",
"tracing-subscriber",
"wasm-bindgen",
]
[[package]]
name = "traitobject"
version = "0.1.0"

View File

@ -69,5 +69,6 @@ ensogl::read_args! {
skip_min_version_check : bool,
preferred_engine_version : semver::Version,
enable_new_component_browser : bool,
emit_user_timing_measurements : bool,
}
}

View File

@ -187,7 +187,7 @@ impl Group {
fn restore_initial_order(&self) {
let mut entries = self.entries.borrow_mut();
if entries.len() != self.initial_entries_order.len() {
tracing::error!(
error!(
"Tried to restore initial order in group where \
`initial_entries_order` is not initialized or up-to-date. Will keep the \
old order."

View File

@ -75,6 +75,9 @@ impl Initializer {
config::InitialView::Project => view.switch_view_to_project(),
}
if enso_config::ARGS.emit_user_timing_measurements.unwrap_or_default() {
ensogl_app.display.connect_profiler_to_user_timing();
}
let status_bar = view.status_bar().clone_ref();
ensogl_app.display.add_child(&view);
// TODO [mwu] Once IDE gets some well-defined mechanism of reporting

View File

@ -667,7 +667,7 @@ impl Graph {
let graph_notifications = self.model.controller.subscribe();
let weak = Rc::downgrade(&self.model);
spawn_stream_handler(weak, graph_notifications, move |notification, _model| {
tracing::debug!("Received controller notification {notification:?}");
debug!("Received controller notification {notification:?}");
match notification {
executed::Notification::Graph(graph) => match graph {
Notification::Invalidate => update_view.emit(()),
@ -709,7 +709,7 @@ impl Graph {
/// content of a node. For example, the searcher uses this to allow the edit field to have a
/// preview that is different from the actual node AST.
pub fn allow_expression_auto_updates(&self, id: AstNodeId, allow: bool) {
tracing::debug!("Setting auto updates for {id:?} to {allow}");
debug!("Setting auto updates for {id:?} to {allow}");
self.model.state.allow_expression_auto_updates(id, allow);
}
}

View File

@ -137,10 +137,10 @@ impl Model {
self.graph.allow_expression_auto_updates(node, true);
searcher.abort_editing();
} else {
tracing::warn!("When porting editing the AST node of the node view {input_node_view} could not be found.");
warn!("When porting editing the AST node of the node view {input_node_view} could not be found.");
}
} else {
tracing::warn!("Editing aborted without searcher controller.");
warn!("Editing aborted without searcher controller.");
}
}

View File

@ -101,7 +101,7 @@ impl Model {
#[profile(Debug)]
fn input_changed(&self, new_input: &str) {
if let Err(err) = self.controller.set_input(new_input.to_owned()) {
tracing::error!("Error while setting new searcher input: {err}.");
error!("Error while setting new searcher input: {err}.");
}
}
@ -116,7 +116,7 @@ impl Model {
Some((self.input_view, new_code_and_trees))
}
Err(err) => {
tracing::error!("Error while applying suggestion: {err}.");
error!("Error while applying suggestion: {err}.");
None
}
}
@ -126,7 +126,7 @@ impl Model {
/// API.
fn entry_selected_as_suggestion(&self, entry_id: view::searcher::entry::Id) {
if let Err(error) = self.controller.preview_entry_as_suggestion(entry_id) {
tracing::warn!("Failed to preview entry {entry_id:?} because of error: {error:?}.");
warn!("Failed to preview entry {entry_id:?} because of error: {error:?}.");
}
}
@ -136,7 +136,7 @@ impl Model {
None => self.controller.commit_node().map(Some),
};
result.unwrap_or_else(|err| {
tracing::error!("Error while executing action: {err}.");
error!("Error while executing action: {err}.");
None
})
}
@ -166,11 +166,9 @@ impl Model {
match self.suggestion_for_entry_id(entry_id) {
Ok(suggestion) =>
if let Err(error) = self.controller.preview_suggestion(suggestion) {
tracing::warn!(
"Failed to preview suggestion {entry_id:?} because of error: {error:?}."
);
warn!("Failed to preview suggestion {entry_id:?} because of error: {error:?}.");
},
Err(err) => tracing::warn!("Error while previewing suggestion: {err}."),
Err(err) => warn!("Error while previewing suggestion: {err}."),
}
}
@ -196,7 +194,7 @@ impl Model {
Some((self.input_view, new_code_and_trees))
}
Err(err) => {
tracing::error!("Error while applying suggestion: {err}.");
error!("Error while applying suggestion: {err}.");
None
}
}
@ -262,7 +260,7 @@ impl Model {
self.suggestion_accepted(entry_id);
}
self.controller.commit_node().map(Some).unwrap_or_else(|err| {
tracing::error!("Error while committing node expression: {err}.");
error!("Error while committing node expression: {err}.");
None
})
}
@ -518,9 +516,7 @@ impl Searcher {
if let Mode::NewNode { source_node, .. } = mode {
if source_node.is_none() {
if let Err(e) = searcher_controller.set_input("".to_string()) {
tracing::error!(
"Failed to clear input when creating searcher for a new node: {e:?}."
);
error!("Failed to clear input when creating searcher for a new node: {e:?}.");
}
}
}

View File

@ -30,7 +30,7 @@ pub async fn main() {
});
// Emit profile and exit.
debug_api::save_profile(&profiler::internal::take_log());
debug_api::save_profile(&profiler::internal::get_log());
debug_api::LifecycleController::new().expect("Workflows run in Electron").quit();
}

View File

@ -261,7 +261,7 @@ impl Model {
let text: &ImString = entry.as_ref();
entry::Model::Text(text.clone_ref())
} else {
tracing::error!("Requested entry is missing in the breadcrumbs ({col})");
error!("Requested entry is missing in the breadcrumbs ({col})");
entry::Model::default()
}
}

View File

@ -256,6 +256,11 @@ optParser.options('skip-min-version-check', {
type: 'boolean',
})
optParser.options('emit-user-timing-measurements', {
describe: 'Forward profiler data to the User Timing web API.',
type: 'boolean',
})
// === Parsing ===
let args = optParser.parse()
@ -574,6 +579,7 @@ function createWindow() {
data_gathering: args.dataGathering,
preferred_engine_version: args.preferredEngineVersion,
enable_new_component_browser: args.enableNewComponentBrowser,
emit_user_timing_measurements: args.emitUserTimingMeasurements,
node_labels: args.nodeLabels,
verbose: args.verbose,
}

View File

@ -774,6 +774,7 @@ class Config {
public skip_min_version_check: boolean = Versions.isDevVersion()
public preferred_engine_version: SemVer = Versions.ideVersion
public enable_new_component_browser: boolean = true
public emit_user_timing_measurements: boolean = false
updateFromObject(other: any) {
if (!ok(other)) {

View File

@ -59,6 +59,17 @@ pub enum ProfilingLevel {
Debug,
}
#[derive(Clone, Copy, Debug, Default, strum::Display, strum::EnumString, PartialEq, Eq)]
#[strum(serialize_all = "kebab-case")]
pub enum LogLevel {
Error,
#[default]
Warn,
Info,
Debug,
Trace,
}
#[derive(clap::ArgEnum, Clone, Copy, Debug, PartialEq, Eq, strum::Display, strum::AsRefStr)]
#[strum(serialize_all = "kebab-case")]
pub enum Profile {
@ -112,13 +123,15 @@ impl Profile {
#[derivative(Debug)]
pub struct BuildInput {
/// Path to the crate to be compiled to WAM. Relative to the repository root.
pub crate_path: PathBuf,
pub wasm_opt_options: Vec<String>,
pub skip_wasm_opt: bool,
pub extra_cargo_options: Vec<String>,
pub profile: Profile,
pub profiling_level: Option<ProfilingLevel>,
pub wasm_size_limit: Option<byte_unit::Byte>,
pub crate_path: PathBuf,
pub wasm_opt_options: Vec<String>,
pub skip_wasm_opt: bool,
pub extra_cargo_options: Vec<String>,
pub profile: Profile,
pub profiling_level: Option<ProfilingLevel>,
pub log_level: Option<LogLevel>,
pub uncollapsed_log_level: Option<LogLevel>,
pub wasm_size_limit: Option<byte_unit::Byte>,
}
impl BuildInput {
@ -191,6 +204,8 @@ impl IsTarget for Wasm {
extra_cargo_options,
profile,
profiling_level,
log_level,
uncollapsed_log_level,
wasm_size_limit: _wasm_size_limit,
} = &inner;
@ -206,7 +221,6 @@ impl IsTarget for Wasm {
.current_dir(&repo_root)
.kill_on_drop(true)
.env_remove(ide_ci::programs::rustup::env::RUSTUP_TOOLCHAIN.name())
.set_env(env::ENSO_ENABLE_PROC_MACRO_SPAN, &true)?
.build()
.arg(wasm_pack::Profile::from(*profile))
.target(wasm_pack::Target::Web)
@ -220,6 +234,11 @@ impl IsTarget for Wasm {
if let Some(profiling_level) = profiling_level {
command.set_env(env::ENSO_MAX_PROFILING_LEVEL, &profiling_level)?;
}
command.set_env(env::ENSO_MAX_LOG_LEVEL, &log_level.unwrap_or_default())?;
command.set_env(
env::ENSO_MAX_UNCOLLAPSED_LOG_LEVEL,
&uncollapsed_log_level.unwrap_or_default(),
)?;
command.run_ok().await?;
Self::finalize_wasm(wasm_opt_options, *skip_wasm_opt, *profile, &temp_dist).await?;
@ -280,6 +299,8 @@ impl IsWatchable for Wasm {
extra_cargo_options,
profile,
profiling_level,
log_level,
uncollapsed_log_level,
wasm_size_limit,
} = inner;
@ -322,6 +343,11 @@ impl IsWatchable for Wasm {
if let Some(profiling_level) = profiling_level {
watch_cmd.args(["--profiling-level", profiling_level.to_string().as_str()]);
}
watch_cmd.args(["--log-level", log_level.unwrap_or_default().to_string().as_str()]);
watch_cmd.args([
"--uncollapsed-log-level",
uncollapsed_log_level.unwrap_or_default().to_string().as_str(),
]);
for wasm_opt_option in wasm_opt_options {
watch_cmd.args(["--wasm-opt-option", &wasm_opt_option]);
}

View File

@ -1,38 +1,22 @@
//! Environment variables used by the GUI's Rust part build.
use crate::project::wasm::LogLevel;
use crate::project::wasm::ProfilingLevel;
ide_ci::define_env_var! {
/// Enable a Rust unstable feature that the `#[profile]` macro uses to obtain source-file
/// and line number information to include in generated profile files.
///
/// The IntelliJ Rust plugin does not support the `proc_macro_span` Rust feature; using it
/// causes JetBrains IDEs to become entirely unaware of the items produced by `#[profile]`.
/// (See: https://github.com/intellij-rust/intellij-rust/issues/8655)
///
/// In order to have line number information in actual usage, but keep everything
/// understandable by JetBrains IDEs, we need IntelliJ/CLion to build crates differently
/// from how they are built for the application to be run. This is accomplished by gating
/// the use of the unstable functionality by a `cfg` flag. A `cfg` flag is disabled by
/// default, so when a Rust IDE builds crates internally in order to determine macro
/// expansions, it will do so without line numbers. When this script is used to build the
/// application, it is not for the purpose of IDE macro expansion, so we can safely enable
/// line numbers.
///
/// The reason we don't use a Cargo feature for this is because this script can build
/// different crates, and we'd like to enable this feature when building any crate that
/// depends on the `profiler` crates. We cannot do something like
/// '--feature=enso_profiler/line-numbers' without causing build to fail when building a
/// crate that doesn't have `enso_profiler` in its dependency tree.
ENSO_ENABLE_PROC_MACRO_SPAN, bool;
/// Use the environment-variable API provided by the `enso_profiler_macros` library to
/// implement the public interface to profiling-level configuration (see:
/// https://github.com/enso-org/design/blob/main/epics/profiling/implementation.md)
ENSO_MAX_PROFILING_LEVEL, ProfilingLevel;
/// Set the level of logging detail that will be enabled at compile-time.
ENSO_MAX_LOG_LEVEL, LogLevel;
/// Set the level of logging detail that will be displayed initially-open in hierarchical views,
/// such as the Web Console.
ENSO_MAX_UNCOLLAPSED_LOG_LEVEL, LogLevel;
/// The timeout for `wasm-bindgen-test-runner` in seconds.
WASM_BINDGEN_TEST_TIMEOUT, u64;
}

View File

@ -57,6 +57,28 @@ impl From<ProfilingLevel> for enso_build::project::wasm::ProfilingLevel {
}
}
// Follows hierarchy defined in lib/rust/logging/src/lib.rs
#[derive(ArgEnum, Clone, Copy, Debug, PartialEq, Eq)]
pub enum LogLevel {
Error,
Warn,
Info,
Debug,
Trace,
}
impl From<LogLevel> for enso_build::project::wasm::LogLevel {
fn from(level: LogLevel) -> Self {
match level {
LogLevel::Error => Self::Error,
LogLevel::Warn => Self::Warn,
LogLevel::Info => Self::Info,
LogLevel::Debug => Self::Debug,
LogLevel::Trace => Self::Trace,
}
}
}
#[derive(Args, Clone, Debug, PartialEq, Eq)]
pub struct BuildInput {
/// Which crate should be treated as a WASM entry point. Relative path from source root.
@ -84,6 +106,14 @@ pub struct BuildInput {
#[clap(long, arg_enum, enso_env())]
pub profiling_level: Option<ProfilingLevel>,
/// Compiles Enso with given log level. If not set, defaults to [`warn`].
#[clap(long, arg_enum, enso_env())]
pub log_level: Option<LogLevel>,
/// Compiles Enso with given uncollapsed log level. If not set, defaults to [`warn`].
#[clap(long, arg_enum, enso_env())]
pub uncollapsed_log_level: Option<LogLevel>,
/// Fail the build if compressed WASM exceeds the specified size. Supports format like
/// "4.06MiB". Pass "0" to disable check.
#[clap(long, enso_env(),

View File

@ -620,6 +620,8 @@ impl Resolvable for Wasm {
wasm_opt_option: wasm_opt_options,
cargo_options,
profiling_level,
log_level,
uncollapsed_log_level,
wasm_size_limit,
skip_wasm_opt,
} = from;
@ -630,6 +632,8 @@ impl Resolvable for Wasm {
extra_cargo_options: cargo_options,
profile: wasm_profile,
profiling_level: profiling_level.map(into),
log_level: log_level.map(into),
uncollapsed_log_level: uncollapsed_log_level.map(into),
wasm_size_limit: wasm_size_limit.filter(|size_limit| size_limit.get_bytes() > 0),
})
}

View File

@ -58,7 +58,7 @@ impl ColumnWidths {
if column < width_diffs.len() {
width_diffs.set(column, width_diff);
} else {
tracing::error!("Index out of bounds when setting column width");
error!("Index out of bounds when setting column width");
}
}
@ -86,7 +86,7 @@ impl ColumnWidths {
if column == 0 {
0.0
} else if column > len {
tracing::warn!(
warn!(
"Trying to get a position offset of a column that does not exist. \
{column} > {len}. Returning 0.0."
);

View File

@ -430,7 +430,6 @@ mod tests {
#[test]
fn selecting_header() {
init_tracing(TRACE);
let app = Application::new("root");
let network = frp::Network::new("selecting_header");
let grid_view = GridViewWithHeaders::<TestEntry, TestEntry>::new(&app);
@ -465,7 +464,7 @@ mod tests {
assert_eq!(selection_state(), vec![false, true, false]);
assert_eq!(grid_view.selection_highlight_frp().position.value(), Vector2(10.0, -40.0));
tracing::debug!("About to go up");
debug!("About to go up");
grid_view.set_viewport(viewport_with_no_pushed_header);
assert_eq!(selection_state(), vec![false, true, false]);
assert_eq!(grid_view.selection_highlight_frp().position.value(), Vector2(10.0, -30.0));

View File

@ -240,7 +240,7 @@ impl<T: FlagOps1, OnMut: FnMut0> HasSet1 for Flag<T, OnMut> {
let is_set = self.data.check(&arg);
if !is_set {
self.data.set(arg);
debug_span!("Setting to {self.data:?}.").in_scope(|| {
debug_span!("Setting to {:?}.", self.data).in_scope(|| {
if first_set {
self.on_set.call();
}

View File

@ -906,7 +906,7 @@ impl SceneData {
// due to rounding errors. We round to the nearest integer to compensate for those errors.
let width = canvas.width.round() as i32;
let height = canvas.height.round() as i32;
debug_span!("Resized to {screen.width}px x {screen.height}px.").in_scope(|| {
debug_span!("Resized to {}px x {}px.", screen.width, screen.height).in_scope(|| {
self.dom.layers.canvas.set_attribute_or_warn("width", width.to_string());
self.dom.layers.canvas.set_attribute_or_warn("height", height.to_string());
if let Some(context) = &*self.context.borrow() {

View File

@ -9,7 +9,6 @@ use crate::display::style::data::DataMatch;
use crate::display::style::Path;
use enso_frp as frp;
use enso_prelude::tracing::log;
@ -93,7 +92,7 @@ impl StyleWatchFrp {
let (source, current) = self.get_internal(path);
frp::extend! { network
value <- source.map(move |t| t.number().unwrap_or_else(|| {
log::warn!("{}", warning);
warn!("{}", warning);
0.0
}));
sampler <- value.sampler();
@ -110,7 +109,7 @@ impl StyleWatchFrp {
let (source, current) = self.get_internal(path);
frp::extend! { network
value <- source.map(move |t| t.color().unwrap_or_else(|| {
log::warn!("{}", warning);
warn!("{}", warning);
FALLBACK_COLOR
}));
sampler <- value.sampler();
@ -128,7 +127,7 @@ impl StyleWatchFrp {
frp::extend! { network
value <- source.map(move |t| {
t.im_string_or_else(|| {
log::warn!("{}", warning);
warn!("{}", warning);
default()
})
});

View File

@ -226,17 +226,18 @@ pub fn scene() -> Scene {
#[allow(missing_docs)]
pub struct WorldData {
#[deref]
frp: api::private::Output,
pub default_scene: Scene,
scene_dirty: dirty::SharedBool,
uniforms: Uniforms,
display_mode: Rc<Cell<glsl::codes::DisplayModes>>,
stats: Stats,
stats_monitor: debug::monitor::Monitor,
stats_draw_handle: callback::Handle,
pub on: Callbacks,
frp: api::private::Output,
pub default_scene: Scene,
scene_dirty: dirty::SharedBool,
uniforms: Uniforms,
display_mode: Rc<Cell<glsl::codes::DisplayModes>>,
stats: Stats,
stats_monitor: debug::monitor::Monitor,
stats_draw_handle: callback::Handle,
pub on: Callbacks,
debug_hotkeys_handle: Rc<RefCell<Option<web::EventListenerHandle>>>,
garbage_collector: garbage::Collector,
garbage_collector: garbage::Collector,
emit_measurements_handle: Rc<RefCell<Option<callback::Handle>>>,
}
impl WorldData {
@ -257,6 +258,7 @@ impl WorldData {
stats_monitor.sample_and_draw(stats);
log_render_stats(*stats)
}));
let emit_measurements_handle = default();
SCENE.with_borrow_mut(|t| *t = Some(default_scene.clone_ref()));
@ -272,6 +274,7 @@ impl WorldData {
stats_monitor,
stats_draw_handle,
garbage_collector,
emit_measurements_handle,
}
.init()
}
@ -291,6 +294,7 @@ impl WorldData {
let stats_monitor = self.stats_monitor.clone_ref();
let display_mode = self.display_mode.clone_ref();
let display_mode_uniform = self.uniforms.display_mode.clone_ref();
let emit_measurements_handle = self.emit_measurements_handle.clone_ref();
let closure: Closure<dyn Fn(JsValue)> = Closure::new(move |val: JsValue| {
let event = val.unchecked_into::<web::KeyboardEvent>();
let digit_prefix = "Digit";
@ -299,9 +303,19 @@ impl WorldData {
if key == "Backquote" {
stats_monitor.toggle()
} else if key == "KeyP" {
enso_debug_api::save_profile(&profiler::internal::take_log());
if event.shift_key() {
let forwarding_incrementally = emit_measurements_handle.borrow().is_some();
// If we are submitting the data continuously, the hotkey is redundant.
let enable_hotkey = !forwarding_incrementally;
if enable_hotkey {
profiler::interval_stream()
.for_each(|interval| log_measurement(&interval));
}
} else {
enso_debug_api::save_profile(&profiler::internal::get_log());
}
} else if key == "KeyQ" {
enso_debug_api::save_profile(&profiler::internal::take_log());
enso_debug_api::save_profile(&profiler::internal::get_log());
enso_debug_api::LifecycleController::new().map(|api| api.quit());
} else if key.starts_with(digit_prefix) {
let code_value = key.trim_start_matches(digit_prefix).parse().unwrap_or(0);
@ -349,6 +363,22 @@ impl WorldData {
}
}
/// Begin incrementally submitting [`profiler`] data to the User Timing web API.
///
/// This will submit all measurements logged so far, and then periodically submit any new
/// measurements in batches.
pub fn connect_profiler_to_user_timing(&self) {
let mut handle = self.emit_measurements_handle.borrow_mut();
if handle.is_none() {
let mut intervals = profiler::interval_stream();
*handle = Some(self.on.after_frame.add(move |_| {
for interval in &mut intervals {
log_measurement(&interval);
}
}));
}
}
/// Perform to the next frame with the provided time information.
///
/// Please note that the provided time information from the [`requestAnimationFrame`] JS
@ -378,6 +408,25 @@ impl WorldData {
}
}
mod js {
#[wasm_bindgen::prelude::wasm_bindgen(inline_js = r#"
export function log_measurement(label, start, end) {
window.performance.measure(label, { "start": start, "end": end })
}
"#)]
extern "C" {
#[allow(unsafe_code)]
pub fn log_measurement(msg: String, start: f64, end: f64);
}
}
fn log_measurement(interval: &profiler::Interval) {
let label = interval.label().to_owned();
let start = interval.start();
let end = interval.end();
js::log_measurement(label, start, end);
}
// =============

View File

@ -117,7 +117,7 @@ impl<T:Storable> {
/// Constructor.
pub fn new<OnMut:callback::NoArgs, OnResize:callback::NoArgs>
(stats:&Stats, on_mut:OnMut, on_resize:OnResize) -> Self {
debug_span!("Creating new {T::type_display()} buffer.").in_scope(|| {
debug_span!("Creating new {} buffer.", T::type_display()).in_scope(|| {
stats.inc_buffer_count();
let mut_dirty = MutDirty::new(Box::new(on_mut));
let resize_dirty = ResizeDirty::new(Box::new(on_resize));

View File

@ -79,16 +79,16 @@ fn configure_simple_grid_view(view: &grid_view::simple::SimpleGridView) -> frp::
view.model_for_entry <+ requested_entry;
entry_hovered <- view.entry_hovered.filter_map(|l| *l);
entry_selected <- view.entry_selected.filter_map(|l| *l);
eval entry_hovered ([]((row, col)) tracing::debug!("Hovered entry ({row}, {col})."));
eval entry_selected ([]((row, col)) tracing::debug!("Selected entry ({row}, {col})."));
eval view.entry_accepted ([]((row, col)) tracing::debug!("ACCEPTED entry ({row}, {col})."));
eval entry_hovered ([]((row, col)) debug!("Hovered entry ({row}, {col})."));
eval entry_selected ([]((row, col)) debug!("Selected entry ({row}, {col})."));
eval view.entry_accepted ([]((row, col)) debug!("ACCEPTED entry ({row}, {col})."));
eval view.selection_movement_out_of_grid_prevented ([](dir)
if let Some(dir) = dir {
let msg = iformat!(
"An attempt to select an entry outside of the grid in " dir;?
" direction was prevented."
);
tracing::debug!("{msg}");
debug!("{msg}");
}
);
}

View File

@ -169,7 +169,7 @@ impl WeakNetwork {
pub fn upgrade_or_warn(&self) -> Option<Network> {
let result = self.upgrade();
if result.is_none() {
tracing::warn!("The Network is dropped in a place where we don't expect.");
warn!("The Network is dropped in a place where we don't expect.");
}
result
}

View File

@ -0,0 +1,22 @@
[package]
name = "enso-logging"
version = "0.3.1"
authors = ["Enso Team <contact@luna-lang.org>"]
edition = "2021"
description = "An efficient logger for writing applications in Rust."
readme = "README.md"
homepage = "https://github.com/enso-org/enso/lib/rust/logging"
repository = "https://github.com/enso-org/enso"
license-file = "../../LICENSE"
keywords = ["logging"]
categories = ["development-tools::debugging"]
[lib]
[features]
default = []
[dependencies]
wasm-bindgen = { workspace = true }
enso-logging-macros = { path = "macros" }
web-sys = { version = "0.3.4", features = ["console"] }

View File

@ -0,0 +1,14 @@
[package]
name = "enso-logging-macros"
version = "0.1.0"
edition = "2021"
authors = ["Enso Team <contact@enso.org>"]
[lib]
proc-macro = true
[dependencies]
proc-macro2 = "1.0"
quote = "1.0"
syn = { version = "1.0", features = ["full", "visit-mut"] }
Inflector = "0.11"

View File

@ -0,0 +1,28 @@
//! Build script for [`enso_logging_macros`]. This is needed to make cargo aware that
//! the crate depends on the values of environment variables at compile time, and changes to those
//! variables should result in recompiling this crate and its dependents.
// === Non-Standard Linter Configuration ===
#![warn(missing_copy_implementations)]
#![warn(missing_debug_implementations)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(trivial_numeric_casts)]
#![warn(unsafe_code)]
#![warn(unused_import_braces)]
#![warn(unused_qualifications)]
fn main() {
declare_env_dependence("ENSO_MAX_LOGGING_LEVEL");
declare_env_dependence("ENSO_UNCOLLAPSED_LOGGING_LEVEL");
}
/// Make cargo aware that the result of compiling this crate depends on an environment variable.
fn declare_env_dependence(env: &str) {
println!("cargo:rerun-if-env-changed={}", env);
// This is a no-op assignment, except it makes cargo aware that the output depends on the env.
let value = std::env::var(env).unwrap_or_default();
println!("cargo:rustc-env={}={}", env, value);
}

View File

@ -0,0 +1,586 @@
//! Proc macros supporting the implementation of the `enso_logging` library.
#![feature(anonymous_lifetime_in_impl_trait)]
#![feature(proc_macro_span)]
#![feature(let_chains)]
// === Standard Linter Configuration ===
#![deny(non_ascii_idents)]
#![warn(unsafe_code)]
#![allow(clippy::bool_to_int_with_if)]
#![allow(clippy::let_and_return)]
// === Non-Standard Linter Configuration ===
#![deny(unconditional_recursion)]
#![warn(missing_copy_implementations)]
#![warn(missing_debug_implementations)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(trivial_numeric_casts)]
#![warn(unused_import_braces)]
use inflector::Inflector;
use quote::quote;
/// The log levels defined in the Console Web API.
/// See: https://console.spec.whatwg.org/#loglevel-severity
const WEB_LOG_LEVELS: &[&str] = &["error", "warn", "info", "debug"];
// ==================================
// === Compile-time configuration ===
// ==================================
const LEVEL_CONFIGURATION_ENV_VARS: LevelConfiguration<&str> = LevelConfiguration {
max_enabled: "ENSO_MAX_LOG_LEVEL",
max_uncollapsed: "ENSO_MAX_UNCOLLAPSED_LOG_LEVEL",
};
// =====================
// === Helper macros ===
// =====================
macro_rules! map_fns {
($Ty:tt, [$($covariant:tt),*]) => { map_fns!($Ty, [$($covariant),*], []); };
($Ty:ident, [$($covariant:ident),*], [$($invariant:ident),*]) => {
impl<T> $Ty<T> {
#[allow(unused)]
fn map<F, U>(self, f: F) -> $Ty<U> where F: Fn(T) -> U {
let Self {
$($covariant,)*
$($invariant,)*
} = self;
$Ty {
$($covariant: f($covariant),)*
$($invariant,)*
}
}
#[allow(unused)]
fn for_each<F>(self, mut f: F) where F: FnMut(T) {
$(f(self.$covariant);)*
}
#[allow(unused)]
fn as_ref(&self) -> $Ty<&T> {
let Self {
$($covariant,)*
$($invariant,)*
} = self;
$Ty {
$($covariant,)*
$($invariant: $invariant.clone(),)*
}
}
}
};
}
// =================
// === Interface ===
// =================
/// Implement a logging API for the spcecified log levels.
#[proc_macro]
pub fn define_log_levels(ts: proc_macro::TokenStream) -> proc_macro::TokenStream {
use syn::parse::Parser;
let parser = syn::punctuated::Punctuated::<syn::Ident, syn::Token![,]>::parse_terminated;
let args = parser.parse(ts).unwrap();
let names: Vec<_> = args.into_iter().map(|ident| ident.to_string().to_snake_case()).collect();
let position_in_args = |name| {
names.iter().position(|arg| &name == arg).unwrap_or_else(|| {
let error = "Environment variable value must correspond to a defined log level";
panic!("{error}. Found: {name:?}, expected one of: {names:?}.")
})
};
let level_configuration = LEVEL_CONFIGURATION_ENV_VARS
.map(|var| std::env::var(var).ok().map(position_in_args).unwrap_or_default());
logging_api(names, level_configuration)
}
struct LevelConfiguration<T> {
max_enabled: T,
max_uncollapsed: T,
}
map_fns!(LevelConfiguration, [max_enabled, max_uncollapsed]);
// =====================
// === Top-level API ===
// =====================
fn logging_api(
level_names: impl IntoIterator<Item = String>,
config: LevelConfiguration<usize>,
) -> proc_macro::TokenStream {
let global_logger_ident = ident("GlobalLogger");
let global_logger_path = ident_to_path(global_logger_ident.clone());
let levels = levels(level_names, &config, global_logger_path);
let api: Api = [
span_trait(),
logger_trait(&levels),
span_api(&levels),
event_api(&levels),
global_logger(global_logger_ident, &levels),
]
.into_iter()
.collect();
api.into_library().into()
}
// === Information used to construct level-specific interfaces ===
struct Level {
// Intrinsic properties of a level:
name: String,
enabled: bool,
uncollapsed: bool,
// Identifiers for API cross-references:
trait_methods: LoggerMethods<syn::Ident>,
global_logger_methods: LoggerMethods<syn::Path>,
}
impl Level {
fn new(name: String, enabled: bool, uncollapsed: bool, global_logger_path: &syn::Path) -> Self {
let trait_methods = trait_methods(&name);
let global_logger_methods =
trait_methods.clone().map(|x| qualified(global_logger_path.clone(), x));
Level { global_logger_methods, trait_methods, enabled, uncollapsed, name }
}
}
fn levels(
names: impl IntoIterator<Item = String>,
config: &LevelConfiguration<usize>,
global_logger_path: syn::Path,
) -> Vec<Level> {
let enabled = |i| config.max_enabled >= i;
let uncollapsed = |i| config.max_uncollapsed >= i;
let level = |(i, name)| Level::new(name, enabled(i), uncollapsed(i), &global_logger_path);
names.into_iter().enumerate().map(level).collect()
}
// ==============================
// === Representation of APIs ===
// ==============================
#[derive(Default)]
struct Api {
implementation: proc_macro2::TokenStream,
exports: Vec<Export>,
}
impl Api {
fn extend(&mut self, mut other: Self) {
self.implementation.extend(other.implementation);
self.exports.append(&mut other.exports);
}
fn into_library(self) -> proc_macro2::TokenStream {
let reexport = |name: &syn::Ident| {
quote! { pub use crate::internal::#name; }
};
let prelude: proc_macro2::TokenStream =
self.exports.iter().filter_map(|e| e.prelude.then(|| reexport(&e.ident))).collect();
let exports: proc_macro2::TokenStream =
self.exports.iter().map(|e| reexport(&e.ident)).collect();
let implementation = self.implementation;
quote! {
/// Exports, intended to be used by glob-import.
pub mod prelude {
#prelude
}
#exports
/// Low-level interface, used by macro implementations.
pub mod internal {
#implementation
}
}
}
}
impl FromIterator<Api> for Api {
fn from_iter<T: IntoIterator<Item = Api>>(iter: T) -> Self {
let mut collected: Api = Default::default();
iter.into_iter().for_each(|api| collected.extend(api));
collected
}
}
impl From<proc_macro2::TokenStream> for Api {
fn from(implementation: proc_macro2::TokenStream) -> Self {
Self { implementation, ..Default::default() }
}
}
struct Export {
ident: syn::Ident,
prelude: bool,
}
impl Export {
fn prelude(ident: syn::Ident) -> Self {
Self { ident, prelude: true }
}
}
// =====================
// === LogSpan trait ===
// =====================
fn span_trait() -> Api {
let trait_name = ident("LogSpan");
let implementation = quote! {
/// Identifies a location in the source that may be traced in the logs.
pub trait #trait_name {
/// Log entry into the span, run the given closure, and log exit.
#[inline(always)]
fn in_scope<F, T>(self, f: F) -> T where Self: Sized, F: FnOnce() -> T {
let _scope = self.entered();
f()
}
/// Log entry into the span, and log exit when the returned value is dropped.
#[inline(always)]
fn entered(self) -> Entered<Self> where Self: Sized {
Entered::new(self)
}
#[allow(missing_docs)]
fn _enter(&self);
#[allow(missing_docs)]
fn _exit(&self);
}
/// RAII guard that enters a span when created and exits when dropped.
pub struct Entered<S: #trait_name>(S);
impl<S: #trait_name> Entered<S> {
#[allow(missing_docs)]
pub fn new(span: S) -> Self {
span._enter();
Self(span)
}
}
impl<S: #trait_name> Drop for Entered<S> {
fn drop(&mut self) {
self.0._exit();
}
}
};
let exports = vec![Export::prelude(trait_name)];
Api { implementation, exports }
}
// ====================
// === Logger trait ===
// ====================
fn logger_trait(levels: impl IntoIterator<Item = &Level>) -> Api {
let mut methods = proc_macro2::TokenStream::new();
for level in levels {
level.trait_methods.signatures().for_each(|x| methods.extend(x))
}
(quote! {
/// A type that serves as a destination for logging.
pub trait Logger {
#methods
}
})
.into()
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
struct LoggerMethods<T = proc_macro2::TokenStream> {
emit_fn: T,
enter_fn: T,
exit_fn: T,
}
map_fns!(LoggerMethods, [emit_fn, enter_fn, exit_fn]);
impl LoggerMethods<syn::Ident> {
fn signatures(&self) -> LoggerMethods {
let LoggerMethods { emit_fn, enter_fn, exit_fn } = self;
LoggerMethods {
emit_fn: quote! { #[allow(missing_docs)] fn #emit_fn(span: &str); },
enter_fn: quote! { #[allow(missing_docs)] fn #enter_fn(span: &str); },
exit_fn: quote! { #[allow(missing_docs)] fn #exit_fn(); },
}
}
fn with_bodies(
&self,
bodies: &LoggerMethods<proc_macro2::TokenStream>,
) -> LoggerMethods<proc_macro2::TokenStream> {
let LoggerMethods { emit_fn, enter_fn, exit_fn } = self;
let LoggerMethods { emit_fn: emit_body, enter_fn: enter_body, exit_fn: exit_body } = bodies;
LoggerMethods {
emit_fn: quote! { #[inline] fn #emit_fn(span: &str) { #emit_body } },
enter_fn: quote! { #[inline] fn #enter_fn(span: &str) { #enter_body } },
exit_fn: quote! { #[inline] fn #exit_fn() { #exit_body } },
}
}
}
fn trait_methods(level: &str) -> LoggerMethods<syn::Ident> {
(LoggerMethods {
emit_fn: format!("emit_{level}"),
enter_fn: format!("enter_{level}"),
exit_fn: format!("exit_{level}"),
})
.as_ref()
.map(ident)
}
// =================
// === Event API ===
// =================
fn event_api(levels: impl IntoIterator<Item = &Level>) -> Api {
levels
.into_iter()
.map(|level| event_api_for_level(&level.name, &level.global_logger_methods, level.enabled))
.collect()
}
fn event_api_for_level(level: &str, methods: &LoggerMethods<syn::Path>, enabled: bool) -> Api {
let event_macro = ident(level);
let emit_fn = &methods.emit_fn;
let level_tag = level.to_screaming_snake_case();
let body = if enabled {
quote! {
use $crate::internal::Logger;
$crate::internal::#emit_fn(
&format!(
"[{}] {}:{} {}",
#level_tag,
file!(),
line!(),
format_args!($($args)*)
));
}
} else {
quote! {
let _unused_at_this_log_level = format_args!($($args)*);
}
};
let implementation = quote! {
/// Emit a log message, if the log-level is enabled.
#[macro_export]
macro_rules! #event_macro {
($($args:tt)*) => {{ #body }};
}
};
Api { implementation, ..Default::default() }
}
// ================
// === Span API ===
// ================
fn span_api(levels: impl IntoIterator<Item = &Level>) -> Api {
levels
.into_iter()
.map(|level| span_api_for_level(&level.name, &level.global_logger_methods, level.enabled))
.collect()
}
fn span_api_for_level(level: &str, methods: &LoggerMethods<syn::Path>, enabled: bool) -> Api {
let object_name = ident(level.to_pascal_case());
let macro_name = ident(format!("{level}_span"));
let object_contents = enabled.then_some(quote! { pub String }).unwrap_or_default();
let enter_fn = &methods.enter_fn;
let exit_fn = &methods.exit_fn;
let enter_impl = enabled
.then_some(quote! {
#enter_fn(&self.0);
})
.unwrap_or_default();
let exit_impl = enabled
.then_some(quote! {
#exit_fn();
})
.unwrap_or_default();
let level_tag = level.to_screaming_snake_case();
let creation_body = if enabled {
quote! {
$crate::internal::#object_name(
format!(
"[{}] {}:{} {}",
#level_tag,
file!(),
line!(),
format_args!($($args)*)
)
)
}
} else {
quote! {
let _unused_at_this_log_level = format_args!($($args)*);
$crate::internal::#object_name()
}
};
let implementation = quote! {
/// Refers to a region in the source code that may have associated logging.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct #object_name(#object_contents);
impl LogSpan for #object_name {
#[inline(always)]
fn _enter(&self) {
#enter_impl
}
#[inline(always)]
fn _exit(&self) {
#exit_impl
}
}
/// Create an object that identifies a location in the source code for logging purposes.
#[macro_export]
macro_rules! #macro_name {
($($args:tt)*) => {{
#creation_body
}}
}
};
implementation.into()
}
// =========================
// === Global Logger API ===
// =========================
fn global_logger(logger: syn::Ident, levels: impl IntoIterator<Item = &Level>) -> Api {
let console_logger = ident("NativeConsole");
let web_logger = ident("WebConsole");
let mut web_logger_methods = proc_macro2::TokenStream::new();
let mut console_logger_methods = proc_macro2::TokenStream::new();
let mut web_level = WEB_LOG_LEVELS.iter().copied().fuse();
let mut web = web_level.next().unwrap();
for level in levels {
level
.trait_methods
.with_bodies(&level.enabled.then(console_logger_impl).unwrap_or_default())
.for_each(|x| console_logger_methods.extend(x));
level
.trait_methods
.with_bodies(&level.enabled.then_some(web_logger_impl(web, level)).unwrap_or_default())
.for_each(|x| web_logger_methods.extend(x));
web = web_level.next().unwrap_or(web);
}
(quote! {
#[cfg(target_arch = "wasm32")]
/// The currently-enabled global logger.
pub type #logger = web::#web_logger;
#[cfg(not(target_arch = "wasm32"))]
/// The currently-enabled global logger.
pub type #logger = native::#console_logger;
/// Logging support for wasm environments.
#[cfg(target_arch = "wasm32")]
pub mod web {
use super::*;
/// A [`Logger`] that emits messages to the Console Web API.
pub struct #web_logger;
impl Logger for #web_logger {
#web_logger_methods
}
}
/// Logging support for non-wasm environments.
#[cfg(not(target_arch = "wasm32"))]
pub mod native {
use super::*;
/// A [`Logger`] that emits messages to the native console.
pub struct #console_logger;
thread_local! {
static CONSOLE_INDENT: core::cell::Cell<usize> = core::cell::Cell::new(0);
}
impl Logger for #console_logger {
#console_logger_methods
}
}
})
.into()
}
// =============================
// === Native console logger ===
// =============================
fn console_logger_impl() -> LoggerMethods {
LoggerMethods {
emit_fn: quote! {
let indent = CONSOLE_INDENT.get();
println!("{:indent$}{}", "", span, indent=indent*4);
},
enter_fn: quote! {
let indent = CONSOLE_INDENT.get();
println!("{:indent$}{}", "", span, indent=indent*4);
CONSOLE_INDENT.set(indent + 1);
},
exit_fn: quote! {
let indent = CONSOLE_INDENT.get();
CONSOLE_INDENT.set(indent - 1);
},
}
}
// ======================
// === Web API logger ===
// ======================
fn web_logger_impl(web_level: &str, level: &Level) -> LoggerMethods {
let event_fn = ident(format!("{web_level}_1"));
let group_fn = ident(if level.uncollapsed { "group_1" } else { "group_collapsed_1" });
LoggerMethods {
emit_fn: quote! {
web_sys::console::#event_fn(&span.into());
},
enter_fn: quote! {
web_sys::console::#group_fn(&span.into());
},
exit_fn: quote! {
web_sys::console::group_end();
},
}
}
// ============================
// === Syn-building helpers ===
// ============================
fn qualified(mut path: syn::Path, name: syn::Ident) -> syn::Path {
path.segments.push(path_segment(name));
path
}
fn path_segment(ident: syn::Ident) -> syn::PathSegment {
syn::PathSegment { ident, arguments: Default::default() }
}
fn ident(name: impl AsRef<str>) -> syn::Ident {
syn::Ident::new(name.as_ref(), proc_macro2::Span::call_site())
}
fn ident_to_path(segment: syn::Ident) -> syn::Path {
syn::Path {
leading_colon: Default::default(),
segments: std::iter::once(path_segment(segment)).collect(),
}
}

View File

@ -0,0 +1,21 @@
//! High-performance logging library.
// === Features ===
#![feature(local_key_cell_methods)]
// === Standard Linter Configuration ===
#![deny(non_ascii_idents)]
#![warn(unsafe_code)]
#![allow(clippy::bool_to_int_with_if)]
#![allow(clippy::let_and_return)]
// === Non-Standard Linter Configuration ===
#![deny(unconditional_recursion)]
#![warn(missing_copy_implementations)]
#![warn(missing_debug_implementations)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(trivial_numeric_casts)]
#![warn(unused_import_braces)]
enso_logging_macros::define_log_levels![Error, Warn, Info, Debug, Trace];

View File

@ -16,6 +16,7 @@ publish = true
crate-type = ["rlib"]
[dependencies]
enso-logging = { path = "../logging" }
enso-reflect = { path = "../reflect" }
enso-shapely = { path = "../shapely" }
anyhow = "1.0.66"
@ -39,9 +40,6 @@ shrinkwraprs = "0.3.0"
serde = { version = "1.0.126", features = ["derive", "rc"] }
serde_json = { version = "1.0" }
smallvec = "1.0.0"
tracing = { version = "0.1.37", features = ["log"] }
tracing-subscriber = "0.3"
tracing-wasm = "0.2"
wasm-bindgen = { workspace = true }
weak-table = "0.3.0"
enso-web = { path = "../web" }

View File

@ -138,60 +138,22 @@ pub mod serde_reexports {
// ===============
// === Tracing ===
// === Logging ===
// ===============
pub mod tracing {
pub use tracing::*;
pub use tracing_subscriber::*;
}
pub use ::tracing::debug;
pub use ::tracing::debug_span;
pub use ::tracing::error;
pub use ::tracing::error_span;
pub use ::tracing::info;
pub use ::tracing::info_span;
pub use ::tracing::trace;
pub use ::tracing::trace_span;
pub use ::tracing::warn;
pub use ::tracing::warn_span;
pub const ERROR: tracing::Level = tracing::Level::ERROR;
pub const WARN: tracing::Level = tracing::Level::WARN;
pub const INFO: tracing::Level = tracing::Level::INFO;
pub const DEBUG: tracing::Level = tracing::Level::DEBUG;
pub const TRACE: tracing::Level = tracing::Level::TRACE;
use std::sync::Once;
/// Tracing's `set_global_default` can be called only once. When running tests this can fail if
/// not fenced.
static TRACING_INIT_ONCE: Once = Once::new();
pub fn init_tracing(level: tracing::Level) {
TRACING_INIT_ONCE.call_once(|| {
#[cfg(not(target_arch = "wasm32"))]
let subscriber = tracing::fmt()
.compact()
.with_target(false)
.with_max_level(level)
.without_time()
.finish();
#[cfg(target_arch = "wasm32")]
let subscriber = {
use tracing_subscriber::layer::SubscriberExt;
use tracing_wasm::*;
let config = WASMLayerConfigBuilder::new().set_max_level(level).build();
tracing::Registry::default().with(WASMLayer::new(config))
};
tracing::subscriber::set_global_default(subscriber).expect("Failed to initialize logger.");
});
}
pub use enso_logging::debug;
pub use enso_logging::debug_span;
pub use enso_logging::error;
pub use enso_logging::error_span;
pub use enso_logging::info;
pub use enso_logging::info_span;
pub use enso_logging::prelude::*;
pub use enso_logging::trace;
pub use enso_logging::trace_span;
pub use enso_logging::warn;
pub use enso_logging::warn_span;
pub fn init_global() {
init_tracing(WARN);
init_global_internal();
}

View File

@ -54,7 +54,7 @@
//! }
//!
//! // Obtain log data directly; it could also be deserialized from a file.
//! let log = profiler::internal::take_log();
//! let log = profiler::internal::get_log();
//! // Parse the log. Interpret metadata according to the enum defined above.
//! let profile: profiler_data::Profile<MyMetadata> = log.parse().unwrap();
//! // Verify the MyData objects are present and attached to the right interval.
@ -539,7 +539,7 @@ mod tests {
}
parent();
let profile: profiler_data::Profile<OpaqueMetadata> =
profiler::internal::take_log().parse().unwrap();
profiler::internal::get_log().parse().unwrap();
let roots = &profile.root_measurement().children;
assert_eq!(roots.len(), 1);
let parent = &profile[roots[0]];
@ -566,7 +566,7 @@ mod tests {
let future = parent();
futures::executor::block_on(future);
let profile: profiler_data::Profile<OpaqueMetadata> =
profiler::internal::take_log().parse().unwrap();
profiler::internal::get_log().parse().unwrap();
let roots = &profile.root_measurement().children;
assert_eq!(roots.len(), 1);
let parent = &profile[roots[0]];
@ -598,7 +598,7 @@ mod tests {
// Create a Future, but don't await it.
let _future = func();
let profile: profiler_data::Profile<OpaqueMetadata> =
profiler::internal::take_log().parse().unwrap();
profiler::internal::get_log().parse().unwrap();
let roots = &profile.root_measurement().children;
assert!(!profile[roots[0]].finished);
}
@ -609,6 +609,7 @@ mod tests {
profiler::internal::Label(label),
Some(profiler::internal::Timestamp::now()),
profiler::internal::StartState::Active,
Default::default(),
)
}
@ -616,7 +617,7 @@ mod tests {
fn unfinished_still_running() {
start_profiler("unfinished (?:?)");
let profile: profiler_data::Profile<OpaqueMetadata> =
profiler::internal::take_log().parse().unwrap();
profiler::internal::get_log().parse().unwrap();
let roots = &profile.root_measurement().children;
assert!(!profile[roots[0]].finished);
}
@ -626,7 +627,7 @@ mod tests {
let id = start_profiler("unfinished (?:?)");
profiler::internal::EventLog.pause(id, profiler::internal::Timestamp::now());
let profile: profiler_data::Profile<OpaqueMetadata> =
profiler::internal::take_log().parse().unwrap();
profiler::internal::get_log().parse().unwrap();
let roots = &profile.root_measurement().children;
assert!(!profile[roots[0]].finished);
}
@ -652,7 +653,7 @@ mod tests {
MyDataA(MyDataA),
MyDataB(MyDataBExpected),
}
let log = profiler::internal::take_log();
let log = profiler::internal::get_log();
let root: Result<profiler_data::Profile<MyMetadata>, _> = log.parse();
let root = match root {
Err(profiler_data::Error::RecoverableFormatError { errors, with_missing_data }) => {

View File

@ -25,7 +25,7 @@ use enso_profiler::profile;
/// Create example profiling data.
pub async fn create_data() -> String {
start_project().await;
profiler::internal::take_log()
profiler::internal::get_log()
}
/// A dummy computation that is intended to take some time based on input (where a higher number

View File

@ -118,7 +118,7 @@ impl Graph {
/// Gather and remove all logged measurements and return them as a `Graph`.
pub fn take_from_log() -> Self {
let profile: Result<data::Profile<data::OpaqueMetadata>, _> =
profiler::internal::take_log().parse();
profiler::internal::get_log().parse();
if let Ok(profile) = profile {
new_hybrid_graph(&profile)
} else {
@ -409,7 +409,7 @@ mod tests {
profiled_a();
let profile: data::Profile<data::OpaqueMetadata> =
profiler::internal::take_log().parse().unwrap();
profiler::internal::get_log().parse().unwrap();
let flame_graph = Graph::new_callgraph(&profile);
assert_eq!(flame_graph.activity_blocks.len(), 2);

View File

@ -16,7 +16,6 @@
fn main() {
declare_env_dependence("ENSO_MAX_PROFILING_LEVEL");
declare_env_cfg_flag("ENSO_ENABLE_PROC_MACRO_SPAN", "enso_enable=\"proc_macro_span\"");
}
/// Make cargo aware that the result of compiling this crate depends on an environment variable.
@ -26,12 +25,3 @@ fn declare_env_dependence(env: &str) {
let value = std::env::var(env).unwrap_or_default();
println!("cargo:rustc-env={}={}", env, value);
}
/// Make cargo aware that the result of compiling this crate depends on an environment variable;
/// convert that variable to a `cfg` flag so that it can be used for conditional compilation.
fn declare_env_cfg_flag(env: &str, cfg: &str) {
println!("cargo:rerun-if-env-changed={}", env);
if std::env::var(env).is_ok() {
println!("cargo:rustc-cfg={}", cfg);
}
}

View File

@ -0,0 +1,87 @@
//! Information specific to each profiling level.
use inflector::Inflector;
use quote::ToTokens;
use std::env;
use syn::parse::Parser;
use syn::punctuated;
// =============
// === Level ===
// =============
/// Information about a profiling level.
pub struct Level {
pub obj_ident: syn::Ident,
pub fn_ident: syn::Ident,
pub name: String,
pub enabled: bool,
}
/// Given syntax representing a sequence of profiling levels, produce [`Level`] objects describing
/// the levels.
pub fn parse_levels(var: &str, ts: proc_macro::TokenStream) -> Vec<Level> {
let parser = punctuated::Punctuated::<syn::Ident, syn::Token![,]>::parse_terminated;
let obj_idents: Vec<_> = parser.parse(ts).unwrap().into_iter().collect();
let level_names: Vec<_> = obj_idents.iter().map(|id| id.to_string().to_snake_case()).collect();
let max_enabled = level_from_env_var(var, &level_names);
obj_idents
.into_iter()
.enumerate()
.zip(level_names)
.map(|((i, obj_ident), name)| Level {
obj_ident,
fn_ident: syn::Ident::new(&name, proc_macro2::Span::call_site()),
name: name.clone(),
enabled: i <= max_enabled,
})
.collect()
}
/// Return the numeric Profiling/Log Level (counting from 0 = top-level only).
fn level_from_env_var(var: &str, levels: &[impl AsRef<str>]) -> usize {
let enabled = match env::var(var) {
Ok(level) => level,
// If the variable isn't set, we default to the minimum.
Err(_) => return 0,
};
for (i, name) in levels.iter().enumerate() {
if &enabled[..] == name.as_ref() {
return i;
}
}
panic!("{var} set to unknown level: {enabled}")
}
// =========================
// === `enum` Generation ===
// =========================
/// Given a collection of variant identifiers, produce syntax defining a data-less enum.
pub fn make_enum<'a>(
ident: syn::Ident,
variants: impl IntoIterator<Item = &'a syn::Ident>,
) -> proc_macro::TokenStream {
let ident_to_variant = |ident| syn::Variant {
ident,
fields: syn::Fields::Unit,
attrs: Default::default(),
discriminant: Default::default(),
};
let variants: punctuated::Punctuated<syn::Variant, syn::Token![,]> =
variants.into_iter().cloned().map(ident_to_variant).collect();
(quote::quote! {
#[allow(missing_docs)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Default, serde::Serialize)]
pub enum #ident {
#[default]
#variants
}
})
.to_token_stream()
.into()
}

View File

@ -7,7 +7,8 @@
//! implementing this without proc macros would be complex and repetitious.
//! - To implement the [`#[profile]`](macro@profile) attribute macro.
#![cfg_attr(enso_enable = "proc_macro_span", feature(proc_macro_span))]
#![feature(proc_macro_span)]
#![feature(let_chains)]
// === Standard Linter Configuration ===
#![deny(non_ascii_idents)]
#![warn(unsafe_code)]
@ -22,193 +23,35 @@
#![warn(trivial_numeric_casts)]
#![warn(unused_import_braces)]
use inflector::Inflector;
use quote::ToTokens;
use std::env;
use std::fmt;
use syn::parse::Parser;
use syn::punctuated;
use syn::visit_mut;
use syn::visit_mut::VisitMut;
#[macro_use]
mod wrap_async;
mod level;
mod low_level;
mod profile_attribute;
// =========================
// === define_hierarchy! ===
// =========================
// ==================================
// === Compile-time configuration ===
// ==================================
/// Define a profiler type.
fn define_profiler(
level: &syn::Ident,
obj_ident: &syn::Ident,
enabled: bool,
const PROFILING_LEVEL_ENV_VAR: &str = "ENSO_MAX_PROFILING_LEVEL";
// =================
// === Interface ===
// =================
/// [Documented in `profiler`](../enso_profiler/attr.profile.html).
#[allow(missing_docs)]
#[proc_macro_attribute]
pub fn profile(
args: proc_macro::TokenStream,
ts: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let start = quote::format_ident!("start_{}", level);
let create = quote::format_ident!("create_{}", level);
let level_link = format!("[{}-level](index.html#{})", level, level);
let doc_obj = format!("Identifies a {} profiler.", level_link);
let doc_start = format!("Start a new {} profiler.", level_link);
let doc_create = format!("Create a new {} profiler, in unstarted state.", level_link);
let ts = if enabled {
quote::quote! {
// =================================
// === Profiler (e.g. Objective) ===
// =================================
#[doc = #doc_obj]
#[derive(Copy, Clone, Debug)]
pub struct #obj_ident(pub EventId);
// === Trait Implementations ===
impl Profiler for #obj_ident {
fn start(
parent: EventId,
label: Label,
time: Option<Timestamp>,
start: StartState,
) -> Self {
#obj_ident(EventLog.start(parent, label, time, start))
}
fn finish(self) {
EventLog.end(self.0, Timestamp::now())
}
fn pause(self) {
EventLog.pause(self.0, Timestamp::now());
}
fn resume(self) {
EventLog.resume(self.0, Timestamp::now());
}
}
// === Constructor macros ===
#[doc = #doc_start]
#[macro_export]
macro_rules! #start {
($parent: expr, $label: expr) => {{
use profiler::Parent;
let label = profiler::internal::Label(
concat!($label, " (", file!(), ":", line!(), ")"));
let profiler: profiler::internal::Started<profiler::#obj_ident> =
$parent.start_child(label);
profiler
}}
}
#[doc = #doc_create]
#[macro_export]
macro_rules! #create {
($label: expr) => {{
let label = profiler::internal::Label(
concat!($label, " (", file!(), ":", line!(), ")"));
let parent = profiler::internal::EventId::implicit();
let now = Some(profiler::internal::Timestamp::now());
let paused = profiler::internal::StartState::Paused;
profiler::#obj_ident::start(parent, label, now, paused)
}}
}
}
} else {
quote::quote! {
// =================================
// === Profiler (e.g. Objective) ===
// =================================
#[doc = #doc_obj]
#[derive(Copy, Clone, Debug)]
pub struct #obj_ident(pub ());
// === Trait Implementations ===
impl Profiler for #obj_ident {
fn start(
_: EventId,
_: Label,
_: Option<Timestamp>,
_: StartState,
) -> Self {
Self(())
}
fn finish(self) {}
fn pause(self) {}
fn resume(self) { }
}
// === Constructor macros ===
#[doc = #doc_start]
#[macro_export]
macro_rules! #start {
($parent: expr, $label: expr) => {{
let _unused_at_this_profiling_level = $parent;
profiler::internal::Started(profiler::#obj_ident(()))
}}
}
#[doc = #doc_create]
#[macro_export]
macro_rules! #create {
($label: expr) => {{
profiler::#obj_ident(())
}}
}
}
};
ts.into()
}
/// Generates an implementation of the [`Parent`] trait relating the given parent and child, when
/// both are enabled.
fn enabled_impl_parent(
parent_ident: &syn::Ident,
child_ident: &syn::Ident,
) -> proc_macro::TokenStream {
let ts = quote::quote! {
impl Parent<#child_ident> for #parent_ident {
fn start_child(&self, label: Label) -> Started<#child_ident> {
let start = Some(Timestamp::now());
Started(#child_ident::start(self.0, label, start, StartState::Active))
}
}
};
ts.into()
}
/// Generates an implementation of the [`Parent`] trait relating the given parent and child, when
/// the child is disabled.
fn disabled_impl_parent(
parent_ident: &syn::Ident,
child_ident: &syn::Ident,
) -> proc_macro::TokenStream {
let ts = quote::quote! {
impl Parent<#child_ident> for #parent_ident {
fn start_child(&self, label: Label) -> Started<#child_ident> {
Started(#child_ident(()))
}
}
};
ts.into()
}
/// Return the numeric Profiling Level (counting from 0 = top-level only).
fn get_enabled_level(levels: &[impl AsRef<str>]) -> usize {
const PROFILING_LEVEL_ENV_VAR: &str = "ENSO_MAX_PROFILING_LEVEL";
let enabled = match env::var(PROFILING_LEVEL_ENV_VAR) {
Ok(level) => level,
// If the variable isn't set, we default to the minimum.
Err(_) => return 0,
};
for (i, name) in levels.iter().enumerate() {
if &enabled[..] == name.as_ref() {
return i;
}
}
panic!("{} set to unknown profiling level: {}", PROFILING_LEVEL_ENV_VAR, enabled);
profile_attribute::profile(args, ts)
}
/// Defines a hierarchy of profiler levels.
@ -223,422 +66,6 @@ fn get_enabled_level(levels: &[impl AsRef<str>]) -> usize {
///
/// Profiler-level names should be given in CamelCase.
#[proc_macro]
pub fn define_hierarchy(ts: proc_macro::TokenStream) -> proc_macro::TokenStream {
let parser = punctuated::Punctuated::<syn::Ident, syn::Token![,]>::parse_terminated;
let idents: Vec<_> = parser.parse(ts).unwrap().into_iter().collect();
let level_names: Vec<_> = idents.iter().map(|id| id.to_string().to_snake_case()).collect();
let enabled_level = get_enabled_level(&level_names);
let mut out = proc_macro::TokenStream::new();
for ((i, obj_ident), level_name) in idents.iter().enumerate().zip(level_names.iter()) {
let snake_ident = syn::Ident::new(level_name, proc_macro2::Span::call_site());
out.extend(define_profiler(&snake_ident, obj_ident, enabled_level >= i));
for (j, child_ident) in idents[i..].iter().enumerate() {
let parent_impl = match enabled_level >= i + j {
true => enabled_impl_parent(obj_ident, child_ident),
false => disabled_impl_parent(obj_ident, child_ident),
};
out.extend(parent_impl);
}
}
out
}
// ================
// === profile! ===
// ================
// [Documented in `profiler`](../enso_profiler/attr.profile.html).
#[allow(missing_docs)]
#[proc_macro_attribute]
pub fn profile(
args: proc_macro::TokenStream,
ts: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let mut func = syn::parse_macro_input!(ts as syn::ItemFn);
let level: syn::Ident = syn::parse(args)
.expect("The `profile` macro requires a profiling-level argument, e.g. #[profile(Task)]");
let label = make_label(&func.sig.ident);
// Instrument awaits, whether at the top level (if this is an async fn) or in inner async
// blocks.
WrapAwait.visit_block_mut(&mut func.block);
// Different transformations for async or non-async.
let async_block_origin = match func.sig.asyncness {
// Async: transform it to an non-async fn containing an async block. The outer fn does
// not need any top-level profiling; all it does is set up the async block. We'll
// instrument the block below.
Some(_) => {
wrap_async_fn(&mut func);
AsyncBlockOrigin::FnBody
}
// Non-async: instrument the top level of the function.
None => {
profile_sync(&level, &label, &mut func);
AsyncBlockOrigin::Block
}
};
// Instrument any async blocks in the body.
let name = func.sig.ident.to_string();
let mut instrumentor = InstrumentAsync { level, func: name, origin: async_block_origin };
instrumentor.visit_block_mut(&mut func.block);
func.into_token_stream().into()
}
#[cfg(not(enso_enable = "proc_macro_span"))]
/// Decorate the input with file:line info determined by the proc_macro's call site.
fn make_label<L: fmt::Display>(name: L) -> String {
format!("{} (?:?)", name)
}
#[cfg(enso_enable = "proc_macro_span")]
/// Decorate the input with file:line info determined by the proc_macro's call site.
fn make_label<L: fmt::Display>(name: L) -> String {
let span = proc_macro::Span::call_site();
let file = span.source_file().path();
let path = file.as_path().to_string_lossy();
let line = span.start().line;
format!("{} ({}:{})", name, path, line)
}
// === VisitMut helpers ===
/// Used in a `impl VisitMut` to block descent into inner function items.
macro_rules! ignore_inner_fn_items {
() => {
fn visit_item_fn_mut(&mut self, _: &mut syn::ItemFn) {}
};
}
// === profile_sync ===
fn profile_sync(obj_ident: &syn::Ident, label: &str, func: &mut syn::ItemFn) {
let start_profiler = start_profiler(obj_ident, label, func.sig.asyncness.is_some());
let block = &func.block;
let body = quote::quote! {{
#start_profiler
let __profiler_scope = profiler::internal::Started(__profiler);
#block
}};
func.block = syn::parse2(body).unwrap();
}
fn start_profiler(
obj_ident: &syn::Ident,
label: &str,
asyncness: bool,
) -> proc_macro2::TokenStream {
let state = match asyncness {
true => quote::quote! { profiler::internal::StartState::Paused },
false => quote::quote! { profiler::internal::StartState::Active },
};
quote::quote! {
let __profiler = {
use profiler::internal::Profiler;
let parent = profiler::internal::EventId::implicit();
let now = Some(profiler::internal::Timestamp::now());
let label = profiler::internal::Label(#label);
profiler::#obj_ident::start(parent, label, now, #state)
};
}
}
// === wrap_async_fn ===
/// Convert an `async fn` into a `fn` returning a `Future`, implemented with an `async` block.
///
/// The output is functionally equivalent to the input (except the output won't `impl Send` even if
/// the original `async fn` did); this is useful as a basis for further transformation.
fn wrap_async_fn(func: &mut syn::ItemFn) {
// Wrap the body.
let block = &func.block;
let ret_ty = match &func.sig.output {
syn::ReturnType::Default => quote::quote! { () },
syn::ReturnType::Type(_, ty) => ty.into_token_stream(),
};
let body = quote::quote! {{
(async move {
let result: #ret_ty = #block;
result
})
}};
func.block = syn::parse2(body).unwrap();
// Transform the signature.
let output_ty = match &func.sig.output {
syn::ReturnType::Default => quote::quote! { () },
syn::ReturnType::Type(_, ty) => ty.to_token_stream(),
};
let output_lifetime = syn::Lifetime::new("'__profiler_out", proc_macro2::Span::call_site());
let lifetimes = explicitize_lifetimes(&mut func.sig);
let output = if lifetimes.is_empty() {
quote::quote! {
-> impl std::future::Future<Output=#output_ty>
}
} else {
// Bound all inputs on the output lifetime.
let type_bound = syn::TypeParamBound::Lifetime(output_lifetime.clone());
for param in &mut func.sig.generics.params {
match param {
syn::GenericParam::Lifetime(def) => def.bounds.push(output_lifetime.clone()),
syn::GenericParam::Type(def) => def.bounds.push(type_bound.clone()),
syn::GenericParam::Const(_) => (),
}
}
for arg in &mut func.sig.inputs {
if let syn::FnArg::Typed(syn::PatType { ty, .. }) = arg {
if let syn::Type::ImplTrait(def) = ty.as_mut() {
def.bounds.push(type_bound.clone());
}
}
}
// Add a definition for the output lifetime.
let lifetime_def = syn::LifetimeDef::new(output_lifetime.clone());
func.sig.generics.params.insert(0, syn::GenericParam::Lifetime(lifetime_def));
// Apply the output lifetime to the output.
quote::quote! {
-> impl std::future::Future<Output=#output_ty> + #output_lifetime
}
};
func.sig.asyncness = None;
func.sig.output = syn::parse2(output).unwrap();
}
/// Make all lifetimes in function signature explicit.
///
/// Returns the lifetimes used in the function signature.
fn explicitize_lifetimes(sig: &mut syn::Signature) -> Vec<syn::Lifetime> {
// Go through the args; find:
// - anonymous lifetime: '_
// - implicit lifetimes: &foo
// - explicit lifetimes: &'a
// Make all input lifetimes explicit:
// - Use new lifetime explicitly in arg list.
// - Define new lifetime in generic params.
let mut input_transformer = ExplicitizeInputLifetimes::default();
for input in &mut sig.inputs {
input_transformer.visit_fn_arg_mut(input);
}
let ExplicitizeInputLifetimes { new_lifetimes, existing_lifetimes } = input_transformer;
let mut all_lifetimes = existing_lifetimes;
all_lifetimes.extend_from_slice(&new_lifetimes);
let new_lifetimes =
new_lifetimes.into_iter().map(|lt| syn::GenericParam::Lifetime(syn::LifetimeDef::new(lt)));
sig.generics.params.extend(new_lifetimes);
// There are two cases where output lifetimes may be elided:
// - There's exactly one lifetime in the inputs.
// - There's a receiver with a lifetime.
// If either case occurs, make any implicit output lifetimes explicit.
let default_lifetime = if all_lifetimes.len() == 1 {
Some(all_lifetimes[0].clone())
} else {
get_receiver_lifetime(sig).cloned()
};
if let Some(lifetime) = default_lifetime {
ExplicitizeOutputLifetimes { lifetime }.visit_return_type_mut(&mut sig.output);
}
all_lifetimes
}
#[derive(Default)]
struct ExplicitizeInputLifetimes {
new_lifetimes: Vec<syn::Lifetime>,
existing_lifetimes: Vec<syn::Lifetime>,
}
impl ExplicitizeInputLifetimes {
fn gen_lifetime(&mut self) -> syn::Lifetime {
let name = format!("'__profiler{}", self.new_lifetimes.len());
let new = syn::Lifetime::new(&name, proc_macro2::Span::call_site());
self.new_lifetimes.push(new.clone());
new
}
fn visit_elidable_lifetime(&mut self, lifetime: &mut Option<syn::Lifetime>) {
match lifetime {
Some(lifetime) => self.visit_lifetime_mut(lifetime),
None => *lifetime = Some(self.gen_lifetime()),
}
}
}
impl visit_mut::VisitMut for ExplicitizeInputLifetimes {
ignore_inner_fn_items!();
// Handles 'x in generic parameters in types of non-self arguments.
fn visit_lifetime_mut(&mut self, lifetime: &mut syn::Lifetime) {
let name = lifetime.ident.to_string();
if &name == "_" {
*lifetime = self.gen_lifetime();
} else {
self.existing_lifetimes.push(lifetime.clone());
}
}
// Handles &self.
fn visit_receiver_mut(&mut self, receiver: &mut syn::Receiver) {
if let Some((_, lifetime)) = &mut receiver.reference {
self.visit_elidable_lifetime(lifetime);
}
}
// Handles & in types of non-self arguments.
fn visit_type_reference_mut(&mut self, type_reference: &mut syn::TypeReference) {
self.visit_elidable_lifetime(&mut type_reference.lifetime);
}
}
struct ExplicitizeOutputLifetimes {
lifetime: syn::Lifetime,
}
impl visit_mut::VisitMut for ExplicitizeOutputLifetimes {
ignore_inner_fn_items!();
// Handles 'x in generic parameters in types.
fn visit_lifetime_mut(&mut self, lifetime: &mut syn::Lifetime) {
if &lifetime.ident.to_string() == "_" {
*lifetime = self.lifetime.clone();
}
}
// Handles & in types.
fn visit_type_reference_mut(&mut self, type_reference: &mut syn::TypeReference) {
if type_reference.lifetime.is_none() {
type_reference.lifetime = Some(self.lifetime.clone());
}
}
}
fn get_receiver_lifetime(sig: &syn::Signature) -> Option<&syn::Lifetime> {
match sig.inputs.first() {
Some(syn::FnArg::Receiver(syn::Receiver {
reference: Some((_, Some(lifetime))), ..
})) => Some(lifetime),
_ => None,
}
}
// === WrapAwait ===
struct WrapAwait;
impl visit_mut::VisitMut for WrapAwait {
ignore_inner_fn_items!();
fn visit_expr_mut(&mut self, expr: &mut syn::Expr) {
match expr {
syn::Expr::Await(await_) => *expr = wrap_await(await_),
_ => syn::visit_mut::visit_expr_mut(self, expr),
}
}
}
fn wrap_await(await_: &mut syn::ExprAwait) -> syn::Expr {
let expr = &mut await_.base;
WrapAwait.visit_expr_mut(expr);
assert!(
await_.attrs.is_empty(),
"#[profile] cannot wrap a function that applies attributes to an .await expression"
);
let wrapped = quote::quote! {
({
let future = #expr;
profiler::internal::Profiler::pause(__profiler);
let result = future.await;
profiler::internal::Profiler::resume(__profiler);
result
})
};
syn::parse2(wrapped).unwrap()
}
// === InstrumentAsync ===
/// Inserts instrumentation into all async block in an item (ignoring inner items).
struct InstrumentAsync {
level: syn::Ident,
func: String,
origin: AsyncBlockOrigin,
}
impl visit_mut::VisitMut for InstrumentAsync {
ignore_inner_fn_items!();
fn visit_expr_mut(&mut self, expr: &mut syn::Expr) {
match expr {
syn::Expr::Async(async_) => *expr = self.instrument_async(async_),
_ => syn::visit_mut::visit_expr_mut(self, expr),
}
}
}
impl InstrumentAsync {
/// Insert instrumentation into an async block.
fn instrument_async(&self, expr: &mut syn::ExprAsync) -> syn::Expr {
self.inner_instrumentor().visit_block_mut(&mut expr.block);
assert!(
expr.attrs.is_empty(),
"#[profile] cannot wrap a function that applies attributes to an async block"
);
let label = match self.origin {
AsyncBlockOrigin::FnBody => make_label(&self.func),
AsyncBlockOrigin::Block => {
let name = format!("<async block in {}>", &self.func);
make_label(name)
}
};
let start_profiler = start_profiler(&self.level, &label, true);
let move_ = &expr.capture;
let block = &expr.block;
let wrapped = if move_.is_some() {
quote::quote! {{
#start_profiler
let __profiler_scope = profiler::internal::Started(__profiler);
async move {
profiler::internal::Profiler::resume(__profiler);
let result = #block;
std::mem::drop(__profiler_scope);
result
}
}}
} else {
// We have to move the profiler into the async block, because borrowing it would
// restrict the lifetime of the block. So we use an outer `move` block to
// capture `__profiler`, and an inner non-move block to match the behavior
// of the original non-move block.
quote::quote! {{
#start_profiler
let __profiler_scope = profiler::internal::Started(__profiler);
let inner = async #block;
async move {
profiler::internal::Profiler::resume(__profiler);
let result = inner.await;
std::mem::drop(__profiler_scope);
result
}
}}
};
syn::parse2(wrapped).unwrap()
}
/// Produce an instrumentor suitable for instrumenting blocks nested inside this block.
fn inner_instrumentor(&self) -> Self {
let level = self.level.clone();
let func = self.func.clone();
let origin = AsyncBlockOrigin::Block;
Self { level, func, origin }
}
}
/// Distinguishes between an async block that was originally the body of an `async fn`, versus an
/// async block that was originated as an async block in the source.
enum AsyncBlockOrigin {
FnBody,
Block,
pub fn define_profiling_levels(ts: proc_macro::TokenStream) -> proc_macro::TokenStream {
low_level::define_profiling_levels(ts)
}

View File

@ -0,0 +1,202 @@
//! Generation of the Profiling Low-level API and its implementation.
use crate::level;
// =====================================
// === Low-level profiling interface ===
// =====================================
/// Produces source code defining the Low-Level Profiling API for the given hierarchy of profiling
/// levels.
pub fn define_profiling_levels(ts: proc_macro::TokenStream) -> proc_macro::TokenStream {
let mut out = proc_macro::TokenStream::new();
let levels = level::parse_levels(crate::PROFILING_LEVEL_ENV_VAR, ts);
for level in &levels {
let profiler = Profiler::new(&level.name, level.obj_ident.clone());
out.extend(if level.enabled {
define_enabled_profiler(&profiler)
} else {
define_disabled_profiler(&profiler)
});
}
for (i, parent) in levels.iter().enumerate() {
for child in &levels[i..] {
out.extend(impl_parent(&parent.obj_ident, &child.obj_ident, child.enabled));
}
}
let ident = syn::Ident::new("ProfilingLevel", proc_macro2::Span::call_site());
out.extend(level::make_enum(ident, levels.iter().map(|level| &level.obj_ident)));
out
}
struct Profiler {
ident: syn::Ident,
start: syn::Ident,
create: syn::Ident,
doc_obj: String,
doc_start: String,
doc_create: String,
}
impl Profiler {
fn new(level: impl AsRef<str>, ident: syn::Ident) -> Self {
let level = level.as_ref();
let level_link = format!("[{level}-level](index.html#{level})");
Self {
ident,
start: quote::format_ident!("start_{level}"),
create: quote::format_ident!("create_{level}"),
doc_obj: format!("Identifies a {level_link} profiler."),
doc_start: format!("Start a new {level_link} profiler."),
doc_create: format!("Create a new {level_link} profiler, in unstarted state."),
}
}
}
fn define_enabled_profiler(profiler: &Profiler) -> proc_macro::TokenStream {
let Profiler { ident, start, create, doc_obj, doc_start, doc_create } = profiler;
let profiling_level_variant = ident;
(quote::quote! {
// =================================
// === Profiler (e.g. Objective) ===
// =================================
#[doc = #doc_obj]
#[derive(Copy, Clone, Debug)]
pub struct #ident(pub EventId);
// === Trait Implementations ===
impl Profiler for #ident {
fn start(
parent: EventId,
label: Label,
time: Option<Timestamp>,
start: StartState,
) -> Self {
let level = crate::ProfilingLevel::#profiling_level_variant;
#ident(EventLog.start(parent, label, time, start, level))
}
fn finish(self) {
EventLog.end(self.0, Timestamp::now())
}
fn pause(self) {
EventLog.pause(self.0, Timestamp::now());
}
fn resume(self) {
EventLog.resume(self.0, Timestamp::now());
}
}
// === Constructor macros ===
#[doc = #doc_start]
#[macro_export]
macro_rules! #start {
($parent: expr, $label: expr) => {{
use profiler::Parent;
let label = profiler::internal::Label(
concat!($label, " (", file!(), ":", line!(), ")"));
let profiler: profiler::internal::Started<profiler::#ident> =
$parent.start_child(label);
profiler
}}
}
#[doc = #doc_create]
#[macro_export]
macro_rules! #create {
($label: expr) => {{
let label = profiler::internal::Label(
concat!($label, " (", file!(), ":", line!(), ")"));
let parent = profiler::internal::EventId::implicit();
let now = Some(profiler::internal::Timestamp::now());
let paused = profiler::internal::StartState::Paused;
profiler::#ident::start(parent, label, now, paused)
}}
}
})
.into()
}
fn define_disabled_profiler(profiler: &Profiler) -> proc_macro::TokenStream {
let Profiler { ident, start, create, doc_obj, doc_start, doc_create, .. } = profiler;
(quote::quote! {
// =================================
// === Profiler (e.g. Objective) ===
// =================================
#[doc = #doc_obj]
#[derive(Copy, Clone, Debug)]
pub struct #ident(pub ());
// === Trait Implementations ===
impl Profiler for #ident {
fn start(
_: EventId,
_: Label,
_: Option<Timestamp>,
_: StartState,
) -> Self {
Self(())
}
fn finish(self) {}
fn pause(self) {}
fn resume(self) { }
}
// === Constructor macros ===
#[doc = #doc_start]
#[macro_export]
macro_rules! #start {
($parent: expr, $label: expr) => {{
let _unused_at_this_profiling_level = $parent;
profiler::internal::Started(profiler::#ident(()))
}}
}
#[doc = #doc_create]
#[macro_export]
macro_rules! #create {
($label: expr) => {{
profiler::#ident(())
}}
}
})
.into()
}
/// Generates an implementation of the [`Parent`] trait relating the given parent and child.
fn impl_parent(
parent_ident: &syn::Ident,
child_ident: &syn::Ident,
enabled: bool,
) -> proc_macro::TokenStream {
let body = if enabled {
quote::quote! {
let start = Some(Timestamp::now());
Started(#child_ident::start(self.0, label, start, StartState::Active))
}
} else {
quote::quote! {
Started(#child_ident(()))
}
};
(quote::quote! {
impl Parent<#child_ident> for #parent_ident {
fn start_child(&self, label: Label) -> Started<#child_ident> {
#body
}
}
})
.into()
}

View File

@ -0,0 +1,209 @@
//! Implementation of the [`#[profile]`] proc-macro.
use crate::wrap_async;
use quote::ToTokens;
use std::fmt;
use syn::visit_mut;
use syn::visit_mut::VisitMut;
/// The `#[profile]` proc-macro.
pub fn profile(
args: proc_macro::TokenStream,
ts: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let mut func = syn::parse_macro_input!(ts as syn::ItemFn);
let level: syn::Ident = syn::parse(args)
.expect("The `profile` macro requires a profiling-level argument, e.g. #[profile(Task)]");
let label = make_label(&func.sig.ident);
// Instrument awaits, whether at the top level (if this is an async fn) or in inner async
// blocks.
WrapAwait.visit_block_mut(&mut func.block);
// Different transformations for async or non-async.
let async_block_origin = match func.sig.asyncness {
// Async: transform it to an non-async fn containing an async block. The outer fn does
// not need any top-level profiling; all it does is set up the async block. We'll
// instrument the block below.
Some(_) => {
wrap_async::wrap_async_fn(&mut func);
AsyncBlockOrigin::FnBody
}
// Non-async: instrument the top level of the function.
None => {
profile_sync(&level, &label, &mut func);
AsyncBlockOrigin::Block
}
};
// Instrument any async blocks in the body.
let name = func.sig.ident.to_string();
let mut instrumentor = InstrumentAsync { level, func: name, origin: async_block_origin };
instrumentor.visit_block_mut(&mut func.block);
func.into_token_stream().into()
}
/// Decorate the input with file:line info determined by the proc_macro's call site.
fn make_label<L: fmt::Display>(name: L) -> String {
let span = proc_macro::Span::call_site();
let file = span.source_file().path();
let path = file.as_path().to_string_lossy();
let line = span.start().line;
format!("{} ({}:{})", name, path, line)
}
// === WrapAwait ===
struct WrapAwait;
impl visit_mut::VisitMut for WrapAwait {
ignore_inner_fn_items!();
fn visit_expr_mut(&mut self, expr: &mut syn::Expr) {
match expr {
syn::Expr::Await(await_) => *expr = wrap_await(await_),
_ => syn::visit_mut::visit_expr_mut(self, expr),
}
}
}
fn wrap_await(await_: &mut syn::ExprAwait) -> syn::Expr {
let expr = &mut await_.base;
WrapAwait.visit_expr_mut(expr);
assert!(
await_.attrs.is_empty(),
"#[profile] cannot wrap a function that applies attributes to an .await expression"
);
let wrapped = quote::quote! {
({
let future = #expr;
profiler::internal::Profiler::pause(__profiler);
let result = future.await;
profiler::internal::Profiler::resume(__profiler);
result
})
};
syn::parse2(wrapped).unwrap()
}
// === profile_sync ===
fn profile_sync(obj_ident: &syn::Ident, label: &str, func: &mut syn::ItemFn) {
let start_profiler = start_profiler(obj_ident, label, func.sig.asyncness.is_some());
let block = &func.block;
let body = quote::quote! {{
#start_profiler
let __profiler_scope = profiler::internal::Started(__profiler);
#block
}};
func.block = syn::parse2(body).unwrap();
}
fn start_profiler(
obj_ident: &syn::Ident,
label: &str,
asyncness: bool,
) -> proc_macro2::TokenStream {
let state = match asyncness {
true => quote::quote! { profiler::internal::StartState::Paused },
false => quote::quote! { profiler::internal::StartState::Active },
};
quote::quote! {
let __profiler = {
use profiler::internal::Profiler;
let parent = profiler::internal::EventId::implicit();
let now = Some(profiler::internal::Timestamp::now());
let label = profiler::internal::Label(#label);
profiler::#obj_ident::start(parent, label, now, #state)
};
}
}
// === InstrumentAsync ===
/// Inserts instrumentation into all async block in an item (ignoring inner items).
struct InstrumentAsync {
level: syn::Ident,
func: String,
origin: AsyncBlockOrigin,
}
impl visit_mut::VisitMut for InstrumentAsync {
ignore_inner_fn_items!();
fn visit_expr_mut(&mut self, expr: &mut syn::Expr) {
match expr {
syn::Expr::Async(async_) => *expr = self.instrument_async(async_),
_ => syn::visit_mut::visit_expr_mut(self, expr),
}
}
}
impl InstrumentAsync {
/// Insert instrumentation into an async block.
fn instrument_async(&self, expr: &mut syn::ExprAsync) -> syn::Expr {
self.inner_instrumentor().visit_block_mut(&mut expr.block);
assert!(
expr.attrs.is_empty(),
"#[profile] cannot wrap a function that applies attributes to an async block"
);
let label = match self.origin {
AsyncBlockOrigin::FnBody => make_label(&self.func),
AsyncBlockOrigin::Block => {
let name = format!("<async block in {}>", &self.func);
make_label(name)
}
};
let start_profiler = start_profiler(&self.level, &label, true);
let move_ = &expr.capture;
let block = &expr.block;
let wrapped = if move_.is_some() {
quote::quote! {{
#start_profiler
let __profiler_scope = profiler::internal::Started(__profiler);
async move {
profiler::internal::Profiler::resume(__profiler);
let result = #block;
std::mem::drop(__profiler_scope);
result
}
}}
} else {
// We have to move the profiler into the async block, because borrowing it would
// restrict the lifetime of the block. So we use an outer `move` block to
// capture `__profiler`, and an inner non-move block to match the behavior
// of the original non-move block.
quote::quote! {{
#start_profiler
let __profiler_scope = profiler::internal::Started(__profiler);
let inner = async #block;
async move {
profiler::internal::Profiler::resume(__profiler);
let result = inner.await;
std::mem::drop(__profiler_scope);
result
}
}}
};
syn::parse2(wrapped).unwrap()
}
/// Produce an instrumentor suitable for instrumenting blocks nested inside this block.
fn inner_instrumentor(&self) -> Self {
let level = self.level.clone();
let func = self.func.clone();
let origin = AsyncBlockOrigin::Block;
Self { level, func, origin }
}
}
/// Distinguishes between an async block that was originally the body of an `async fn`, versus an
/// async block that was originated as an async block in the source.
enum AsyncBlockOrigin {
FnBody,
Block,
}

View File

@ -0,0 +1,202 @@
//! Implementation of [`wrap_async_fn`], a helper for macros that operate on async functions.
use quote::ToTokens;
use syn::visit_mut;
use syn::visit_mut::VisitMut;
// ========================
// === VisitMut helpers ===
// ========================
/// Used in a `impl VisitMut` to block descent into inner function items.
macro_rules! ignore_inner_fn_items {
() => {
fn visit_item_fn_mut(&mut self, _: &mut syn::ItemFn) {}
};
}
// =====================
// === wrap_async_fn ===
// =====================
/// Convert an `async fn` into a `fn` returning a `Future`, implemented with an `async` block.
///
/// The output is functionally equivalent to the input (except the output won't `impl Send` even if
/// the original `async fn` did); this is useful as a basis for further transformation.
pub fn wrap_async_fn(func: &mut syn::ItemFn) {
// Wrap the body.
let block = &func.block;
let ret_ty = match &func.sig.output {
syn::ReturnType::Default => quote::quote! { () },
syn::ReturnType::Type(_, ty) => ty.into_token_stream(),
};
let body = quote::quote! {{
(async move {
let result: #ret_ty = #block;
result
})
}};
func.block = syn::parse2(body).unwrap();
// Transform the signature.
let output_ty = match &func.sig.output {
syn::ReturnType::Default => quote::quote! { () },
syn::ReturnType::Type(_, ty) => ty.to_token_stream(),
};
let output_lifetime = syn::Lifetime::new("'__profiler_out", proc_macro2::Span::call_site());
let lifetimes = explicitize_lifetimes(&mut func.sig);
let output = if lifetimes.is_empty() {
quote::quote! {
-> impl std::future::Future<Output=#output_ty>
}
} else {
// Bound all inputs on the output lifetime.
let type_bound = syn::TypeParamBound::Lifetime(output_lifetime.clone());
for param in &mut func.sig.generics.params {
match param {
syn::GenericParam::Lifetime(def) => def.bounds.push(output_lifetime.clone()),
syn::GenericParam::Type(def) => def.bounds.push(type_bound.clone()),
syn::GenericParam::Const(_) => (),
}
}
for arg in &mut func.sig.inputs {
if let syn::FnArg::Typed(syn::PatType { ty, .. }) = arg {
if let syn::Type::ImplTrait(def) = ty.as_mut() {
def.bounds.push(type_bound.clone());
}
}
}
// Add a definition for the output lifetime.
let lifetime_def = syn::LifetimeDef::new(output_lifetime.clone());
func.sig.generics.params.insert(0, syn::GenericParam::Lifetime(lifetime_def));
// Apply the output lifetime to the output.
quote::quote! {
-> impl std::future::Future<Output=#output_ty> + #output_lifetime
}
};
func.sig.asyncness = None;
func.sig.output = syn::parse2(output).unwrap();
}
// === Lifetime transformation ===
/// Make all lifetimes in function signature explicit.
///
/// Returns the lifetimes used in the function signature.
fn explicitize_lifetimes(sig: &mut syn::Signature) -> Vec<syn::Lifetime> {
// Go through the args; find:
// - anonymous lifetime: '_
// - implicit lifetimes: &foo
// - explicit lifetimes: &'a
// Make all input lifetimes explicit:
// - Use new lifetime explicitly in arg list.
// - Define new lifetime in generic params.
let mut input_transformer = ExplicitizeInputLifetimes::default();
for input in &mut sig.inputs {
input_transformer.visit_fn_arg_mut(input);
}
let ExplicitizeInputLifetimes { new_lifetimes, existing_lifetimes } = input_transformer;
let mut all_lifetimes = existing_lifetimes;
all_lifetimes.extend_from_slice(&new_lifetimes);
let new_lifetimes =
new_lifetimes.into_iter().map(|lt| syn::GenericParam::Lifetime(syn::LifetimeDef::new(lt)));
sig.generics.params.extend(new_lifetimes);
// There are two cases where output lifetimes may be elided:
// - There's exactly one lifetime in the inputs.
// - There's a receiver with a lifetime.
// If either case occurs, make any implicit output lifetimes explicit.
let default_lifetime = if all_lifetimes.len() == 1 {
Some(all_lifetimes[0].clone())
} else {
get_receiver_lifetime(sig).cloned()
};
if let Some(lifetime) = default_lifetime {
ExplicitizeOutputLifetimes { lifetime }.visit_return_type_mut(&mut sig.output);
}
all_lifetimes
}
#[derive(Default)]
struct ExplicitizeInputLifetimes {
new_lifetimes: Vec<syn::Lifetime>,
existing_lifetimes: Vec<syn::Lifetime>,
}
impl ExplicitizeInputLifetimes {
fn gen_lifetime(&mut self) -> syn::Lifetime {
let name = format!("'__profiler{}", self.new_lifetimes.len());
let new = syn::Lifetime::new(&name, proc_macro2::Span::call_site());
self.new_lifetimes.push(new.clone());
new
}
fn visit_elidable_lifetime(&mut self, lifetime: &mut Option<syn::Lifetime>) {
match lifetime {
Some(lifetime) => self.visit_lifetime_mut(lifetime),
None => *lifetime = Some(self.gen_lifetime()),
}
}
}
impl visit_mut::VisitMut for ExplicitizeInputLifetimes {
ignore_inner_fn_items!();
// Handles 'x in generic parameters in types of non-self arguments.
fn visit_lifetime_mut(&mut self, lifetime: &mut syn::Lifetime) {
let name = lifetime.ident.to_string();
if &name == "_" {
*lifetime = self.gen_lifetime();
} else {
self.existing_lifetimes.push(lifetime.clone());
}
}
// Handles &self.
fn visit_receiver_mut(&mut self, receiver: &mut syn::Receiver) {
if let Some((_, lifetime)) = &mut receiver.reference {
self.visit_elidable_lifetime(lifetime);
}
}
// Handles & in types of non-self arguments.
fn visit_type_reference_mut(&mut self, type_reference: &mut syn::TypeReference) {
self.visit_elidable_lifetime(&mut type_reference.lifetime);
}
}
struct ExplicitizeOutputLifetimes {
lifetime: syn::Lifetime,
}
impl visit_mut::VisitMut for ExplicitizeOutputLifetimes {
ignore_inner_fn_items!();
// Handles 'x in generic parameters in types.
fn visit_lifetime_mut(&mut self, lifetime: &mut syn::Lifetime) {
if &lifetime.ident.to_string() == "_" {
*lifetime = self.lifetime.clone();
}
}
// Handles & in types.
fn visit_type_reference_mut(&mut self, type_reference: &mut syn::TypeReference) {
if type_reference.lifetime.is_none() {
type_reference.lifetime = Some(self.lifetime.clone());
}
}
}
fn get_receiver_lifetime(sig: &syn::Signature) -> Option<&syn::Lifetime> {
match sig.inputs.first() {
Some(syn::FnArg::Receiver(syn::Receiver {
reference: Some((_, Some(lifetime))), ..
})) => Some(lifetime),
_ => None,
}
}

View File

@ -7,6 +7,7 @@
use crate::format;
use crate::log;
use crate::ProfilingLevel;
use std::fmt;
use std::rc;
@ -37,11 +38,8 @@ pub(crate) static METADATA_LOGS: log::ThreadLocalLog<rc::Rc<dyn MetadataSource>>
// =========================
/// Produce a JSON-formatted event log from the internal event logs.
///
/// Consumes all events that have happened up to this point; except in testing, this should only be
/// done once.
pub fn take_log() -> String {
let LogData { events, metadata_names, mut metadata_entries } = take_raw_log();
pub fn get_log() -> String {
let LogData { events, metadata_names, mut metadata_entries } = get_raw_log();
let mut out = LogTranslator::new();
for (id, event) in events.into_iter().enumerate() {
let id = EventId(id as u32);
@ -53,11 +51,11 @@ pub fn take_log() -> String {
let data = metadata_entries[id].next().unwrap();
out.metadata(timestamp, name, data);
}
Event::Start(Start { parent, start, label }) => {
Event::Start(Start { parent, start, label, .. }) => {
out.create(start, parent, label, id);
out.start(start.unwrap(), id);
}
Event::StartPaused(Start { parent, start, label }) =>
Event::StartPaused(Start { parent, start, label, .. }) =>
out.create(start, parent, label, id),
Event::End { id, timestamp } => out.end(timestamp, id),
Event::Pause { id, timestamp } => out.pause(timestamp, id),
@ -71,12 +69,12 @@ pub fn take_log() -> String {
// === Capture raw log data ===
/// Obtain the data from the internal event log.
pub(crate) fn take_raw_log() -> LogData {
let events = EVENTS.take_all();
let metadatas = METADATA_LOGS.clone_all();
pub(crate) fn get_raw_log() -> LogData {
let events = EVENTS.clone_all();
let metadatas: Vec<_> = METADATA_LOGS.clone_all();
let metadata_names: Vec<_> = metadatas.iter().map(|metadata| metadata.name()).collect();
let metadata_entries: Vec<_> =
metadatas.into_iter().map(|metadata| metadata.take_all()).collect();
metadatas.into_iter().map(|metadata| metadata.get_all()).collect();
LogData { events, metadata_names, metadata_entries }
}
@ -155,18 +153,18 @@ pub(crate) struct MetadataLog<T> {
pub(crate) trait MetadataSource {
fn name(&self) -> &'static str;
fn take_all(&self) -> Box<dyn Iterator<Item = Box<serde_json::value::RawValue>>>;
fn get_all(&self) -> Box<dyn Iterator<Item = Box<serde_json::value::RawValue>>>;
}
impl<T: 'static + serde::Serialize> MetadataSource for MetadataLog<T> {
fn name(&self) -> &'static str {
self.name
}
fn take_all(&self) -> Box<dyn Iterator<Item = Box<serde_json::value::RawValue>>> {
let entries = self.entries.take_all();
let entries =
entries.into_iter().map(|data| serde_json::value::to_raw_value(&data).unwrap());
Box::new(entries)
fn get_all(&self) -> Box<dyn Iterator<Item = Box<serde_json::value::RawValue>>> {
let mut entries = Vec::with_capacity(self.entries.len());
self.entries.for_each(|x| entries.push(serde_json::value::to_raw_value(&x).unwrap()));
Box::new(entries.into_iter())
}
}
@ -190,7 +188,7 @@ impl<T: 'static + serde::Serialize> MetadataLogger<T> {
pub fn new(name: &'static str) -> Self {
let id = METADATA_LOGS.len() as u32;
let entries = rc::Rc::new(log::Log::new());
METADATA_LOGS.append(rc::Rc::new(MetadataLog::<T> { name, entries: entries.clone() }));
METADATA_LOGS.push(rc::Rc::new(MetadataLog::<T> { name, entries: entries.clone() }));
Self { id, entries }
}
@ -198,7 +196,7 @@ impl<T: 'static + serde::Serialize> MetadataLogger<T> {
///
/// Returns an identifier that can be used to create references between log entries.
pub fn log(&self, t: T) -> EventId {
self.entries.append(t);
self.entries.push(t);
EventLog.metadata(self.id)
}
}
@ -215,49 +213,54 @@ pub struct EventLog;
impl EventLog {
/// Log the beginning of a measurement.
#[inline]
pub fn start(
self,
parent: EventId,
label: Label,
start: Option<Timestamp>,
state: StartState,
level: ProfilingLevel,
) -> EventId {
let m = Start { parent, label, start };
let m = Start { parent, label, start, level };
let event = match state {
StartState::Active => Event::Start(m),
StartState::Paused => Event::StartPaused(m),
};
let id = EVENTS.len() as u32;
EVENTS.append(event);
EventId(id)
self.log_event(event)
}
/// Log the end of a measurement.
#[inline]
pub fn end(self, id: EventId, timestamp: Timestamp) {
let event = Event::End { id, timestamp };
EVENTS.append(event);
self.log_event(Event::End { id, timestamp });
}
/// Log the beginning of an interval in which the measurement is not active.
#[inline]
pub fn pause(self, id: EventId, timestamp: Timestamp) {
let event = Event::Pause { id, timestamp };
EVENTS.append(event);
self.log_event(Event::Pause { id, timestamp });
}
/// Log the end of an interval in which the measurement is not active.
#[inline]
pub fn resume(self, id: EventId, timestamp: Timestamp) {
let event = Event::Resume { id, timestamp };
EVENTS.append(event);
self.log_event(Event::Resume { id, timestamp });
}
/// Log metadata.
#[inline]
pub fn metadata(self, type_id: u32) -> EventId {
let id = EVENTS.len() as u32;
let timestamp = Timestamp::now();
let data = ExternalMetadata { type_id };
let event = Event::Metadata { timestamp, data };
EVENTS.append(event);
EventId(id)
self.log_event(Event::Metadata { timestamp, data })
}
#[inline(always)]
fn log_event(self, event: Event) -> EventId {
let id = EventId(EVENTS.len() as u32);
EVENTS.push(event);
id
}
}
@ -316,6 +319,16 @@ pub enum Event {
},
}
impl Event {
/// Return the [`Start`] information, if this is an event that defines a profiler.
pub fn as_start(self) -> Option<Start> {
match self {
Event::Start(start) | Event::StartPaused(start) => Some(start),
_ => None,
}
}
}
// =============
@ -331,6 +344,8 @@ pub struct Start {
pub start: Option<Timestamp>,
/// Identifies where in the code this measurement originates.
pub label: Label,
/// Identifies the importance of this event.
pub level: ProfilingLevel,
}
@ -362,33 +377,39 @@ pub struct Timestamp {
}
impl Timestamp {
#[inline(always)]
/// Return the current time, relative to the time origin.
pub fn now() -> Self {
Self { ms: now() }
}
/// Return the timestamp corresponding to an offset from the time origin, in ms.
#[inline(always)]
pub fn from_ms(ms: f64) -> Self {
Self { ms }
}
/// Return the timestamp of the time origin.
#[inline(always)]
pub fn time_origin() -> Self {
Self { ms: 0.0 }
}
/// Convert to an offset from the time origin, in ms.
#[inline(always)]
pub fn into_ms(self) -> f64 {
self.ms
}
/// Return the offset of the time origin from a system timestamp.
#[inline(always)]
pub fn time_offset() -> Self {
Self::from_ms(time_origin())
}
}
impl Default for Timestamp {
#[inline(always)]
fn default() -> Self {
Self::time_origin()
}
@ -426,6 +447,7 @@ fn time_origin() -> f64 {
// === Conversions to related types ===
impl From<Timestamp> for format::Timestamp {
#[inline(always)]
fn from(time: Timestamp) -> Self {
Self::from_ms(time.into_ms())
}
@ -438,7 +460,7 @@ impl From<Timestamp> for format::Timestamp {
// ===============
/// Identifies an event in the profiling log.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct EventId(pub u32);
impl EventId {
@ -455,6 +477,15 @@ impl EventId {
pub const fn implicit() -> Self {
Self::IMPLICIT
}
/// If the value explicitly references a profiler ID, return it; otherwise, return [`None`].
pub fn explicit(self) -> Option<Self> {
if self == Self::IMPLICIT {
None
} else {
Some(self)
}
}
}

View File

@ -127,6 +127,10 @@
// === Features ===
#![feature(test)]
#![feature(local_key_cell_methods)]
#![feature(maybe_uninit_uninit_array)]
#![feature(extend_one)]
#![feature(result_option_inspect)]
// === Standard Linter Configuration ===
#![deny(non_ascii_idents)]
#![warn(unsafe_code)]
@ -323,7 +327,7 @@ pub async fn join<T: futures::Future, U: futures::Future>(t: T, u: U) -> (T::Out
#[doc(inline)]
pub use enso_profiler_macros::profile;
enso_profiler_macros::define_hierarchy![Objective, Task, Detail, Debug];
enso_profiler_macros::define_profiling_levels![Objective, Task, Detail, Debug];
// === APP_LIFETIME ===
@ -333,6 +337,153 @@ pub const APP_LIFETIME: Objective = Objective(EventId::APP_LIFETIME);
// ===================
// === EventStream ===
// ===================
/// An iterator over all logged events. This is a resumable iterator: After [`next()`] returns
/// [`None`], it can be iterated again to observe any additional events that have been logged since
/// the last it was used.
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct EventStream {
next_i: usize,
}
impl Iterator for EventStream {
type Item = Event;
fn next(&mut self) -> Option<Self::Item> {
let result = EVENTS.try_get(self.next_i, |e| *e);
result.inspect(|_| self.next_i += 1);
result
}
}
/// Return a stream that yields all logged events.
pub fn event_stream() -> EventStream {
EventStream::default()
}
// ======================
// === IntervalStream ===
// ======================
/// Return a stream that yields all logged events. This is a resumable iterator: After [`next()`]
/// returns [`None`], it can be iterated again to observe any additional events that have been
/// logged since the last it was used.
pub fn interval_stream() -> IntervalStream {
IntervalStream {
events: EventStream::default().enumerate(),
resume: Default::default(),
parent: Default::default(),
stack: Default::default(),
}
}
/// A stream that yields all logged events.
#[derive(Debug, Clone)]
pub struct IntervalStream {
events: std::iter::Enumerate<EventStream>,
// [`Timestamp`]s of events that have been paused (or started paused) and resumed. If a running
// event is not found in this collection, it has been running since its start event.
resume: std::collections::BTreeMap<EventId, Timestamp>,
parent: std::collections::BTreeMap<EventId, EventId>,
stack: Vec<EventId>,
}
impl IntervalStream {
fn resolve_start_id(&self, id: EventId) -> usize {
let non_empty_stack = "For an internal to end, the stack must be non-empty.";
id.explicit().unwrap_or_else(|| *self.stack.last().expect(non_empty_stack)).0 as usize
}
fn record_parent(&mut self, id: EventId, parent: EventId) {
let parent = parent
.explicit()
.unwrap_or_else(|| self.stack.last().copied().unwrap_or(EventId::APP_LIFETIME));
self.parent.insert(id, parent);
}
fn resolve_start_time(&self, start: &Start) -> Timestamp {
start.start.unwrap_or_else(|| self.resolve_parent_start(start.parent))
}
fn resolve_parent_start(&self, id: EventId) -> Timestamp {
let parent_was_recorded =
"If the event ID is implicit, we must have resolved its parent when we encountered it.";
let id = id.explicit().unwrap_or_else(|| *self.parent.get(&id).expect(parent_was_recorded));
EVENTS.get(id.0 as usize, |event| self.resolve_start_time(&event.as_start().unwrap()))
}
}
impl Iterator for IntervalStream {
type Item = Interval;
fn next(&mut self) -> Option<Self::Item> {
while let Some((i, event)) = self.events.next() {
let id = EventId(i as u32);
match event {
Event::Start(start) => {
self.stack.push(id);
self.record_parent(id, start.parent);
}
Event::StartPaused(start) => self.record_parent(id, start.parent),
Event::Resume { id, timestamp } => {
self.resume.insert(id, timestamp);
self.stack.push(id);
}
Event::End { id, timestamp } | Event::Pause { id, timestamp } => {
let start = self.resume.remove(&id).or_else(|| {
let id = self.resolve_start_id(id);
EVENTS.get(id, |e| match e {
Event::Start(start) => Some(self.resolve_start_time(start)),
Event::StartPaused(_) => None,
_ => unreachable!(),
})
});
if let Some(start) = start {
let label = EVENTS.get(id.0 as usize, |e| e.as_start().unwrap().label);
let end = timestamp;
return Some(Interval { label, start, end });
}
}
Event::Metadata { .. } => {}
}
}
None
}
}
/// Identifies a time period in which a particular profiler was running.
#[derive(Copy, Clone, Debug)]
pub struct Interval {
label: Label,
start: Timestamp,
end: Timestamp,
}
impl Interval {
/// Return a string identifying profiler, usually by the location in the code that it was
/// created.
pub fn label(&self) -> &'static str {
self.label.0
}
/// Return the time the profiler began running, in the same units as the [`DOMHighResTimeStamp`]
/// web API.
pub fn start(&self) -> f64 {
self.start.into_ms()
}
/// Return the time the profiler finished running, in the same units as the
/// [`DOMHighResTimeStamp`] web API.
pub fn end(&self) -> f64 {
self.end.into_ms()
}
}
// =============
// === Tests ===
// =============
@ -343,7 +494,7 @@ mod log_tests {
use profiler::profile;
fn get_log() -> Vec<profiler::internal::Event> {
crate::internal::take_raw_log().events
crate::internal::get_raw_log().events
}
#[test]
@ -477,7 +628,8 @@ mod bench {
for _ in 0..count {
let _profiler = start_objective!(profiler::APP_LIFETIME, "log_measurement");
}
test::black_box(crate::EVENTS.take_all());
let events: Vec<_> = crate::EVENTS.clone_all();
test::black_box(events);
}
#[bench]
@ -497,6 +649,7 @@ mod bench {
parent: profiler::EventId::APP_LIFETIME,
start: None,
label: profiler::Label(""),
level: Default::default(),
}));
log.push(profiler::Event::End {
id: profiler::EventId::implicit(),

View File

@ -1,69 +1,136 @@
//! Data structure supporting limited interior mutability, to build up a collection.
//! Data structure supporting:
//! - O(1) append (amortized), with low overhead.
//! - O(1) random-access reads.
//! - Single-threaded shared mutability.
//!
//! # Implementation
//!
//! Note [Log Safety]
//! =============================
//! When obtaining a reference from the UnsafeCell, all accessors follow these rules:
//! - There must be a scope that the reference doesn't escape.
//! - There must be no other references obtained in the same scope.
//! Consistently following these rules ensures the no-alias rule of mutable references is
//! satisfied.
//! Soundness of shared-mutable data requires avoiding reference conflicts: The data must not be
//! mutated while a shared-reference to it exists. This is ensured by:
//! - No public interface of [`Log`] allows keeping a reference with lifetime derived from the data.
//! - References taken within [`Log`]'s implementation don't overlap with other references in the
//! scope.
use std::cell;
use std::mem;
/// Allocation unit of events within a [`Log`].
const BLOCK: usize = 1024;
// ===========
// === Log ===
// ===========
/// Data structure supporting limited interior mutability, to build up a collection.
/// A shared-mutable data structure supporting append and random-access read.
#[derive(Debug)]
pub struct Log<T>(cell::UnsafeCell<Vec<T>>);
#[allow(unsafe_code)]
impl<T> Log<T> {
#[allow(clippy::new_without_default)]
/// Create a new, empty vec builder.
pub fn new() -> Self {
Self(cell::UnsafeCell::new(vec![]))
}
/// Push an element.
pub fn append(&self, element: T) {
// Note [Log Safety]
unsafe {
(*self.0.get()).push(element);
}
}
/// Return (and consume) all elements pushed so far.
pub fn take_all(&self) -> Vec<T> {
// Note [Log Safety]
unsafe { mem::take(&mut *self.0.get()) }
}
/// The number of elements that are currently available.
pub fn len(&self) -> usize {
// Note [Log Safety]
unsafe { &*self.0.get() }.len()
}
/// Returns true if no elements are available.
pub fn is_empty(&self) -> bool {
// Note [Log Safety]
unsafe { &*self.0.get() }.is_empty()
}
pub struct Log<T> {
current: cell::UnsafeCell<Box<[mem::MaybeUninit<T>; BLOCK]>>,
completed: cell::UnsafeCell<Vec<Box<[T; BLOCK]>>>,
len: cell::Cell<usize>,
}
#[allow(unsafe_code)]
impl<T> Log<T> {
/// Create a new, empty [`Log`].
pub fn new() -> Self {
Self {
current: cell::UnsafeCell::new(Box::new(mem::MaybeUninit::uninit_array())),
completed: cell::UnsafeCell::new(Default::default()),
len: Default::default(),
}
}
/// Push an element.
#[inline]
#[allow(unsafe_code)] // Note [Log Safety]
pub fn push(&self, element: T) {
unsafe {
let i = self.len.get();
(*self.current.get())[i % BLOCK].write(element);
let i1 = i + 1;
if i1 % BLOCK == 0 {
// Current gradually-initialized block is full. Read it, cast it to a
// fully-initialized block, and replace it with a new empty block.
let empty = Box::new(mem::MaybeUninit::uninit_array());
let block = self.current.get().replace(empty);
let block =
mem::transmute::<Box<[mem::MaybeUninit<T>; BLOCK]>, Box<[T; BLOCK]>>(block);
// Add the old block to our collection of completed blocks.
(*self.completed.get()).push(block);
}
self.len.set(i1);
}
}
/// Returns the number of entries in the log.
#[inline]
pub fn len(&self) -> usize {
self.len.get()
}
/// Returns true if the log contains no entries.
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Applies a function to each entry in the log.
#[allow(unsafe_code)] // Note [Log Safety]
pub fn for_each<F>(&self, mut f: F)
where F: FnMut(&T) {
unsafe {
let blocks = self.len() / BLOCK;
let n = self.len() % BLOCK;
for i in 0..blocks {
// Safety: The contents of a completed block are never modified, so we can hold a
// borrow while calling the function (which may append to the log).
let block = &(*self.completed.get())[i];
block.iter().for_each(&mut f);
}
// Safety: The elements in the completed portion of the block are never modified, so we
// can hold a borrow while calling the function (which may append to the log).
let current = &(*self.current.get())[..n];
current.iter().map(|elem| elem.assume_init_ref()).for_each(f);
}
}
#[inline]
#[allow(unsafe_code)] // Note [Log Safety]
fn get(&self, index: usize) -> Option<&T> {
unsafe {
let block_i = index / BLOCK;
let i = index % BLOCK;
let blocks = &*self.completed.get();
if let Some(block) = blocks.get(block_i) {
Some(&block[i])
} else if block_i == blocks.len() && i < self.len.get() % BLOCK {
Some((*self.current.get())[i].assume_init_ref())
} else {
None
}
}
}
}
impl<T: Clone> Log<T> {
/// Return clones of all elements pushed so far.
pub fn clone_all(&self) -> Vec<T> {
// Note [Log Safety]
unsafe { &*self.0.get() }.to_vec()
/// Return a collection of all entries currently in the log.
pub fn clone_all<C>(&self) -> C
where C: Default + Extend<T> {
let mut result = C::default();
self.for_each(|elem| result.extend_one(elem.clone()));
result
}
}
impl<T> core::ops::Index<usize> for Log<T> {
type Output = T;
#[inline]
fn index(&self, index: usize) -> &Self::Output {
self.get(index).unwrap()
}
}
@ -71,7 +138,7 @@ impl<T: Clone> Log<T> {
// See: https://github.com/rust-lang/rust/issues/26925
impl<T> Default for Log<T> {
fn default() -> Self {
Self(Default::default())
Log::new()
}
}
@ -81,40 +148,54 @@ impl<T> Default for Log<T> {
// === ThreadLocalLog ===
// ======================
/// Wraps a thread-local [`Log`] instance.
/// Wraps a [`Log`] for thread-local access.
#[derive(Debug)]
pub struct ThreadLocalLog<T: 'static>(std::thread::LocalKey<Log<T>>);
impl<T: 'static> ThreadLocalLog<T> {
/// New.
#[allow(missing_docs)]
pub const fn new(log: std::thread::LocalKey<Log<T>>) -> Self {
Self(log)
}
/// Push an element.
pub fn append(&'static self, element: T) {
self.0.with(|log| log.append(element))
/// Append an entry to the log.
pub fn push(&'static self, t: T) {
self.0.with(|this| this.push(t));
}
/// Return (and consume) all elements pushed so far.
pub fn take_all(&'static self) -> Vec<T> {
self.0.with(|log| log.take_all())
}
/// The number of elements that are currently available.
/// Return the number of entries in the log. Note that as the log is thread-local but
/// append-only, any function in the thread may increase this value, but it will never
/// decrease.
pub fn len(&'static self) -> usize {
self.0.with(|log| log.len())
self.0.with(|this| this.len())
}
/// Returns true if no elements are available.
/// Returns true if the log contains no entries.
pub fn is_empty(&'static self) -> bool {
self.0.with(|log| log.is_empty())
self.0.with(|this| this.is_empty())
}
/// Get the entry at the given index, and pass it to a function; return the result of the
/// function.
///
/// Panics if the index is not less than [`len`].
pub fn get<U>(&'static self, i: usize, f: impl FnOnce(&T) -> U) -> U {
self.0.with(|this| f(&this[i]))
}
/// Get the entry at the given index, and pass it to a function; return the result of the
/// function.
///
/// Returns [`None`] if the index is not less than [`len`].
pub fn try_get<U>(&'static self, i: usize, f: impl FnOnce(&T) -> U) -> Option<U> {
self.0.with(|this| this.get(i).map(f))
}
}
impl<T: Clone> ThreadLocalLog<T> {
/// Return clones of all elements pushed so far.
pub fn clone_all(&'static self) -> Vec<T> {
self.0.with(|log| log.clone_all())
impl<T: 'static + Clone> ThreadLocalLog<T> {
/// Return a collection of log entries since the program was started.
pub fn clone_all<C>(&'static self) -> C
where C: Default + Extend<T> {
self.0.with(|this| this.clone_all())
}
}

View File

@ -18,7 +18,7 @@ js-sys = { version = "0.3.28" }
nalgebra = { version = "0.26.1" }
wasm-bindgen = { workspace = true }
derivative = "2.2.0"
tracing = { version = "0.1.37" }
enso-logging = { path = "../logging" }
enso-shapely = { path = "../shapely" }
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]

View File

@ -58,6 +58,9 @@ pub mod prelude {
pub use super::JsCast;
pub use super::JsValue;
pub use super::Object;
pub use enso_logging as logging;
pub use enso_logging::debug;
pub use enso_logging::warn;
pub use enso_shapely::clone_ref::*;
pub use std::cell::RefCell;
pub use std::default::default;
@ -65,9 +68,6 @@ pub mod prelude {
pub use std::marker::PhantomData;
pub use std::ops::Deref;
pub use std::rc::Rc;
pub use tracing;
pub use tracing::debug;
pub use tracing::warn;
}
@ -253,8 +253,8 @@ macro_rules! ops {
pub mod wasm {
use super::binding::wasm::*;
use super::wasm_traits::*;
pub use tracing;
pub use tracing::warn;
pub use enso_logging as logging;
pub use enso_logging::warn;
pub use std::default::default;
/// Extensions to the [`$target`] type.
pub trait Trait $defs
@ -266,8 +266,8 @@ macro_rules! ops {
pub mod mock {
use super::binding::mock::*;
use super::mock_traits::*;
pub use tracing;
pub use tracing::warn;
pub use enso_logging as logging;
pub use enso_logging::warn;
pub use std::default::default;
/// Extensions to the [`$target`] type.
pub trait Trait $defs
@ -475,7 +475,7 @@ ops! { WindowOps for Window
fn cancel_animation_frame_or_warn(&self, id: i32) {
self.cancel_animation_frame(id).unwrap_or_else(|err| {
tracing::error!("Error when canceling animation frame: {err:?}");
logging::error!("Error when canceling animation frame: {err:?}");
});
}
@ -580,7 +580,7 @@ ops! { NodeOps for Node
fn append_or_warn(&self, node: &Self) {
let warn_msg: &str = &format!("Failed to append child {:?} to {:?}", node, self);
if self.append_child(node).is_err() {
warn!(warn_msg)
warn!("{warn_msg}")
};
}
@ -588,7 +588,7 @@ ops! { NodeOps for Node
let warn_msg: &str = &format!("Failed to prepend child \"{:?}\" to \"{:?}\"", node, self);
let first_c = self.first_child();
if self.insert_before(node, first_c.as_ref()).is_err() {
warn!(warn_msg)
warn!("{warn_msg}")
}
}
@ -596,7 +596,7 @@ ops! { NodeOps for Node
let warn_msg: &str =
&format!("Failed to insert {:?} before {:?} in {:?}", node, ref_node, self);
if self.insert_before(node, Some(ref_node)).is_err() {
warn!(warn_msg)
warn!("{warn_msg}")
}
}
@ -604,7 +604,7 @@ ops! { NodeOps for Node
if let Some(parent) = self.parent_node() {
let warn_msg: &str = &format!("Failed to remove {:?} from parent", self);
if parent.remove_child(self).is_err() {
warn!(warn_msg)
warn!("{warn_msg}")
}
}
}
@ -612,7 +612,7 @@ ops! { NodeOps for Node
fn remove_child_or_warn(&self, node: &Self) {
let warn_msg: &str = &format!("Failed to remove child {:?} from {:?}", node, self);
if self.remove_child(node).is_err() {
warn!(warn_msg)
warn!("{warn_msg}")
}
}
}
@ -636,7 +636,7 @@ ops! { ElementOps for Element
let values = format!("\"{}\" = \"{}\" on \"{:?}\"", name, value, self);
let warn_msg: &str = &format!("Failed to set attribute {}", values);
if self.set_attribute(name, value).is_err() {
warn!(warn_msg)
warn!("{warn_msg}")
}
}
}
@ -660,7 +660,7 @@ ops! { HtmlElementOps for HtmlElement
let values = format!("\"{}\" = \"{}\" on \"{:?}\"", name, value, self);
let warn_msg: &str = &format!("Failed to set style {}", values);
if self.style().set_property(name, value).is_err() {
warn!(warn_msg);
warn!("{warn_msg}");
}
}
}