Revert "Implement basic performance logging API. (#3169)"

This reverts commit 178cfb0404.
This commit is contained in:
Michael Mauderer 2022-01-14 12:40:03 +00:00
parent 4fc97d5563
commit 5c525daefe
23 changed files with 98 additions and 1458 deletions

105
Cargo.lock generated
View File

@ -109,7 +109,7 @@ dependencies = [
"Inflector",
"enso-macro-utils",
"enso-prelude",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -120,7 +120,7 @@ version = "0.1.0"
dependencies = [
"clap",
"itertools 0.10.1",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"syn",
"uuid",
]
@ -719,7 +719,7 @@ version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -731,7 +731,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40eebddd2156ce1bb37b20bbe5151340a31828b1f2d22ba4141f3531710e38df"
dependencies = [
"convert_case",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"rustc_version 0.3.3",
"syn",
@ -903,7 +903,7 @@ dependencies = [
"enso-prelude",
"itertools 0.8.2",
"nonempty",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
"unicode-segmentation",
@ -961,6 +961,7 @@ dependencies = [
"ensogl-component",
"ensogl-drop-manager",
"ensogl-examples",
"ensogl-hardcoded-theme",
"ensogl-text-msdf-sys",
"failure",
"flo_stream",
@ -973,7 +974,6 @@ dependencies = [
"mockall",
"nalgebra 0.26.2",
"parser",
"profiling",
"regex",
"semver 1.0.4",
"serde",
@ -1010,7 +1010,7 @@ dependencies = [
name = "enso-macro-utils"
version = "0.2.0"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
"wasm-bindgen-test 0.2.50",
@ -1077,7 +1077,7 @@ dependencies = [
"boolinator",
"enso-macro-utils",
"itertools 0.8.2",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
"wasm-bindgen-test 0.2.50",
@ -1565,7 +1565,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd53b3fde38a39a06b2e66dc282f3e86191e53bd04cc499929c15742beae3df8"
dependencies = [
"once_cell",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -1593,7 +1593,7 @@ version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
"synstructure",
@ -1790,7 +1790,7 @@ checksum = "18e4a4b95cea4b4ccbcf1c5675ca7c4ee4e9e75eb79944d07defde18068f79bb"
dependencies = [
"autocfg 1.0.1",
"proc-macro-hack",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -2075,7 +2075,6 @@ dependencies = [
"nalgebra 0.26.2",
"ordered-float 2.8.0",
"parser",
"profiling",
"serde",
"serde_json",
"span-tree",
@ -2164,7 +2163,7 @@ checksum = "e50385662f423431a619ab28ba2beeab3063b581a0d1a943765e23911c502904"
dependencies = [
"lazy_static",
"proc-macro-hack",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"regex",
"syn",
@ -2388,17 +2387,6 @@ dependencies = [
"value-bag",
]
[[package]]
name = "macros"
version = "0.1.0"
dependencies = [
"proc-macro2 1.0.33",
"quote 1.0.10",
"serde",
"serde_json",
"syn",
]
[[package]]
name = "matches"
version = "0.1.9"
@ -2552,7 +2540,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a673cb441f78cd9af4f5919c28576a3cc325fb6b54e42f7047dacce3c718c17b"
dependencies = [
"cfg-if 0.1.10",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -2797,7 +2785,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "486ea01961c4a818096de679a8b740b26d9033146ac5291b1c98557658f8cdd9"
dependencies = [
"proc-macro-crate",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -3028,7 +3016,7 @@ version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e8fe8163d14ce7f0cdac2e040116f22eac817edabff0be91e8aff7e9accf389"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -3137,33 +3125,13 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.33"
version = "1.0.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb37d2df5df740e582f28f8560cf425f52bb267d872fe58358eadb554909f07a"
checksum = "ba508cc11742c0dc5c1659771673afbab7a0efab23aa17e854cbab0837ed0b43"
dependencies = [
"unicode-xid 0.2.2",
]
[[package]]
name = "profiling"
version = "0.1.0"
dependencies = [
"Inflector",
"chrono",
"enso-logger",
"enso-prelude",
"enso-web",
"js-sys",
"macros",
"ordered-float 2.8.0",
"paste 1.0.6",
"serde",
"serde_json",
"serde_plain",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "quote"
version = "0.6.13"
@ -3179,7 +3147,7 @@ version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
]
[[package]]
@ -3678,7 +3646,7 @@ version = "1.0.130"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7bc1a1ab1961464eae040d96713baa5a724a8152c1222492465b54322ec508b"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -3694,15 +3662,6 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_plain"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95455e7e29fada2052e72170af226fbe368a4ca33dee847875325d9fdb133858"
dependencies = [
"serde",
]
[[package]]
name = "serde_urlencoded"
version = "0.6.1"
@ -3754,7 +3713,7 @@ checksum = "83695fde96cbe9e08f0e4eb96b1b56fdbd44f2098ee27462dda964c7745fddc7"
dependencies = [
"bitflags",
"itertools 0.8.2",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -3767,7 +3726,7 @@ checksum = "e63e6744142336dfb606fe2b068afa2e1cca1ee6a5d8377277a92945d81fa331"
dependencies = [
"bitflags",
"itertools 0.8.2",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -3871,11 +3830,11 @@ checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
[[package]]
name = "syn"
version = "1.0.82"
version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8daf5dd0bb60cbd4137b1b587d2fc0ae729bc07cf01cd70b36a1ed5ade3b9d59"
checksum = "f2afee18b8beb5a596ecb4a2dce128c719b4ba399d34126b9e4396e3f9860966"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"unicode-xid 0.2.2",
]
@ -3886,7 +3845,7 @@ version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
"unicode-xid 0.2.2",
@ -3936,7 +3895,7 @@ version = "1.0.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -4033,7 +3992,7 @@ version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e44da00bfc73a25f814cd8d7e57a68a5c31b74b3152a0a1d1f590c97ed06265a"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
]
@ -4371,7 +4330,7 @@ dependencies = [
"bumpalo",
"lazy_static",
"log 0.4.14",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
"wasm-bindgen-shared",
@ -4418,7 +4377,7 @@ version = "0.2.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e85031354f25eaebe78bb7db1c3d86140312a911a106b2e29f9cc440ce3e7668"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
"wasm-bindgen-backend",
@ -4476,7 +4435,7 @@ version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97837a6e83ab24a4b3a38d44a257e13335b54f4b4548b2c9d71edd0bf570cb4f"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
]
@ -4489,7 +4448,7 @@ dependencies = [
"anyhow",
"heck",
"log 0.4.14",
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
"wasm-bindgen-backend",
@ -4534,7 +4493,7 @@ dependencies = [
name = "web-test-proc-macro"
version = "0.1.0"
dependencies = [
"proc-macro2 1.0.33",
"proc-macro2 1.0.32",
"quote 1.0.10",
"syn",
"wasm-bindgen-test 0.3.8",

View File

@ -65,8 +65,6 @@ members = [
"lib/rust/json-rpc",
"lib/rust/launcher-shims",
"lib/rust/logger",
"lib/rust/profiling",
"lib/rust/profiling/macros",
"lib/rust/macro-utils",
"lib/rust/optics",
"lib/rust/parser",

View File

@ -9,13 +9,9 @@ crate-type = ["cdylib", "rlib"]
[dependencies]
analytics = { version = "0.1.0", path = "analytics" }
ast = { path = "language/ast/impl" }
bimap = { version = "0.4.0" }
console_error_panic_hook = { version = "0.1.6" }
double-representation = { version = "0.1.0", path = "controller/double-representation" }
engine-protocol = { path = "controller/engine-protocol" }
enso-callback = { path = "../../lib/rust/callback" }
enso-config = { path = "config" }
enso-callback = { path = "../../lib/rust/callback" }
enso-data-structures = { path = "../../lib/rust/data-structures" }
enso-debug-scene = { path = "view/debug_scene" }
enso-frp = { path = "../../lib/rust/frp" }
@ -25,27 +21,31 @@ enso-shapely = { path = "../../lib/rust/shapely/impl"}
enso-text = { path = "../../lib/rust/text" }
enso-web = { path = "../../lib/rust/web" }
ensogl = { path = "../../lib/rust/ensogl" }
ensogl-drop-manager = { path = "../../lib/rust/ensogl/component/drop-manager" }
ensogl-examples = { path = "../../lib/rust/ensogl/example" }
ensogl-component = { path = "../../lib/rust/ensogl/component" }
ensogl-text-msdf-sys = { path = "../../lib/rust/ensogl/component/text/msdf-sys" }
ensogl-hardcoded-theme = { path = "../../lib/rust/ensogl/app/theme/hardcoded" }
ensogl-drop-manager = { path = "../../lib/rust/ensogl/component/drop-manager" }
fuzzly = { path = "../../lib/rust/fuzzly" }
ast = { path = "language/ast/impl" }
ide-view = { path = "view" }
engine-protocol = { path = "controller/engine-protocol" }
json-rpc = { path = "../../lib/rust/json-rpc" }
parser = { path = "language/parser" }
span-tree = { path = "language/span-tree" }
bimap = { version = "0.4.0" }
console_error_panic_hook = { version = "0.1.6" }
failure = { version = "0.1.6" }
flo_stream = { version = "0.4.0" }
futures = { version = "0.3.1" }
fuzzly = { path = "../../lib/rust/fuzzly" }
ide-view = { path = "view" }
itertools = { version = "0.10.0" }
js-sys = { version = "0.3.28" }
json-rpc = { path = "../../lib/rust/json-rpc" }
mockall = { version = "0.7.1", features = ["nightly"] }
nalgebra = { version = "0.26.1", features = ["serde-serialize"] }
parser = { path = "language/parser" }
profiling = { path = "../../lib/rust/profiling", default-features = false }
semver = { version = "1.0.0", features = ["serde"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0" }
sha3 = { version = "0.8.2" }
span-tree = { path = "language/span-tree" }
uuid = { version = "0.8", features = ["serde", "v4", "wasm-bindgen"] }
wasm-bindgen = { version = "=0.2.58" }
@ -73,9 +73,3 @@ features = [
'WebSocket',
'Window',
]
[features]
enable-section-profiling = ["profiling/enable-section-profiling"]
enable-task-profiling = ["profiling/enable-task-profiling"]
enable-detail-profiling = ["profiling/enable-detail-profiling"]
enable-debug-profiling = ["profiling/enable-debug-profiling"]

View File

@ -281,22 +281,6 @@ have prepared several scripts which maximally automate the process:
- **Linting** Please be sure to fix all errors reported by `node ./run lint`
before creating a pull request to this repository.
### Profiling
If you are working on performance critical code, or on improving the performance
of parts of the GUI we have some tools available to help you. Especially there
is a profiling framework integrated that can help you measure the timing of your
code. There is a JS API (`app/ide-desktop/lib/profiling/src/profiling.ts`) and a
Rust API (`lib/rust/profiling/src/lib.rs`) and you can read their respective
docs for details on the API usage. To activate the profiling framework you need
to pass `--profiling <name of the desired profiling level>` to the run script.
Available profiling levels are “Section”, “Task”, “Details”, “Debug” For
example, `./run start --profiling-level debug --dev`. In the Chrome developer
console, you can then see the profiled intervals in the Performance tab when
recording performance statistics. You can also see a performance report when
running `showPerformanceReport()` in the Chrome developer console. You will then
see a textual overview of the profiled intervals.
### Development Branches
The following branches are used to develop the product:

View File

@ -121,44 +121,37 @@ impl Project {
///
/// Returns the controllers of module and graph which should be displayed in the view.
pub async fn initialize(&self) -> FallibleResult<InitializationResult> {
profiling::measure_task!("project_initialise", {
let project = self.model.clone_ref();
let parser = self.model.parser();
let module_path = self.initial_module_path()?;
let file_path = module_path.file_path().clone();
let project = self.model.clone_ref();
let parser = self.model.parser();
let module_path = self.initial_module_path()?;
let file_path = module_path.file_path().clone();
// TODO [mwu] This solution to recreate missing main file should be considered
// provisional until proper decision is made. See: https://github.com/enso-org/enso/issues/1050
self.recreate_if_missing(&file_path, default_main_method_code()).await?;
let method = main_method_ptr(project.qualified_name(), &module_path);
let main_module_model = self.model.module(module_path.clone()).await?;
Self::add_main_if_missing(
project.qualified_name(),
&main_module_model,
&method,
&parser,
)?;
// TODO [mwu] This solution to recreate missing main file should be considered provisional
// until proper decision is made. See: https://github.com/enso-org/enso/issues/1050
self.recreate_if_missing(&file_path, default_main_method_code()).await?;
let method = main_method_ptr(project.qualified_name(), &module_path);
let main_module_model = self.model.module(module_path.clone()).await?;
Self::add_main_if_missing(project.qualified_name(), &main_module_model, &method, &parser)?;
let mut info = main_module_model.info();
info.add_module_import(
&project.qualified_module_name(&module_path),
&project.parser(),
&QualifiedName::from_text("Standard.Visualization").unwrap(),
);
main_module_model.update_ast(info.ast)?;
let mut info = main_module_model.info();
info.add_module_import(
&project.qualified_module_name(&module_path),
&project.parser(),
&QualifiedName::from_text("Standard.Visualization").unwrap(),
);
main_module_model.update_ast(info.ast)?;
// Here, we should be relatively certain (except race conditions in case of multiple
// clients that we currently do not support) that main module exists and contains main
// method. Thus, we should be able to successfully create a graph controller for it.
let main_module_text = controller::Text::new(&self.logger, &project, file_path).await?;
let main_graph = controller::ExecutedGraph::new(&self.logger, project, method).await?;
// Here, we should be relatively certain (except race conditions in case of multiple
// clients that we currently do not support) that main module exists and contains main
// method. Thus, we should be able to successfully create a graph controller for it.
let main_module_text = controller::Text::new(&self.logger, &project, file_path).await?;
let main_graph = controller::ExecutedGraph::new(&self.logger, project, method).await?;
self.init_call_stack_from_metadata(&main_module_model, &main_graph).await;
self.notify_about_compiling_process(&main_graph);
self.display_warning_on_unsupported_engine_version();
self.init_call_stack_from_metadata(&main_module_model, &main_graph).await;
self.notify_about_compiling_process(&main_graph);
self.display_warning_on_unsupported_engine_version();
Ok(InitializationResult { main_module_text, main_module_model, main_graph })
})
Ok(InitializationResult { main_module_text, main_module_model, main_graph })
}
}

View File

@ -22,7 +22,6 @@ ensogl-text-msdf-sys = { path = "../../../lib/rust/ensogl/component/text/msdf-sy
ensogl-hardcoded-theme = { path = "../../../lib/rust/ensogl/app/theme/hardcoded" }
ide-view-graph-editor = { path = "graph-editor" }
parser = { path = "../language/parser" }
profiling = { path = "../../../lib/rust/profiling"}
span-tree = { path = "../language/span-tree" }
js-sys = { version = "0.3.28" }
multi-map = { version = "1.3.0" }

View File

@ -12,7 +12,6 @@ let config = {
'copy-webpack-plugin': '^5.1.1',
'html-loader': '^1.3.2',
'mixpanel-browser': '2.40.1',
profiling: '1.0.0',
},
devDependencies: {
'compression-webpack-plugin': '^3.1.0',

View File

@ -20,9 +20,6 @@ import 'firebase/auth'
// @ts-ignore
import semver from 'semver'
// @ts-ignore
import * as profiling from '../../profiling/src/profiling'
// ==================
// === Global API ===
// ==================
@ -335,14 +332,6 @@ function showLogs() {
// @ts-ignore
window.showLogs = showLogs
function showPerformanceReport() {
const report = new profiling.Report()
report.renderToDevConsole()
}
// @ts-ignore
window.showPerformanceReport = showPerformanceReport
// ======================
// === Crash Handling ===
// ======================
@ -971,15 +960,13 @@ async function runEntryPoint(config: Config) {
}
API.main = async function (inputConfig: any) {
const config = profiling.task.measure('load_config', () => {
const urlParams = new URLSearchParams(window.location.search)
// @ts-ignore
const urlConfig = Object.fromEntries(urlParams.entries())
const config = Config.default()
config.updateFromObject(inputConfig)
config.updateFromObject(urlConfig)
return config
})
const urlParams = new URLSearchParams(window.location.search)
// @ts-ignore
const urlConfig = Object.fromEntries(urlParams.entries())
const config = Config.default()
config.updateFromObject(inputConfig)
config.updateFromObject(urlConfig)
if (await checkMinSupportedVersion(config)) {
if (config.authentication_enabled && !Versions.isDevVersion()) {

View File

@ -60,9 +60,6 @@ module.exports = {
BUILD_INFO: JSON.stringify(BUILD_INFO),
FIREBASE_API_KEY: JSON.stringify(process.env.FIREBASE_API_KEY),
}),
/// Makes the `PROFILING_LEVEL` environment variable which is set at build time accessible
/// at runtime. See: https://webpack.js.org/plugins/environment-plugin/
new webpack.EnvironmentPlugin({ PROFILING_LEVEL: '' }),
],
devtool: 'eval-source-map',
devServer: {

View File

@ -1,6 +0,0 @@
let config = {
name: 'profiling',
version: '1.0.0',
}
module.exports = { config }

View File

@ -1 +0,0 @@
["Section", "Task", "Detail", "Debug"]

View File

@ -1,415 +0,0 @@
/**
* Profiling module that supports work with the [Web Performance API]
* (https://developer.mozilla.org/en-US/docs/Web/API/Performance).
*
* Allows structured profiling of code with a hierarchical API.
*
* Example usage
* -------------
* ```
* // Manually start and end the measurement.
* const profilingHandle = profiling.task.start(doSomething')
* doSomething()
* profilingHandle.end()
*
* // Or use the `measure` method.
* profiling::task::measure("doSomething", doSomething);
*
* ```
*
* Note that this API and encoding formats for messages are synced with the rust equivalent in
* `lib/rust/profiling/src/lib.rs`.
*/
import * as assert from 'assert'
// =========================
// === Profiler Registry ===
// =========================
/// List of all instantiated profilers.
const profilerRegistry: Profiler[] = []
// ================
// === Metadata ===
// ================
type SourceLocation = {
file: String
line: number
}
type Metadata = {
source: SourceLocation
profiling_level: string
label: string
}
/**
* Create a Metadata object that gets populated with the `SourceLocation` of the caller of the
* caller of this methods.
*/
function makeMetadata(profiling_level: string, label: string): Metadata {
// We need to move up the stack by 2 levels to end up at the call of the caller of this
// function.
const callerLevel = 2
const source = callerInfo(callerLevel)
return {
source,
profiling_level,
label,
}
}
/**
* Return the `SourceLocation` of this method call. By supplying the `caller_level` argument, the
* `SourceLocation` will be shifted up the call stack by that number of levels.
*
*/
function callerInfo(caller_level = 0): SourceLocation | null {
const e = new Error()
const regex = /\((?<filePath>.*):(?<line>\d+):(?<column>\d+)\)$/
const caller = e.stack.split('\n')[2 + caller_level]
if (!caller) {
return null
}
const match = regex.exec(caller)
if (!match) {
return null
}
return {
file: match.groups['filePath'],
line: Number(match.groups['line']),
}
}
// ==============
// === Labels ===
// ==============
// Return a string that encodes the given profiler level and name for a mark that indicates the
// start of an interval. The label encoded the profiler level, name and event type.
function startIntervalLabel(metadata: Metadata): string {
return `${metadata.label} [START]`
}
// Return a string that encodes the given profiler level and name for a mark that indicates the
// end of an interval.
function endIntervalLabel(metadata: Metadata): string {
return `${metadata.label} [END]`
}
// Return a string that encodes the given profiler level and name for display in the developer
// console performance tool.
function performanceToolLabel(metadata: Metadata): string {
return `${metadata.label} (${metadata.source.file}:${metadata.source.line})`
}
// ================
// === Profiler ===
// ================
/**
* Class that exposes `start`, `end` and `measure` methods, as well as some utility methods for
* working with the profiler.
*/
class Profiler {
profileLevel: string
toggledInEnvironment: boolean
constructor(profilerLevel: string) {
this.profileLevel = profilerLevel
// The value `process.env.PROFILING_LEVEL` is populated through the [environment plugin]
// (https://webpack.js.org/plugins/environment-plugin/) at build time from an environment
// variable.
this.toggledInEnvironment = process.env.PROFILING_LEVEL.toLowerCase().includes(
profilerLevel.toLowerCase()
)
profilerRegistry.push(this)
}
profileLevelName(): string {
return this.profileLevel.toUpperCase()
}
// Indicate whether this profiler should perform profiling.
isActive(): boolean {
// We need to check whether this profiler, or a profiler that is higher in the hierarchy, is
// activated. The hierarchy is established by order of instantiation / registration in the
// `profilerRegistry`.
const profilerIndex = profilerRegistry.findIndex(item => item === this)
if (profilerIndex < 0) {
const profilerLevelName = this.profileLevelName()
const warning = `Looked for invalid profiler in profilerRegistry: ${profilerLevelName}`
console.warn(warning)
return false
}
for (let i = profilerIndex; i < profilerRegistry.length; i++) {
if (profilerRegistry[i].toggledInEnvironment) {
return true
}
}
return false
}
isProfileLevelName(profilerLevelName: string): boolean {
return this.profileLevelName() == profilerLevelName.toUpperCase()
}
// Start the profiling of the named interval.
start(intervalName: string): IntervalHandle {
if (this.isActive()) {
const metadata = makeMetadata(this.profileLevel, intervalName)
performance.mark(startIntervalLabel(metadata))
}
return new IntervalHandle(intervalName, this)
}
_end_with_metadata(metadata: Metadata) {
const start_label = startIntervalLabel(metadata)
const end_label = endIntervalLabel(metadata)
const measurement_label = performanceToolLabel(metadata)
if (this.isActive()) {
performance.mark(end_label)
const options = { detail: metadata, start: start_label, end: end_label }
try {
// @ts-ignore TS does not yet support typing for the User Timing Level 3
// Specification.
performance.measure(measurement_label, options)
} catch (e) {
console.warn(`Error during profiling: ${e}`)
}
}
}
/// End the profiling of the named interval.
end(interval_name: string) {
const metadata = makeMetadata(this.profileLevel, interval_name)
this._end_with_metadata(metadata)
}
/// Profile the execution of the given callable.
measure(interval_name: string, closure: CallableFunction) {
const metadata = makeMetadata(this.profileLevel, interval_name)
this.start(interval_name).release()
const ret = closure()
this._end_with_metadata(metadata)
return ret
}
}
// ================
// === Decoding ===
// ================
function parseProfilerLevel(profilerLevelName: string): Profiler | null {
for (const profiler of profilerRegistry) {
if (profiler.isProfileLevelName(profilerLevelName)) {
return profiler
}
}
console.warn(`Could not parse profiler level from "${profilerLevelName}"`)
return null
}
// ======================
// === IntervalHandle ===
// ======================
/**
* Handle that allows the setting of an `end` marker for a profiling interval that was started via
* the `start` function.
*/
class IntervalHandle {
intervalName: string
profilerLevel: Profiler
releasedStatus: ReleasedStatus
constructor(intervalName: string, profilerLevel: Profiler) {
this.intervalName = intervalName
this.profilerLevel = profilerLevel
this.releasedStatus = new ReleasedStatus()
this.releasedStatus.released = false
let entry = new IntervalFinalizationRegistryEntry(intervalName, this.releasedStatus)
if (handleRegistry) {
handleRegistry.register(this, entry)
}
}
/// Measure the interval.
end() {
const metadata = makeMetadata(this.profilerLevel.profileLevel, this.intervalName)
this.profilerLevel._end_with_metadata(metadata)
this.releasedStatus.released = true
}
/// Release the handle to manually call `end_interval` without emitting a warning.
release() {
this.releasedStatus.released = true
}
}
/**
* Class that contains the information added to the `FinalizationRegistry` to decide whether to emit
* a warning when a `IntervalHandle` is garbage collected.
*/
class IntervalFinalizationRegistryEntry {
interval_name: string
release_status: ReleasedStatus
constructor(interval_name: string, release_status: ReleasedStatus) {
this.interval_name = interval_name
this.release_status = release_status
}
}
/** Class that contains information about the release status of an `IntervalHandle`. An
* `IntervalHandle` handle is released by calling `end` or `release`. If it is garbage collected
* without being released, we want to emit a warning. In order to be able to keep track of the
* release status of an `IntervalHandle` we need to keep the status boolean in a separate class. */
class ReleasedStatus {
released: boolean
constructor() {
this.released = false
}
}
let finalizationRegistry
try {
// We need to ts-ignore here as `FinalizationRegistry` is not recognised by TypeScript as being in
// scope even though it is.
// @ts-ignore
finalizationRegistry = new FinalizationRegistry(heldValue => {
if (heldValue instanceof IntervalFinalizationRegistryEntry) {
if (!heldValue.release_status.released) {
const interval_name = heldValue.interval_name
console.warn(`${interval_name} was dropped without explicit end of measurement.`)
}
}
})
} catch (e) {
console.warn(`Initialisation of FinalizationRegistry registry failed because of ${e}.`)
}
const handleRegistry = finalizationRegistry
if (!finalizationRegistry) {
console.error(
'Browser does not support use of `FinalizationRegistry`. ' +
'Profiling might not work correctly if handles are dropped.'
)
}
// ===================
// === Measurement ===
// ===================
// Number of decimals to show in floating point output.
const OUTPUT_PRECISION = 2
/**
* Profiling measurement that contains the profiling information about a measured time interval.
*/
class Measurement {
readonly startTime: number
readonly duration: number
readonly profilerLevel: Profiler
readonly name: string
constructor(startTime: number, duration: number, profilerLevel: Profiler, name: string) {
this.startTime = startTime
this.duration = duration
this.profilerLevel = profilerLevel
this.name = name
}
static fromPerformanceEntry(entry: PerformanceEntry): Measurement | null {
const startTime = entry.startTime
const duration = entry.duration
// @ts-ignore TS does not yet support typing for the User Timing Level 3
// Specification.
const detail = entry.detail
const profilerLevel = parseProfilerLevel(detail.profiling_level)
if (profilerLevel === null) {
const profiling_level = detail.profiling_level
console.warn(`Could not parse ${profiling_level} as valid profiling level.`)
return null
}
const name = entry.name
return new Measurement(startTime, duration, profilerLevel, name)
}
endTime(): number {
return this.startTime + this.duration
}
prettyString(): string {
const start = this.startTime.toFixed(OUTPUT_PRECISION)
const end = this.endTime().toFixed(OUTPUT_PRECISION)
const duration = this.duration.toFixed(OUTPUT_PRECISION)
const name = this.name
return `[${start},${end}] (${duration}) ${name}`
}
prettyPrint() {
console.log(this.prettyString())
}
}
// ==============
// === Report ===
// ==============
/**
* Report of all measurements taken during the current session.
*/
export class Report {
measurements: Measurement[]
constructor() {
const entries = performance.getEntriesByType('measure')
this.measurements = entries.map(entry => Measurement.fromPerformanceEntry(entry))
}
singleLevelMeasurement(profiler: Profiler) {
return this.measurements.filter(m => m.profilerLevel === profiler)
}
/**
* Render the report to the developer console.
*/
renderToDevConsole() {
console.groupCollapsed('PerformanceReport')
for (const profiler of profilerRegistry) {
console.groupCollapsed(profiler.profileLevel)
this.singleLevelMeasurement(profiler).forEach(m => m.prettyPrint())
console.groupEnd()
}
console.groupEnd()
}
toString() {
let output = ''
for (const profiler of profilerRegistry) {
this.singleLevelMeasurement(profiler).forEach(
m => (output = output.concat(m.prettyString(), '\n'))
)
}
return output
}
}
// ================================
// === Public API for Profilers ===
// ================================
/// These need to be defined manually for type information generated by TS to be correct.
/// They need to kept in sync with the `./profilers.json` and the Rust API in
// `lib/rust/profiling/src/lib.rs`
export const section = new Profiler('Section')
export const task = new Profiler('Task')
export const detail = new Profiler('Detail')
export const debug = new Profiler('Debug')

View File

@ -1,13 +0,0 @@
{
"compilerOptions": {
"noImplicitAny": true,
"target": "ES5",
"module": "es2020",
"declaration": true,
"outDir": "./lib",
"types": ["node"],
"lib": ["dom", "es5", "scripthost", "esnext.weakref"]
},
"exclude": ["node_modules", "**/*.spec.ts"],
"include": ["./src"]
}

View File

@ -31,6 +31,7 @@
"tar": "^6.0.5",
"to-ico": "^1.1.5",
"ts-loader": "^8.0.3",
"typescript": "^3.4",
"unzipper": "^0.10",
"uuid": "^8.3.1",
"yaml-loader": "^0.6.0",
@ -38,7 +39,6 @@
},
"devDependencies": {
"lerna": "^3.22.1",
"typescript": "^3.4",
"webpack": "^4.42.1",
"webpack-cli": "^3.3.11",
"webpack-dev-server": "^3.1.5"
@ -2410,9 +2410,9 @@
"optional": true
},
"node_modules/@types/yargs": {
"version": "17.0.8",
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.8.tgz",
"integrity": "sha512-wDeUwiUmem9FzsyysEwRukaEdDNcwbROvQ9QGRKaLI6t+IltNzbn4/i4asmB10auvZGQCzSQ6t0GSczEThlUXw==",
"version": "17.0.7",
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.7.tgz",
"integrity": "sha512-OvLKmpKdea1aWtqHv9bxVVcMoT6syAeK+198dfETIFkAevYRGwqh4H+KFxfjUETZuUuE5sQCAFwdOdoHUdo8eg==",
"dependencies": {
"@types/yargs-parser": "*"
}
@ -6856,9 +6856,9 @@
}
},
"node_modules/electron-builder/node_modules/yargs": {
"version": "17.3.1",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.3.1.tgz",
"integrity": "sha512-WUANQeVgjLbNsEmGk20f+nlHgOqzRFpiGWVaBrYGYIGANIIu3lWjoyi0fNlFmJkvfhCZ6BXINe7/W2O2bV4iaA==",
"version": "17.3.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.3.0.tgz",
"integrity": "sha512-GQl1pWyDoGptFPJx9b9L6kmR33TGusZvXIZUT+BOz9f7X2L94oeAskFYLEg/FkhV06zZPBYLvLZRWeYId29lew==",
"dependencies": {
"cliui": "^7.0.2",
"escalade": "^3.1.1",
@ -7101,9 +7101,9 @@
}
},
"node_modules/electron/node_modules/@types/node": {
"version": "12.20.39",
"resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.39.tgz",
"integrity": "sha512-U7PMwkDmc3bnL0e4U8oA0POpi1vfsYDc+DEUS2+rPxm9NlLcW1dBa5JcRhO633PoPUcCSWMNXrMsqhmAVEo+IQ=="
"version": "12.20.38",
"resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.38.tgz",
"integrity": "sha512-NxmtBRGipjx1B225OeMdI+CQmLbYqvvmYbukDTJGDgzIDgPQ1EcjGmYxGhOk5hTBqeB558S6RgHSpq2iiqifAQ=="
},
"node_modules/elliptic": {
"version": "6.5.3",
@ -20882,9 +20882,9 @@
"optional": true
},
"@types/yargs": {
"version": "17.0.8",
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.8.tgz",
"integrity": "sha512-wDeUwiUmem9FzsyysEwRukaEdDNcwbROvQ9QGRKaLI6t+IltNzbn4/i4asmB10auvZGQCzSQ6t0GSczEThlUXw==",
"version": "17.0.7",
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.7.tgz",
"integrity": "sha512-OvLKmpKdea1aWtqHv9bxVVcMoT6syAeK+198dfETIFkAevYRGwqh4H+KFxfjUETZuUuE5sQCAFwdOdoHUdo8eg==",
"requires": {
"@types/yargs-parser": "*"
}
@ -24331,9 +24331,9 @@
},
"dependencies": {
"@types/node": {
"version": "12.20.39",
"resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.39.tgz",
"integrity": "sha512-U7PMwkDmc3bnL0e4U8oA0POpi1vfsYDc+DEUS2+rPxm9NlLcW1dBa5JcRhO633PoPUcCSWMNXrMsqhmAVEo+IQ=="
"version": "12.20.38",
"resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.38.tgz",
"integrity": "sha512-NxmtBRGipjx1B225OeMdI+CQmLbYqvvmYbukDTJGDgzIDgPQ1EcjGmYxGhOk5hTBqeB558S6RgHSpq2iiqifAQ=="
}
}
},
@ -24475,9 +24475,9 @@
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="
},
"yargs": {
"version": "17.3.1",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.3.1.tgz",
"integrity": "sha512-WUANQeVgjLbNsEmGk20f+nlHgOqzRFpiGWVaBrYGYIGANIIu3lWjoyi0fNlFmJkvfhCZ6BXINe7/W2O2bV4iaA==",
"version": "17.3.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.3.0.tgz",
"integrity": "sha512-GQl1pWyDoGptFPJx9b9L6kmR33TGusZvXIZUT+BOz9f7X2L94oeAskFYLEg/FkhV06zZPBYLvLZRWeYId29lew==",
"requires": {
"cliui": "^7.0.2",
"escalade": "^3.1.1",

View File

@ -7,7 +7,6 @@ let config = {
webpack: '^4.42.1',
'webpack-cli': '^3.3.11',
'webpack-dev-server': '^3.1.5',
typescript: `^3.4`,
},
scripts: {
// IMPORTANT! Please remember to run "bootstrap" before "build" or "watch" to install

View File

@ -87,45 +87,6 @@ function run_project_manager() {
})
}
// Create a feature flag that sets the desired profiling level in the build from the given
// command line argument. If the arguments contain a `profiling-level` argument that has a value
// that matches one of the profilers defined in the
// `../app/ide-desktop/lib/profiling/src/profilers.json` configuration file, that profiler will be
// enabled. Note that profilers are hierarchical and the highest level profiler will determine the
// activated profilers.
// Example Output: "enable-task-profiling"
function make_profiling_feature_string(args) {
let profiling_level = args['profiling-level']
if (profiling_level === undefined) {
return ''
}
profiling_level = profiling_level.toString().toLowerCase()
const to_check = require(`../app/ide-desktop/lib/profiling/src/profilers.json`)
const features = []
for (let feature of to_check) {
feature = feature.toLowerCase()
if (profiling_level.includes(feature)) {
features.push(`enable-${feature}-profiling`)
}
}
return features.join(' ')
}
function make_performance_logging_feature_flag(args) {
const feature_string = make_profiling_feature_string(args)
if (feature_string.length === 0) {
return ''
} else {
return '--features ' + '"' + feature_string + '"'
}
}
function set_performance_logging_env(args) {
const feature_string = make_profiling_feature_string(args)
process.env['PROFILING_LEVEL'] = feature_string
}
// ================
// === Commands ===
// ================
@ -181,7 +142,6 @@ commands.build.js = async function () {
commands.build.rust = async function (argv) {
let crate = argv.crate || DEFAULT_CRATE
let crate_sfx = crate ? ` '${crate}'` : ``
console.log(`Building WASM target${crate_sfx}.`)
let args = [
'build',
@ -197,22 +157,9 @@ commands.build.rust = async function (argv) {
if (argv.dev) {
args.push('--dev')
}
/// Set compile time features to toggle profiling mode. This is used for the Rust profiling
// crate.
const feature_flags = make_performance_logging_feature_flag(argv)
if (cargoArgs || feature_flags) {
if (cargoArgs) {
args.push('--')
}
if (feature_flags) {
cargoArgs.push(feature_flags)
}
/// Set environment variables that indicate the profiling mode. This is used in the JS profiling library.
set_performance_logging_env(argv)
await run_cargo('wasm-pack', args)
await patch_file(paths.wasm.glue, js_workaround_patcher)
await fs.rename(paths.wasm.mainRaw, paths.wasm.main)

View File

@ -27,8 +27,7 @@ impl<T: AsRef<str> + Display> GithubRelease<T> {
format!("{}/releases/download/{}/{}", self.project_url, self.version, self.filename);
let destination_file = destination_dir.join(self.filename.as_ref());
Self::remove_old_file(&destination_file);
let resp = reqwest::blocking::get(&url).expect("Download failed.");
let mut resp = resp.error_for_status().expect("Received error from server.");
let mut resp = reqwest::blocking::get(&url).expect("Download failed.");
let mut out = std::fs::File::create(destination_file).expect("Failed to create file.");
std::io::copy(&mut resp, &mut out).expect("Failed to copy file content.");
}

View File

@ -1,32 +0,0 @@
[package]
name = "profiling"
authors = ["Enso Team <contact@luna-lang.org>"]
version = "0.1.0"
edition = "2021"
[lib]
crate-type = ["cdylib", "rlib" ]
[features]
enable-section-profiling = []
enable-task-profiling = []
enable-detail-profiling = []
enable-debug-profiling = []
default = []
[dependencies]
chrono = "0.4.19"
enso-logger = { path = "../logger"}
enso-prelude = { version = "^0.2.1", path = "../prelude" }
enso-web = { path = "../web" }
Inflector = "0.11.4"
js-sys = { version = "0.3" }
ordered-float = "2.8.0"
paste = "1.0"
serde = "1.0"
serde_plain = "1.0"
serde_json = "1.0"
wasm-bindgen = {version = "0.2", features = ["serde-serialize"]}
web-sys = { version = "0.3", features=["PerformanceEntry"] }
macros = { path = "./macros" }

View File

@ -1,15 +0,0 @@
[package]
name = "macros"
authors = ["Enso Team <contact@luna-lang.org>"]
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
proc-macro2 = "1.0.33"
quote = "1.0.10"
serde = "1.0"
serde_json = "1.0"
syn = "1.0.82"

View File

@ -1,87 +0,0 @@
//! Support crate that allows to define loggers based on a configuration file.
#![warn(missing_debug_implementations)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(trivial_numeric_casts)]
#![warn(unsafe_code)]
#![warn(unused_import_braces)]
use proc_macro2::TokenStream;
use quote::quote;
/// Calls the given macro with each defined profiler and creates a method that checks whether
/// a given profiler is activated via the crates feature flags.
/// The macro that is expected to be passed is `define_logger` from the profiling crate. The
/// call is built to satisfy the input requirements of this macro. The `define_logger` macro
/// needs to be defined in the profiling crate as it exposes API related structs that also need
/// to be part of the public API there and to avoid circular dependencies.
#[proc_macro]
pub fn define_hierarchy(tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
let profilers: Vec<String> = parse_profilers(tokens.into());
let mut out_tokens = proc_macro::TokenStream::default();
out_tokens.extend(make_profiling_level_is_active_function(&profilers).into_iter());
out_tokens
}
fn parse_profilers(tokens: proc_macro2::TokenStream) -> Vec<String> {
let mut profiler_names = Vec::default();
for token in tokens {
if let proc_macro2::TokenTree::Ident(ident) = token {
profiler_names.push(ident.to_string());
}
}
profiler_names
}
/// Creates a function that checks whether a given profiling level name corresponds to an active
/// profiling level. The profilers are interpreted to be hierarchical, so if a profiler further
/// down the list is active, all profilers above are expected to be active, too.
fn make_profiling_level_is_active_function(profilers: &[String]) -> proc_macro::TokenStream {
let mut match_arms = TokenStream::default();
for ix in 0..profilers.len() {
let match_arm: TokenStream = if ix == (profilers.len() - 1) {
make_match_line(&profilers[ix], None)
} else {
make_match_line(&profilers[ix], Some(&profilers[ix + 1]))
};
match_arms.extend(match_arm.into_iter())
}
let is_active_fn_tokens: proc_macro::TokenStream = quote! {
fn profiling_level_is_active(log_level: ProfilingLevel) -> bool {
match log_level.as_str() {
#match_arms
invalid_level => {
WARNING!(
format!("Tried to check for invalid profiling level: {:?}", invalid_level)
);
false
}
}
}
}
.into();
is_active_fn_tokens
}
/// Helper function that creates a match arm for the `make_profiling_level_is_active_function`.
/// The match arm will check whether the given profiler is enabled, and if there is a next one,
/// insert a recursive call to check whether the next profiler in the hierarchy is enabled.
fn make_match_line(current: &str, next: Option<&str>) -> TokenStream {
let current = current.to_lowercase();
let current_profiler_mod = quote::format_ident!("{}", current);
if let Some(next_profiling_level) = next {
let next_profiling_level = next_profiling_level.to_lowercase();
quote! {
#current => {
#current_profiler_mod::ENABLED
|| profiling_level_is_active(#next_profiling_level.to_string())
}
}
} else {
quote! {
#current => #current_profiler_mod::ENABLED,
}
}
}

View File

@ -1,440 +0,0 @@
//!The performance-logger library allows to create performance reports via the
//! [Web Performance API](https://developer.mozilla.org/en-US/docs/Web/API/Performance).
//!
//! The API provided allows the marking of intervals between a start and end, which the gets
//! measured and marked in the Chrome DevTools Performance Monitor. Intervals can be assigned a
//! log level, for example, `TASK`, SECTION` or `DEBUG` to allow filtering based on the current
//! needs.
//!
//! Example usage
//! --------------
//! ```ignore
//! let some_task = || "DoWork";
//!
//! // Manually start and end the measurement.
//! let gui_init = profiling::start_task!("GUI initialization");
//! some_task();
//! gui_init.end();
//! // Or use the `measure` method.
//! profiling::task_measure!("GUI initialization", || { some_task() });
//! ```
//!
//! Note that this API and encoding formats for messages are synced with the JS equivalent in
//! `app/ide-desktop/lib/profiling/src/profiling.ts`.
#![warn(missing_debug_implementations)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(trivial_numeric_casts)]
#![warn(unsafe_code)]
#![warn(unused_import_braces)]
use enso_prelude::*;
use wasm_bindgen::prelude::*;
pub mod macros;
use crate::js::*;
use ::macros::*;
use enso_prelude::fmt::Formatter;
use enso_web::performance;
use inflector::Inflector;
use ordered_float::OrderedFloat;
use serde::Deserialize;
use serde::Serialize;
use serde_plain::from_str;
use wasm_bindgen::JsValue;
use web_sys::PerformanceEntry;
// ================
// === Metadata ===
// ================
/// Source code location given as file path and line number.
#[derive(Serialize, Deserialize, Clone, Debug)]
#[allow(missing_docs)]
pub struct SourceLocation {
pub file: String,
pub line: u32,
}
/// Measurement metadata. This struct holds information about a measurement and provides
/// functionality for conversion form/to JS for use in the performance API.
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Metadata {
/// Source code location of the profiling interval.
pub source: SourceLocation,
/// Profiling level of the measurement.
pub profiling_level: ProfilingLevel,
/// Label of the measurement..
pub label: String,
}
impl From<Metadata> for JsValue {
fn from(metadata: Metadata) -> JsValue {
JsValue::from_serde(&metadata).expect("Failed to serialise Metadata struct to JSON.")
}
}
impl TryFrom<JsValue> for Metadata {
type Error = serde_json::Error;
fn try_from(value: JsValue) -> Result<Self, Self::Error> {
value.into_serde()
}
}
// =================================
// === Custom JS Performance API ===
// =================================
mod js {
use super::*;
use js_sys::JsString;
use wasm_bindgen::JsValue;
#[wasm_bindgen(inline_js = "
export function mark_with_metadata(markName, markOptions) {
performance.mark(markName, markOptions)
}
export function measure_with_start_mark_and_end_mark_and_metadata(
measureName,
startMark,
endMark,
measureOptions
) {
const options = {}
options.start = startMark
options.end = endMark
options.detail = measureOptions
performance.measure(measureName, options)
}
")]
extern "C" {
#[allow(unsafe_code)]
pub fn mark_with_metadata(mark_name: JsString, mark_options: JsValue);
#[allow(unsafe_code)]
pub fn measure_with_start_mark_and_end_mark_and_metadata(
measure_name: JsString,
start_mark: JsString,
end_mark: JsString,
measure_options: JsValue,
);
}
}
// =================
// === Profilers ===
// =================
#[allow(missing_docs)]
type ProfilingLevel = String;
define_profiler! {
$, "section", Section, section, start_section, end_section, measure_section;
$, "task", Task, task, start_task, end_task, measure_task;
$, "detail", Detail, detail, start_detail, end_detail, measure_detail;
$, "debug", Debug, debug, start_debug, end_debug, measure_debug;
}
define_hierarchy!(section, task, detail, debug);
// ====================
// === Measurements ===
// ====================
/// A single interval measurement.
#[derive(Debug, Clone)]
#[allow(missing_docs)]
pub struct Measurement {
pub name: String,
pub start_time: f64,
pub duration: f64,
pub log_level: ProfilingLevel,
}
impl Measurement {
/// Return the timestamp of the interval's end.
pub fn end_time(&self) -> f64 {
self.start_time + self.duration
}
}
impl Display for Measurement {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str(&format!(
"[{:.2}, {:.2}] ({:.2}) {}",
self.start_time,
self.end_time(),
self.duration,
self.name
))
}
}
/// Error that can occur when converting between a `PerformanceEntry` and a `Measurement`.
#[derive(Debug, Clone)]
pub enum MeasurementConversionError {
/// The name of the `PerformanceEntry` was not in a format that could be decoded to a
/// `Measurement`.
InvalidFormatting,
/// The log level encoded in the `PerformanceEntry`s name is not a valid log level.
InvalidLogLevel,
}
impl TryFrom<PerformanceEntry> for Measurement {
type Error = MeasurementConversionError;
fn try_from(measure: PerformanceEntry) -> Result<Self, Self::Error> {
use MeasurementConversionError::*;
let start_time = measure.start_time();
let duration = measure.duration();
let name = measure.name();
let metadata: Metadata = js_sys::Reflect::get(&measure, &"detail".to_string().into())
.expect("Could not get details field of PerformanceEntry")
.try_into()
.or(Err(InvalidFormatting))?;
let log_level_name = metadata.profiling_level;
let log_level_name = log_level_name.to_class_case();
let log_level: ProfilingLevel = from_str(&log_level_name).or(Err(InvalidLogLevel))?;
Ok(Measurement { start_time, duration, name, log_level })
}
}
/// Error that can occur when taking a measurement.
#[derive(Clone, Debug)]
pub enum MeasurementError {
/// Parsing the measurement information from the performance API failed, for example, due to an
/// invalid log level.
InvalidMeasurementConversion,
/// No measurement was created in the performance API backend.
NoMeasurementFound,
/// A function call to the Performance API failed to execute.
PerformanceAPIExecutionFailure {
/// Underlying error returned by the JS API.
error: JsValue,
},
/// Profiling for the given profiling level was disabled.
ProfilingDisabled,
}
// ==================================
// === Internal Profiling Methods ===
// ==================================
/// Emit a warning if the given result is an error.
pub fn warn_on_error(result: Result<Measurement, MeasurementError>) {
if let Err(e) = result {
WARNING!(format!("Failed to do profiling for an interval due to error: {:?}", e));
}
}
/// Return a string that encodes the given log level and name for a mark that indicates the start of
/// an interval.
/// Example output: "DoThing! (FilePath:22) [START]".
fn start_interval_label(metadata: &Metadata) -> String {
format!("{} [START]", metadata.label)
}
/// Return a string that encodes the given log level and name for a mark that indicates the end of
/// an interval.
/// Example output: "DoThing! (FilePath:22) [END]".
fn end_interval_label(metadata: &Metadata) -> String {
format!("{} [END]", metadata.label)
}
/// Return a string that encodes the given log level and name for a measurement. This is done by
/// separating the information by the `MESSAGE_DELIMITER`.
/// Example output: "DoThing! (FilePath:22)".
fn measure_interval_label(metadata: &Metadata) -> String {
format!("{} ({}:{})", metadata.label, metadata.source.file, metadata.source.line)
}
/// Mark the start of an interval in the JS API. Returns a `IntervalHandle` that an be used to end
/// the created interval. The interval can also be ended by calling `end_interval` with the same
/// metadata.
pub fn mark_start_interval(metadata: Metadata) -> IntervalHandle {
let interval_name = start_interval_label(&metadata);
if profiling_level_is_active(metadata.profiling_level.clone()) {
mark_with_metadata(interval_name.into(), metadata.clone().into());
}
IntervalHandle::new(metadata)
}
fn get_latest_performance_entry() -> Option<PerformanceEntry> {
let entries: js_sys::Array = performance().get_entries_by_type("measure");
if entries.length() < 1 {
return None;
}
let measure = entries.get(entries.length() - 1);
let measure: PerformanceEntry = measure.into();
Some(measure)
}
/// Mark the end of an measuring an interval in the JS API. Return the measurement taken between
/// start and end of the interval, if possible.
pub fn mark_end_interval(metadata: Metadata) -> Result<Measurement, MeasurementError> {
let profiling_level = metadata.profiling_level.clone();
let start_label = start_interval_label(&metadata);
let end_label = end_interval_label(&metadata);
let measurement_label = measure_interval_label(&metadata);
if !profiling_level_is_active(profiling_level) {
Err(MeasurementError::ProfilingDisabled)
} else {
mark_with_metadata(end_label.clone().into(), metadata.clone().into());
measure_with_start_mark_and_end_mark_and_metadata(
measurement_label.into(),
start_label.into(),
end_label.into(),
metadata.into(),
);
let measure = get_latest_performance_entry().ok_or(MeasurementError::NoMeasurementFound)?;
let measurement: Measurement =
measure.try_into().or(Err(MeasurementError::InvalidMeasurementConversion))?;
Ok(measurement)
}
}
// ======================
// === IntervalHandle ===
// ======================
/// Handle that allows ending the interval.
#[derive(Clone, Debug)]
pub struct IntervalHandle {
metadata: Metadata,
released: bool,
}
impl IntervalHandle {
fn new(metadata: Metadata) -> Self {
IntervalHandle { metadata, released: false }
}
/// Measure the interval.
pub fn end(mut self) {
self.released = true;
warn_on_error(mark_end_interval(self.metadata.clone()));
}
/// Release the handle to prevent a warning to be emitted when it is garbage collected without
/// a call to `end`. This can be useful if one wants to call `end_interval` manually, or the
/// equivalent call to `end_interval` is in Rust code.
pub fn release(mut self) {
self.released = true;
drop(self)
}
}
impl Drop for IntervalHandle {
fn drop(&mut self) {
if !self.released {
warn_on_error(mark_end_interval(self.metadata.clone()));
WARNING!(format!(
"{} was dropped without explicitly being ended.",
self.metadata.label
));
}
}
}
// ===============
// === Reports ===
// ===============
/// Report of all measurements taken during the current session.
#[derive(Clone, Debug)]
pub struct Report {
measures: Vec<Measurement>,
}
impl Report {
/// Create a new report from the measurements registered in the Performance API.
pub fn generate() -> Self {
let js_measures = performance().get_entries_by_type("measure");
let mut measures = Vec::default();
js_measures.for_each(&mut |entry, _, _| {
let entry: PerformanceEntry = entry.into();
let measurement: Result<Measurement, _> = entry.try_into();
if let Ok(measurement) = measurement {
measures.push(measurement)
}
});
Report { measures }
}
}
impl Display for Report {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let sorted_measurements =
self.measures.iter().sorted_by_key(|measure| OrderedFloat(measure.start_time));
for to_print in sorted_measurements {
f.write_str(&format!("{}\n", &to_print))?;
}
Ok(())
}
}
/// Return a report on all recorded measurement entries.
pub fn entries() -> Report {
Report::generate()
}
// =============
// === Tests ===
// =============
#[cfg(wasm_bindgen_test)]
mod tests {
use super::*;
// Checks that macros work correctly and create valid code.
#[wasm_bindgen_test]
fn macro_expansion() {
let task_handle = start_task!("sample_task");
task_handle.release();
end_task!("sample_task");
let _value: Option<_> = measure_task!("sample_measurement", {
let a = "DummyExpression".to_string().pop();
if false {
println!("foobar")
}
a
});
}
fn early_return_case() -> Option<()> {
measure_task!("sample_measurement", {
None?;
Some(())
})
}
}

View File

@ -1,206 +0,0 @@
//! Utilities for easy profiling usage and definition.
/// Check whether the given tokens end with a semicolon. If so, call the first argument with
/// true/false and the second argument.
#[doc(hidden)]
#[macro_export]
macro_rules! ends_with_colon {
($f:ident $args:tt [;]) => {
$crate::$f! {true, $args}
};
($f:ident $args:tt []) => {
$crate::$f! {false, $args}
};
($f:ident $args:tt [$t:tt $($ts:tt)*]) => {
$crate::ends_with_colon! {$f $args [$($ts)*]}
};
}
/// Turn the given identifier in a function call in this crate namespace.
#[doc(hidden)]
#[macro_export]
macro_rules! make_call {
($name:ident) => {
$name();
};
}
/// Put the given code block between the two passed functions. The result will either be of the
/// form
/// ```no_compile
/// f1();
/// foo;
/// bar;
/// f2();
/// ```
/// or
/// ```no_compile
/// f1();
/// foo;
/// out = bar;
/// f2()
/// out
/// ```
/// to allow for return values.
#[doc(hidden)]
#[macro_export]
macro_rules! wrap_block {
($fn_start:ident $fn_end:ident $($t:tt)*) => {
$crate::ends_with_colon!{apply_block_wrapper [$fn_start, $fn_end $($t)*] [$($t)*]}
};
}
#[doc(hidden)]
#[allow(missing_docs)]
#[macro_export]
macro_rules! apply_block_wrapper {
($ending:tt, [$fn_start:ident, $fn_end:ident $($expr:stmt)*]) => {
$crate::block_wrapper_start!{$ending, $fn_start, $fn_end, [$($expr;)*]}
};
}
#[doc(hidden)]
#[allow(missing_docs)]
#[macro_export]
macro_rules! block_wrapper_start {
(false, $fn_start:ident, $fn_end:ident, [$($expr:stmt;)*]) => {
$crate::make_call!{$fn_start}
$crate::block_wrapper_continue!{$fn_end, $($expr;)*}
};
(true, $fn_start:ident, $fn_end:ident, [$($expr:stmt;)*]) => {
$crate::make_call!{$fn_start}
$($expr;)*
$crate::make_call!{$fn_end}
};
}
#[allow(missing_docs)]
#[macro_export]
macro_rules! block_wrapper_continue {
($fn_end:ident, $expr:stmt;) => {
let out = { $expr };
$crate::make_call!{$fn_end}
out
};
($fn_end:ident, $expr:stmt; $($exprs:stmt;)*) => {
$expr;
$crate::block_wrapper_continue!{$fn_end:ident, $($exprs;)*}
};
}
/// Create a Metadata struct that has the source location prepopulated via the `file!` and
/// `line!` macros.
#[doc(hidden)]
#[macro_export]
macro_rules! make_metadata {
($profiling_level:expr, $interval_name:expr) => {
$crate::Metadata {
source: $crate::SourceLocation { file: file!().to_string(), line: line!() },
profiling_level: $profiling_level.to_string(),
label: $interval_name.to_string(),
}
};
}
/// Start measuring the interval of the given profiling level and name.
#[doc(hidden)]
#[macro_export]
macro_rules! start_interval {
($profiling_level:expr, $interval_name:expr) => {
$crate::mark_start_interval($crate::make_metadata!($profiling_level, $interval_name))
};
}
/// End measuring the interval of the given profiling level and name.
#[doc(hidden)]
#[macro_export]
macro_rules! end_interval {
($profiling_level:expr, $interval_name:expr) => {
$crate::warn_on_error($crate::mark_end_interval($crate::make_metadata!(
$profiling_level,
$interval_name
)))
};
}
/// Measure an interval with the given profiling level and name that starts before the given
/// expressions and ends after the given expressions.
#[doc(hidden)]
#[macro_export]
macro_rules! measure_interval {
($d:tt, $profiling_level:expr, $interval_name:expr, $($body:tt)*) => {
{
fn start() {
$crate::start_interval!($profiling_level,$interval_name).release();
}
fn end() {
$crate::end_interval!($profiling_level,$interval_name);
}
$crate::wrap_block! { start end $($body)* }
}
}
}
/// Define a new boolean variable whose value is determined by a feature flag in the crate.
/// The name of the variable is `ENABLED` and it will be true if a feature flag
/// `enable-<profiling_level_name>-profiling` is set, otherwise it will be false.
#[doc(hidden)]
#[macro_export]
macro_rules! define_profiling_toggle {
($profiling_level_name:ident) => {
paste::item! {
#[doc = "Defines whether the log level `" $profiling_level_name "` should be used."]
#[cfg(feature = "enable-" $profiling_level_name "-profiling")]
pub const ENABLED: bool = true;
#[doc = "Defines whether the log level `" $profiling_level_name "` should be used."]
#[cfg(not(feature = "enable-" $profiling_level_name "-profiling"))]
pub const ENABLED: bool = false;
}
};
}
/// Define a new profiling module that exposes `start`, `end` and `measure` methods. The profiling
/// can be activated and de-activated via a crate feature flag named
/// `enable-<profiling_module_name>-profiling`, which will turn the profiling methods into no-ops.
#[doc(hidden)]
#[macro_export]
macro_rules! define_profiler {
($($d:tt, $profiling_level:expr, $profiling_level_name_upper:ident, $profiling_level_name:ident,
$start:ident, $end:ident, $measure:ident ;)*) => {$(
/// Profiler module that exposes methods to measure named intervals.
pub mod $profiling_level_name {
$crate::define_profiling_toggle!($profiling_level_name);
/// Start measuring a named time interval. Return an `IntervalHandle` that can be used
/// to end the profiling.
#[macro_export]
macro_rules! $start {
($interval_name:expr) => {
$crate::start_interval!($profiling_level, $interval_name)
};
}
/// Manually end measuring a named time interval.
#[macro_export]
macro_rules! $end {
($interval_name:expr) => {
$crate::end_interval!($profiling_level, $interval_name)
};
}
/// Profile the execution of the given closure.
#[macro_export]
macro_rules! $measure {
($interval_name:expr, $d($d body:tt)*) => {
$crate::measure_interval!(
A, $profiling_level, $interval_name, $d($d body)*)
};
}
})*
}
}