remove unused rust modules (#9540)

Removes a bulk of rust crates that we no longer need, but that added significant install, build and testing time to the Rust parser.
Most significantly, removed `enso-web` and `enso-shapely`, and got rid of many no longer necessary `#![feature]`s. Moved two still used proc-macros from shapely to prelude. The last remaining usage of `web-sys` is within the logger (`console.log`), but we may actually want to keep that one.
This commit is contained in:
Paweł Grabarz 2024-03-27 13:19:38 +01:00 committed by GitHub
parent f2d6079ac4
commit a509035017
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
104 changed files with 301 additions and 13714 deletions

1
.npmrc
View File

@ -1 +0,0 @@
legacy-peer-deps=true

385
Cargo.lock generated
View File

@ -156,97 +156,6 @@ dependencies = [
"tokio",
]
[[package]]
name = "async-executor"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17adb73da160dfb475c183343c8cccd80721ea5a605d3eb57125f0a7b7a92d0b"
dependencies = [
"async-lock",
"async-task",
"concurrent-queue",
"fastrand",
"futures-lite",
"slab",
]
[[package]]
name = "async-global-executor"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776"
dependencies = [
"async-channel",
"async-executor",
"async-io",
"async-lock",
"blocking",
"futures-lite",
"once_cell",
]
[[package]]
name = "async-io"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c374dda1ed3e7d8f0d9ba58715f924862c63eae6849c92d3a18e7fbde9e2794"
dependencies = [
"async-lock",
"autocfg",
"concurrent-queue",
"futures-lite",
"libc",
"log",
"parking",
"polling",
"slab",
"socket2",
"waker-fn",
"windows-sys 0.42.0",
]
[[package]]
name = "async-lock"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8101efe8695a6c17e02911402145357e718ac92d3ff88ae8419e84b1707b685"
dependencies = [
"event-listener",
"futures-lite",
]
[[package]]
name = "async-std"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d"
dependencies = [
"async-channel",
"async-global-executor",
"async-io",
"async-lock",
"crossbeam-utils",
"futures-channel",
"futures-core",
"futures-io",
"futures-lite",
"gloo-timers",
"kv-log-macro",
"log",
"memchr",
"once_cell",
"pin-project-lite",
"pin-utils",
"slab",
"wasm-bindgen-futures",
]
[[package]]
name = "async-task"
version = "4.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524"
[[package]]
name = "async-trait"
version = "0.1.78"
@ -258,12 +167,6 @@ dependencies = [
"syn 2.0.53",
]
[[package]]
name = "atomic-waker"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "debc29dde2e69f9e47506b525f639ed42300fc014a3e007832592448fa8e4599"
[[package]]
name = "atty"
version = "0.2.14"
@ -687,20 +590,6 @@ dependencies = [
"generic-array",
]
[[package]]
name = "blocking"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c67b173a56acffd6d2326fb7ab938ba0b00a71480e14902b2591c87bc5741e8"
dependencies = [
"async-channel",
"async-lock",
"async-task",
"atomic-waker",
"fastrand",
"futures-lite",
]
[[package]]
name = "boolinator"
version = "2.4.0"
@ -752,16 +641,6 @@ dependencies = [
"syn 1.0.107",
]
[[package]]
name = "buf_redux"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f"
dependencies = [
"memchr",
"safemem",
]
[[package]]
name = "bumpalo"
version = "3.12.2"
@ -801,26 +680,6 @@ dependencies = [
"syn 1.0.107",
]
[[package]]
name = "bytemuck"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea"
dependencies = [
"bytemuck_derive",
]
[[package]]
name = "bytemuck_derive"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdde5c9cd29ebd706ce1b35600920a33550e402fc998a2e53ad3b42c3c47a192"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.53",
]
[[package]]
name = "byteorder"
version = "1.4.3"
@ -1577,17 +1436,6 @@ dependencies = [
"serde",
]
[[package]]
name = "enso-debug-api"
version = "0.1.0"
dependencies = [
"derivative",
"futures",
"js-sys",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "enso-doc-parser"
version = "0.1.0"
@ -1596,7 +1444,6 @@ dependencies = [
"enso-metamodel-lexpr",
"enso-parser",
"enso-prelude",
"enso-profiler",
"enso-reflect",
"lexpr",
"pretty_assertions",
@ -1656,6 +1503,17 @@ dependencies = [
"syn 1.0.107",
]
[[package]]
name = "enso-macros"
version = "0.2.7"
dependencies = [
"Inflector",
"enso-macro-utils",
"proc-macro2",
"quote",
"syn 1.0.107",
]
[[package]]
name = "enso-metamodel"
version = "0.1.0"
@ -1689,7 +1547,7 @@ dependencies = [
"enso-parser-syntax-tree-visitor",
"enso-prelude",
"enso-reflect",
"enso-shapely-macros",
"paste",
"rand 0.8.5",
"rand_chacha 0.3.1",
"rand_distr",
@ -1755,45 +1613,16 @@ dependencies = [
[[package]]
name = "enso-prelude"
version = "0.2.6"
version = "0.2.7"
dependencies = [
"anyhow",
"boolinator",
"derivative",
"derive_more",
"enso-logging",
"enso-macros",
"enso-reflect",
"enso-shapely",
"enso-zst",
"failure",
"futures",
"itertools",
"paste",
"serde",
"serde_json",
"smallvec",
"web-sys",
]
[[package]]
name = "enso-profiler"
version = "0.1.0"
dependencies = [
"enso-profiler-macros",
"enso-web",
"futures",
"serde",
"serde_json",
]
[[package]]
name = "enso-profiler-macros"
version = "0.1.0"
dependencies = [
"Inflector",
"proc-macro2",
"quote",
"syn 2.0.53",
]
[[package]]
@ -1813,53 +1642,10 @@ dependencies = [
"syn 1.0.107",
]
[[package]]
name = "enso-shapely"
version = "0.2.0"
dependencies = [
"enso-prelude",
"enso-shapely-macros",
"enso-zst",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "enso-shapely-macros"
version = "0.2.1"
dependencies = [
"Inflector",
"boolinator",
"enso-macro-utils",
"itertools",
"proc-macro2",
"quote",
"syn 1.0.107",
]
[[package]]
name = "enso-web"
version = "0.1.0"
dependencies = [
"async-std",
"console_error_panic_hook",
"derivative",
"enso-debug-api",
"enso-logging",
"enso-shapely",
"gloo-timers",
"js-sys",
"wasm-bindgen",
"wasm-bindgen-test",
"web-sys",
]
[[package]]
name = "enso-zst"
version = "0.1.0"
dependencies = [
"bytemuck",
"paste",
"serde",
]
@ -2197,18 +1983,6 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "gloo-timers"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c"
dependencies = [
"futures-channel",
"futures-core",
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "goblin"
version = "0.6.1"
@ -2523,7 +2297,6 @@ dependencies = [
"multimap",
"new_mime_guess",
"octocrab",
"paste",
"path-absolutize",
"path-slash",
"pathdiff",
@ -2693,15 +2466,6 @@ dependencies = [
"simple_asn1",
]
[[package]]
name = "kv-log-macro"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f"
dependencies = [
"log",
]
[[package]]
name = "language-tags"
version = "0.3.2"
@ -2783,9 +2547,6 @@ name = "log"
version = "0.4.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
dependencies = [
"value-bag",
]
[[package]]
name = "logstat"
@ -2894,24 +2655,6 @@ dependencies = [
"serde",
]
[[package]]
name = "multipart"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00dec633863867f29cb39df64a397cdf4a6354708ddd7759f70c7fb51c5f9182"
dependencies = [
"buf_redux",
"httparse",
"log",
"mime",
"mime_guess",
"quick-error",
"rand 0.8.5",
"safemem",
"tempfile",
"twoway",
]
[[package]]
name = "nanorand"
version = "0.7.0"
@ -3352,20 +3095,6 @@ dependencies = [
"serde",
]
[[package]]
name = "polling"
version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22122d5ec4f9fe1b3916419b76be1e80bcb93f618d071d2edf841b137b2a2bd6"
dependencies = [
"autocfg",
"cfg-if",
"libc",
"log",
"wepoll-ffi",
"windows-sys 0.42.0",
]
[[package]]
name = "port_check"
version = "0.1.5"
@ -3483,12 +3212,6 @@ dependencies = [
"unicase",
]
[[package]]
name = "quick-error"
version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quote"
version = "1.0.35"
@ -3909,12 +3632,6 @@ version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
[[package]]
name = "safemem"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072"
[[package]]
name = "same-file"
version = "1.0.6"
@ -4104,17 +3821,6 @@ dependencies = [
"unsafe-libyaml",
]
[[package]]
name = "sha-1"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]]
name = "sha1"
version = "0.6.1"
@ -4631,18 +4337,6 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-tungstenite"
version = "0.17.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f714dd15bead90401d77e04243611caec13726c2408afd5b31901dfcdcb3b181"
dependencies = [
"futures-util",
"log",
"tokio",
"tungstenite",
]
[[package]]
name = "tokio-util"
version = "0.7.4"
@ -4787,34 +4481,6 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "tungstenite"
version = "0.17.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0"
dependencies = [
"base64 0.13.1",
"byteorder",
"bytes",
"http",
"httparse",
"log",
"rand 0.8.5",
"sha-1",
"thiserror",
"url",
"utf-8",
]
[[package]]
name = "twoway"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59b11b2b5241ba34be09c3cc85a36e56e48f9888862e19cedf23336d35316ed1"
dependencies = [
"memchr",
]
[[package]]
name = "typenum"
version = "1.16.0"
@ -4911,12 +4577,6 @@ version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8db7427f936968176eaa7cdf81b7f98b980b18495ec28f1b5791ac3bfe3eea9"
[[package]]
name = "utf-8"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "utf8-width"
version = "0.1.6"
@ -4945,12 +4605,6 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
[[package]]
name = "value-bag"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fec26a25bd6fca441cdd0f769fd7f891bae119f996de31f86a5eddccef54c1d"
[[package]]
name = "vcpkg"
version = "0.2.15"
@ -5026,7 +4680,6 @@ dependencies = [
"log",
"mime",
"mime_guess",
"multipart",
"percent-encoding",
"pin-project",
"rustls-pemfile 0.2.1",
@ -5036,7 +4689,6 @@ dependencies = [
"serde_urlencoded",
"tokio",
"tokio-stream",
"tokio-tungstenite",
"tokio-util",
"tower-service",
"tracing",
@ -5222,15 +4874,6 @@ dependencies = [
"websocket-codec",
]
[[package]]
name = "wepoll-ffi"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb"
dependencies = [
"cc",
]
[[package]]
name = "which"
version = "4.4.0"

View File

@ -113,7 +113,6 @@ derivative = { version = "2.2" }
futures = { version = "0.3" }
itertools = { version = "0.12.1" }
lazy_static = { version = "1.4" }
paste = { version = "1.0" }
serde_json = { version = "1.0", features = ["raw_value"] }
smallvec = { version = "1.0.0" }
js-sys = { version = "0.3" }
@ -150,4 +149,3 @@ quote = { version = "1.0.23" }
semver = { version = "1.0.0", features = ["serde"] }
strum = { version = "0.26.2", features = ["derive"] }
thiserror = "1.0.40"
bytemuck = { version = "1.13.1", features = ["derive"] }

View File

@ -37,7 +37,6 @@ mime = "0.3.16"
multimap = "0.8.3"
new_mime_guess = "4.0.0"
octocrab = { workspace = true }
paste = { workspace = true }
path-absolutize = "3.0.11"
pathdiff = "0.2.1"
path-slash = "0.2.1"
@ -69,7 +68,7 @@ which = "4.2.2"
zip = { version = "0.6.2", default-features = false, features = ["deflate"] }
[dev-dependencies]
warp = "0.3.2"
warp = { version = "0.3.2", default-features = false }
wiremock = "0.5.10"
[lints]

View File

@ -21,26 +21,6 @@ impl Manipulator for ServerAutostart {
}
}
macro_rules! strong_string {
($name:ident($inner_ty:ty)) => {
paste::paste! {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct $name(pub <$inner_ty as ToOwned>::Owned);
impl $name {
pub fn new(inner: impl Into<<$inner_ty as ToOwned>::Owned>) -> Self {
Self(inner.into())
}
}
#[derive(Debug, Serialize, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct [<$name Ref>]<'a>(pub &'a $inner_ty);
}
};
}
strong_string!(Task(str));
#[derive(Clone, Copy, Debug, Default)]
pub struct Sbt;

View File

@ -9,8 +9,6 @@
//! Possible extensions, not implemented yet:
//! - Sections are automatically keeping spacing.
// === Features ===
#![feature(exit_status_error)]
// === Non-Standard Linter Configuration ===
#![allow(missing_docs)]
#![deny(keyword_idents)]

View File

@ -82,7 +82,7 @@ impl<T> NonEmpty<T> {
/// Convert this list to a vector.
pub fn to_vec(&self) -> Vec<&T> {
let mut out = vec![&self.head];
let mut out = vec![self.head()];
let mut list = self.tail();
loop {
match list.head() {

View File

@ -1,10 +1,5 @@
//! Library of general data structures.
// === Features ===
#![feature(associated_type_bounds)]
#![feature(test)]
#![feature(trait_alias)]
#![feature(cell_update)]
// === Non-Standard Linter Configuration ===
#![deny(unconditional_recursion)]
#![warn(missing_docs)]

View File

@ -1,15 +0,0 @@
[package]
name = "enso-debug-api"
version = "0.1.0"
authors = ["Enso Team <enso-dev@enso.org>"]
edition = "2021"
[dependencies]
derivative = { workspace = true }
futures = { workspace = true }
js-sys = { workspace = true }
wasm-bindgen = { workspace = true }
web-sys = { version = "0.3.4", features = ["console"] }
[lints]
workspace = true

View File

@ -1,183 +0,0 @@
//! Functionality for producing debug information.
// === Features ===
#![feature(extern_types)]
use futures::prelude::*;
use derivative::Derivative;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
// ===========================
// === LifecycleController ===
// ===========================
/// Handle to an API for managing application shutdown.
#[derive(Derivative)]
#[derivative(Debug)]
pub struct LifecycleController {
#[derivative(Debug = "ignore")]
#[cfg_attr(not(target_arg = "wasm32"), allow(unused))]
api: js::lifecycle::Lifecycle,
}
impl LifecycleController {
/// Try to obtain a handle. Will succeed if running in Electron.
pub fn new() -> Option<LifecycleController> {
lifecycle_controller().map(|api| Self { api })
}
/// Initiate application shutdown.
pub fn quit(&self) {
#[cfg(target_arch = "wasm32")]
self.api.quit();
#[cfg(not(target_arch = "wasm32"))]
unreachable!("Instance can only be acquired under wasm32.");
}
}
// ===========================
// === Saving profile data ===
// ===========================
/// Emit profile data.
pub fn save_profile(profile: &str) {
static PROFILE_SAVED: AtomicBool = AtomicBool::new(false);
let already_saved = PROFILE_SAVED.swap(true, Ordering::Relaxed);
if !already_saved {
match profiling_data_api() {
Some(api) => api.save_profile(profile),
None => web_sys::console::log_1(&profile.into()),
}
}
}
/// Get profile data loaded from files, if the Electron API is available.
pub fn load_profiles() -> Option<impl Future<Output = Vec<String>>> {
let api = profiling_data_api()?;
let (sender, receiver) = futures::channel::oneshot::channel();
let handler = wasm_bindgen::prelude::Closure::once(|profiles: Vec<wasm_bindgen::JsValue>| {
let context = "Parsing profile file as UTF-8 String";
let profiles: Vec<String> =
profiles.into_iter().map(|value| value.as_string().expect(context)).collect();
// This only fails if the receiver was dropped; in that case the data is no longer needed.
let _result = sender.send(profiles);
});
api.load_profiles(&handler);
Some(async move {
let result = receiver.await;
drop(handler);
// The error case (Cancelled) cannot occur, because the handler owns the sender, and we
// ensure the handler isn't dropped until after we have received the data.
result.unwrap()
})
}
// ======================
// === GPU Debug Info ===
// ======================
/// Load a page displaying information used for debugging hardware-specific rendering issues.
pub fn open_gpu_debug_info() {
if let Some(api) = hardware_info_api() {
api.open_gpu_debug_info();
}
}
// ===========
// === FFI ===
// ===========
/// Javascript FFI
#[allow(clippy::empty_docs)] // https://github.com/rust-lang/rust-clippy/issues/12377
pub mod js {
/// Enso Lifecycle API
pub mod lifecycle {
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
pub type Lifecycle;
#[wasm_bindgen(method, js_name = quit)]
#[allow(unsafe_code)]
pub fn quit(this: &Lifecycle);
}
}
/// Enso Profiling Data API
pub mod profiling_data {
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
pub type ProfilingData;
#[wasm_bindgen(method, js_name = saveProfile)]
#[allow(unsafe_code)]
pub fn save_profile(this: &ProfilingData, data: &str);
#[wasm_bindgen(method, js_name = loadProfiles)]
#[allow(unsafe_code)]
pub fn load_profiles(this: &ProfilingData, callback: &Closure<dyn FnMut(Vec<JsValue>)>);
}
}
/// Enso Hardware Info API
pub mod hardware_info {
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
pub type HardwareInfo;
#[wasm_bindgen(method, js_name = openGpuDebugInfo)]
#[allow(unsafe_code)]
pub fn open_gpu_debug_info(this: &HardwareInfo);
}
}
/// Enso Console API
pub mod console {
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
pub type Console;
#[wasm_bindgen(method, js_name = error)]
#[allow(unsafe_code)]
pub fn error(this: &Console, data: &str);
}
}
}
macro_rules! window_prop_getter {
($prop:expr; $fun:ident -> $ty:ty) => {
/// Return a property of `window`, cast to an expected type.
pub fn $fun() -> Option<$ty> {
use wasm_bindgen::JsCast;
let window = web_sys::window()?;
let prop = $prop;
let val = js_sys::Reflect::get(&window, &prop.into()).ok()?;
if val.is_undefined() {
return None;
}
Some(val.unchecked_into())
}
};
}
window_prop_getter!("enso_console"; console -> js::console::Console);
window_prop_getter!("enso_lifecycle"; lifecycle_controller -> js::lifecycle::Lifecycle);
window_prop_getter!("enso_profiling_data"; profiling_data_api -> js::profiling_data::ProfilingData);
window_prop_getter!("enso_hardware_info"; hardware_info_api -> js::hardware_info::HardwareInfo);

View File

@ -1,58 +1,16 @@
//! A number of helper functions meant to be used in the procedural enso-shapely-macros
//! definitions.
// === Features ===
#![feature(trait_alias)]
// === Non-Standard Linter Configuration ===
#![warn(missing_docs)]
use proc_macro2::TokenStream;
use proc_macro2::TokenTree;
use quote::quote;
use syn::visit::Visit;
use syn::WhereClause;
use syn::WherePredicate;
use syn_1 as syn;
// ==========================
// === Token Stream Utils ===
// ==========================
/// Maps all the tokens in the stream using a given function.
pub fn map_tokens<F: Fn(TokenTree) -> TokenTree>(input: TokenStream, f: F) -> TokenStream {
let ret_iter = input.into_iter().map(f);
ret_iter.collect()
}
/// Rewrites stream replacing each token with a sequence of tokens returned by
/// the given function. The groups (e.g. token tree within braces) are unpacked,
/// rewritten and repacked into groups -- the function is applied recursively.
pub fn rewrite_stream<F: Fn(TokenTree) -> TokenStream + Copy>(
input: TokenStream,
f: F,
) -> TokenStream {
let mut ret = TokenStream::new();
for token in input.into_iter() {
match token {
TokenTree::Group(group) => {
let delim = group.delimiter();
let span = group.span();
let rewritten = rewrite_stream(group.stream(), f);
let mut new_group = proc_macro2::Group::new(delim, rewritten);
new_group.set_span(span);
let new_group = vec![TokenTree::from(new_group)];
ret.extend(new_group.into_iter())
}
_ => ret.extend(f(token)),
}
}
ret
}
// ===================
// === Token Utils ===
// ===================
@ -71,36 +29,6 @@ pub fn matching_ident(token: &TokenTree, name: &str) -> bool {
// === Repr ===
// ============
/// Obtains text representation of given `ToTokens`-compatible input.
pub fn repr<T: quote::ToTokens>(t: &T) -> String {
quote!(#t).to_string()
}
// ===================
// === Field Utils ===
// ===================
/// Collects all fields, named or not.
pub fn fields_list(fields: &syn::Fields) -> Vec<&syn::Field> {
match fields {
syn::Fields::Named(ref f) => f.named.iter().collect(),
syn::Fields::Unnamed(ref f) => f.unnamed.iter().collect(),
syn::Fields::Unit => Default::default(),
}
}
/// Returns token that refers to the field.
///
/// It is the field name for named field and field index for unnamed fields.
pub fn field_ident_token(field: &syn::Field, index: syn::Index) -> TokenStream {
match &field.ident {
Some(ident) => quote!(#ident),
None => quote!(#index),
}
}
/// Returns names of the named fields.
pub fn field_names(fields: &syn::FieldsNamed) -> Vec<&syn::Ident> {
fields
@ -141,103 +69,6 @@ pub fn identifier_sequence(len: usize) -> Vec<syn::Ident> {
// =======================
// === Type Path Utils ===
// =======================
/// Obtain list of generic arguments on the path's segment.
pub fn path_segment_generic_args(segment: &syn::PathSegment) -> Vec<&syn::GenericArgument> {
match segment.arguments {
syn::PathArguments::AngleBracketed(ref args) => args.args.iter().collect(),
_ => Vec::new(),
}
}
/// Obtain list of generic arguments on the path's last segment.
///
/// Empty, if path contains no segments.
pub fn ty_path_generic_args(ty_path: &syn::TypePath) -> Vec<&syn::GenericArgument> {
ty_path.path.segments.last().map_or(Vec::new(), path_segment_generic_args)
}
/// Obtain list of type arguments on the path's last segment.
pub fn ty_path_type_args(ty_path: &syn::TypePath) -> Vec<&syn::Type> {
ty_path_generic_args(ty_path)
.iter()
.filter_map(|generic_arg| match generic_arg {
syn::GenericArgument::Type(t) => Some(t),
_ => None,
})
.collect()
}
/// Last type argument of the last segment on the type path.
pub fn last_type_arg(ty_path: &syn::TypePath) -> Option<&syn::GenericArgument> {
ty_path_generic_args(ty_path).last().copied()
}
// =====================
// === Collect Types ===
// =====================
/// Visitor that accumulates all visited `syn::TypePath`.
#[derive(Debug, Default)]
pub struct TypeGatherer<'ast> {
/// Observed types accumulator.
pub types: Vec<&'ast syn::TypePath>,
}
impl<'ast> Visit<'ast> for TypeGatherer<'ast> {
fn visit_type_path(&mut self, node: &'ast syn::TypePath) {
self.types.push(node);
syn::visit::visit_type_path(self, node);
}
}
/// All `TypePath`s in the given's `Type` subtree.
pub fn gather_all_types(node: &syn::Type) -> Vec<&syn::TypePath> {
let mut type_gather = TypeGatherer::default();
type_gather.visit_type(node);
type_gather.types
}
/// All text representations of `TypePath`s in the given's `Type` subtree.
pub fn gather_all_type_reprs(node: &syn::Type) -> Vec<String> {
gather_all_types(node).iter().map(repr).collect()
}
// =======================
// === Type Dependency ===
// =======================
/// Naive type equality test by comparing its representation with a string.
pub fn type_matches_repr(ty: &syn::Type, target_repr: &str) -> bool {
repr(ty) == target_repr
}
/// Naive type equality test by comparing their text representations.
pub fn type_matches(ty: &syn::Type, target_param: &syn::GenericParam) -> bool {
type_matches_repr(ty, &repr(target_param))
}
/// Does type depends on the given type parameter.
pub fn type_depends_on(ty: &syn::Type, target_param: &syn::GenericParam) -> bool {
let target_param = repr(target_param);
let relevant_types = gather_all_types(ty);
relevant_types.iter().any(|ty| repr(ty) == target_param)
}
/// Does enum variant depend on the given type parameter.
pub fn variant_depends_on(var: &syn::Variant, target_param: &syn::GenericParam) -> bool {
var.fields.iter().any(|field| type_depends_on(&field.ty, target_param))
}
// ===================
// === WhereClause ===
// ===================
@ -247,90 +78,3 @@ pub fn new_where_clause(predicates: impl IntoIterator<Item = WherePredicate>) ->
let predicates = syn::punctuated::Punctuated::from_iter(predicates);
WhereClause { where_token: Default::default(), predicates }
}
// =============
// === Tests ===
// =============
#[cfg(test)]
mod tests {
use super::*;
fn parse<T: syn::parse::Parse>(code: &str) -> T {
syn::parse_str(code).unwrap()
}
#[test]
fn repr_round_trips() {
let program = "pub fn repr<T: quote::ToTokens>(t: &T) -> String {}";
let tokens = parse::<TokenStream>(program);
let quoted_program = repr(&tokens);
let tokens2 = parse::<TokenStream>(&quoted_program);
// check only second round-trip, first is allowed to break whitespace
assert_eq!(repr(&tokens), repr(&tokens2));
}
#[test]
fn fields_list_test() {
let tuple_like = "struct Unnamed(i32, String, T);";
let proper_struct = "struct Named{i: i32, s: String, t: T}";
let expected_types = vec!["i32", "String", "T"];
fn assert_field_types(program: &str, expected_types: &[&str]) {
let tokens = parse::<syn::ItemStruct>(program);
let fields = fields_list(&tokens.fields);
let types = fields.iter().map(|f| repr(&f.ty));
assert_eq!(Vec::from_iter(types), expected_types);
}
assert_field_types(tuple_like, &expected_types);
assert_field_types(proper_struct, &expected_types);
}
#[test]
fn type_dependency() {
let param: syn::GenericParam = parse("T");
let depends = |code| {
let ty: syn::Type = parse(code);
type_depends_on(&ty, &param)
};
// sample types that depend on `T`
let dependents =
vec!["T", "Option<T>", "Pair<T, U>", "Pair<U, T>", "Pair<U, (T,)>", "&T", "&'t mut T"];
// sample types that do not depend on `T`
let independents =
vec!["Tt", "Option<Tt>", "Pair<Tt, U>", "Pair<U, Tt>", "Pair<U, Tt>", "i32", "&str"];
for dependent in dependents {
assert!(depends(dependent), "{} must depend on {}", repr(&dependent), repr(&param));
}
for independent in independents {
assert!(
!depends(independent),
"{} must not depend on {}",
repr(&independent),
repr(&param)
);
}
}
#[test]
fn collecting_type_path_args() {
fn check(expected_type_args: Vec<&str>, ty_path: &str) {
let ty_path = parse(ty_path);
let args = ty_path_type_args(&ty_path);
assert_eq!(expected_type_args.len(), args.len());
let zipped = expected_type_args.iter().zip(args.iter());
for (expected, got) in zipped {
assert_eq!(expected, &repr(got));
}
}
check(vec!["T"], "std::Option<T>");
check(vec!["U"], "std::Option<U>");
check(vec!["A", "B"], "Either<A,B>");
assert_eq!(last_type_arg(&parse("i32")), None);
assert_eq!(repr(&last_type_arg(&parse("Foo<C>"))), "C");
}
}

View File

@ -44,8 +44,6 @@
//! Java code after all computation is completed.
// === Features ===
#![feature(option_get_or_insert_default)]
#![feature(type_alias_impl_trait)]
#![feature(impl_trait_in_assoc_type)]
// === Non-Standard Linter Configuration ===
#![allow(clippy::option_map_unit_fn)]

View File

@ -13,8 +13,8 @@ license-file = "../../LICENSE"
enso-prelude = { path = "../prelude" }
enso-reflect = { path = "../reflect" }
enso-data-structures = { path = "../data-structures" }
enso-shapely-macros = { path = "../shapely/macros" }
enso-parser-syntax-tree-visitor = { path = "src/syntax/tree/visitor" }
paste = { version = "1.0" }
serde = { workspace = true }
serde_json = { workspace = true }
uuid = { version = "1.1", features = ["serde"] }

View File

@ -12,7 +12,6 @@ license-file = "../../LICENSE"
[dependencies]
enso-parser = { path = ".." }
enso-prelude = { path = "../../prelude" }
enso-profiler = { path = "../../profiler" }
enso-reflect = { path = "../../reflect" }
serde = { workspace = true }

View File

@ -41,7 +41,6 @@ impl DocParser {
}
/// Parse the documentation.
#[profile(Detail)]
pub fn parse(&mut self, input: &str) -> Vec<DocSection> {
for (line_number, line) in input.trim_start().lines().enumerate() {
let location = Location::start_of_line(line_number);
@ -78,10 +77,9 @@ impl Argument {
// We split by the first colon or space, whatever comes first.
// Typically a colon must be used as a separator, but in some documentation snippets we
// have there is no colon and the name of the argument is simply the first word.
let split = text.splitn(2, |c| c == ':' || c == ' ');
let (name, description) = split.collect_tuple().unwrap_or((text, ""));
let name = name.trim().to_string();
let description = description.trim().to_string();
let mut split = text.splitn(2, |c| c == ':' || c == ' ');
let name = split.next().unwrap_or(text).trim().to_string();
let description = split.next().unwrap_or_default().trim().to_string();
Self { name, description }
}
}

View File

@ -29,11 +29,6 @@ pub use doc_sections::DocSection;
pub(crate) use enso_profiler as profiler;
pub(crate) use enso_profiler::profile;
// ============
// === Tags ===
// ============

View File

@ -1,13 +1,5 @@
//! Prints a debug representation of Enso documentation found in the given Enso source file(s).
#![recursion_limit = "256"]
// === Features ===
#![feature(assert_matches)]
#![feature(allocator_api)]
#![feature(exact_size_is_empty)]
#![feature(test)]
#![feature(let_chains)]
#![feature(if_let_guard)]
// === Non-Standard Linter Configuration ===
#![allow(clippy::option_map_unit_fn)]
#![allow(clippy::precedence)]
@ -28,7 +20,7 @@ use enso_parser::prelude::*;
fn main() {
let args = std::env::args().skip(1);
if args.is_empty() {
if args.len() == 0 {
use std::io::Read;
let mut input = String::new();
std::io::stdin().read_to_string(&mut input).unwrap();

View File

@ -1502,7 +1502,7 @@ pub mod test {
let is_operator = false;
let left_offset = test_code(left_offset);
let code = test_code(code);
token::ident_(left_offset, code, is_free, lift_level, is_uppercase, is_operator, false)
ident(left_offset, code, is_free, lift_level, is_uppercase, is_operator, false).into()
}
/// Constructor.
@ -1510,7 +1510,16 @@ pub mod test {
let lift_level = code.chars().rev().take_while(|t| *t == '\'').count() as u32;
let left_offset = test_code(left_offset);
let code = test_code(code);
token::wildcard_(left_offset, code, lift_level)
wildcard(left_offset, code, lift_level).into()
}
/// Constructor.
pub fn digits_(code: &str) -> Token<'_> {
digits(test_code(""), test_code(code), None).into()
}
/// Constructor.
pub fn newline_<'s>(left_offset: &'s str, code: &'s str) -> Token<'s> {
newline(test_code(left_offset), test_code(code)).into()
}
/// Constructor.
@ -1611,52 +1620,52 @@ mod tests {
#[test]
fn test_case_block() {
let newline = newline_(empty(), test_code("\n"));
test_lexer("\n", vec![newline_(empty(), test_code("\n"))]);
let newline = newline_("", "\n");
test_lexer("\n", vec![newline_("", "\n")]);
test_lexer("\n foo\n bar", vec![
block_start_(empty(), empty()),
block_start(empty(), empty()).into(),
newline.clone(),
ident_(" ", "foo"),
newline.clone(),
ident_(" ", "bar"),
block_end_(empty(), empty()),
block_end(empty(), empty()).into(),
]);
test_lexer("foo\n +", vec![
ident_("", "foo"),
block_start_(empty(), empty()),
block_start(empty(), empty()).into(),
newline,
operator_(" ", "+"),
block_end_(empty(), empty()),
block_end(empty(), empty()).into(),
]);
}
#[test]
fn test_case_block_bad_indents() {
let newline = newline_(empty(), test_code("\n"));
let newline = newline_("", "\n");
#[rustfmt::skip]
test_lexer(" foo\n bar\nbaz", vec![
block_start_(empty(), empty()),
newline_(empty(), empty()),
block_start(empty(), empty()).into(),
newline_("", ""),
ident_(" ", "foo"),
newline.clone(), ident_(" ", "bar"),
block_end_(empty(), empty()),
block_end(empty(), empty()).into(),
newline.clone(), ident_("", "baz"),
]);
#[rustfmt::skip]
test_lexer("\n foo\n bar\nbaz", vec![
block_start_(empty(), empty()),
block_start(empty(), empty()).into(),
newline.clone(), ident_(" ", "foo"),
newline.clone(), ident_(" ", "bar"),
block_end_(empty(), empty()),
block_end(empty(), empty()).into(),
newline.clone(), ident_("", "baz"),
]);
#[rustfmt::skip]
test_lexer("\n foo\n bar\n baz", vec![
block_start_(empty(), empty()),
block_start(empty(), empty()).into(),
newline.clone(), ident_(" ", "foo"),
newline.clone(), ident_(" ", "bar"),
newline, ident_(" ", "baz"),
block_end_(empty(), empty()),
block_end(empty(), empty()).into(),
]);
}
@ -1664,8 +1673,8 @@ mod tests {
fn test_case_whitespace_only_line() {
test_lexer_many(vec![("foo\n \nbar", vec![
ident_("", "foo"),
newline_(empty(), test_code("\n")),
newline_(test_code(" "), test_code("\n")),
newline_("", "\n"),
newline_(" ", "\n"),
ident_("", "bar"),
])]);
}
@ -1690,7 +1699,7 @@ mod tests {
#[test]
fn test_numeric_literal() {
test_lexer("10", vec![digits_(empty(), test_code("10"), None)]);
test_lexer("10", vec![digits_("10")]);
}
#[test]

View File

@ -110,10 +110,10 @@ pub mod syntax;
/// Popular utilities, imported by most modules of this crate.
pub mod prelude {
pub use enso_prelude::serde_reexports::*;
pub use enso_prelude::*;
pub use enso_reflect as reflect;
pub use enso_reflect::Reflect;
pub(crate) use paste::paste;
/// Return type for functions that will only fail in case of a bug in the implementation.
#[derive(Debug, Default)]

View File

@ -349,7 +349,7 @@ impl<'s> Resolver<'s> {
fn resolve_match(&mut self, macro_def: &macros::Definition, segments_start: usize) {
let mut def_segments = macro_def.segments.to_vec().into_iter().rev();
let segments = self.segments.drain(segments_start..).rev();
let segments: NonEmptyVec<_> = segments.collect_vec().try_into().unwrap();
let segments: NonEmptyVec<_> = segments.collect::<Vec<_>>().try_into().unwrap();
let mut pattern_matched_segments = segments.mapped(|segment| {
let count_must_match =
"Internal error. Macro definition and match segments count mismatch.";

View File

@ -1,13 +1,5 @@
//! Tests for [`enso_parser`].
#![recursion_limit = "256"]
// === Features ===
#![feature(assert_matches)]
#![feature(allocator_api)]
#![feature(exact_size_is_empty)]
#![feature(test)]
#![feature(let_chains)]
#![feature(if_let_guard)]
// === Non-Standard Linter Configuration ===
#![allow(clippy::option_map_unit_fn)]
#![allow(clippy::precedence)]
@ -28,7 +20,7 @@ use enso_parser::prelude::*;
fn main() {
let args = std::env::args().skip(1);
let mut parser = enso_parser::Parser::new();
if args.is_empty() {
if args.len() == 0 {
use std::io::Read;
let mut input = String::new();
std::io::stdin().read_to_string(&mut input).unwrap();

View File

@ -197,21 +197,10 @@ impl<'s> Span<'s> {
pub fn length_including_whitespace(&self) -> code::Length {
self.left_offset.code.length() + self.code_length
}
}
impl<'s> AsRef<Span<'s>> for Span<'s> {
fn as_ref(&self) -> &Span<'s> {
self
}
}
impl<'s, 'a, T> PartialSemigroup<T> for Span<'s>
where
T: Into<Ref<'s, 'a>>,
's: 'a,
{
#[inline(always)]
fn concat_mut(&mut self, other: T) {
fn concat<'a>(mut self, other: impl Into<Ref<'s, 'a>>) -> Self
where 's: 'a {
let other = other.into();
if self.code_length.is_zero() {
self.left_offset += other.left_offset;
@ -224,6 +213,13 @@ where
self.code_length += other.left_offset.code.length();
self.code_length += other.code_length;
}
self
}
}
impl<'s> AsRef<Span<'s>> for Span<'s> {
fn as_ref(&self) -> &Span<'s> {
self
}
}

View File

@ -99,36 +99,3 @@ pub enum Ref<'s, 'a> {
Token(token::Ref<'s, 'a>),
Tree(&'a Tree<'s>),
}
// ======================
// === Variant Checks ===
// ======================
/// For each token variant, generates a function checking if the token is of the given variant. For
/// example, the `is_ident` function checks if the token is an identifier.
macro_rules! generate_variant_checks {
(
$(#$enum_meta:tt)*
pub enum $enum:ident {
$(
$(#$variant_meta:tt)*
$variant:ident $({
$($(#$field_meta:tt)* pub $field:ident : $field_ty:ty),* $(,)?
})?
),* $(,)?
}
) => { paste!{
impl<'s> Item<'s> {
$(
$(#[$($variant_meta)*])*
#[allow(missing_docs)]
pub fn [<is_ $variant:snake:lower>](&self) -> bool {
self.is_variant(token::variant::VariantMarker::$variant)
}
)*
}
}};
}
crate::with_token_definition!(generate_variant_checks());

View File

@ -96,8 +96,6 @@
use crate::prelude::*;
use crate::source::*;
use enso_shapely_macros::tagged_enum;
// =============
@ -623,15 +621,6 @@ macro_rules! generate_token_aliases {
Token(left_offset, code, variant::$variant($($($field),*)?))
}
/// Constructor.
pub fn [<$variant:snake:lower _>]<'s> (
left_offset: impl Into<Offset<'s>>,
code: Code<'s>,
$($($field : $field_ty),*)?
) -> Token<'s> {
Token(left_offset, code, variant::$variant($($($field),*)?)).into()
}
impl<'s> From<Token<'s, variant::$variant>> for Token<'s, Variant> {
fn from(token: Token<'s, variant::$variant>) -> Self {
token.map_variant(|t| t.into())

View File

@ -7,7 +7,6 @@ use crate::syntax::*;
use crate::span_builder;
use enso_parser_syntax_tree_visitor::Visitor;
use enso_shapely_macros::tagged_enum;
// ==============
@ -1078,16 +1077,13 @@ impl<'s> From<token::Ident<'s>> for Tree<'s> {
/// as AST nodes ([`TreeVisitor`]), span information ([`SpanVisitor`]), and AST nodes or tokens
/// altogether ([`ItemVisitor`]). A visitor is a struct that is modified when traversing the target
/// elements. Visitors are also capable of tracking when they entered or exited a nested
/// [`Tree`] structure, and they can control how deep the traversal should be performed. To learn
/// more, see the [`RefCollectorVisitor`] implementation, which traverses [`Tree`] and collects
/// references to all [`Tree`] nodes in a vector.
/// [`Tree`] structure, and they can control how deep the traversal should be performed.
///
/// # Visitable traits
/// This macro also defines visitable traits, such as [`TreeVisitable`] or [`SpanVisitable`], which
/// provide [`Tree`] elements with such functions as [`visit`], [`visit_mut`], [`visit_span`], or
/// [`visit_span_mut`]. These functions let you run visitors. However, as defining a visitor is
/// relatively complex, a set of traversal functions are provided, such as [`map`], [`map_mut`],
/// [`map_span`], or [`map_span_mut`].
/// provide [`Tree`] elements with such functions as [`visit`] or [`visit_span`]. These functions
/// let you run visitors. However, as defining a visitor is relatively complex, a traversal function
/// [`map`] is provided.
///
/// # Generalization of the implementation
/// The current implementation bases on a few non-generic traits. One might define a way better
@ -1113,24 +1109,12 @@ pub trait TreeVisitor<'s, 'a>: Visitor {
fn visit(&mut self, ast: &'a Tree<'s>) -> bool;
}
/// The visitor trait allowing for [`Tree`] nodes mutable traversal.
#[allow(missing_docs)]
pub trait TreeVisitorMut<'s>: Visitor {
fn visit_mut(&mut self, ast: &mut Tree<'s>) -> bool;
}
/// The visitor trait allowing for [`Span`] traversal.
#[allow(missing_docs)]
pub trait SpanVisitor<'s, 'a>: Visitor {
fn visit(&mut self, ast: span::Ref<'s, 'a>) -> bool;
}
/// The visitor trait allowing for [`Span`] mutable traversal.
#[allow(missing_docs)]
pub trait SpanVisitorMut<'s>: Visitor {
fn visit_mut(&mut self, ast: span::RefMut<'s, '_>) -> bool;
}
/// The visitor trait allowing for [`Item`] traversal.
#[allow(missing_docs)]
pub trait ItemVisitor<'s, 'a>: Visitor {
@ -1138,61 +1122,20 @@ pub trait ItemVisitor<'s, 'a>: Visitor {
}
macro_rules! define_visitor {
($name:ident, $visit:ident) => {
define_visitor_no_mut! {$name, $visit}
define_visitor_mut! {$name, $visit}
};
}
macro_rules! define_visitor_no_mut {
($name:ident, $visit:ident) => {
paste! {
define_visitor_internal! {
$name,
$visit,
[[<$name Visitor>]<'s, 'a>],
[<$name Visitable>],
}
}
};
}
macro_rules! define_visitor_mut {
($name:ident, $visit:ident) => {
paste! {
define_visitor_internal! {
[_mut mut]
$name,
[<$visit _mut>],
[[<$name VisitorMut>]<'s>],
[<$name VisitableMut>],
}
}
};
}
macro_rules! define_visitor_internal {
(
$([$pfx_mod:ident $mod:ident])?
$name:ident,
$visit:ident,
[$($visitor:tt)*],
$visitable:ident,
) => { paste! {
$visitor:ident,
$visitable:ident
) => {
/// The visitable trait. See documentation of [`define_visitor`] to learn more.
#[allow(missing_docs)]
pub trait $visitable<'s, 'a> {
fn $visit<V: $($visitor)*>(&'a $($mod)? self, _visitor: &mut V) {}
}
impl<'s, 'a, T: $visitable<'s, 'a>> $visitable<'s, 'a> for Box<T> {
fn $visit<V: $($visitor)*>(&'a $($mod)? self, visitor: &mut V) {
$visitable::$visit(& $($mod)? **self, visitor)
}
fn $visit<V: $visitor<'s, 'a>>(&'a self, _visitor: &mut V) {}
}
impl<'s, 'a, T: $visitable<'s, 'a>> $visitable<'s, 'a> for Option<T> {
fn $visit<V: $($visitor)*>(&'a $($mod)? self, visitor: &mut V) {
fn $visit<V: $visitor<'s, 'a>>(&'a self, visitor: &mut V) {
if let Some(elem) = self {
$visitable::$visit(elem, visitor)
}
@ -1202,7 +1145,7 @@ macro_rules! define_visitor_internal {
impl<'s, 'a, T: $visitable<'s, 'a>, E: $visitable<'s, 'a>> $visitable<'s, 'a>
for Result<T, E>
{
fn $visit<V: $($visitor)*>(&'a $($mod)? self, visitor: &mut V) {
fn $visit<V: $visitor<'s, 'a>>(&'a self, visitor: &mut V) {
match self {
Ok(elem) => $visitable::$visit(elem, visitor),
Err(elem) => $visitable::$visit(elem, visitor),
@ -1211,20 +1154,17 @@ macro_rules! define_visitor_internal {
}
impl<'s, 'a, T: $visitable<'s, 'a>> $visitable<'s, 'a> for Vec<T> {
fn $visit<V: $($visitor)*>(&'a $($mod)? self, visitor: &mut V) {
self.[<iter $($pfx_mod)?>]().map(|t| $visitable::$visit(t, visitor)).for_each(drop);
fn $visit<V: $visitor<'s, 'a>>(&'a self, visitor: &mut V) {
self.iter().map(|t| $visitable::$visit(t, visitor)).for_each(drop);
}
}
impl<'s, 'a, T: $visitable<'s, 'a>> $visitable<'s, 'a> for NonEmptyVec<T> {
fn $visit<V: $($visitor)*>(&'a $($mod)? self, visitor: &mut V) {
self.[<iter $($pfx_mod)?>]().map(|t| $visitable::$visit(t, visitor)).for_each(drop);
fn $visit<V: $visitor<'s, 'a>>(&'a self, visitor: &mut V) {
self.iter().map(|t| $visitable::$visit(t, visitor)).for_each(drop);
}
}
impl<'s, 'a> $visitable<'s, 'a> for &str {}
impl<'s, 'a> $visitable<'s, 'a> for str {}
}};
};
}
macro_rules! define_visitor_for_tokens {
@ -1238,13 +1178,12 @@ macro_rules! define_visitor_for_tokens {
}
) => {
impl<'s, 'a> TreeVisitable<'s, 'a> for token::$kind {}
impl<'s, 'a> TreeVisitableMut<'s, 'a> for token::$kind {}
};
}
define_visitor!(Tree, visit);
define_visitor!(Span, visit_span);
define_visitor_no_mut!(Item, visit_item);
define_visitor!(Tree, visit, TreeVisitor, TreeVisitable);
define_visitor!(Span, visit_span, SpanVisitor, SpanVisitable);
define_visitor!(Item, visit_item, ItemVisitor, ItemVisitable);
crate::with_token_definition!(define_visitor_for_tokens());
@ -1252,11 +1191,9 @@ crate::with_token_definition!(define_visitor_for_tokens());
// === Trait Implementations for Simple Leaf Types ===
macro_rules! spanless_leaf_impls {
($ty:ident) => {
($ty:ty) => {
impl<'s, 'a> TreeVisitable<'s, 'a> for $ty {}
impl<'s, 'a> TreeVisitableMut<'s, 'a> for $ty {}
impl<'a, 's> SpanVisitable<'s, 'a> for $ty {}
impl<'a, 's> SpanVisitableMut<'s, 'a> for $ty {}
impl<'a, 's> ItemVisitable<'s, 'a> for $ty {}
impl<'s> span::Builder<'s> for $ty {
fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> {
@ -1269,6 +1206,7 @@ macro_rules! spanless_leaf_impls {
spanless_leaf_impls!(u32);
spanless_leaf_impls!(bool);
spanless_leaf_impls!(VisibleOffset);
spanless_leaf_impls!(Cow<'static, str>);
// === TreeVisitable special cases ===
@ -1281,16 +1219,8 @@ impl<'s, 'a> TreeVisitable<'s, 'a> for Tree<'s> {
}
}
impl<'s, 'a> TreeVisitableMut<'s, 'a> for Tree<'s> {
fn visit_mut<V: TreeVisitorMut<'s>>(&'a mut self, visitor: &mut V) {
if visitor.visit_mut(self) {
self.variant.visit_mut(visitor)
}
}
}
impl<'s, 'a, T> TreeVisitable<'s, 'a> for Token<'s, T> {}
impl<'s, 'a, T> TreeVisitableMut<'s, 'a> for Token<'s, T> {}
// === SpanVisitable special cases ===
@ -1306,16 +1236,6 @@ impl<'s, 'a> SpanVisitable<'s, 'a> for Tree<'s> {
}
}
impl<'s, 'a> SpanVisitableMut<'s, 'a> for Tree<'s> {
fn visit_span_mut<V: SpanVisitorMut<'s>>(&'a mut self, visitor: &mut V) {
if visitor.visit_mut(span::RefMut {
left_offset: &mut self.span.left_offset,
code_length: self.span.code_length,
}) {
self.variant.visit_span_mut(visitor)
}
}
}
impl<'a, 's, T> SpanVisitable<'s, 'a> for Token<'s, T> {
fn visit_span<V: SpanVisitor<'s, 'a>>(&'a self, visitor: &mut V) {
@ -1324,12 +1244,6 @@ impl<'a, 's, T> SpanVisitable<'s, 'a> for Token<'s, T> {
}
}
impl<'a, 's, T> SpanVisitableMut<'s, 'a> for Token<'s, T> {
fn visit_span_mut<V: SpanVisitorMut<'s>>(&'a mut self, visitor: &mut V) {
let code_length = self.code.length();
visitor.visit_mut(span::RefMut { left_offset: &mut self.left_offset, code_length });
}
}
// === ItemVisitable special cases ===
@ -1351,31 +1265,6 @@ where &'a Token<'s, T>: Into<token::Ref<'s, 'a>>
}
// === String ===
impl<'s, 'a> TreeVisitable<'s, 'a> for String {}
impl<'s, 'a> TreeVisitableMut<'s, 'a> for String {}
impl<'a, 's> SpanVisitable<'s, 'a> for String {}
impl<'a, 's> SpanVisitableMut<'s, 'a> for String {}
impl<'a, 's> ItemVisitable<'s, 'a> for String {}
impl<'s> span::Builder<'s> for String {
fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> {
span
}
}
impl<'s, 'a> TreeVisitable<'s, 'a> for Cow<'static, str> {}
impl<'s, 'a> TreeVisitableMut<'s, 'a> for Cow<'static, str> {}
impl<'a, 's> SpanVisitable<'s, 'a> for Cow<'static, str> {}
impl<'a, 's> SpanVisitableMut<'s, 'a> for Cow<'static, str> {}
impl<'a, 's> ItemVisitable<'s, 'a> for Cow<'static, str> {}
impl<'s> span::Builder<'s> for Cow<'static, str> {
fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> {
span
}
}
// ==========================
// === CodePrinterVisitor ===
@ -1420,36 +1309,6 @@ impl<'s> Tree<'s> {
// ===========================
// === RefCollectorVisitor ===
// ===========================
/// A visitor collecting references to all [`Tree`] nodes.
#[derive(Debug, Default)]
#[allow(missing_docs)]
struct RefCollectorVisitor<'s, 'a> {
pub vec: Vec<&'a Tree<'s>>,
}
impl<'s, 'a> Visitor for RefCollectorVisitor<'s, 'a> {}
impl<'s, 'a> TreeVisitor<'s, 'a> for RefCollectorVisitor<'s, 'a> {
fn visit(&mut self, ast: &'a Tree<'s>) -> bool {
self.vec.push(ast);
true
}
}
impl<'s> Tree<'s> {
/// Collect references to all [`Tree`] nodes and return them in a vector.
pub fn collect_vec_ref(&self) -> Vec<&Tree<'s>> {
let mut visitor = RefCollectorVisitor::default();
self.visit(&mut visitor);
visitor.vec
}
}
// =================
// === FnVisitor ===
// =================
@ -1467,12 +1326,6 @@ impl<'s: 'a, 'a, T, F: Fn(&'a Tree<'s>) -> T> TreeVisitor<'s, 'a> for FnVisitor<
}
}
impl<'s, T, F: Fn(&mut Tree<'s>) -> T> TreeVisitorMut<'s> for FnVisitor<F> {
fn visit_mut(&mut self, ast: &mut Tree<'s>) -> bool {
(self.0)(ast);
true
}
}
impl<'s> Tree<'s> {
/// Map the provided function over each [`Tree`] node. The function results will be discarded.
@ -1480,12 +1333,6 @@ impl<'s> Tree<'s> {
let mut visitor = FnVisitor(f);
self.visit(&mut visitor);
}
/// Map the provided function over each [`Tree`] node. The function results will be discarded.
pub fn map_mut<T>(&mut self, f: impl Fn(&mut Tree<'s>) -> T) {
let mut visitor = FnVisitor(f);
self.visit_mut(&mut visitor);
}
}
@ -1510,39 +1357,3 @@ impl<'s> Tree<'s> {
self.variant.visit_item(&mut ItemFnVisitor { f });
}
}
// =================
// === Traversal ===
// =================
impl<'s> Tree<'s> {
/// Return an iterator over the operands of the given left-associative operator, in reverse
/// order.
pub fn left_assoc_rev<'t, 'o>(&'t self, operator: &'o str) -> LeftAssocRev<'o, 't, 's> {
let tree = Some(self);
LeftAssocRev { operator, tree }
}
}
/// Iterator over the operands of a particular left-associative operator, in reverse order.
#[derive(Debug)]
pub struct LeftAssocRev<'o, 't, 's> {
operator: &'o str,
tree: Option<&'t Tree<'s>>,
}
impl<'o, 't, 's> Iterator for LeftAssocRev<'o, 't, 's> {
type Item = &'t Tree<'s>;
fn next(&mut self) -> Option<Self::Item> {
if let box Variant::OprApp(OprApp { lhs, opr: Ok(opr), rhs }) = &self.tree?.variant
&& opr.code == self.operator
{
self.tree = lhs.into();
rhs.into()
} else {
self.tree.take()
}
}
}

View File

@ -43,9 +43,7 @@ pub fn derive_visitor(input: proc_macro::TokenStream) -> proc_macro::TokenStream
let ident = &decl.ident;
let (impl_generics, ty_generics, _inherent_where_clause_opt) = &decl.generics.split_for_impl();
let body = gen_body(quote!(TreeVisitable::visit), &decl.data, false);
let body_mut = gen_body(quote!(TreeVisitableMut::visit_mut), &decl.data, true);
let body_span = gen_body(quote!(SpanVisitable::visit_span), &decl.data, false);
let body_span_mut = gen_body(quote!(SpanVisitableMut::visit_span_mut), &decl.data, true);
let body_item = gen_body(quote!(ItemVisitable::visit_item), &decl.data, false);
let impl_generics_vec: Vec<_> = impl_generics.to_token_stream().into_iter().collect();
@ -71,14 +69,6 @@ pub fn derive_visitor(input: proc_macro::TokenStream) -> proc_macro::TokenStream
}
}
impl #impl_generics TreeVisitableMut #impl_generics for #ident #ty_generics {
fn visit_mut<T: TreeVisitorMut<'s>>(&'a mut self, visitor:&mut T) {
visitor.before_visiting_children();
#body_mut
visitor.after_visiting_children();
}
}
impl #impl_generics SpanVisitable #impl_generics for #ident #ty_generics {
fn visit_span<T: SpanVisitor #impl_generics>(&'a self, visitor:&mut T) {
visitor.before_visiting_children();
@ -87,14 +77,6 @@ pub fn derive_visitor(input: proc_macro::TokenStream) -> proc_macro::TokenStream
}
}
impl #impl_generics SpanVisitableMut #impl_generics for #ident #ty_generics {
fn visit_span_mut<T: SpanVisitorMut<'s>>(&'a mut self, visitor:&mut T) {
visitor.before_visiting_children();
#body_span_mut
visitor.after_visiting_children();
}
}
impl #impl_generics ItemVisitable #impl_generics for #ident #ty_generics {
fn visit_item<T: ItemVisitor #impl_generics>(&'a self, visitor:&mut T) {
visitor.before_visiting_children();
@ -104,11 +86,6 @@ pub fn derive_visitor(input: proc_macro::TokenStream) -> proc_macro::TokenStream
}
};
// #[allow(missing_docs)]
// pub trait ItemVisitable<'s, 'a> {
// fn visit_item<V: ItemVisitor<'s, 'a>>(&'a self, _visitor: &mut V) {}
// }
output.into()
}

View File

@ -1,6 +1,6 @@
[package]
name = "enso-prelude"
version = "0.2.6"
version = "0.2.7"
authors = ["Enso Team <enso-dev@enso.org>"]
edition = "2021"
description = "An augmented standard library in the vein of Haskell's prelude."
@ -18,62 +18,12 @@ crate-type = ["rlib"]
[dependencies]
enso-logging = { path = "../logging" }
enso-reflect = { path = "../reflect" }
enso-shapely = { path = "../shapely" }
enso-zst = { path = "../zst" }
anyhow = { workspace = true }
boolinator = { workspace = true }
derivative = { workspace = true }
derive_more = { workspace = true }
failure = { workspace = true }
futures = { workspace = true }
itertools = { workspace = true }
paste = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
smallvec = { workspace = true }
[dependencies.web-sys]
version = "0.3.4"
features = [
"TextMetrics",
'AddEventListenerOptions',
'CanvasRenderingContext2d',
'CssStyleDeclaration',
'Document',
'DomRect',
'Element',
'Event',
'EventTarget',
'EventTarget',
'FontFaceSet',
'HtmlCanvasElement',
'HtmlCollection',
'HtmlDivElement',
'HtmlElement',
'HtmlHeadElement',
'HtmlImageElement',
'KeyboardEvent',
'Location',
'MouseEvent',
'Node',
'Performance',
'Url',
'WebGl2RenderingContext',
'WebGlBuffer',
'WebGlFramebuffer',
'WebGlProgram',
'WebGlRenderingContext',
'WebGlShader',
'WebGlSync',
'WebGlTexture',
'WebGlUniformLocation',
'WebGlUniformLocation',
'WebGlVertexArrayObject',
'WheelEvent',
'Window',
'console',
'FontFace'
]
enso-macros = { path = "macros" }
[lints]
workspace = true

View File

@ -1,16 +1,12 @@
[package]
name = "enso-shapely-macros"
version = "0.2.1"
name = "enso-macros"
version = "0.2.7"
authors = ["Enso Team <enso-dev@enso.org>"]
edition = "2021"
description = "Automated typeclass derivation."
readme = "README.md"
homepage = "https://github.com/enso-org/enso/lib/rust/shapely/macros"
description = "Common macros used in Enso codebase."
homepage = "https://github.com/enso-org/enso"
repository = "https://github.com/enso-org/enso"
license-file = "../../../LICENSE"
keywords = ["typeclass", "deriving", "macro"]
categories = ["algorithms"]
publish = true
[lib]
proc-macro = true
@ -23,8 +19,6 @@ enso-macro-utils = { path = "../../macro-utils" }
proc-macro2 = { workspace = true }
quote = { workspace = true }
Inflector = "0.11.4"
itertools = { workspace = true }
boolinator = { workspace = true }
[dependencies.syn]
version = "1.0"

View File

@ -0,0 +1,23 @@
//! This crate defines a custom attribute macro [`tagged_enum`].
// === Non-Standard Linter Configuration ===
#![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(unused_qualifications)]
extern crate proc_macro;
mod tagged_enum;
/// Transforms Rust enums into enums where each variant is a separate type. It also implements
/// several traits (such as conversions between variants and the enum type) and defines utility
/// functions, such as constructors. See [`tagged_enum::run`] to learn more.
#[proc_macro_attribute]
pub fn tagged_enum(
attr: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
tagged_enum::run(attr, input)
}

View File

@ -1,6 +1,5 @@
use crate::prelude::*;
use inflector::cases::snakecase::to_snake_case;
use quote::quote;
use syn::AttrStyle;
use syn::Attribute;
use syn::Data;

View File

@ -7,9 +7,5 @@
// ==============
mod non_empty_vec;
pub mod semigroup;
pub mod vec_indexed_by;
pub use non_empty_vec::NonEmptyVec;
pub use semigroup::*;
pub use vec_indexed_by::VecIndexedBy;

View File

@ -12,17 +12,14 @@ use std::vec::Drain;
// ===================
/// A version of [`std::vec::Vec`] that can't be empty.
#[allow(missing_docs)]
#[derive(Clone, Debug, Eq, PartialEq, Deref, DerefMut, Reflect)]
#[derive(Clone, Debug, Eq, PartialEq, Deref, DerefMut, Reflect, Serialize, Deserialize)]
#[reflect(transparent)]
#[derive(crate::serde_reexports::Serialize)]
#[derive(crate::serde_reexports::Deserialize)]
pub struct NonEmptyVec<T, I = usize> {
#[reflect(as = "Vec<T>")]
pub elems: VecIndexedBy<T, I>,
pub struct NonEmptyVec<T> {
/// An internal vector that contains at least one element at all times.
pub elems: Vec<T>,
}
impl<T, I> NonEmptyVec<T, I> {
impl<T> NonEmptyVec<T> {
/// Construct a new non-empty vector.
///
/// # Examples
@ -32,39 +29,19 @@ impl<T, I> NonEmptyVec<T, I> {
/// use enso_prelude::NonEmptyVec;
/// let mut vec: NonEmptyVec<usize> = NonEmptyVec::new(0, vec![]);
/// ```
pub fn new(first: T, rest: Vec<T>) -> NonEmptyVec<T, I> {
let mut elems = VecIndexedBy::with_capacity(1 + rest.len());
pub fn new(first: T, rest: Vec<T>) -> NonEmptyVec<T> {
let mut elems = Vec::with_capacity(1 + rest.len());
elems.push(first);
elems.extend(rest);
NonEmptyVec { elems }
}
/// Construct a new non-empty vector.
///
/// # Examples
///
/// ```
/// #![allow(unused_mut)]
/// use enso_prelude::NonEmptyVec;
/// let mut vec: NonEmptyVec<usize> = NonEmptyVec::new_with_last(vec![], 0);
/// ```
pub fn new_with_last(mut elems: Vec<T>, last: T) -> NonEmptyVec<T, I> {
elems.push(last);
NonEmptyVec { elems: elems.into() }
}
/// Length of the vector.
#[allow(clippy::len_without_is_empty)]
pub fn len(&self) -> usize {
self.elems.len()
}
/// Return the last valid index.
pub fn last_valid_index(&self) -> I
where I: From<usize> {
(self.len() - 1).into()
}
/// Construct a `NonEmptyVec` containing a single element.
///
/// # Examples
@ -75,12 +52,12 @@ impl<T, I> NonEmptyVec<T, I> {
/// assert_eq!(vec.get(0), Some(&0));
/// assert_eq!(vec.len(), 1);
/// ```
pub fn singleton(first: T) -> NonEmptyVec<T, I> {
pub fn singleton(first: T) -> NonEmptyVec<T> {
let elems = vec![first];
Self { elems: elems.into() }
Self { elems }
}
/// Construct a new, `NonEmptyVec<T, I>` containing the provided element and with the
/// Construct a new, `NonEmptyVec<T>` containing the provided element and with the
/// provided `capacity`.
///
/// If `capacity` is 0, then the vector will be allocated with capacity for the provided `first`
@ -97,7 +74,7 @@ impl<T, I> NonEmptyVec<T, I> {
///
/// ```
/// use enso_prelude::NonEmptyVec;
/// let mut vec = NonEmptyVec::<_, usize>::with_capacity(0, 10);
/// let mut vec = NonEmptyVec::<_>::with_capacity(0, 10);
///
/// // The vector contains one item, even though it has capacity for more
/// assert_eq!(vec.len(), 1);
@ -110,9 +87,9 @@ impl<T, I> NonEmptyVec<T, I> {
/// // ...but this may make the vector reallocate
/// vec.push(11);
/// ```
pub fn with_capacity(first: T, capacity: usize) -> NonEmptyVec<T, I> {
pub fn with_capacity(first: T, capacity: usize) -> NonEmptyVec<T> {
debug_assert_ne!(capacity, 0, "Capacity must be greater than zero for a NonEmptyVec.");
let mut elems = VecIndexedBy::with_capacity(capacity);
let mut elems = Vec::with_capacity(capacity);
elems.push(first);
NonEmptyVec { elems }
}
@ -132,7 +109,7 @@ impl<T, I> NonEmptyVec<T, I> {
///
/// ```
/// use enso_prelude::NonEmptyVec;
/// let mut vec = NonEmptyVec::<_, usize>::new(0, vec![]);
/// let mut vec = NonEmptyVec::<_>::new(0, vec![]);
/// vec.reserve(10);
/// assert!(vec.capacity() >= 11);
/// ```
@ -149,7 +126,7 @@ impl<T, I> NonEmptyVec<T, I> {
///
/// ```
/// use enso_prelude::NonEmptyVec;
/// let mut vec = NonEmptyVec::<_, usize>::with_capacity(0, 10);
/// let mut vec = NonEmptyVec::<_>::with_capacity(0, 10);
/// assert_eq!(vec.capacity(), 10);
/// vec.shrink_to_fit();
/// assert!(vec.capacity() < 10);
@ -168,7 +145,7 @@ impl<T, I> NonEmptyVec<T, I> {
///
/// ```
/// use enso_prelude::NonEmptyVec;
/// let mut vec = NonEmptyVec::<_, usize>::new(0, vec![1, 2]);
/// let mut vec = NonEmptyVec::<_>::new(0, vec![1, 2]);
/// vec.push(3);
/// assert_eq!(vec.len(), 4);
/// ```
@ -176,24 +153,9 @@ impl<T, I> NonEmptyVec<T, I> {
self.elems.push(value)
}
/// Remove an element from the back of the collection, returning it.
///
/// # Examples
///
/// ```
/// use enso_prelude::NonEmptyVec;
/// let mut vec = NonEmptyVec::<_, usize>::new(0, vec![1]);
/// assert!(vec.pop_if_has_more_than_1_elem().is_some());
/// assert!(vec.pop_if_has_more_than_1_elem().is_none());
/// assert_eq!(vec.len(), 1);
/// ```
pub fn pop_if_has_more_than_1_elem(&mut self) -> Option<T> {
(self.len() > 1).and_option_from(|| self.elems.pop())
}
/// Remove an element from the back of the collection, returning it and a new possibly empty
/// vector.
pub fn pop(mut self) -> (T, VecIndexedBy<T, I>) {
pub fn pop(mut self) -> (T, Vec<T>) {
let first = self.elems.pop().unwrap();
(first, self.elems)
}
@ -204,7 +166,7 @@ impl<T, I> NonEmptyVec<T, I> {
///
/// ```
/// use enso_prelude::NonEmptyVec;
/// let vec = NonEmptyVec::<_, usize>::new(0, vec![1, 2]);
/// let vec = NonEmptyVec::<_>::new(0, vec![1, 2]);
/// assert_eq!(*vec.first(), 0);
/// ```
pub fn first(&self) -> &T {
@ -217,7 +179,7 @@ impl<T, I> NonEmptyVec<T, I> {
///
/// ```
/// use enso_prelude::NonEmptyVec;
/// let mut vec = NonEmptyVec::<_, usize>::new(0, vec![1, 2]);
/// let mut vec = NonEmptyVec::<_>::new(0, vec![1, 2]);
/// assert_eq!(*vec.first_mut(), 0);
/// ```
pub fn first_mut(&mut self) -> &mut T {
@ -230,7 +192,7 @@ impl<T, I> NonEmptyVec<T, I> {
///
/// ```
/// use enso_prelude::NonEmptyVec;
/// let vec = NonEmptyVec::<_, usize>::new(0, vec![1, 2]);
/// let vec = NonEmptyVec::<_>::new(0, vec![1, 2]);
/// assert_eq!(*vec.last(), 2)
/// ```
pub fn last(&self) -> &T {
@ -243,7 +205,7 @@ impl<T, I> NonEmptyVec<T, I> {
///
/// ```
/// use enso_prelude::NonEmptyVec;
/// let mut vec = NonEmptyVec::<_, usize>::new(0, vec![1, 2]);
/// let mut vec = NonEmptyVec::<_>::new(0, vec![1, 2]);
/// assert_eq!(*vec.last_mut(), 2)
/// ```
pub fn last_mut(&mut self) -> &mut T {
@ -252,7 +214,7 @@ impl<T, I> NonEmptyVec<T, I> {
/// Convert this non-empty vector to vector.
pub fn into_vec(self) -> Vec<T> {
self.elems.into()
self.elems
}
/// Consume this non-empty vector and return it's first element. The rest will be dropped.
@ -265,11 +227,7 @@ impl<T, I> NonEmptyVec<T, I> {
let elems = self.elems.into_iter().map(f).collect();
NonEmptyVec { elems }
}
}
impl<T, I> NonEmptyVec<T, I>
where I: vec_indexed_by::Index
{
/// Obtain a mutable reference to the element in the vector at the specified `index`.
///
/// # Examples
@ -281,20 +239,18 @@ where I: vec_indexed_by::Index
/// assert!(reference.is_some());
/// assert_eq!(*reference.unwrap(), 0);
/// ```
pub fn get_mut(&mut self, index: I) -> Option<&mut T> {
pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
self.elems.get_mut(index)
}
/// Get the tail reference.
pub fn tail(&self) -> &[T]
where I: From<u8> {
&self.elems[I::from(1_u8)..]
pub fn tail(&self) -> &[T] {
&self.elems[1..]
}
/// Get the mutable tail reference.
pub fn tail_mut(&mut self) -> &mut [T]
where I: From<u8> {
&mut self.elems[I::from(1_u8)..]
pub fn tail_mut(&mut self) -> &mut [T] {
&mut self.elems[1..]
}
/// Create a draining iterator that removes the specified range in the vector and yields the
@ -316,68 +272,47 @@ where I: vec_indexed_by::Index
/// assert_eq!(drained, [1, 2, 3, 4, 5])
/// ```
pub fn drain<R>(&mut self, range: R) -> Drain<T>
where
R: RangeBounds<I>,
I: PartialOrd + Copy + From<u8>, {
if range.contains(&I::from(0_u8)) {
where R: RangeBounds<usize> {
if range.contains(&0) {
match range.end_bound() {
Bound::Included(n) => self.elems.drain(I::from(1_u8)..=*n),
Bound::Excluded(n) => self.elems.drain(I::from(1_u8)..*n),
Bound::Unbounded => self.elems.drain(I::from(1_u8)..),
Bound::Included(n) => self.elems.drain(1..=*n),
Bound::Excluded(n) => self.elems.drain(1..*n),
Bound::Unbounded => self.elems.drain(1..),
}
} else {
self.elems.drain(range)
}
}
/// Insert the contents of an iterator at a specified index in the collection.
///
/// This is optimal if:
/// - The specified index is equal to the length of the vector,
/// - or the lower bound of the iterator's `size_hint()` is exact.
///
/// Otherwise, a temporary vector is allocated and the tail is moved twice.
///
/// # Panics
///
/// Panics if the given index is greater than the length of the vector.
///
/// # Examples
///
/// ```
/// use enso_prelude::NonEmptyVec;
/// let mut vec = NonEmptyVec::new(0, vec![1, 4, 5]);
/// vec.extend_at(2, vec![2, 3]);
/// assert_eq!(&vec.as_slice(), &[0, 1, 2, 3, 4, 5])
/// ```
pub fn extend_at(&mut self, index: I, elems: impl IntoIterator<Item = T>) {
self.splice(index..index, elems);
}
}
// === Trait Impls ===
impl<T: Default, I> Default for NonEmptyVec<T, I> {
impl<T: Default> Default for NonEmptyVec<T> {
fn default() -> Self {
Self::singleton(default())
}
}
impl<T, I> TryFrom<Vec<T>> for NonEmptyVec<T, I> {
impl<T> TryFrom<Vec<T>> for NonEmptyVec<T> {
type Error = ();
fn try_from(elems: Vec<T>) -> Result<Self, Self::Error> {
(!elems.is_empty()).as_result_from(|| NonEmptyVec { elems: elems.into() }, || ())
if elems.is_empty() {
Err(())
} else {
Ok(NonEmptyVec { elems })
}
}
}
impl<T, I> From<NonEmptyVec<T, I>> for Vec<T> {
fn from(v: NonEmptyVec<T, I>) -> Self {
v.elems.into()
impl<T> From<NonEmptyVec<T>> for Vec<T> {
fn from(v: NonEmptyVec<T>) -> Self {
v.elems
}
}
impl<T, I> IntoIterator for NonEmptyVec<T, I> {
impl<T> IntoIterator for NonEmptyVec<T> {
type Item = T;
type IntoIter = std::vec::IntoIter<T>;
fn into_iter(self) -> Self::IntoIter {
@ -385,7 +320,7 @@ impl<T, I> IntoIterator for NonEmptyVec<T, I> {
}
}
impl<'a, T, I> IntoIterator for &'a NonEmptyVec<T, I> {
impl<'a, T> IntoIterator for &'a NonEmptyVec<T> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
@ -393,7 +328,7 @@ impl<'a, T, I> IntoIterator for &'a NonEmptyVec<T, I> {
}
}
impl<'a, T, I> IntoIterator for &'a mut NonEmptyVec<T, I> {
impl<'a, T> IntoIterator for &'a mut NonEmptyVec<T> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {

View File

@ -1,145 +0,0 @@
//! In mathematics, a semigroup is an algebraic structure consisting of a set together with an
//! associative binary operation. A semigroup generalizes a monoid in that there might not exist an
//! identity element. It also (originally) generalized a group (a monoid with all inverses) to a
//! type where every element did not have to have an inverse, thus the name semigroup.
use std::collections::HashMap;
use std::hash::BuildHasher;
use std::hash::Hash;
// =================
// === Semigroup ===
// =================
/// Mutable Semigroup definition. Impls should satisfy the associativity law:
/// `x.concat(y.concat(z)) = x.concat(y).concat(z)`, in symbolic form:
/// `x <> (y <> z) = (x <> y) <> z`
pub trait PartialSemigroup<T = Self>: Clone {
/// An associative operation.
fn concat_mut(&mut self, other: T);
/// An associative operation.
fn concat_ref(&self, other: T) -> Self
where Self: Clone {
self.clone().concat(other)
}
/// An associative operation.
fn concat(mut self, other: T) -> Self {
self.concat_mut(other);
self
}
}
impl<T> Semigroup for T where T: PartialSemigroup<T> + for<'t> PartialSemigroup<&'t T> {}
pub trait Semigroup: PartialSemigroup<Self> + for<'t> PartialSemigroup<&'t Self> {
fn partial_times_mut(&mut self, n: usize) {
let val = self.clone();
for _ in 0..n - 1 {
self.concat_mut(&val)
}
}
fn partial_times(mut self, n: usize) -> Self {
self.partial_times_mut(n);
self
}
}
// ====================
// === Stdlib Impls ===
// ====================
// === Option ===
impl<T: Semigroup> PartialSemigroup<&Option<T>> for Option<T> {
fn concat_mut(&mut self, other: &Self) {
if let Some(r) = other {
match self {
None => *self = Some(r.clone()),
Some(l) => l.concat_mut(r),
}
}
}
}
impl<T: PartialSemigroup> PartialSemigroup<Option<T>> for Option<T> {
fn concat_mut(&mut self, other: Self) {
if let Some(r) = other {
match self {
None => *self = Some(r),
Some(l) => l.concat_mut(r),
}
}
}
}
// === HashMap ===
impl<K, V, S> PartialSemigroup<&HashMap<K, V, S>> for HashMap<K, V, S>
where
K: Eq + Hash + Clone,
V: Semigroup,
S: Clone + BuildHasher,
{
fn concat_mut(&mut self, other: &Self) {
for (key, new_val) in other {
let key = key.clone();
self.entry(key)
.and_modify(|val| val.concat_mut(new_val))
.or_insert_with(|| new_val.clone());
}
}
}
impl<K, V, S> PartialSemigroup<HashMap<K, V, S>> for HashMap<K, V, S>
where
K: Eq + Hash + Clone,
V: Semigroup,
S: Clone + BuildHasher,
{
fn concat_mut(&mut self, other: Self) {
for (key, new_val) in other {
self.entry(key).and_modify(|val| val.concat_mut(&new_val)).or_insert(new_val);
}
}
}
// === Vec ===
impl<T: Clone> PartialSemigroup<&Vec<T>> for Vec<T> {
fn concat_mut(&mut self, other: &Self) {
self.extend(other.iter().cloned())
}
}
impl<T: Clone> PartialSemigroup<Vec<T>> for Vec<T> {
fn concat_mut(&mut self, other: Self) {
self.extend(other)
}
}
// =============
// === Tests ===
// =============
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn option() {
assert_eq!(None::<Vec<usize>>.concat(&None), None);
assert_eq!(Some(vec![1]).concat(&None), Some(vec![1]));
assert_eq!(None.concat(&Some(vec![1])), Some(vec![1]));
assert_eq!(Some(vec![1]).concat(&Some(vec![2])), Some(vec![1, 2]));
}
}

View File

@ -1,287 +0,0 @@
use crate::*;
use std::alloc::Allocator;
use std::ops::Bound;
// =====================
// === Helper macros ===
// =====================
macro_rules! ranged_fn {
($name:ident $([$($parm:tt)*])? ($($arg:ident : $arg_tp:ty),* ) -> $out:ty ) => {
pub fn $name $(<$($parm)*>)?
(&mut self, range: impl RangeBounds<I> $(,$arg:$arg_tp)* ) -> $out {
let map_bound = |bound| match bound {
Bound::<&I>::Included(t) => Bound::Included((*t).into()),
Bound::<&I>::Excluded(t) => Bound::Excluded((*t).into()),
Bound::<&I>::Unbounded => Bound::Unbounded,
};
let start = map_bound(range.start_bound());
let end = map_bound(range.end_bound());
self.vec.$name((start, end) $(,$arg)*)
}
};
}
// ====================
// === VecIndexedBy ===
// ====================
pub trait Index = Copy + From<usize> + Into<usize>;
#[derive(crate::serde_reexports::Serialize)]
#[derive(crate::serde_reexports::Deserialize)]
#[derive(Derivative, Deref, DerefMut, From, Into)]
#[derivative(Clone(bound = "T: Clone, A: Allocator + Clone"))]
#[derivative(Debug(bound = "T: Debug, A: Allocator"))]
#[derivative(Default(bound = "A: Allocator, Vec<T, A>: Default"))]
#[derivative(PartialEq(bound = "Vec<T, A>: PartialEq"))]
#[derivative(Eq(bound = "Vec<T, A>: PartialEq"))]
pub struct VecIndexedBy<T, I = usize, A: Allocator = std::alloc::Global> {
#[serde(bound(
serialize = "Vec<T, A>: crate::serde_reexports::Serialize",
deserialize = "Vec<T, A>: crate::serde_reexports::Deserialize<'de>"
))]
#[deref]
#[deref_mut]
vec: Vec<T, A>,
key: ZST<I>,
}
impl<T, I> VecIndexedBy<T, I> {
pub fn with_capacity(capacity: usize) -> Self {
Vec::with_capacity(capacity).into()
}
}
impl<T, I, A> VecIndexedBy<T, I, A>
where A: Allocator
{
/// Return the last valid index, if any.
pub fn last_valid_index(&self) -> Option<I>
where I: From<usize> {
if self.vec.is_empty() {
None
} else {
Some((self.len() - 1).into())
}
}
}
// ==============
// === Traits ===
// ==============
define_not_same_trait!();
impl<T, I, A> VecIndexedBy<T, I, A>
where
A: Allocator,
I: Index,
{
pub fn get_mut(&mut self, index: I) -> Option<&mut T> {
self.vec.get_mut(index.into())
}
pub fn get(&self, index: I) -> Option<&T> {
self.vec.get(index.into())
}
pub fn insert(&mut self, index: I, element: T) {
self.vec.insert(index.into(), element)
}
pub fn remove(&mut self, index: I) -> T {
self.vec.remove(index.into())
}
ranged_fn! {drain() -> std::vec::Drain<'_, T, A>}
ranged_fn! {splice[Iter: IntoIterator<Item = T>](replace_with: Iter) -> std::vec::Splice<'_, Iter::IntoIter, A>}
}
impl<T, I, A> From<Vec<T, A>> for VecIndexedBy<T, I, A>
where A: Allocator
{
fn from(vec: Vec<T, A>) -> Self {
Self { vec, key: default() }
}
}
impl<T, I, A> From<VecIndexedBy<T, I, A>> for Vec<T, A>
where A: Allocator
{
fn from(vec: VecIndexedBy<T, I, A>) -> Self {
vec.vec
}
}
impl<T, I, A> From<&Vec<T, A>> for VecIndexedBy<T, I, A>
where
T: Clone,
A: Allocator + Clone,
{
fn from(vec: &Vec<T, A>) -> Self {
Self { vec: vec.clone(), key: default() }
}
}
impl<T, I, A> From<&VecIndexedBy<T, I, A>> for VecIndexedBy<T, I, A>
where
T: Clone,
A: Allocator + Clone,
{
fn from(vec: &VecIndexedBy<T, I, A>) -> Self {
vec.clone()
}
}
impl<T, I, A> std::ops::Index<I> for VecIndexedBy<T, I, A>
where
I: Index,
A: Allocator,
{
type Output = T;
fn index(&self, index: I) -> &Self::Output {
&self.vec[index.into()]
}
}
impl<T, I, A> std::ops::Index<Range<I>> for VecIndexedBy<T, I, A>
where
I: Index,
A: Allocator,
{
type Output = [T];
fn index(&self, range: Range<I>) -> &Self::Output {
&self.vec[range.start.into()..range.end.into()]
}
}
impl<T, I, A> std::ops::Index<RangeFrom<I>> for VecIndexedBy<T, I, A>
where
I: Index,
A: Allocator,
{
type Output = [T];
fn index(&self, range: RangeFrom<I>) -> &Self::Output {
&self.vec[range.start.into()..]
}
}
impl<T, I, A> std::ops::Index<RangeTo<I>> for VecIndexedBy<T, I, A>
where
I: Index,
A: Allocator,
{
type Output = [T];
fn index(&self, range: RangeTo<I>) -> &Self::Output {
&self.vec[..range.end.into()]
}
}
impl<T, I, A> std::ops::Index<RangeFull> for VecIndexedBy<T, I, A>
where
I: Index,
A: Allocator,
(RangeFull, I): NotSame,
{
type Output = [T];
fn index(&self, _range: RangeFull) -> &Self::Output {
&self.vec[..]
}
}
impl<T, I, A> IndexMut<I> for VecIndexedBy<T, I, A>
where
I: Index,
A: Allocator,
{
fn index_mut(&mut self, index: I) -> &mut Self::Output {
&mut self.vec[index.into()]
}
}
impl<T, I, A> IndexMut<Range<I>> for VecIndexedBy<T, I, A>
where
I: Index,
A: Allocator,
{
fn index_mut(&mut self, range: Range<I>) -> &mut Self::Output {
&mut self.vec[range.start.into()..range.end.into()]
}
}
impl<T, I, A> IndexMut<RangeFrom<I>> for VecIndexedBy<T, I, A>
where
I: Index,
A: Allocator,
{
fn index_mut(&mut self, range: RangeFrom<I>) -> &mut Self::Output {
&mut self.vec[range.start.into()..]
}
}
impl<T, I, A> IndexMut<RangeTo<I>> for VecIndexedBy<T, I, A>
where
I: Index,
A: Allocator,
{
fn index_mut(&mut self, range: RangeTo<I>) -> &mut Self::Output {
&mut self.vec[..range.end.into()]
}
}
impl<T, I, A> IndexMut<RangeFull> for VecIndexedBy<T, I, A>
where
I: Index,
A: Allocator,
(RangeFull, I): NotSame,
{
fn index_mut(&mut self, _range: RangeFull) -> &mut Self::Output {
&mut self.vec[..]
}
}
impl<T, I, A> IntoIterator for VecIndexedBy<T, I, A>
where A: Allocator
{
type Item = T;
type IntoIter = std::vec::IntoIter<T, A>;
fn into_iter(self) -> Self::IntoIter {
self.vec.into_iter()
}
}
impl<'a, T, I, A> IntoIterator for &'a VecIndexedBy<T, I, A>
where A: Allocator
{
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.vec.iter()
}
}
impl<'a, T, I, A> IntoIterator for &'a mut VecIndexedBy<T, I, A>
where A: Allocator
{
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.vec.iter_mut()
}
}
impl<T, I> FromIterator<T> for VecIndexedBy<T, I> {
fn from_iter<Iter: IntoIterator<Item = T>>(iter: Iter) -> VecIndexedBy<T, I> {
let vec = Vec::from_iter(iter);
Self { vec, key: default() }
}
}

View File

@ -3,55 +3,27 @@
//! defines several aliases and utils which may find their place in new
//! libraries in the future.
// === Features ===
#![feature(trait_alias)]
#![feature(allocator_api)]
#![feature(auto_traits)]
#![feature(negative_impls)]
#![feature(pattern)]
mod data;
mod macros;
mod not_same;
mod option;
mod serde;
mod smallvec;
mod std_reexports;
mod string;
mod vec;
pub use crate::serde::*;
pub use crate::smallvec::*;
pub use enso_macros::*;
pub use enso_zst::*;
pub use anyhow;
pub use data::*;
pub use macros::*;
pub use option::*;
pub use std_reexports::*;
pub use string::*;
pub use vec::*;
pub use boolinator::Boolinator;
pub use derivative::Derivative;
pub use derive_more::*;
pub use enso_reflect::prelude::*;
pub use itertools::Itertools;
pub use paste::paste;
pub use std::ops::AddAssign;
pub use serde::Deserialize;
pub use serde::Serialize;
/// Serde reexports for the code generated by declarative macros.
///
/// They cannot be directly reexported from prelude, as the methods `serialize` and `deserialize`
/// that would be brought into scope by this, would collide with the other IDE-defined traits.
pub mod serde_reexports {
pub use serde::Deserialize;
pub use serde::Serialize;
}
// ===============
// === Logging ===

View File

@ -1,326 +0,0 @@
//! This module defines set of common macros which are useful across different projects.
// ==============
// === Export ===
// ==============
pub use enso_shapely::ForEachVariant;
/// Allows for nicer definition of impls, similar to what Haskell or Scala does. Reduces the needed
/// boilerplate. For example, the following usage:
///
/// ```text
/// struct A { name:String };
/// impls! { From<A> for String { |t| t.name.clone() } }
/// ```
///
/// compiles to:
/// ```
/// struct A {
/// name: String,
/// };
/// impl From<A> for String {
/// fn from(t: A) -> Self {
/// t.name.clone()
/// }
/// }
/// ```
///
/// This macro is meant to support many standard traits (like From) and should grow in the future.
/// Currently supported ones are:
/// * From<…>
/// * From + &From<…>
/// * Into + &Into<…>
/// * PhantomFrom<…>
#[macro_export]
macro_rules! impls {
($([$($impl_params:tt)*])? From<$ty:ty> for $target:ty $(where [$($bounds:tt)*])? {
|$arg:tt| $($result:tt)*
} ) => {
#[allow(clippy::redundant_closure_call)]
impl <$($($impl_params)*)?> From <$ty> for $target $(where $($bounds)*)? {
fn from (arg:$ty) -> Self {
(|$arg:$ty| $($result)*)(arg)
}
}
};
($([$($impl_params:tt)*])? From + &From <$ty:ty> for $target:ty $(where [$($bounds:tt)*])? {
|$arg:tt| $($result:tt)*
} ) => {
#[allow(clippy::redundant_closure_call)]
#[allow(clippy::identity_conversion)]
impl <$($($impl_params)*)?> From <$ty> for $target $(where $($bounds)*)? {
fn from (arg:$ty) -> Self {
(|$arg:$ty| $($result)*)(arg)
}
}
#[allow(clippy::redundant_closure_call)]
#[allow(clippy::identity_conversion)]
impl <$($($impl_params)*)?> From <&$ty> for $target $(where $($bounds)*)? {
fn from (arg:&$ty) -> Self {
(|$arg:&$ty| $($result)*)(arg)
}
}
};
($([$($impl_params:tt)*])? Into + &Into <$ty:ty> for $target:ty $(where [$($bounds:tt)*])? {
|$arg:tt| $($result:tt)*
} ) => {
#[allow(clippy::redundant_closure_call)]
#[allow(clippy::identity_conversion)]
impl <$($($impl_params)*)?> Into <$ty> for $target $(where $($bounds)*)? {
fn into(self) -> $ty {
(|$arg:Self| $($result)*)(self)
}
}
#[allow(clippy::redundant_closure_call)]
#[allow(clippy::identity_conversion)]
impl <$($($impl_params)*)?> Into <$ty> for &$target $(where $($bounds)*)? {
fn into(self) -> $ty {
(|$arg:Self| $($result)*)(self)
}
}
};
($([$($impl_params:tt)*])? PhantomFrom<$ty:ty> for $target:ty {
$($result:tt)*
} ) => {
impl <$($($impl_params)*)?> From <ZST<$ty>> for $target {
fn from (_:ZST<$ty>) -> Self {
$($result)*
}
}
};
}
#[macro_export]
macro_rules! alias {
($( $(#$meta:tt)* $name:ident = {$($tok:tt)*} )*) => {$(
$(#$meta)*
pub trait $name: $($tok)* {}
impl<T:$($tok)*> $name for T {}
)*};
(no_docs $( $(#$meta:tt)* $name:ident = {$($tok:tt)*} )*) => {$(
$(#$meta)*
#[allow(missing_docs)]
pub trait $name: $($tok)* {}
impl<T:$($tok)*> $name for T {}
)*};
}
// ==============
// === Lambda ===
// ==============
/// Clones all arguments from the first argument list by using `CloneRef` and defines lambda with
/// arguments from the second argument list (if present). For example, the following usage
///
/// ```text
/// f! { (a,b)(c) a + b + c }
/// ```
///
/// is equivalent to:
///
/// ```text
/// {
/// let a = a.clone_ref();
/// let b = b.clone_ref();
/// move |c| { a + b + c }
/// }
/// ```
#[macro_export]
macro_rules! f {
([$($name:ident),*] ($($args:tt)*) $($expr:tt)*) => {
{
$(let $name = $name.clone_ref();)*
move |$($args)*| { $($expr)* }
}
};
([$($name:ident),*] $($expr:tt)*) => {
{
$(let $name = $name.clone_ref();)*
move || { $($expr)* }
}
};
(($($args:tt)*) $name:ident . $($toks:tt)*) => {
f! { [$name] ($($args)*) $name . $($toks)* }
};
(($($args:tt)*) { $name:ident . $($toks:tt)* }) => {
f! { [$name] ($($args)*) { $name . $($toks)* } }
};
($name:ident . $($toks:tt)*) => {
f! { [$name] $name . $($toks)* }
};
}
/// Variant of the `f` macro producing a lambda which drops its first argument.
#[macro_export]
macro_rules! f_ {
([$($name:ident),*] $($expr:tt)*) => {
f! { [$($name),*] (_) $($expr)* }
};
($name:ident . $($toks:tt)*) => {
f_! { [$name] $name . $($toks)* }
};
( { $name:ident . $($toks:tt)* } ) => {
f_! { [$name] { $name . $($toks)* } }
};
}
/// Variant of the `f` macro producing a lambda which drops its first and second arguments.
#[macro_export]
macro_rules! f__ {
([$($name:ident),*] $($expr:tt)*) => {
f! { [$($name),*] (_,_) $($expr)* }
};
($name:ident . $($toks:tt)*) => {
f__! { [$name] $name . $($toks)* }
};
( { $name:ident . $($toks:tt)* } ) => {
f__! { [$name] { $name . $($toks)* } }
};
}
// ===================
// === Unreachable ===
// ===================
/// A macro for use in situations where the code is unreachable.
///
/// This macro will panic in debug builds, but in release builds it expands to
/// the unsafe [`std::hint::unreachable_unchecked()`] function, which allows the
/// compiler to optimise more.
#[macro_export]
macro_rules! unreachable_panic {
() => {
unreachable_panic!("This code was marked as unreachable.")
};
($msg:tt) => {
if cfg!(debug_assertions) {
panic!($msg)
} else {
use std::hint::unreachable_unchecked;
#[allow(unsafe_code)]
unsafe {
unreachable_unchecked()
}
}
};
}
// ====================
// === ReflectMatch ===
// ====================
/// Used to match a value against a set of candidates, while keeping track of the candidates.
///
/// This achieves the same function as using a `HashMap` to dispatch between a set of handlers, but
/// does not require reifying the handlers, which can be inconvenient (e.g. if they contain
/// `.await`, of if they need conflicting captures from the environment).
///
/// # Example
///
/// ```
/// use enso_prelude::*;
///
/// let selected = "foo";
/// let out = reflect_match!(match selected as options {
/// "bar" => Ok(1),
/// "baz" => Ok(2),
/// _ => Err(format!("Unexpected choice: {selected}. Must be one of: {options:?}.")),
/// });
/// ```
///
/// This is functionally equivalent to:
///
/// ```
/// # use std::collections::HashMap;
///
/// let selected = "foo";
/// let mut dispatch = HashMap::new();
/// dispatch.insert("bar", 1);
/// dispatch.insert("baz", 2);
/// let options = dispatch.keys();
/// let error = format!("Unexpected choice: {selected}. Must be one of: {options:?}.");
/// let out = dispatch.get(selected).ok_or(error);
/// ```
#[macro_export]
macro_rules! reflect_match {
(@acc ($dispatch:ident, $value:expr, $candidates:ident, {
_ => $fallback:expr $(,)?
}) -> {$( $branches:tt )*}) => {{
let mut $dispatch = $crate::ReflectMatch::new($value);
match () {
$( $branches )*
_ => {
let $candidates = $dispatch.into_candidates();
$fallback
}
}
}};
(@acc ($dispatch:ident, $value:expr, $candidates:ident, {
$candidate:literal => $branch:expr,
$( $rest:tt )*
}) -> {$( $branches:tt )*}) => {
reflect_match!(@acc ($dispatch, $value, $candidates, { $( $rest )* }) -> {
$( $branches )*
_ if $dispatch.matches($candidate) => $branch,
})
};
(match $value:tt as $candidates:tt { $( $branches:tt )* }) => {
reflect_match!(@acc (dispatch, $value, $candidates, { $( $branches )* }) -> {})
};
}
// === ReflectMatch Runtime Support ===
/// Match a value against a set of candidates; if no match is found, the list of candidates is
/// available. See [`reflect_match!`] for motivation and usage examples.
#[derive(Debug)]
pub struct ReflectMatch<T, U> {
value: T,
candidates: Vec<U>,
}
impl<T, U> ReflectMatch<T, U> {
/// Create a new dispatcher, for a given value.
pub fn new(value: T) -> Self {
let candidates = Default::default();
Self { value, candidates }
}
/// Test the value against a candidate. Return whether it's a match.
pub fn matches(&mut self, key: U) -> bool
where T: PartialEq<U> {
let matches = self.value == key;
self.candidates.push(key);
matches
}
/// Return the candidates the match was tested against.
pub fn into_candidates(self) -> Vec<U> {
self.candidates
}
}

View File

@ -1,18 +0,0 @@
//! [`NotSame`] trait definition.
/// Defines the [`NotSame`] trait. It can be used to disambiguate conflicting trait implementations.
/// For example, it is not allowed to implement `impl<U, T> From<MyType<U>> for MyType<T>`, because
/// Rust standard library defines `impl<T> From<T> for T`. This trait allows to disambiguate such
/// cases by writing `impl<U, T> From<MyType<U>> for MyType<T> where (U, T) : NotSame`. However,
/// because of some strange reasons, it does not work if it is defined in another crate and has to
/// be defined locally, on-demand. As soon as it will be possible to define it in prelude, it should
/// be refactored. See its usages to learn more.
#[macro_export]
macro_rules! define_not_same_trait {
() => {
auto trait NotSame {}
impl<T> !NotSame for (T, T) {}
};
}

View File

@ -1,90 +0,0 @@
//! This module defines utilities for working with the [`std::option::Option`] type.
/// Adds mapping methods to the `Option` type.
pub trait OptionOps {
type Item;
fn map_none<F>(self, f: F) -> Self
where F: FnOnce();
fn map_ref<'a, U, F>(&'a self, f: F) -> Option<U>
where F: FnOnce(&'a Self::Item) -> U;
fn map_or_default<U, F>(self, f: F) -> U
where
U: Default,
F: FnOnce(Self::Item) -> U;
fn if_some_or_default<U, F>(self, f: F) -> U
where
U: Default,
F: FnOnce() -> U;
fn map_ref_or_default<'a, U, F>(&'a self, f: F) -> U
where
U: Default,
F: FnOnce(&'a Self::Item) -> U;
fn for_each<U, F>(self, f: F)
where F: FnOnce(Self::Item) -> U;
fn for_each_ref<'a, U, F>(&'a self, f: F)
where F: FnOnce(&'a Self::Item) -> U;
/// Returns true if option contains Some with value matching given predicate.
fn contains_if<'a, F>(&'a self, f: F) -> bool
where F: FnOnce(&'a Self::Item) -> bool;
}
impl<T> OptionOps for Option<T> {
type Item = T;
fn map_none<F>(self, f: F) -> Self
where
F: FnOnce(),
T: Sized, {
if self.is_none() {
f()
}
self
}
fn map_ref<'a, U, F>(&'a self, f: F) -> Option<U>
where F: FnOnce(&'a Self::Item) -> U {
self.as_ref().map(f)
}
fn map_or_default<U, F>(self, f: F) -> U
where
U: Default,
F: FnOnce(Self::Item) -> U, {
self.map_or_else(U::default, f)
}
fn if_some_or_default<U, F>(self, f: F) -> U
where
U: Default,
F: FnOnce() -> U, {
self.map_or_else(U::default, |_| f())
}
fn map_ref_or_default<'a, U, F>(&'a self, f: F) -> U
where
U: Default,
F: FnOnce(&'a Self::Item) -> U, {
self.as_ref().map_or_default(f)
}
fn for_each<U, F>(self, f: F)
where F: FnOnce(Self::Item) -> U {
if let Some(x) = self {
f(x);
}
}
fn for_each_ref<'a, U, F>(&'a self, f: F)
where F: FnOnce(&'a Self::Item) -> U {
if let Some(x) = self {
f(x);
}
}
fn contains_if<'a, F>(&'a self, f: F) -> bool
where F: FnOnce(&'a Self::Item) -> bool {
self.as_ref().map_or(false, f)
}
}

View File

@ -1,96 +0,0 @@
//! Module for utilities related to serialization/deserialization using the `serde` library.
use serde::Deserialize;
/// Try to deserialize value of type `Ret`. In case of any error, it is ignored and the default
/// value is returned instead.
pub fn deserialize_or_default<'d, Ret, D>(d: D) -> Result<Ret, D::Error>
where
for<'e> Ret: Default + Deserialize<'e>,
D: serde::Deserializer<'d>, {
// We first parse as generic JSON value. This is necessary to consume parser input.
// If we just tried parsing the desired type directly and ignored error, we would end up with
// `trailing characters` error in non-trivial cases.
let raw_json = <&serde_json::value::RawValue>::deserialize(d)?;
serde_json::from_str(raw_json.get()).or_else(|_error| Ok(Ret::default()))
}
/// Deserialize a JSON value that is either of `Ret` type or equals `null`. A `null` is converted
/// to a default value of `Ret` type.
///
/// Example usage:
/// ```
/// # use serde::Deserialize;
/// # use enso_prelude::deserialize_null_as_default;
/// #[derive(Debug, Deserialize, PartialEq)]
/// struct Foo {
/// #[serde(default, deserialize_with = "deserialize_null_as_default")]
/// blah: Vec<i32>,
/// }
/// fn check_deserialized_eq(code: &str, expected_deserialized: &Foo) {
/// let deserialized = serde_json::from_str::<Foo>(code).unwrap();
/// assert_eq!(&deserialized, expected_deserialized);
/// }
/// let empty_foo = Foo { blah: vec![] };
/// check_deserialized_eq(r#"{"blah" : null }"#, &empty_foo);
/// check_deserialized_eq(r#"{}"#, &empty_foo);
/// check_deserialized_eq(r#"{"blah" : [] }"#, &empty_foo);
/// check_deserialized_eq(r#"{"blah" : [1,2,3] }"#, &Foo { blah: vec![1, 2, 3] });
/// ```
pub fn deserialize_null_as_default<'d, Ret, D>(d: D) -> Result<Ret, D::Error>
where
for<'e> Ret: Default + Deserialize<'e>,
D: serde::Deserializer<'d>, {
let option_value = Option::deserialize(d)?;
Ok(option_value.unwrap_or_default())
}
#[cfg(test)]
mod tests {
use super::*;
use serde::Serialize;
#[test]
fn deserialize_or_default_attribute_test() {
// Two structures - same except for `deserialize_or_default` atribute.
// One fails to deserialize, second one goes through.
#[derive(Debug, Deserialize, PartialEq, Eq, Serialize)]
struct Foo {
blah: String,
boom: Vec<i32>,
}
#[derive(Debug, Deserialize, Eq, PartialEq, Serialize)]
struct Bar {
#[serde(deserialize_with = "deserialize_or_default")]
blah: String,
boom: Vec<i32>,
}
let code = r#"{"blah" : {}, "boom" : [1,2,3] }"#;
let result = serde_json::from_str::<Foo>(code);
assert!(result.is_err());
let deserialized = serde_json::from_str::<Bar>(code).unwrap();
assert_eq!(deserialized, Bar { blah: "".into(), boom: vec![1, 2, 3] });
}
#[test]
fn deserialize_or_default_attribute_for_optional_field() {
#[derive(Debug, Deserialize, Eq, PartialEq, Serialize)]
struct Foo {
#[serde(default, deserialize_with = "deserialize_or_default")]
blah: Option<String>,
boom: Vec<i32>,
}
let code = r#"{"blah" : "blah", "boom" : [1,2,3] }"#;
let deserialized = serde_json::from_str::<Foo>(code).unwrap();
assert_eq!(deserialized, Foo { blah: Some("blah".to_owned()), boom: vec![1, 2, 3] });
let code = r#"{"boom" : [1,2,3] }"#;
let deserialized = serde_json::from_str::<Foo>(code).unwrap();
assert_eq!(deserialized, Foo { blah: None, boom: vec![1, 2, 3] });
}
}

View File

@ -1,120 +0,0 @@
//! This module defines utilities for working with the [`SmallVec`] type.
use std::cmp::Ordering;
// ==============
// === Export ===
// ==============
pub use smallvec::SmallVec;
/// Adds methods to the `SmallVec` type.
pub trait SmallVecOps {
type Item;
/// Binary searches this sorted slice with a comparator function.
///
/// The comparator function should implement an order consistent
/// with the sort order of the underlying slice, returning an
/// order code that indicates whether its argument is `Less`,
/// `Equal` or `Greater` the desired target.
///
/// If the value is found then [`Result::Ok`] is returned, containing the
/// index of the matching element. If there are multiple matches, then any
/// one of the matches could be returned. If the value is not found then
/// [`Result::Err`] is returned, containing the index where a matching
/// element could be inserted while maintaining sorted order.
///
/// # Implementation Details
/// Please note that the following implementation is a copy-paste from
/// [`Vec::binary_search_by`].
fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
where F: FnMut(&Self::Item) -> Ordering;
/// Binary searches this sorted slice for a given element.
///
/// If the value is found then [`Result::Ok`] is returned, containing the
/// index of the matching element. If there are multiple matches, then any
/// one of the matches could be returned. If the value is not found then
/// [`Result::Err`] is returned, containing the index where a matching
/// element could be inserted while maintaining sorted order.
///
/// # Implementation Details
/// Please note that the following implementation is a copy-paste from
/// [`Vec::binary_search`].
fn binary_search(&self, t: &Self::Item) -> Result<usize, usize>
where Self::Item: Ord;
}
impl<T: smallvec::Array> SmallVecOps for SmallVec<T> {
type Item = <T as smallvec::Array>::Item;
#[allow(unsafe_code)]
fn binary_search_by<F>(&self, mut f: F) -> Result<usize, usize>
where F: FnMut(&Self::Item) -> Ordering {
let s = self;
let mut size = s.len();
if size == 0 {
return Err(0);
}
let mut base = 0usize;
while size > 1 {
let half = size / 2;
let mid = base + half;
// SAFETY: the call is made safe by the following inconstants:
// - `mid >= 0`: by definition
// - `mid < size`: `mid = size / 2 + size / 4 + size / 8 ...`
let cmp = f(unsafe { s.get_unchecked(mid) });
base = if cmp == Ordering::Greater { base } else { mid };
size -= half;
}
// SAFETY: base is always in [0, size) because base <= mid.
let cmp = f(unsafe { s.get_unchecked(base) });
if cmp == Ordering::Equal {
Ok(base)
} else {
Err(base + (cmp == Ordering::Less) as usize)
}
}
fn binary_search(&self, t: &Self::Item) -> Result<usize, usize>
where Self::Item: Ord {
self.binary_search_by(|p| p.cmp(t))
}
}
// =============
// === Tests ===
// =============
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_binary_search_by() {
let v = SmallVec::<[usize; 4]>::from_iter([5, 10, 20, 40].iter().copied());
assert_eq!(v.binary_search_by(|probe| probe.cmp(&0)), Err(0));
assert_eq!(v.binary_search_by(|probe| probe.cmp(&5)), Ok(0));
assert_eq!(v.binary_search_by(|probe| probe.cmp(&6)), Err(1));
assert_eq!(v.binary_search_by(|probe| probe.cmp(&9)), Err(1));
assert_eq!(v.binary_search_by(|probe| probe.cmp(&10)), Ok(1));
assert_eq!(v.binary_search_by(|probe| probe.cmp(&11)), Err(2));
}
#[test]
fn test_binary_search() {
let v = SmallVec::<[usize; 4]>::from_iter([5, 10, 20, 40].iter().copied());
assert_eq!(v.binary_search(&0), Err(0));
assert_eq!(v.binary_search(&5), Ok(0));
assert_eq!(v.binary_search(&6), Err(1));
assert_eq!(v.binary_search(&9), Err(1));
assert_eq!(v.binary_search(&10), Ok(1));
assert_eq!(v.binary_search(&11), Err(2));
}
}

View File

@ -24,6 +24,7 @@ pub use std::iter::FromIterator;
pub use std::marker::PhantomData;
pub use std::mem;
pub use std::ops::Add;
pub use std::ops::AddAssign;
pub use std::ops::Deref;
pub use std::ops::DerefMut;
pub use std::ops::Div;

View File

@ -1,551 +0,0 @@
//! This module defines several useful string variants, including copy-on-write and immutable
//! implementations.
use derive_more::*;
use enso_shapely::clone_ref::*;
use itertools::*;
use crate::impls;
use serde::Deserialize;
use serde::Serialize;
use std::borrow::Borrow;
use std::borrow::Cow;
use std::ops::Deref;
use std::rc::Rc;
use std::str::pattern;
// =================
// === StringOps ===
// =================
pub trait StringOps {
fn is_enclosed(&self, first_char: char, last_char: char) -> bool;
/// Splits `self` twice. Once at the first occurrence of `start_marker` and once at the first
/// occurence of `end_marker`. Returns a triple containing the split `self` as a prefix, middle,
/// and suffix. If `self` could not be split twice, returns [`None`].
///
/// [`None`]: ::std::option::Option::None
fn split_twice<'a, P>(
&'a self,
start_marker: P,
end_marker: P,
) -> Option<(&'a str, &'a str, &'a str)>
where
P: pattern::Pattern<'a>;
/// Converts the camel case string to snake case. For example, converts `FooBar` to `foo_bar`.
fn camel_case_to_snake_case(&self) -> String;
/// Converts the first letter of the string to uppercase. For example, converts `foo` to `Foo`.
fn capitalize_first_letter(&self) -> String;
}
impl<T: AsRef<str>> StringOps for T {
/// Check if given string starts and ends with given characters.
///
/// Optimized to be O(1) if both characters are within ASCII range.
fn is_enclosed(&self, first_char: char, last_char: char) -> bool {
let text = self.as_ref();
if first_char.is_ascii() && last_char.is_ascii() {
let bytes = text.as_bytes();
bytes.first() == Some(&(first_char as u8)) && bytes.last() == Some(&(last_char as u8))
} else {
let mut chars = text.chars();
let first = chars.next();
let last = chars.last().or(first);
first == Some(first_char) && last == Some(last_char)
}
}
fn split_twice<'a, P>(
&'a self,
start_marker: P,
end_marker: P,
) -> Option<(&'a str, &'a str, &'a str)>
where
P: pattern::Pattern<'a>,
{
let text = self.as_ref();
let (prefix, rest) = text.split_once(start_marker)?;
let (mid, suffix) = rest.split_once(end_marker)?;
Some((prefix, mid, suffix))
}
fn camel_case_to_snake_case(&self) -> String {
let mut result = String::new();
let mut chars = self.as_ref().chars();
if let Some(first) = chars.next() {
result.push(first.to_ascii_lowercase());
}
for c in chars {
if c.is_uppercase() {
result.push('_');
result.push(c.to_ascii_lowercase());
} else {
result.push(c);
}
}
result
}
fn capitalize_first_letter(&self) -> String {
let mut chars = self.as_ref().chars();
if let Some(first) = chars.next() {
first.to_uppercase().to_string() + chars.as_str()
} else {
String::new()
}
}
}
// ===========
// === Str ===
// ===========
/// Abstraction for any kind of string as an argument. Functions defined as
/// `fn test<S:Str>(s: Str) { ... }` can be called with `String`, `&String`, and `&str` without
/// requiring caller to know the implementation details. Moreover, the definition can decide if it
/// needs allocation or not. Calling `s.as_ref()` will never allocate, while `s.into()` will
/// allocate only when necessary.
pub trait Str = Into<String> + AsRef<str>;
// =================
// === CowString ===
// =================
// === Definition ===
/// A copy-on-write String implementation. It is a newtype wrapper for `Cow<'static,str>` and
/// provides many useful impls for efficient workflow. Use it whenever you want to store a string
/// but you are not sure if the string will be allocated or not. This way you can store a static
/// slice as long as you can and switch to allocated String on demand.
#[derive(Clone, Debug, Default, Display)]
pub struct CowString(Cow<'static, str>);
// === Conversions From CowString ===
impls! { From <&CowString> for String { |t| t.clone().into() } }
impls! { From <CowString> for String { |t| t.0.into() } }
// === Conversions To CowString ===
impls! { From <Cow<'static,str>> for CowString { |t| Self(t) } }
impls! { From <&Cow<'static,str>> for CowString { |t| Self(t.clone()) } }
impls! { From <&'static str> for CowString { |t| Self(t.into()) } }
impls! { From <String> for CowString { |t| Self(t.into()) } }
impls! { From <&String> for CowString { |t| t.to_string().into() } }
impls! { From <&CowString> for CowString { |t| t.clone() } }
// === Instances ===
impl Deref for CowString {
type Target = str;
fn deref(&self) -> &str {
self.0.deref()
}
}
impl AsRef<str> for CowString {
fn as_ref(&self) -> &str {
self.deref()
}
}
// ================
// === ImString ===
// ================
/// Immutable string implementation with a fast clone implementation.
#[derive(Clone, CloneRef, Eq, Hash, PartialEq, Ord, PartialOrd)]
#[derive(Deserialize, Serialize)]
pub struct ImString {
content: Rc<str>,
}
impl ImString {
/// Constructor.
pub fn new(content: impl Into<Rc<str>>) -> Self {
Self { content: content.into() }
}
/// Extract a string slice containing the entire string.
pub fn as_str(&self) -> &str {
&self.content
}
}
impl Default for ImString {
fn default() -> Self {
"".into()
}
}
impl std::fmt::Display for ImString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.content, f)
}
}
impl std::fmt::Debug for ImString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Debug::fmt(&self.content, f)
}
}
impl Deref for ImString {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.content
}
}
impl AsRef<ImString> for ImString {
fn as_ref(&self) -> &ImString {
self
}
}
impl AsRef<str> for ImString {
fn as_ref(&self) -> &str {
self.content.as_ref()
}
}
impl Borrow<str> for ImString {
fn borrow(&self) -> &str {
&self.content
}
}
impl From<Rc<str>> for ImString {
fn from(content: Rc<str>) -> Self {
Self { content }
}
}
impl From<String> for ImString {
fn from(t: String) -> Self {
Self::new(t)
}
}
impl From<&String> for ImString {
fn from(t: &String) -> Self {
Self::new(t.as_str())
}
}
impl From<&&String> for ImString {
fn from(t: &&String) -> Self {
Self::new(t.as_str())
}
}
impl From<&str> for ImString {
fn from(t: &str) -> Self {
Self::new(t)
}
}
impl From<&&str> for ImString {
fn from(t: &&str) -> Self {
Self::new(*t)
}
}
impl From<Cow<'_, str>> for ImString {
fn from(t: Cow<str>) -> Self {
t.into_owned().into()
}
}
impl From<ImString> for Rc<str> {
fn from(t: ImString) -> Self {
t.content
}
}
impl From<ImString> for String {
fn from(value: ImString) -> Self {
value.as_str().into()
}
}
impl PartialEq<&str> for ImString {
fn eq(&self, other: &&str) -> bool {
self.content.as_ref().eq(*other)
}
}
impl PartialEq<str> for ImString {
fn eq(&self, other: &str) -> bool {
self.content.as_ref().eq(other)
}
}
impl PartialEq<ImString> for &str {
fn eq(&self, other: &ImString) -> bool {
self.eq(&other.content.as_ref())
}
}
impl PartialEq<ImString> for str {
fn eq(&self, other: &ImString) -> bool {
self.eq(other.content.as_ref())
}
}
impl PartialEq<String> for ImString {
fn eq(&self, other: &String) -> bool {
self.content.as_ref().eq(other)
}
}
impl PartialEq<ImString> for String {
fn eq(&self, other: &ImString) -> bool {
self.eq(other.content.as_ref())
}
}
// ==================
// === ToImString ===
// ==================
/// Conversion of a value to [`ImString`].
#[allow(missing_docs)]
pub trait ToImString {
fn to_im_string(&self) -> ImString;
}
impl ToImString for ImString {
fn to_im_string(&self) -> ImString {
self.clone()
}
}
impl ToImString for String {
fn to_im_string(&self) -> ImString {
self.into()
}
}
impl ToImString for &String {
fn to_im_string(&self) -> ImString {
self.into()
}
}
impl ToImString for str {
fn to_im_string(&self) -> ImString {
self.into()
}
}
impl ToImString for &str {
fn to_im_string(&self) -> ImString {
self.into()
}
}
// === Macros ===
/// Defines a newtype for `ImString`.
#[macro_export]
macro_rules! im_string_newtype {
($($(#$meta:tt)* $name:ident),* $(,)?) => {
im_string_newtype_without_serde!{ $(
#[derive($crate::serde_reexports::Serialize,$crate::serde_reexports::Deserialize)]
$(#$meta)* $name
),* }
};
}
#[macro_export]
macro_rules! im_string_newtype_without_serde {
($($(#$meta:tt)* $name:ident),* $(,)?) => {$(
$(#$meta)*
#[derive(Clone,CloneRef,Debug,Default,Eq,Hash,PartialEq)]
pub struct $name {
content : ImString
}
impl $name {
/// Constructor.
pub fn new(content:impl Into<ImString>) -> Self {
let content = content.into();
Self {content}
}
}
impl Deref for $name {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.content
}
}
impl AsRef<$name> for $name {
fn as_ref(&self) -> &$name {
self
}
}
impl AsRef<ImString> for $name {
fn as_ref(&self) -> &ImString {
self.content.as_ref()
}
}
impl AsRef<str> for $name {
fn as_ref(&self) -> &str {
self.content.as_ref()
}
}
impl From<String> for $name {
fn from(t:String) -> Self {
Self::new(t)
}
}
impl From<&String> for $name {
fn from(t:&String) -> Self {
Self::new(t)
}
}
impl From<&&String> for $name {
fn from(t:&&String) -> Self {
Self::new(t)
}
}
impl From<ImString> for $name {
fn from(t:ImString) -> Self {
Self::new(t)
}
}
impl From<&str> for $name {
fn from(t:&str) -> Self {
Self::new(t)
}
}
impl From<&&str> for $name {
fn from(t:&&str) -> Self {
Self::new(t)
}
}
impl From<&$name> for String {
fn from(t:&$name) -> Self {
t.content.to_string()
}
}
)*};
}
// ===============================
// === Common Pre- and Postfix ===
// ===============================
/// Return the length of the longest common prefix of the two strings. If they are completely
/// different this will be zero.
///
/// Example:
/// ```
/// # use enso_prelude::*;
/// let a = "🐁hospital";
/// let b = "🐁host";
/// let c = "🐇bunny🐇";
///
/// assert_eq!(common_prefix_length(a, b), 4);
/// assert_eq!(common_prefix_length(a, c), 0);
/// assert_eq!(common_prefix_length(a, a), 9);
/// ```
pub fn common_prefix_length(source_a: &str, source_b: &str) -> usize {
let shortest = source_a.chars().count().min(source_b.chars().count());
let chars_a = source_a.chars();
let chars_b = source_b.chars();
let mut zipped = chars_a.zip(chars_b);
let mismatch = zipped.find_position(|(a, b)| *a != *b);
mismatch.map(|(ix, _)| ix).unwrap_or(shortest)
}
/// Return the length of the longest common postfix of the two strings. If they are completely
/// different this will be zero.
///
/// Example:
/// ```
/// # use enso_prelude::*;
/// let a = "sunny🐇yard";
/// let b = "🐇yard";
/// let c = "🐇";
///
/// assert_eq!(common_postfix_length(a, b), 5);
/// assert_eq!(common_postfix_length(a, c), 0);
/// assert_eq!(common_postfix_length(a, a), 10);
/// ```
pub fn common_postfix_length(source_a: &str, source_b: &str) -> usize {
let shortest = source_a.chars().count().min(source_b.chars().count());
let chars_a = source_a.chars().rev();
let chars_b = source_b.chars().rev();
let mut zipped = chars_a.zip(chars_b);
let mismatch = zipped.find_position(|(a, b)| *a != *b);
mismatch.map(|(ix, _)| ix).unwrap_or(shortest)
}
// =============
// === Tests ===
// =============
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_string_ops() {
// === Matching against ascii ===
assert!("{}".is_enclosed('{', '}'));
assert!("{ }".is_enclosed('{', '}'));
assert!(!"{".is_enclosed('{', '}'));
assert!(!"{a".is_enclosed('{', '}'));
assert!(!"a}".is_enclosed('{', '}'));
assert!(!"}".is_enclosed('{', '}'));
assert!(!"".is_enclosed('{', '}'));
assert!("{a}".is_enclosed('{', '}'));
assert!("{字}".is_enclosed('{', '}'));
assert!(!"{".is_enclosed('{', '}'));
assert!(!"{字".is_enclosed('{', '}'));
assert!(!"字}".is_enclosed('{', '}'));
assert!(!"}".is_enclosed('{', '}'));
assert!(!"".is_enclosed('{', '}'));
// === Matching against non-ascii ===
assert!("【】".is_enclosed('【', '】'));
assert!("【 】".is_enclosed('【', '】'));
assert!("【 a】".is_enclosed('【', '】'));
assert!(!"".is_enclosed('【', '】'));
assert!(!"【a".is_enclosed('【', '】'));
assert!(!"a】".is_enclosed('【', '】'));
assert!(!"".is_enclosed('【', '】'));
assert!(!"".is_enclosed('【', '】'));
// === Edge case of matching single char string ===
assert!("{".is_enclosed('{', '{'));
assert!("".is_enclosed('【', '【'));
// === Splitting a string twice ===
assert!("a.b.c,d,e".split_twice('.', ',').unwrap() == ("a", "b.c", "d,e"));
}
}

View File

@ -1,8 +1,5 @@
//! This module defines utilities for working with the [`std::vec::Vec`] type.
use derivative::Derivative;
use failure::_core::hint::unreachable_unchecked;
// ==============
@ -10,78 +7,6 @@ use failure::_core::hint::unreachable_unchecked;
// ==============
pub trait VecOps<T>: AsMut<Vec<T>> + Sized {
/// Pushes the provided `item` onto the [`std::vec::Vec`], and then returns an immutable
/// reference to the item.
fn push_and_get(&mut self, item: T) -> &T {
let vec = self.as_mut();
vec.push(item);
let item_ix = vec.len() - 1;
#[allow(unsafe_code)]
unsafe {
vec.get(item_ix).unwrap_or_else(|| unreachable_unchecked())
}
}
/// Pushes the provided `item` onto the [`std::vec::Vec`], and then returns a mutable reference
/// to the item.
fn push_and_get_mut(&mut self, item: T) -> &mut T {
let vec = self.as_mut();
vec.push(item);
let item_ix = vec.len() - 1;
#[allow(unsafe_code)]
unsafe {
vec.get_mut(item_ix).unwrap_or_else(|| unreachable_unchecked())
}
}
/// Extend the vector with the provided `iter`.
fn extended<I: IntoIterator<Item = T>>(mut self, iter: I) -> Self {
self.as_mut().extend(iter);
self
}
/// Push element to the vector.
fn pushed(mut self, item: T) -> Self {
self.as_mut().push(item);
self
}
/// Self but reversed.
fn reversed(mut self) -> Self {
self.as_mut().reverse();
self
}
/// Remove first element equal to `item` and returns it if any.
fn remove_item(&mut self, item: &T) -> Option<T>
where T: PartialEq<T> {
let vec = self.as_mut();
let index = vec.iter().position(|x| *x == *item);
index.map(|i| vec.remove(i))
}
/// Attempts to remove `T` if its `index` is valid. If not, it returns `None`.
fn try_remove(&mut self, index: usize) -> Option<T> {
let vec = self.as_mut();
if index < vec.len() {
Some(vec.remove(index))
} else {
None
}
}
/// Attempts to remove the first element of `Vec<T>`, returns `None` if its length is zero.
fn pop_front(&mut self) -> Option<T> {
self.try_remove(0)
}
/// Removes the last `n` elements from the vector. Returns true if the elements were removed.
fn remove_last_n(&mut self, n: usize) -> bool {
let vec = self.as_mut();
let new_size = vec.len().checked_sub(n);
new_size.map(|new_size| vec.truncate(new_size)).is_some()
}
/// Pop and return the last element, if the vector is non-empty and the given predicate returns
/// true when applied to the last element.
fn pop_if<F>(&mut self, f: F) -> Option<T>
@ -94,23 +19,6 @@ pub trait VecOps<T>: AsMut<Vec<T>> + Sized {
}
None
}
/// Index the vector. If it is too short, extend it with default value.
fn index_or_resize_mut(&mut self, index: usize) -> &mut T
where T: Clone + Default {
self.index_or_resize_with_mut(index, || Default::default())
}
/// Index the vector. If it is too short, extend it with the provided default value.
#[allow(unsafe_code)]
fn index_or_resize_with_mut(&mut self, index: usize, cons: impl Fn() -> T) -> &mut T
where T: Clone {
let vec = self.as_mut();
if index >= vec.len() {
vec.resize(index + 1, cons());
}
unsafe { vec.get_mut(index).unwrap_or_else(|| unreachable_unchecked()) }
}
}
impl<T> VecOps<T> for Vec<T> {}
@ -164,12 +72,17 @@ impl<T> VecOps<T> for Vec<T> {}
/// }
/// }
/// ```
#[derive(Clone, Debug, Derivative, Eq, PartialEq)]
#[derivative(Default(bound = ""))]
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct VecAllocation<T> {
data: Vec<T>,
}
impl<T> Default for VecAllocation<T> {
fn default() -> Self {
Self { data: Vec::new() }
}
}
impl<T> VecAllocation<T> {
/// Create a new, empty allocation.
pub fn new() -> Self {
@ -190,35 +103,3 @@ impl<T> VecAllocation<T> {
std::mem::take(&mut self.data)
}
}
// =============
// === Tests ===
// =============
#[cfg(test)]
mod tests {
use super::*;
struct Test {
pub item: usize,
}
#[test]
fn test_push_and_get() {
let mut vec = Vec::new();
let item = Test { item: 10 };
let item_in_vec = vec.push_and_get(item);
assert_eq!(item_in_vec.item, 10)
}
#[test]
fn test_push_and_get_mut() {
let mut vec = Vec::new();
let item = Test { item: 10 };
let item_in_vec = vec.push_and_get_mut(item);
item_in_vec.item = 20;
assert_eq!(item_in_vec.item, 20);
}
}

View File

@ -1,15 +0,0 @@
[package]
name = "enso-profiler"
version = "0.1.0"
edition = "2021"
authors = ["Enso Team <contact@enso.org>"]
[dependencies]
futures = { workspace = true }
serde = { workspace = true }
serde_json = { version = "1.0.59", features = ["raw_value"] }
enso-profiler-macros = { path = "macros" }
enso-web = { path = "../web" }
[lints]
workspace = true

View File

@ -1,18 +0,0 @@
[package]
name = "enso-profiler-data"
version = "0.1.0"
edition = "2021"
authors = ["Enso Team <contact@enso.org>"]
[dependencies]
derivative = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
enso-prelude = { path = "../../prelude" }
enso-profiler = { path = "../" }
[dev-dependencies]
futures = { workspace = true }
[lints]
workspace = true

View File

@ -1,129 +0,0 @@
//! Supports aggregating interval data by profiler to analyze total time spent, abstracting away
//! *when* intervals occurred.
use enso_prelude::*;
use crate::Class;
use std::collections::HashMap;
// =====================
// === Configuration ===
// =====================
/// Frames shorter than this duration, and all their children, will be excluded from interval
/// reports.
///
/// Some operations are not very expensive, but are repeated in many frames. These operations add
/// noise to the analysis: Their total duration can be high even if they have no actual performance
/// impact, and their total duration will vary depending on how long the profile is recorded.
/// Filtering them out makes profiling results more consistent, and more focused on the costs that
/// matter.
// This could logically be a configuration option, but in practice we'll probably never want to turn
// it off or change it.
const SKIP_FRAMES_BELOW_MS: f64 = 16.6;
// ==================
// === Aggregator ===
// ==================
/// Aggregate time spent in different functions.
#[derive(Default, Debug)]
pub struct Aggregator {
stack: Vec<ImString>,
root: Frame,
}
impl Aggregator {
/// Add data from a profile to the tree.
pub fn add_profile<Metadata>(&mut self, profile: &crate::Profile<Metadata>) {
let not_short_frame = |&&child: &&crate::IntervalId| {
let interval = &profile[child];
let measurement = &profile[interval.measurement];
match measurement.classify() {
Class::OnFrame => interval
.interval
.duration_ms()
.map_or(true, |duration| duration >= SKIP_FRAMES_BELOW_MS),
_ => true,
}
};
for &child in profile.root_interval().children.iter().filter(not_short_frame) {
self.visit_interval(profile, child);
}
}
/// Add the interval to a [`Frame`]; recurse into children.
fn visit_interval<Metadata>(
&mut self,
profile: &crate::Profile<Metadata>,
active: crate::IntervalId,
) {
let active = &profile[active];
let label = profile[active.measurement].label.to_string().into();
self.stack.push(label);
match active.interval.duration_ms() {
Some(duration) if duration > 0.0 => {
self.log_interval(duration);
for child in &active.children {
self.visit_interval(profile, *child);
}
}
_ => (),
};
self.stack.pop();
}
/// Add the interval to the total for the current stack.
fn log_interval(&mut self, duration: f64) {
let stack = &self.stack;
let mut frame = &mut self.root;
for id in stack {
frame = frame.children.entry(id.clone()).or_default();
}
frame.duration += duration;
frame.intervals += 1;
}
}
impl From<Aggregator> for Frame {
fn from(Aggregator { root, .. }: Aggregator) -> Self {
root
}
}
// =============
// === Frame ===
// =============
/// Aggregated info about all occurrences of a particular stack of profilers.
#[derive(Default, Debug)]
pub struct Frame {
duration: f64,
/// Aggregated intervals that ran as children of this profiler.
pub children: HashMap<ImString, Self>,
intervals: usize,
}
impl Frame {
/// Return the duration spent in this profiler's intervals, exclusive of time in child
/// intervals.
pub fn self_duration(&self) -> f64 {
let children_duration: f64 = self.children.values().map(Frame::total_duration).sum();
self.duration - children_duration
}
/// Return the duration spent in this profiler's intervals.
pub fn total_duration(&self) -> f64 {
self.duration
}
/// Return the number of intervals this aggregate represents.
pub fn interval_count(&self) -> usize {
self.intervals
}
}

View File

@ -1,132 +0,0 @@
//! Tool that generates Chrome DevTools-compatible files from profiling interval data.
//!
//! The Chrome DevTools profile format has no official publicly available documentation.
//! Someone's description of it is available here:
//! https://docs.google.com/document/d/1lieZBBXZiEKOVk5vLCGmMT99_O-5lv9cGXoKnhqlY4g/preview
//!
//! # Usage
//!
//! The tool reads a
//! [JSON-formatted event log](https://github.com/enso-org/design/blob/main/epics/profiling/implementation.md#file-format)
//! from stdin, and writes a report to stdout.
//!
//! For example:
//!
//! ```console
//! ~/git/enso/data $ cargo run --bin intervals < profile.json > devtools.json
//! ```
// === Features ===
#![feature(test)]
// === Non-Standard Linter Configuration ===
#![deny(unconditional_recursion)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
use enso_profiler::format::AnyMetadata;
use enso_profiler_data as data;
/// Support for the Chrome DevTools profile format.
mod devtools {
// =============
// === Event ===
// =============
/// DevTools-profile interval.
#[derive(serde::Serialize)]
pub struct Event {
pub name: String,
#[serde(rename = "cat")]
pub category: String,
#[serde(rename = "ph")]
pub event_type: EventType,
#[serde(rename = "ts")]
pub timestamp_us: u64,
#[serde(rename = "dur")]
pub duration_us: u64,
#[serde(rename = "pid")]
pub process_id: u32,
#[serde(rename = "tid")]
pub thread_id: u32,
// Actually a type of map, but we don't need to write anything there.
pub args: Option<()>,
}
/// Information about type of event in DevTools profiling interval.
#[derive(Clone, Copy, Eq, PartialEq, serde::Serialize)]
pub enum EventType {
#[serde(rename = "X")]
Complete,
}
}
// ============
// === main ===
// ============
fn main() {
use std::io::Read;
let mut log = String::new();
std::io::stdin().read_to_string(&mut log).unwrap();
let profile: data::Profile<AnyMetadata> = log.parse().unwrap();
let events = IntervalTranslator::run(&profile);
serde_json::to_writer(std::io::stdout(), &events).unwrap();
}
// ==========================
// === IntervalTranslator ===
// ==========================
/// Translates `profiler` data to the Chrome DevTools format.
struct IntervalTranslator<'p, Metadata> {
profile: &'p data::Profile<Metadata>,
events: Vec<devtools::Event>,
}
impl<'p, Metadata> IntervalTranslator<'p, Metadata> {
/// Translate `profiler` data to the Chrome DevTools format.
fn run(profile: &'p data::Profile<Metadata>) -> Vec<devtools::Event> {
let events = Default::default();
let mut builder = Self { profile, events };
// We skip the root node APP_LIFETIME, which is not a real measurement.
for child in &profile.root_interval().children {
builder.visit_interval(*child);
}
let Self { events, .. } = builder;
events
}
}
impl<'p, Metadata> IntervalTranslator<'p, Metadata> {
/// Translate an interval, and its children.
fn visit_interval(&mut self, active: data::IntervalId) {
let active = &self.profile[active];
let measurement = &self.profile[active.measurement];
let start = active.interval.start.into_ms();
// DevTools ignores open intervals.
if let Some(duration_ms) = active.interval.duration_ms() {
let duration_us = (duration_ms * 1000.0) as u64;
let event = devtools::Event {
name: measurement.label.to_string(),
event_type: devtools::EventType::Complete,
category: "interval".to_owned(),
duration_us,
timestamp_us: (start * 1000.0) as u64,
process_id: 1,
thread_id: 1,
args: None,
};
self.events.push(event);
}
for child in &active.children {
self.visit_interval(*child);
}
}
}

View File

@ -1,135 +0,0 @@
//! Tool that generates interval reports from profiling data.
//!
//! # Usage
//!
//! The tool reads a
//! [JSON-formatted event log](https://github.com/enso-org/design/blob/main/epics/profiling/implementation.md#file-format)
//! from stdin, and writes a report to stdout.
//!
//! For example:
//!
//! ```console
//! ~/git/enso/data $ cargo run --bin intervals < profile.json | less
//! ```
// === Features ===
#![feature(test)]
#![feature(let_chains)]
// === Non-Standard Linter Configuration ===
#![deny(unconditional_recursion)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
use enso_prelude::*;
use std::collections::HashMap;
use enso_profiler::format::AnyMetadata;
use enso_profiler_data as data;
// =====================
// === Configuration ===
// =====================
/// Set this to filter the output to matching profilers and their children.
const INCLUDE_ONLY_SUBTREES_MATCHING_PREFIX: Option<&str> = None;
// ============
// === main ===
// ============
fn main() {
use std::io::Read;
let mut log = String::new();
std::io::stdin().read_to_string(&mut log).unwrap();
let profile: data::Profile<AnyMetadata> = log.parse().unwrap();
let mut aggregator = data::aggregate::Aggregator::default();
aggregator.add_profile(&profile);
let root = data::aggregate::Frame::from(aggregator);
let funcs = FuncCollector::run(&root);
let kv_to_func = |(label, timings)| Func { label, timings };
let mut funcs: Vec<_> = funcs.into_iter().map(kv_to_func).collect();
funcs.sort_unstable_by(|a, b| a.timings.self_duration.total_cmp(&b.timings.self_duration));
println!("self_duration,total_duration,count,profiler");
for Func { label, timings } in funcs.iter().rev() {
let FuncTimings { total_duration, self_duration, count } = timings;
println!("{self_duration:>6.1},{total_duration:>6.1},{count},{label}");
}
let mut total_duration = 0.0;
for Func { timings, .. } in funcs.iter() {
total_duration += timings.self_duration;
}
println!("0.0,{total_duration:>6.1},1,(total_self_duration)");
}
// =====================
// === FuncCollector ===
// =====================
/// Aggregates all intervals created by a particular profiler, abstracting away where in the stack
/// it occurs.
#[derive(Default)]
struct FuncCollector {
funcs: HashMap<Label, FuncTimings>,
}
impl FuncCollector {
/// Aggregate all intervals created by a particular profiler.
fn run(root: &data::aggregate::Frame) -> HashMap<Label, FuncTimings> {
let mut collector = FuncCollector::default();
for (label, frame) in &root.children {
collector.visit(label, frame, default());
}
let FuncCollector { funcs, .. } = collector;
funcs
}
}
impl FuncCollector {
/// Add time spent in an interval to the running sums; recurse into children.
fn visit(&mut self, label: &Label, frame: &data::aggregate::Frame, enable: bool) {
let enable = enable
|| INCLUDE_ONLY_SUBTREES_MATCHING_PREFIX
.map_or(true, |prefix| label.starts_with(prefix));
if enable {
let func = self.funcs.entry(label.clone()).or_default();
func.self_duration += frame.self_duration();
func.total_duration += frame.total_duration();
func.count += frame.interval_count();
}
for (label, frame) in &frame.children {
self.visit(label, frame, enable);
}
}
}
type Label = ImString;
// ===================
// === FuncTimings ===
// ===================
/// Aggregate of all time spent in a particular profiler's intervals.
#[derive(Default)]
struct FuncTimings {
total_duration: f64,
self_duration: f64,
count: usize,
}
// ============
// === Func ===
// ============
/// Identifies a profiler, and contains information about the time spent in its intervals.
struct Func {
label: Label,
timings: FuncTimings,
}

View File

@ -1,86 +0,0 @@
//! Tool that generates measurement hierarchy reports from profiling data.
//!
//! # Usage
//!
//! The tool reads a JSON-formatted event log from stdin, and writes a report to stdout.
//!
//! For example:
//!
//! ```console
//! ~/git/enso/data $ cargo run --bin measurements < profile.json | less
//! ```
// === Features ===
#![feature(test)]
// === Non-Standard Linter Configuration ===
#![deny(unconditional_recursion)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
use enso_profiler::format::AnyMetadata;
use enso_profiler_data as profiler_data;
// =========================
// === print_measurement ===
// =========================
/// Pretty-print a [`profiler_data::Measurement`], including all children, in a way that illustrates
/// the hierarchy of the data. Results will be written to stdout.
fn print_measurement<Metadata: std::fmt::Display>(
profile: &profiler_data::Profile<Metadata>,
measurement: profiler_data::MeasurementId,
i: usize,
) {
let measurement = &profile[measurement];
let mut indent = String::new();
for _ in 0..i {
indent.push_str(" ");
}
println!("{}{}", indent, measurement.label);
print!("{indent}");
print!(" {:.1}", measurement.created.into_ms());
for active in &measurement.intervals {
let interval = &profile[*active];
print!(" {}", fmt_interval(interval.interval));
}
println!();
for active in &measurement.intervals {
let interval = &profile[*active];
for metadata in &interval.metadata {
println!("{} {}", indent, metadata.data);
}
}
for child in &measurement.children {
print_measurement(profile, *child, i + 1);
}
}
// === formatting ===
/// Format a [`profiler_data::Interval`] in an easy-to-read way.
fn fmt_interval(interval: profiler_data::Interval) -> String {
let start = interval.start.into_ms();
let end = interval.end.map(|x| format!("{:.1}", x.into_ms())).unwrap_or_default();
format!("{start:.1}-{end}")
}
// ============
// === main ===
// ============
fn main() {
use std::io::Read;
let mut log = String::new();
std::io::stdin().read_to_string(&mut log).unwrap();
let profile: profiler_data::Profile<AnyMetadata> = log.parse().unwrap();
for root in &profile.root_measurement().children {
print_measurement(&profile, *root, 0);
}
}

View File

@ -1,259 +0,0 @@
//! Tool for comparing the latencies of different processes in reacting to an event.
//!
//! # Usage
//!
//! The tool reads a JSON-formatted event log from stdin, and writes CSV data to stdout.
//!
//! For example:
//!
//! ```console
//! ~/git/enso/data $ cargo run --bin processes compile_new_shaders,backend_execution < profile.json
//! ```
// === Non-Standard Linter Configuration ===
#![deny(unconditional_recursion)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
use enso_profiler_data as profiler_data;
use profiler_data::Class;
use profiler_data::MeasurementId;
use profiler_data::OpaqueMetadata;
use profiler_data::Profile;
use profiler_data::Timestamp;
use std::collections::HashMap;
use std::default::Default;
use std::path::Path;
use std::str::FromStr;
// ===============
// === Process ===
// ===============
/// Used to classify work into sets that are executed in parallel with each other.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct Process(usize);
// === Processes ===
/// A profile's [`Process`]es.
#[derive(Debug)]
pub struct Processes {
in_order: Vec<Process>,
names: HashMap<Process, String>,
by_label: HashMap<String, Process>,
}
impl FromStr for Processes {
type Err = ();
fn from_str(labels: &str) -> Result<Self, Self::Err> {
let process_labels = labels.split(',').map(|s| s.to_owned());
let mut in_order = Vec::new();
let mut names = HashMap::new();
let mut by_label = HashMap::new();
for (i, label) in process_labels.enumerate() {
let p = Process(i);
in_order.push(p);
names.insert(p, label.clone());
by_label.insert(label, p);
}
Ok(Self { in_order, names, by_label })
}
}
// =================================
// === Categorizing measurements ===
// =================================
/// Categorize the given profile's measurements by process.
pub fn categorize_measurements(
profile: &Profile<OpaqueMetadata>,
process_by_label: &HashMap<String, Process>,
) -> HashMap<MeasurementId, Process> {
let root = profile.root_measurement_id();
let mut measurement_process = Default::default();
let current = Default::default();
categorize_subtree(&mut measurement_process, profile, process_by_label, root, current);
measurement_process
}
fn categorize_subtree(
measurement_process: &mut HashMap<MeasurementId, Process>,
profile: &Profile<OpaqueMetadata>,
process_by_label: &HashMap<String, Process>,
measurement_id: MeasurementId,
current: Option<Process>,
) {
let measurement = &profile[measurement_id];
let new = process_by_label.get(&measurement.label.name).cloned();
if let Some(process) = new {
measurement_process.insert(measurement_id, process);
}
let current = new.or(current);
for &child in &measurement.children {
categorize_subtree(measurement_process, profile, process_by_label, child, current);
}
}
// =========================
// === Process end times ===
// =========================
/// Find the end of each process, i.e. when the last work attributed to it is completed.
pub fn process_ends(
profile: &Profile<OpaqueMetadata>,
measurement_process: &HashMap<MeasurementId, Process>,
root: MeasurementId,
) -> Vec<(Process, f64)> {
let mut ends = Default::default();
for &child in &profile[root].children {
gather_ends(&mut ends, profile, measurement_process, child);
}
let root_start = profile[root].created;
ends.into_iter()
.map(|(process, end)| {
let end = end.into_ms() - root_start.into_ms();
(process, end)
})
.collect()
}
fn gather_ends(
ends: &mut HashMap<Process, Timestamp>,
profile: &Profile<OpaqueMetadata>,
measurement_process: &HashMap<MeasurementId, Process>,
measurement_id: MeasurementId,
) {
let measurement = &profile[measurement_id];
if let Some(process) = measurement_process.get(&measurement_id) {
let last_interval = measurement.intervals.last();
let end = last_interval.and_then(|&i| profile[i].interval.end);
if let Some(new_end) = end {
let end = ends.entry(*process).or_default();
if new_end > *end {
*end = new_end;
}
}
}
for &child in &measurement.children {
gather_ends(ends, profile, measurement_process, child);
}
}
// ====================
// === Working time ===
// ====================
/// Sum the time any profiler not attributable to a foreign process is active during the given
/// interval.
pub fn working_time_in_interval(
profile: &Profile<OpaqueMetadata>,
measurement_process: &HashMap<MeasurementId, Process>,
interval_start: Timestamp,
interval_end: Timestamp,
) -> f64 {
let mut total = 0.0;
for &i in &profile.root_interval().children {
let interval = &profile[i];
if measurement_process.contains_key(&interval.measurement) {
continue;
}
let interval = interval.interval;
let start = interval.start;
if let Some(end) = interval.end {
let start = std::cmp::max(start, interval_start).into_ms();
let end = std::cmp::min(end, interval_end).into_ms();
let duration = end - start;
if duration.is_sign_positive() {
total += duration;
}
}
}
total
}
// ===========================
// === Highlighted regions ===
// ===========================
/// Get the region of interest in the profile, identified by a special profiler that must be present
/// in the data.
pub fn get_highlighted_region(
profile: &Profile<OpaqueMetadata>,
) -> (MeasurementId, Timestamp, Timestamp) {
let is_highlight = |&m: &MeasurementId| profile[m].classify() == Class::Highlight;
let mut highlights: Vec<_> = profile.measurement_ids().filter(is_highlight).collect();
let mut highlights = highlights.drain(..);
let head = highlights.next();
let rest = highlights.len();
let m_id = match (head, rest) {
(Some(first), 0) => first,
_ => {
let clause1 = "This tool currently only supports profiles of batch-mode workflows";
let clause2 = "which should all have exactly one highlighted region";
unimplemented!("{}, {}.", clause1, clause2);
}
};
let measurement = &profile[m_id];
let start = measurement.created;
let non_empty_highlight_required = "Incomplete profile: Highlighted region contains no data.";
let last_interval = measurement.intervals.last().expect(non_empty_highlight_required);
let end = profile[*last_interval].interval.end;
let complete_profile_required = "Incomplete profile: Highlighted region was not ended.";
let end = end.expect(complete_profile_required);
(m_id, start, end)
}
// ============
// === Main ===
// ============
fn main() {
let mut args = std::env::args();
let argv0 = args.next().unwrap();
let labels = "foreign_process_label1,foreign_process_label2,...";
let profiles = "profile1.json profile2.json ...";
let usage = &format!("Usage: {argv0} {labels} {profiles}");
let processes = Processes::from_str(&args.next().expect(usage)).expect(usage);
let mut cols = vec!["profile".into(), "main".into()];
cols.extend(processes.in_order.iter().map(|p| processes.names[p].clone()));
println!("{}", cols.join(","));
for path in args {
let path = Path::new(&path);
let results = analyze_file(path, &processes);
let results: Vec<_> = results.iter().map(|x| x.to_string()).collect();
let file = path.file_stem().unwrap().to_str().unwrap();
println!("{},{}", file, results.join(","));
}
}
fn analyze_file(path: &Path, processes: &Processes) -> Vec<f64> {
let log = std::fs::read_to_string(path).unwrap();
let profile: Profile<OpaqueMetadata> = log.parse().unwrap();
let measurement_process = categorize_measurements(&profile, &processes.by_label);
let (root, root_start, root_end) = get_highlighted_region(&profile);
let other_process_latencies: HashMap<_, _> = process_ends(&profile, &measurement_process, root)
.into_iter()
.map(|(p, end)| (p, end))
.collect();
let main_process_time =
working_time_in_interval(&profile, &measurement_process, root_start, root_end);
let process_latency = |p| other_process_latencies.get(p).cloned().unwrap_or_default();
let mut results = vec![main_process_time];
results.extend(processes.in_order.iter().map(process_latency));
results
}

View File

@ -1,660 +0,0 @@
//! Interface to profile data.
//!
//! # Overview
//!
//! Usage of this API starts with applying [`str::parse`] to JSON profiling data, returning a
//! [`Measurement`] which is the root of the hierarchy of profiler outputs.
//!
//! Parsing is robust to changes in the definitions of metadata types; if deserialization of some
//! metadata entries fails, the resulting error type provides access to the result of deserializing
//! all the data that succeeded (see [`Error::RecoverableFormatError`]).
//!
//! # Usage example: storing and retrieving metadata
//!
//! ```
//! use enso_profiler as profiler;
//! use enso_profiler_data as profiler_data;
//! use profiler::profile;
//!
//! // Some metadata types.
//! #[derive(serde::Deserialize, PartialEq, Eq, Debug)]
//! struct MyDataA(u32);
//! profiler::metadata_logger!("MyDataA", log_data_a(u32));
//!
//! #[derive(serde::Deserialize, PartialEq, Eq, Debug)]
//! struct MyDataB(String);
//! profiler::metadata_logger!("MyDataB", log_data_b(String));
//!
//! #[profile(Objective)]
//! fn action_producing_metadata() {
//! log_data_a(23);
//! log_data_b("5".into());
//! }
//!
//! fn store_and_retrieve_metadata() {
//! action_producing_metadata();
//!
//! // To deserialize, we define a metadata type as an enum.
//! //
//! // Each variant has a name and type that match the string-argument and type-parameter that
//! // match the `profiler::metadata_logger!` definition. If the type is a newtype, the
//! // metadata logger may accept the wrapped type for convenience; a newtype and its contents
//! // have the same serialized form.
//! #[derive(serde::Deserialize, PartialEq, Eq, Debug)]
//! enum MyMetadata {
//! MyDataA(MyDataA),
//! MyDataB(MyDataB),
//! // In this case we've handled everything.
//! // If we intended to handle some metadata and silently ignore anything else, we could
//! // include a catch-all variant like:
//! // `#[serde(other)] Other`
//! // On the other hand, if we intend to handle every type of metadata, we can omit the
//! // catch-all variant; unknown metadata will produce an
//! // [`Error::RecoverableFormatError`], which we can use to emit a warning and continue.
//! }
//!
//! // Obtain log data directly; it could also be deserialized from a file.
//! let log = profiler::internal::get_log();
//! // Parse the log. Interpret metadata according to the enum defined above.
//! let profile: profiler_data::Profile<MyMetadata> = log.parse().unwrap();
//! // Verify the MyData objects are present and attached to the right interval.
//! let interval = &profile[profile.root_interval().children[0]];
//! let action = &profile[interval.measurement];
//! assert_eq!(&action.label.name, "action_producing_metadata");
//! assert_eq!(interval.metadata[0].data, MyMetadata::MyDataA(MyDataA(23)));
//! assert_eq!(interval.metadata[1].data, MyMetadata::MyDataB(MyDataB("5".into())));
//! // Timestamps can be used to compare the order of events.
//! assert!(interval.metadata[0].time < interval.metadata[1].time);
//! }
//!
//! store_and_retrieve_metadata();
//! ```
// === Features ===
#![feature(test)]
// === Non-Standard Linter Configuration ===
#![deny(unconditional_recursion)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
use enso_profiler as profiler;
use profiler::format;
use std::error;
use std::fmt;
use std::rc::Rc;
// ==============
// === Export ===
// ==============
pub mod aggregate;
pub mod parse;
// =============
// === Error ===
// =============
/// Describes an error and where it occurred.
pub enum Error<M> {
/// Failed to deserialize the event log at all. The file is corrupt, or in a completely
/// incompatible format.
FormatError(serde_json::Error),
/// Failed to deserialize some events; if this is caused by a change to a metadata type, the
/// core data and metadata of unaffected types will still be available.
///
/// For an example of handling a recoverable failure, see `tests::skip_failed_metadata`.
RecoverableFormatError {
/// Deserialization errors for each metadata Event that failed to parse.
errors: Vec<EventError<serde_json::Error>>,
/// A profile with metadata of one or more types excluded due to format incompatibility.
/// There is one missing metadata object for each value in `errors`.
with_missing_data: Profile<M>,
},
/// Failed to interpret the event log data.
DataError(EventError<parse::DataError>),
}
impl<M> fmt::Display for Error<M> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{self:?}")
}
}
// This cannot be derived because: https://github.com/rust-lang/rust/issues/26925
// Also, the debug output doesn't need to include the entire with_missing_data.
impl<M> fmt::Debug for Error<M> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Error::FormatError(e) => e.fmt(f),
Error::RecoverableFormatError { errors, .. } => errors.fmt(f),
Error::DataError(e) => e.fmt(f),
}
}
}
impl<M> error::Error for Error<M> {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
Some(match self {
Error::FormatError(e) => e,
Error::RecoverableFormatError { errors, .. } => &errors[0],
Error::DataError(e) => e,
})
}
}
/// An error associated with a particular event in the log.
#[derive(Debug)]
pub struct EventError<E> {
#[allow(unused)] // displayed by Debug
/// The event's index in the log.
log_pos: usize,
#[allow(unused)] // displayed by Debug
/// The error.
error: E,
}
impl<E: fmt::Debug> fmt::Display for EventError<E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{self:?}")
}
}
impl<E: error::Error> error::Error for EventError<E> {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
self.error.source()
}
}
// ==============================
// === Multi-process profiles ===
// ==============================
/// Parse data representing profiling information collected by multiple processes.
pub fn parse_multiprocess_profile<M: serde::de::DeserializeOwned>(
data: &str,
) -> impl Iterator<Item = Result<Profile<M>, Error<M>>> + '_ {
serde_json::Deserializer::from_str(data).into_iter::<Box<serde_json::value::RawValue>>().map(
|profile| {
let raw_parse_error = "Cannot parse input as sequence of JSON values!";
profile.expect(raw_parse_error).get().parse()
},
)
}
// ===============
// === Profile ===
// ===============
/// All the profiling information captured by one process during one run of the application.
///
/// This is parameterized by a type that determines how metadata is interpreted. The type must be
/// an enum, with a variant for each type of metadata that is handled. Each variant's name and type
/// should correspond to the parameters supplied to [`profiler::metadata_logger`]. For an example,
/// see the docs for the [`crate`].
#[derive(Clone, Debug)]
pub struct Profile<M> {
/// The hierarchy of profilers. A parent-child relationship indicates that the child was
/// started while the parent was running.
pub measurements: Vec<Measurement>,
/// The hierarchy of intervals. A parent-child relationship indicates that the child is
/// contained within the parent.
pub intervals: Vec<ActiveInterval<M>>,
/// Optional information about this profile.
pub headers: Headers,
}
impl<M> Profile<M> {
/// A virtual measurement containing the top-level measurements as children.
pub fn root_measurement(&self) -> &Measurement {
self.measurements.last().unwrap()
}
/// A virtual interval containing the top-level intervals as children.
pub fn root_interval(&self) -> &ActiveInterval<M> {
self.intervals.last().unwrap()
}
/// Id of a virtual measurement containing the top-level measurements as children.
pub fn root_measurement_id(&self) -> MeasurementId {
MeasurementId(self.measurements.len() - 1)
}
/// Id of a virtual interval containing the top-level intervals as children.
pub fn root_interval_id(&self) -> IntervalId {
IntervalId(self.intervals.len() - 1)
}
/// Iterate over all metadata in the profile.
pub fn metadata(&self) -> impl Iterator<Item = &Timestamped<M>> {
self.intervals.iter().flat_map(|interval| interval.metadata.iter())
}
/// Iterate over the IDs of all measurements.
pub fn measurement_ids(&self) -> impl Iterator<Item = MeasurementId> {
(0..self.measurements.len()).map(MeasurementId)
}
}
// === Headers ===
/// Information about the profile.
#[derive(Clone, Debug, Default)]
pub struct Headers {
/// A value that can be used to translate a timestamp to system time.
pub time_offset: Option<format::Timestamp>,
/// An application-specific identifier used to distinguish logs from different processes.
pub process: Option<String>,
}
// === IDs and indexing ===
/// Identifies a measurement in a particular profile.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct MeasurementId(pub(crate) usize);
/// Identifies an interval in a particular profile.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct IntervalId(pub(crate) usize);
impl<M> std::ops::Index<MeasurementId> for Profile<M> {
type Output = Measurement;
fn index(&self, MeasurementId(index): MeasurementId) -> &Self::Output {
&self.measurements[index]
}
}
impl<M> std::ops::Index<IntervalId> for Profile<M> {
type Output = ActiveInterval<M>;
fn index(&self, IntervalId(index): IntervalId) -> &Self::Output {
&self.intervals[index]
}
}
// ===================
// === Measurement ===
// ===================
/// All the information produced by a profiler.
#[derive(Clone, Debug)]
pub struct Measurement {
/// Identifies the profiler's source and scope to the user.
pub label: Rc<Label>,
/// Profilers started by this profiler, ordered by time created.
pub children: Vec<MeasurementId>,
/// When the profiler was created.
pub created: Timestamp,
/// Whether the profiler logged its completion at the end of its last active interval.
pub finished: bool,
/// When the profiler was running.
pub intervals: Vec<IntervalId>,
}
impl Measurement {
/// Distinguish between classes of profilers that may need to be handled differently.
pub fn classify(&self) -> Class {
self.label.classify()
}
}
// == Class ==
/// Distinguishes special profilers from normal profilers.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Class {
/// Profiler that is used to mark a region of interest in the profile.
Highlight,
/// Profiler that is active during the execution of anything else, after early startup.
OnFrame,
/// Profiler that is run when a WebGL context is acquired or lost.
SetContext,
/// Any profiler that doesn't need special treatment.
Normal,
}
// ===================
// === Timestamped ===
// ===================
/// Wrapper adding a timestamp to contents.
#[derive(Clone, Debug)]
pub struct Timestamped<M> {
/// Time the data was logged.
pub time: Timestamp,
/// The actual data.
pub data: M,
}
impl<M> Timestamped<M> {
/// Convert from &[`Timestamped<M>`] to [`Timestamped<&M>`].
pub fn as_ref(&self) -> Timestamped<&M> {
let Self { time, data } = self;
let time = *time;
Timestamped { time, data }
}
/// Use a function to transform the contained data, preserving the timestamp.
pub fn map<F, N>(self, f: F) -> Timestamped<N>
where F: FnOnce(M) -> N {
let Self { time, data } = self;
let data = f(data);
Timestamped { time, data }
}
}
// === OpaqueMetadata ===
/// Black-box metadata object, for ignoring metadata contents.
pub type OpaqueMetadata = format::AnyMetadata;
// =================
// === Timestamp ===
// =================
/// A timestamp. Supports distinguishing order of all events within a process.
///
/// Note that while an [`Ord`] implementation is provided for convenience (e.g. for use with
/// data structures that require it), the results of comparisons should only be considered
/// meaningful when comparing [`Timestamp`]s that were recorded by the same process.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default)]
pub struct Timestamp {
/// The time.
time: format::Timestamp,
/// Indicates event order; used to resolve timestamp collisions.
seq: Seq,
}
impl Timestamp {
fn time_origin() -> Self {
Self::default()
}
/// Offset from the time origin, in milliseconds.
pub fn into_ms(self) -> f64 {
self.time.into_ms()
}
}
// === Seq ===
/// A value that can be used to compare the order of events within a process.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default)]
pub(crate) struct Seq(usize);
impl Seq {
fn runtime_event(event_index: usize) -> Self {
// Seq(0) is the time origin.
Seq(event_index.checked_add(1).unwrap())
}
}
// ======================
// === ActiveInterval ===
// ======================
/// Represents the tree of profilers active during some interval.
#[derive(Clone, Debug)]
pub struct ActiveInterval<M> {
/// The profiler instance that this interval belongs to.
pub measurement: MeasurementId,
/// The time spanned by this interval.
pub interval: Interval,
/// Active intervals that occurred during this interval.
pub children: Vec<IntervalId>,
/// Metadata emitted while this was the active interval.
pub metadata: Vec<Timestamped<M>>,
}
// ================
// === Interval ===
// ================
/// A start time and an optional end time.
#[derive(Copy, Clone, Debug)]
pub struct Interval {
/// The time the interval began.
pub start: Timestamp,
/// The time the interval ended, or None if no end was logged.
pub end: Option<Timestamp>,
}
impl Interval {
/// Return whether this interval has a known end.
pub fn closed(self) -> bool {
self.end.is_some()
}
/// Return the duration from start to end in milliseconds, if the end is known.
pub fn duration_ms(self) -> Option<f64> {
self.end.map(|end| end.into_ms() - self.start.into_ms())
}
}
// =============
// === Label ===
// =============
/// A measurement label.
#[derive(Debug, Clone)]
pub struct Label {
/// The name of the measurement, usually a function.
pub name: String,
/// Location in the code the measurement originated, if compiled with line numbers enabled.
pub pos: Option<CodePos>,
}
impl Label {
/// Recognize profilers with special names.
fn classify(&self) -> Class {
match self.name.as_str() {
"@highlight" => Class::Highlight,
"@on_frame" => Class::OnFrame,
"@set_context" => Class::SetContext,
// Data producer is probably newer than consumer. Forward compatibility isn't necessary.
name if name.starts_with('@') => panic!("Unrecognized special profiler: {name:?}"),
_ => Class::Normal,
}
}
}
impl fmt::Display for Label {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(pos) = self.pos.as_ref() {
write!(f, "{} ({}:{})", self.name, pos.file, pos.line)
} else {
write!(f, "{}", self.name)
}
}
}
// === CodePos ===
/// Identifies a position within a specific file.
#[derive(Debug, Clone)]
pub struct CodePos {
/// The path to the file.
pub file: String,
/// A line number within the file.
pub line: u32,
}
// ==================
// === Unit tests ===
// ==================
#[cfg(test)]
mod tests {
use crate as profiler_data;
use crate::OpaqueMetadata;
use enso_profiler as profiler;
use profiler::profile;
#[test]
fn profile_sync() {
#[profile(Objective)]
fn parent() -> u32 {
child()
}
#[profile(Objective)]
fn child() -> u32 {
4
}
parent();
let profile: profiler_data::Profile<OpaqueMetadata> =
profiler::internal::get_log().parse().unwrap();
let roots = &profile.root_measurement().children;
assert_eq!(roots.len(), 1);
let parent = &profile[roots[0]];
assert!(parent.finished);
assert_eq!(parent.label.name, "parent");
assert_eq!(parent.children.len(), 1);
let child = &profile[parent.children[0]];
assert!(child.finished);
assert_eq!(child.label.name, "child");
assert_eq!(child.children.len(), 0);
}
#[test]
fn profile_async() {
#[profile(Objective)]
async fn parent() -> u32 {
child().await
}
#[profile(Objective)]
async fn child() -> u32 {
let block = async { 4 };
block.await
}
let future = parent();
futures::executor::block_on(future);
let profile: profiler_data::Profile<OpaqueMetadata> =
profiler::internal::get_log().parse().unwrap();
let roots = &profile.root_measurement().children;
assert_eq!(roots.len(), 1);
let parent = &profile[roots[0]];
assert!(parent.finished);
let parent_intervals = &parent.intervals;
assert_eq!(parent_intervals.len(), 2);
for interval in parent_intervals {
assert!(profile[*interval].interval.closed());
}
assert!(parent.finished);
assert_eq!(parent.label.name, "parent");
assert_eq!(parent.children.len(), 1);
let child = &profile[parent.children[0]];
assert!(child.finished);
let child_intervals = &child.intervals;
assert_eq!(child_intervals.len(), 2);
for interval in child_intervals {
assert!(profile[*interval].interval.closed());
}
assert!(child.finished);
assert_eq!(child.label.name, "child");
assert_eq!(child.children.len(), 1, "{:?}", &profile);
}
#[test]
fn unfinished_never_started() {
#[profile(Objective)]
async fn func() {}
// Create a Future, but don't await it.
let _future = func();
let profile: profiler_data::Profile<OpaqueMetadata> =
profiler::internal::get_log().parse().unwrap();
let roots = &profile.root_measurement().children;
assert!(!profile[roots[0]].finished);
}
fn start_profiler(label: &'static str) -> profiler::internal::EventId {
profiler::internal::EventLog.start(
profiler::internal::EventId::implicit(),
profiler::internal::Label(label),
Some(profiler::internal::Timestamp::now()),
profiler::internal::StartState::Active,
Default::default(),
)
}
#[test]
fn unfinished_still_running() {
start_profiler("unfinished (?:?)");
let profile: profiler_data::Profile<OpaqueMetadata> =
profiler::internal::get_log().parse().unwrap();
let roots = &profile.root_measurement().children;
assert!(!profile[roots[0]].finished);
}
#[test]
fn unfinished_paused_never_resumed() {
let id = start_profiler("unfinished (?:?)");
profiler::internal::EventLog.pause(id, profiler::internal::Timestamp::now());
let profile: profiler_data::Profile<OpaqueMetadata> =
profiler::internal::get_log().parse().unwrap();
let roots = &profile.root_measurement().children;
assert!(!profile[roots[0]].finished);
}
/// Simulate a change to the format of a type of metadata; ensure the error is reported
/// correctly, and all other data is still readable.
#[test]
fn skip_failed_metadata() {
#[derive(serde::Deserialize, PartialEq, Eq, Debug)]
struct MyDataA(u32);
profiler::metadata_logger!("MyDataA", log_data_a(u32));
#[derive(serde::Deserialize, PartialEq, Eq, Debug)]
struct MyDataBExpected(u32);
#[derive(serde::Deserialize, PartialEq, Eq, Debug)]
struct MyDataBActual(String);
profiler::metadata_logger!("MyDataB", log_data_b(String));
log_data_a(23);
log_data_b("bad".into());
#[derive(serde::Deserialize, PartialEq, Eq, Debug)]
enum MyMetadata {
MyDataA(MyDataA),
MyDataB(MyDataBExpected),
}
let log = profiler::internal::get_log();
let root: Result<profiler_data::Profile<MyMetadata>, _> = log.parse();
let root = match root {
Err(profiler_data::Error::RecoverableFormatError { errors, with_missing_data }) => {
assert_eq!(errors.len(), 1);
with_missing_data
}
other => panic!("Expected RecoverableFormatError, found: {other:?}"),
};
assert_eq!(root.root_interval().metadata.len(), 1);
assert_eq!(root.root_interval().metadata[0].data, MyMetadata::MyDataA(MyDataA(23)));
}
}

View File

@ -1,522 +0,0 @@
//! Parsing implementation. `pub` contents are low-level error details.
use enso_profiler as profiler;
use profiler::format;
use std::collections;
use std::error;
use std::fmt;
use std::mem;
use std::rc::Rc;
use std::str;
// ===========================
// === Parse and interpret ===
// ===========================
impl<M: serde::de::DeserializeOwned> str::FromStr for crate::Profile<M> {
type Err = crate::Error<M>;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let events: Result<Vec<format::Event>, _> = serde_json::from_str(s);
let events = events.map_err(crate::Error::FormatError)?;
let Interpreted { profile, metadata_errors } =
interpret(events).map_err(crate::Error::DataError)?;
if metadata_errors.is_empty() {
Ok(profile)
} else {
let errors = metadata_errors;
Err(crate::Error::RecoverableFormatError { errors, with_missing_data: profile })
}
}
}
// === interpret ===
/// Process a log of events, producing a hierarchy of measurements and a hierarchy of active
/// intervals.
///
/// Returns an error if the log cannot be interpreted.
pub(crate) fn interpret<'a, M: serde::de::DeserializeOwned>(
events: impl IntoIterator<Item = format::Event<'a>>,
) -> Result<Interpreted<M>, crate::EventError<DataError>> {
// Process log into data about each measurement, and data about relationships.
let LogVisitor {
builders,
order,
intervals,
metadata_errors,
headers,
root_intervals,
root_metadata,
..
} = LogVisitor::visit(events)?;
// Build measurements from accumulated measurement data.
let extra_measurements = 1; // Root measurement.
let mut measurements = Vec::with_capacity(builders.len() + extra_measurements);
let mut builders: Vec<_> = builders.into_iter().collect();
builders.sort_unstable_by_key(|(k, _)| *k);
measurements.extend(builders.into_iter().map(|(_, b)| b.into()));
let mut root = crate::Measurement {
label: Rc::new(crate::Label { name: "APP_LIFETIME (?:?)".into(), pos: None }),
children: Default::default(),
intervals: Default::default(),
finished: Default::default(),
created: crate::Timestamp::time_origin(),
};
let root_measurement_id = crate::MeasurementId(measurements.len());
for (child, (log_pos, parent)) in order.into_iter().enumerate() {
let log_pos = log_pos.0;
let parent = match parent {
format::ParentId::Measurement(id) => measurements
.get_mut(id.0)
.ok_or(DataError::MeasurementNotFound(id))
.map_err(|e| crate::EventError { log_pos, error: e })?,
format::ParentId::Root => &mut root,
};
parent.children.push(crate::MeasurementId(child));
}
measurements.push(root);
let extra_intervals = 1; // Root interval.
let mut intervals_ = Vec::with_capacity(intervals.len() + extra_intervals);
for builder in intervals.into_iter() {
let IntervalBuilder { measurement, interval, children, metadata } = builder;
let id = crate::IntervalId(intervals_.len());
let format::MeasurementId(measurement) = measurement;
let measurement = crate::MeasurementId(measurement);
intervals_.push(crate::ActiveInterval { measurement, interval, children, metadata });
measurements[measurement.0].intervals.push(id);
}
let root = crate::ActiveInterval {
measurement: root_measurement_id,
interval: crate::Interval {
start: crate::Timestamp::time_origin(),
end: Default::default(),
},
children: root_intervals,
metadata: root_metadata,
};
intervals_.push(root);
let profile = crate::Profile { measurements, intervals: intervals_, headers };
Ok(Interpreted { profile, metadata_errors })
}
/// Result of a successful [`interpret()`].
pub(crate) struct Interpreted<M> {
profile: crate::Profile<M>,
metadata_errors: Vec<MetadataError>,
}
// =================
// === DataError ===
// =================
/// A problem with the input data.
#[derive(Debug)]
pub enum DataError {
/// A profiler was in the wrong state for a certain event to occur.
UnexpectedState(State),
/// An ID referred to an undefined measurement.
MeasurementNotFound(format::MeasurementId),
/// A parse error.
UnexpectedToken(Expected),
/// An event that should only occur during the lifetime of a profiler didn't find any profiler.
ActiveProfilerRequired,
/// An event expected to refer to a certain measurement referred to a different measurement.
/// This can occur for events that include a measurement ID as a consistency check, but only
/// have one valid referent (e.g. [`profiler::Event::End`] must end the current measurement.)
WrongProfiler {
/// The measurement that was referred to.
found: format::MeasurementId,
/// The only valid measurement for the event to refer to.
expected: format::MeasurementId,
},
/// Profiler(s) were active at a time none were expected.
ExpectedEmptyStack(Vec<format::MeasurementId>),
/// A profiler was expected to have started before a related event occurred.
ExpectedStarted,
/// A label ID referred beyond the end of the label table (at the time is was used).
///
/// This could only occur due to a logic error in the application that wrote the profile.
UndefinedLabel(usize),
}
impl fmt::Display for DataError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{self:?}")
}
}
impl error::Error for DataError {}
impl From<Expected> for DataError {
fn from(inner: Expected) -> Self {
DataError::UnexpectedToken(inner)
}
}
// === Expected ===
/// Parsing error: expected a different token.
#[derive(Debug, Copy, Clone)]
pub struct Expected(pub(crate) &'static str);
impl fmt::Display for Expected {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self.0)
}
}
impl error::Error for Expected {}
// ==========================
// === MeasurementBuilder ===
// ==========================
/// Used while gathering information about a profiler.
struct MeasurementBuilder {
label: Rc<crate::Label>,
created: crate::Timestamp,
state: State,
finished: bool,
}
impl From<MeasurementBuilder> for crate::Measurement {
fn from(builder: MeasurementBuilder) -> Self {
let MeasurementBuilder { label, created, finished, state: _ } = builder;
let children = Vec::new();
let intervals = Vec::new();
Self { label, children, created, intervals, finished }
}
}
// === State ===
/// Used to validate state transitions.
#[derive(Debug, Copy, Clone)]
pub enum State {
/// Started and not paused or ended; id of most recent Start or Resume event is included.
Active(EventId),
/// Paused. Id of Pause or StartPaused event is included.
Paused(EventId),
/// Ended. Id of End event is included.
Ended(EventId),
}
/// An index into the event log. Mainly used for error reporting.
#[derive(Debug, Copy, Clone)]
pub struct EventId(usize);
impl From<EventId> for crate::Seq {
fn from(id: EventId) -> Self {
crate::Seq::runtime_event(id.0)
}
}
// =======================
// === IntervalBuilder ===
// =======================
/// Holds information gathered during visitation.
struct IntervalBuilder<M> {
// The interval itself.
measurement: format::MeasurementId,
interval: crate::Interval,
// Data attached to interval.
children: Vec<crate::IntervalId>,
metadata: Vec<crate::Timestamped<M>>,
}
// ==================
// === LogVisitor ===
// ==================
/// Gathers data while visiting a series of [`format::Event`]s.
#[derive(derivative::Derivative)]
#[derivative(Default(bound = ""))]
struct LogVisitor<M> {
/// Accumulated data pertaining to each profiler.
builders: collections::HashMap<format::MeasurementId, MeasurementBuilder>,
/// Ids and parents, in same order as event log.
order: Vec<(EventId, format::ParentId)>,
/// Intervals ended, in arbitrary order.
intervals: Vec<IntervalBuilder<M>>,
/// Intervals currently open, as a LIFO stack.
active: Vec<IntervalBuilder<M>>,
/// Top-level intervals.
root_intervals: Vec<crate::IntervalId>,
/// Top-level metadata.
root_metadata: Vec<crate::Timestamped<M>>,
/// Errors for metadata objects that could not be deserialized as type [`M`].
metadata_errors: Vec<MetadataError>,
/// References to the locations in code that measurements measure.
profilers: Vec<Rc<crate::Label>>,
/// Properties of the whole profile.
headers: crate::Headers,
}
type MetadataError = crate::EventError<serde_json::Error>;
impl<M: serde::de::DeserializeOwned> LogVisitor<M> {
/// Convert the log into data about each measurement.
fn visit<'a>(
events: impl IntoIterator<Item = format::Event<'a>>,
) -> Result<Self, crate::EventError<DataError>> {
let mut visitor = Self::default();
let mut event_count = 0;
for (i, event) in events.into_iter().enumerate() {
let log_pos = EventId(i);
let result = match event {
format::Event::Start { id, timestamp } =>
visitor.visit_resume(log_pos, id, timestamp),
format::Event::Create(event) => visitor.visit_create(log_pos, event),
format::Event::End { id, timestamp } => visitor.visit_end(log_pos, id, timestamp),
format::Event::Pause { id, timestamp } =>
visitor.visit_pause(log_pos, id, timestamp),
format::Event::Metadata(metadata) => visitor.visit_metadata(log_pos, metadata),
format::Event::Label { label } => visitor.visit_label(log_pos, label.as_ref()),
};
result.map_err(|error| crate::EventError { log_pos: i, error })?;
event_count += 1;
}
visitor.finish().map_err(|error| crate::EventError { log_pos: event_count, error })?;
Ok(visitor)
}
/// Perform any finalization, e.g. ending intervals implicitly if their ends weren't logged.
fn finish(&mut self) -> Result<(), DataError> {
// Build any ActiveIntervals that didn't have ends logged. This will always include at
// least the root interval.
while let Some(builder) = self.active.pop() {
let id = crate::IntervalId(self.intervals.len());
self.intervals.push(builder);
// Only the root interval has no parent; the root interval is found in the last
// position in the intervals vec.
if let Some(parent) = self.active.last_mut() {
parent.children.push(id);
}
}
Ok(())
}
}
// === Handlers for each event ===
impl<M: serde::de::DeserializeOwned> LogVisitor<M> {
fn visit_create(&mut self, pos: EventId, event: format::Start) -> Result<(), DataError> {
let parent = match event.parent {
format::Parent::Explicit(parent) => parent,
format::Parent::Implicit => self.current_profiler(),
};
let start = match event.start {
Some(time) => crate::Timestamp { seq: pos.into(), time },
None => self.inherit_start(parent)?,
};
let label = event.label.id();
let label = self.profilers.get(label).ok_or(DataError::UndefinedLabel(label))?.clone();
let builder = MeasurementBuilder {
label,
created: start,
state: State::Paused(pos),
finished: Default::default(),
};
self.order.push((pos, parent));
let id = format::MeasurementId(self.builders.len());
let old = self.builders.insert(id, builder);
assert!(old.is_none());
Ok(())
}
fn visit_end(
&mut self,
pos: EventId,
id: format::MeasurementId,
time: format::Timestamp,
) -> Result<(), DataError> {
let measurement = self.measurement_mut(id)?;
measurement.finished = true;
let end = crate::Timestamp { seq: pos.into(), time };
match mem::replace(&mut measurement.state, State::Ended(pos)) {
// Typical case: The current profiler ends.
State::Active(_) => self.end_interval(id, end)?,
// Edge case: A profiler can be dropped without ever being started if an async block
// is created, but dropped without ever being awaited.
State::Paused(_) => (),
state => return Err(DataError::UnexpectedState(state)),
}
Ok(())
}
fn visit_pause(
&mut self,
pos: EventId,
id: format::MeasurementId,
time: format::Timestamp,
) -> Result<(), DataError> {
let time = crate::Timestamp { seq: pos.into(), time };
self.end_interval(id, time)?;
match mem::replace(&mut self.measurement_mut(id)?.state, State::Paused(pos)) {
State::Active(_) => (),
state => return Err(DataError::UnexpectedState(state)),
}
Ok(())
}
fn visit_resume(
&mut self,
pos: EventId,
id: format::MeasurementId,
time: format::Timestamp,
) -> Result<(), DataError> {
let time = crate::Timestamp { seq: pos.into(), time };
self.start_interval(id, time);
match mem::replace(&mut self.measurement_mut(id)?.state, State::Active(pos)) {
State::Paused(_) => (),
state => return Err(DataError::UnexpectedState(state)),
}
Ok(())
}
fn visit_metadata(
&mut self,
pos: EventId,
metadata: format::Timestamped<format::AnyMetadata>,
) -> Result<(), DataError> {
let format::Timestamped { time, data } = metadata;
let time = crate::Timestamp { seq: pos.into(), time };
if let Ok(data) = serde_json::from_str(data.get()) {
match data {
format::Header::Process(process) => self.headers.process = Some(process),
format::Header::TimeOffset(offset) => self.headers.time_offset = Some(offset),
}
} else {
match serde_json::from_str(data.get()) {
Ok(data) => {
let container = match self.active.last_mut() {
Some(parent) => &mut parent.metadata,
None => &mut self.root_metadata,
};
container.push(crate::Timestamped { time, data });
}
Err(error) => {
let log_pos = pos.0;
self.metadata_errors.push(MetadataError { log_pos, error })
}
}
}
Ok(())
}
fn visit_label(&mut self, _pos: EventId, label: &'_ str) -> Result<(), DataError> {
let label = label.parse()?;
self.profilers.push(Rc::new(label));
Ok(())
}
}
// === Visitation helpers ===
impl<M> LogVisitor<M> {
fn start_interval(&mut self, measurement: format::MeasurementId, start: crate::Timestamp) {
let end = Default::default();
let children = Default::default();
let metadata = Default::default();
let interval = crate::Interval { start, end };
self.active.push(IntervalBuilder { measurement, interval, children, metadata });
}
fn end_interval(
&mut self,
id: format::MeasurementId,
end: crate::Timestamp,
) -> Result<(), DataError> {
let mut builder = self.active.pop().ok_or(DataError::ActiveProfilerRequired)?;
builder.interval.end = Some(end);
let expected = builder.measurement;
if id != expected {
let found = id;
return Err(DataError::WrongProfiler { found, expected });
}
let id = crate::IntervalId(self.intervals.len());
self.intervals.push(builder);
let container = match self.active.last_mut() {
Some(parent) => &mut parent.children,
None => &mut self.root_intervals,
};
container.push(id);
Ok(())
}
fn current_profiler(&self) -> format::ParentId {
match self.active.last() {
Some(interval) => format::ParentId::Measurement(interval.measurement),
None => format::ParentId::Root,
}
}
fn inherit_start(&self, parent: format::ParentId) -> Result<crate::Timestamp, DataError> {
Ok(match parent {
format::ParentId::Root => crate::Timestamp::time_origin(),
format::ParentId::Measurement(pos) => self.measurement(pos)?.created,
})
}
fn measurement(&self, id: format::MeasurementId) -> Result<&MeasurementBuilder, DataError> {
self.builders.get(&id).ok_or(DataError::MeasurementNotFound(id))
}
fn measurement_mut(
&mut self,
id: format::MeasurementId,
) -> Result<&mut MeasurementBuilder, DataError> {
self.builders.get_mut(&id).ok_or(DataError::MeasurementNotFound(id))
}
}
// ======================
// === String parsing ===
// ======================
impl str::FromStr for crate::Label {
type Err = Expected;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s.rsplit_once(' ') {
Some((name, pos)) => match crate::CodePos::parse(pos) {
Ok(pos) => Self { name: name.to_owned(), pos },
Err(_) => Self { name: s.to_owned(), pos: None },
},
None => Self { name: s.to_owned(), pos: None },
})
}
}
impl crate::CodePos {
fn parse(s: &str) -> Result<Option<Self>, Expected> {
let (file, line) = s.rsplit_once(':').ok_or(Expected(":"))?;
let file = file.strip_prefix('(').ok_or(Expected("("))?;
let line = line.strip_suffix(')').ok_or(Expected(")"))?;
Ok(if file == "?" {
None
} else {
Some(Self {
file: file.to_owned(),
line: line.parse().map_err(|_| Expected("line number"))?,
})
})
}
}

View File

@ -1,12 +0,0 @@
[package]
name = "enso-profiler-demo-data"
version = "0.1.0"
edition = "2021"
authors = ["Enso Team <contact@enso.org>"]
[dependencies]
futures = { workspace = true }
enso-profiler = { path = ".." }
[lints]
workspace = true

View File

@ -1,108 +0,0 @@
//! Produces example profile data.
// === Non-Standard Linter Configuration ===
#![allow(unused_qualifications)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
use enso_profiler as profiler;
use enso_profiler::profile;
// ==========================
// === Dummy Computations ===
// ==========================
/// Create example profiling data.
pub async fn create_data() -> String {
start_project().await;
profiler::internal::get_log()
}
/// A dummy computation that is intended to take some time based on input (where a higher number
///takes longer).
fn work(n: u32) {
let mut m = n;
for x in 0..n {
for y in 0..n {
for z in 0..n {
m = m.wrapping_add(x * y * z)
}
}
}
// Create a side effect to avoid optimising away the computation.
println!("{}", m % 7)
}
#[profile(Objective)]
async fn start_project() {
wake_dragon().await;
feed_troll();
ride_rainbow();
}
#[profile(Objective)]
fn ride_rainbow() {
work(333)
}
#[profile(Objective)]
fn feed_troll() {
gather_herbs_and_spices();
cook_troll_food();
run_away();
}
#[profile(Objective)]
fn run_away() {
work(100)
}
#[profile(Objective)]
fn cook_troll_food() {
work(100)
}
#[profile(Objective)]
fn gather_herbs_and_spices() {
walk_to_woods();
search_stuff();
find_stuff();
gather_stuff();
}
#[profile(Objective)]
fn gather_stuff() {
work(100)
}
#[profile(Objective)]
fn find_stuff() {
work(100)
}
#[profile(Objective)]
fn search_stuff() {
work(100)
}
#[profile(Objective)]
fn walk_to_woods() {
work(100)
}
#[profile(Objective)]
async fn wake_dragon() {
gather_gold().await;
bake_gold_cake().await;
start_tea_party().await;
}
#[profile(Objective)]
async fn start_tea_party() {
work(100)
}
#[profile(Objective)]
async fn bake_gold_cake() {
work(100)
}
#[profile(Objective)]
fn pick_coin() {
work(75)
}
#[profile(Objective)]
async fn gather_gold() {
for _ in 0..5 {
pick_coin()
}
}

View File

@ -1,15 +0,0 @@
[package]
name = "enso-profiler-flame-graph"
version = "0.1.0"
edition = "2021"
authors = ["Enso Team <contact@enso.org>"]
[dependencies]
enso-profiler = { path = "../" }
enso-profiler-data = { path = "../data" }
[dev-dependencies]
futures = { workspace = true }
[lints]
workspace = true

View File

@ -1,415 +0,0 @@
//! This module contains functionality that allows the profiling framework to
//! generate the data required to render a flame graph. This means creating data for each block
//! that is supposed to be rendered, with start time, end time and labels.
use enso_profiler as profiler;
use enso_profiler_data as data;
// =================
// === Constants ===
// =================
type RowNumber = i32;
// =======================
// === Label Formating ===
// =======================
fn with_timing_info(base: &str, [t1, t2]: [f64; 2]) -> String {
format!("{base}\n[{t1:.2},{t2:.2}]")
}
// ==================
// === Block Data ===
// ==================
#[derive(Copy, Clone, Debug)]
pub enum Activity {
Active,
Paused,
}
#[derive(Copy, Clone, Debug)]
pub enum Performance {
Good,
Medium,
Bad,
}
/// A `Block` contains the data required to render a single block of a frame graph.
#[derive(Clone, Debug)]
pub struct Block<T> {
/// Start x coordinate of the block.
pub start: f64,
/// End x coordinate of the block.
pub end: f64,
/// Row that the block should be placed in.
pub row: RowNumber,
/// The label to be displayed with the block.
pub label: String,
/// Indicates the type of the block.
pub block_type: T,
}
impl<T> Block<T> {
/// Width of the block.
pub fn width(&self) -> f64 {
self.end - self.start
}
}
// ==================
// === Mark Data ===
// ==================
/// A `Mark` contains the data required to render a mark that indicates a labeled point in time.
#[derive(Clone, Debug)]
pub struct Mark {
/// X coordinate of the mark.
pub position: f64,
/// The label to be displayed with the mark.
pub label: String,
}
// ==================
// === Graph Data ===
// ==================
/// Contains the information required to render a graph, i.e., the data for all blocks that make up
/// the graph.
#[derive(Debug, Default)]
pub struct Graph {
/// Collection of all blocks making up the flame graph.
pub activity_blocks: Vec<Block<Activity>>,
/// Collection of all blocks indicating performance characteristics.
pub performance_blocks: Vec<Block<Performance>>,
/// Collection of marks that can be shown in the flame graph.
pub marks: Vec<Mark>,
}
impl Graph {
/// Create a callgraph from the given data.
pub fn new_callgraph<Metadata>(profile: &data::Profile<Metadata>) -> Self {
CallgraphBuilder::run(profile)
}
/// Create a rungraph from the given data.
pub fn new_rungraph<Metadata>(profile: &data::Profile<Metadata>) -> Self {
RungraphBuilder::run(profile)
}
/// Create a hybrid rungraph-callgraph from the given data.
pub fn new_hybrid_graph<Metadata>(profile: &data::Profile<Metadata>) -> Self {
new_hybrid_graph(profile)
}
/// Gather and remove all logged measurements and return them as a `Graph`.
pub fn take_from_log() -> Self {
let profile: Result<data::Profile<data::OpaqueMetadata>, _> =
profiler::internal::get_log().parse();
if let Ok(profile) = profile {
new_hybrid_graph(&profile)
} else {
eprintln!("Failed to deserialize profiling event log.");
Graph::default()
}
}
/// Height of the graph in rows.
pub fn height(&self) -> RowNumber {
let performance_rows = self.performance_blocks.iter().map(|mark| mark.row);
let activity_rows = self.activity_blocks.iter().map(|mark| mark.row);
performance_rows.chain(activity_rows).max().unwrap_or_default()
}
}
// ==================
// === Callgraphs ===
// ==================
/// Build a graph that illustrates the call stack over time.
struct CallgraphBuilder<'p, Metadata> {
profile: &'p data::Profile<Metadata>,
blocks: Vec<Block<Activity>>,
}
impl<'p, Metadata> CallgraphBuilder<'p, Metadata> {
/// Create a callgraph for the given profile.
fn run(profile: &'p data::Profile<Metadata>) -> Graph {
let blocks = Default::default();
let mut builder = Self { profile, blocks };
// We skip the root node APP_LIFETIME, which is not a real measurement.
for child in &profile.root_interval().children {
builder.visit_interval(*child, 0);
}
let Self { blocks, .. } = builder;
Graph {
activity_blocks: blocks,
marks: Vec::default(),
performance_blocks: Vec::default(),
}
}
}
impl<'p, Metadata> CallgraphBuilder<'p, Metadata> {
/// Create a block for an interval; recurse into children.
fn visit_interval(&mut self, active: data::IntervalId, row: RowNumber) {
let active = &self.profile[active];
let start = active.interval.start.into_ms();
let end = active.interval.end.map(|time| time.into_ms()).unwrap_or(f64::MAX);
// Optimization: can't draw zero-width blocks anyway.
if end == start {
return;
}
let label = self.profile[active.measurement].label.to_string();
let label = with_timing_info(&label, [start, end]);
self.blocks.push(Block { start, end, label, row, block_type: Activity::Active });
for child in &active.children {
self.visit_interval(*child, row + 1);
}
}
}
// =================
// === Rungraphs ===
// =================
/// Build a graph that illustrates async tasks over time.
struct RungraphBuilder<'p, Metadata> {
profile: &'p data::Profile<Metadata>,
blocks: Vec<Block<Activity>>,
next_row: RowNumber,
}
impl<'p, Metadata> RungraphBuilder<'p, Metadata> {
/// Create a rungraph for the given profile.
fn run(profile: &'p data::Profile<Metadata>) -> Graph {
let blocks = Default::default();
let next_row = Default::default();
let mut builder = Self { profile, blocks, next_row };
// We skip the root node APP_LIFETIME, which is not a real measurement.
for child in &profile.root_measurement().children {
builder.visit_measurement(*child);
}
let Self { blocks, .. } = builder;
Graph {
activity_blocks: blocks,
marks: Vec::default(),
performance_blocks: Vec::default(),
}
}
}
impl<'p, Metadata> RungraphBuilder<'p, Metadata> {
/// Create blocks for a measurement's intervals; recurse into children.
fn visit_measurement(&mut self, measurement: data::MeasurementId) {
let measurement = &self.profile[measurement];
// We're only interested in tasks that await other tasks, i.e. have at least 2 intervals.
if measurement.intervals.len() >= 2 {
let row = self.next_row;
self.next_row += 1;
let window_size = 2; // Current and next element.
for window in measurement.intervals.windows(window_size) {
if let [current, next] = window {
let current = &self.profile[*current];
let next = &self.profile[*next];
let current_start = current.interval.start.into_ms();
let current_end =
current.interval.end.map(|time| time.into_ms()).unwrap_or(f64::MAX);
let next_start = next.interval.start.into_ms();
let active_interval = [current_start, current_end];
let sleep_interval = [current_end, next_start];
let label_active = self.profile[current.measurement].label.to_string();
let label_active = with_timing_info(&label_active, active_interval);
let label_sleep =
format!("{} (inactive)", self.profile[current.measurement].label);
let label_sleep = with_timing_info(&label_sleep, sleep_interval);
self.blocks.push(Block {
start: active_interval[0],
end: active_interval[1],
label: label_active,
row,
block_type: Activity::Active,
});
self.blocks.push(Block {
start: sleep_interval[0],
end: sleep_interval[1],
label: label_sleep,
row,
block_type: Activity::Paused,
});
}
}
// Add first inactive interval.
let first = measurement.intervals.first().unwrap(); // There are at least two intervals.
let first = &self.profile[*first];
let inactive_interval = [measurement.created.into_ms(), first.interval.start.into_ms()];
let label = with_timing_info(
&self.profile[first.measurement].label.to_string(),
inactive_interval,
);
self.blocks.push(Block {
start: inactive_interval[0],
end: inactive_interval[1],
label,
row,
block_type: Activity::Paused,
});
// Add last active interval.
let last = measurement.intervals.last().unwrap(); // There are at least two intervals.
let last = &self.profile[*last];
let active_interval = [
last.interval.start.into_ms(),
last.interval.end.map(|end| end.into_ms()).unwrap_or(f64::INFINITY),
];
let label = with_timing_info(
&self.profile[last.measurement].label.to_string(),
active_interval,
);
self.blocks.push(Block {
start: active_interval[0],
end: active_interval[1],
label,
row,
block_type: Activity::Active,
});
}
// Recourse through children.
for child in &measurement.children {
self.visit_measurement(*child);
}
}
}
// === hybrid graph ===
/// Create a rungraph+callgraph for the given profile.
fn new_hybrid_graph<Metadata>(profile: &data::Profile<Metadata>) -> Graph {
let blocks = Default::default();
let next_row = Default::default();
let mut rungraph = RungraphBuilder { profile, blocks, next_row };
for child in &profile.root_measurement().children {
rungraph.visit_measurement(*child);
}
let RungraphBuilder { blocks, next_row, .. } = rungraph;
let mut callgraph = CallgraphBuilder { profile, blocks };
for child in &profile.root_interval().children {
callgraph.visit_interval(*child, next_row);
}
let CallgraphBuilder { blocks, .. } = callgraph;
Graph {
activity_blocks: blocks,
marks: Vec::default(),
performance_blocks: Vec::default(),
}
}
// ===================
// === Flamegraphs ===
// ===================
/// Build a graph that illustrates aggregate time spent in different functions.
#[derive(Default)]
pub struct FlamegraphBuilder {
aggregator: data::aggregate::Aggregator,
}
impl FlamegraphBuilder {
/// Add data from a profile to the graph.
pub fn add_profile<Metadata>(&mut self, profile: &data::Profile<Metadata>) {
self.aggregator.add_profile(profile);
}
}
impl From<FlamegraphBuilder> for Graph {
fn from(builder: FlamegraphBuilder) -> Self {
let mut grapher = FlamegraphGrapher::default();
let root = data::aggregate::Frame::from(builder.aggregator);
for (label, frame) in &root.children {
grapher.visit_frame(frame, label.to_string(), 0);
}
let FlamegraphGrapher { blocks, .. } = grapher;
Graph {
activity_blocks: blocks,
marks: Vec::default(),
performance_blocks: Vec::default(),
}
}
}
/// Builds a flamegraph [`Graph`] from [`data::aggregate::Frame`]s.
#[derive(Default)]
struct FlamegraphGrapher {
blocks: Vec<Block<Activity>>,
time: f64,
}
impl FlamegraphGrapher {
fn visit_frame(&mut self, frame: &data::aggregate::Frame, label: String, row: RowNumber) {
let start = self.time;
let end = self.time + frame.total_duration();
self.blocks.push(Block { start, end, label, row, block_type: Activity::Active });
for (label, frame) in &frame.children {
self.visit_frame(frame, label.to_string(), row + 1);
}
self.time = end;
}
}
// =============
// === Tests ===
// =============
#[cfg(test)]
mod tests {
use super::*;
use profiler::profile;
#[profile(Objective)]
pub fn profiled_a() {
profiled_b()
}
#[profile(Objective)]
pub fn profiled_b() {}
#[test]
fn check_flame_graph_creation() {
profiled_a();
let profile: data::Profile<data::OpaqueMetadata> =
profiler::internal::get_log().parse().unwrap();
let flame_graph = Graph::new_callgraph(&profile);
assert_eq!(flame_graph.activity_blocks.len(), 2);
assert_eq!(flame_graph.activity_blocks[1].row, 1);
assert!(flame_graph.activity_blocks[1].label.contains("profiled_b"));
assert_eq!(flame_graph.activity_blocks[0].row, 0);
assert!(flame_graph.activity_blocks[0].label.contains("profiled_a"));
}
}

View File

@ -1,17 +0,0 @@
[package]
name = "enso-profiler-macros"
version = "0.1.0"
edition = "2021"
authors = ["Enso Team <contact@enso.org>"]
[lib]
proc-macro = true
[dependencies]
proc-macro2 = { workspace = true }
quote = { workspace = true }
syn = { workspace = true }
Inflector = "0.11"
[lints]
workspace = true

View File

@ -1,27 +0,0 @@
//! Build script for [`enso_profiler_macros`]. This is needed to make cargo aware that
//! the crate depends on the values of environment variables at compile time, and changes to those
//! variables should result in recompiling this crate and its dependents.
// === Non-Standard Linter Configuration ===
#![warn(missing_copy_implementations)]
#![warn(missing_debug_implementations)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(trivial_numeric_casts)]
#![warn(unsafe_code)]
#![warn(unused_import_braces)]
#![warn(unused_qualifications)]
fn main() {
declare_env_dependence("ENSO_MAX_PROFILING_LEVEL");
}
/// Make cargo aware that the result of compiling this crate depends on an environment variable.
fn declare_env_dependence(env: &str) {
println!("cargo:rerun-if-env-changed={env}",);
// This is a no-op assignment, except it makes cargo aware that the output depends on the env.
let value = std::env::var(env).unwrap_or_default();
println!("cargo:rustc-env={env}={value}");
}

View File

@ -1,87 +0,0 @@
//! Information specific to each profiling level.
use inflector::Inflector;
use quote::ToTokens;
use std::env;
use syn::parse::Parser;
use syn::punctuated;
// =============
// === Level ===
// =============
/// Information about a profiling level.
pub struct Level {
pub obj_ident: syn::Ident,
pub fn_ident: syn::Ident,
pub name: String,
pub enabled: bool,
}
/// Given syntax representing a sequence of profiling levels, produce [`Level`] objects describing
/// the levels.
pub fn parse_levels(var: &str, ts: proc_macro::TokenStream) -> Vec<Level> {
let parser = punctuated::Punctuated::<syn::Ident, syn::Token![,]>::parse_terminated;
let obj_idents: Vec<_> = parser.parse(ts).unwrap().into_iter().collect();
let level_names: Vec<_> = obj_idents.iter().map(|id| id.to_string().to_snake_case()).collect();
let max_enabled = level_from_env_var(var, &level_names);
obj_idents
.into_iter()
.enumerate()
.zip(level_names)
.map(|((i, obj_ident), name)| Level {
obj_ident,
fn_ident: syn::Ident::new(&name, proc_macro2::Span::call_site()),
name: name.clone(),
enabled: i <= max_enabled,
})
.collect()
}
/// Return the numeric Profiling/Log Level (counting from 0 = top-level only).
fn level_from_env_var(var: &str, levels: &[impl AsRef<str>]) -> usize {
let enabled = match env::var(var) {
Ok(level) => level,
// If the variable isn't set, we default to the minimum.
Err(_) => return 0,
};
for (i, name) in levels.iter().enumerate() {
if &enabled[..] == name.as_ref() {
return i;
}
}
panic!("{var} set to unknown level: {enabled}")
}
// =========================
// === `enum` Generation ===
// =========================
/// Given a collection of variant identifiers, produce syntax defining a data-less enum.
pub fn make_enum<'a>(
ident: syn::Ident,
variants: impl IntoIterator<Item = &'a syn::Ident>,
) -> proc_macro::TokenStream {
let ident_to_variant = |ident| syn::Variant {
ident,
fields: syn::Fields::Unit,
attrs: Default::default(),
discriminant: Default::default(),
};
let variants: punctuated::Punctuated<syn::Variant, syn::Token![,]> =
variants.into_iter().cloned().map(ident_to_variant).collect();
(quote::quote! {
#[allow(missing_docs)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Default, serde::Serialize)]
pub enum #ident {
#[default]
#variants
}
})
.to_token_stream()
.into()
}

View File

@ -1,65 +0,0 @@
//! Proc macros supporting the implementation of the `enso_profiler` library.
//!
//! The profiler API uses procedural macros for two reasons:
//! - To define the hierarchy of profiler types ([`define_hierarchy!`]). Each profiler type (e.g.
//! [`Objective`](../enso_profiler/struct.Objective.html)) needs an implementation of
//! [`Parent`](../enso_profiler/trait.Parent.html) for each finer-grained profiler type;
//! implementing this without proc macros would be complex and repetitious.
//! - To implement the [`#[profile]`](macro@profile) attribute macro.
// === Features ===
#![feature(proc_macro_span)]
#![feature(let_chains)]
// === Non-Standard Linter Configuration ===
#![deny(unconditional_recursion)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
#[macro_use]
mod wrap_async;
mod level;
mod low_level;
mod profile_attribute;
// ==================================
// === Compile-time configuration ===
// ==================================
const PROFILING_LEVEL_ENV_VAR: &str = "ENSO_MAX_PROFILING_LEVEL";
// =================
// === Interface ===
// =================
/// [Documented in `profiler`](../enso_profiler/attr.profile.html).
#[allow(missing_docs)]
#[proc_macro_attribute]
pub fn profile(
args: proc_macro::TokenStream,
ts: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
profile_attribute::profile(args, ts)
}
/// Defines a hierarchy of profiler levels.
///
/// # Usage
///
/// ```text
/// enso_profiler_macros::define_hierarchy![Objective, Task, Detail, Debug];
/// ```
///
/// Profiler-levels must be specified from coarsest to finest.
///
/// Profiler-level names should be given in CamelCase.
#[proc_macro]
pub fn define_profiling_levels(ts: proc_macro::TokenStream) -> proc_macro::TokenStream {
low_level::define_profiling_levels(ts)
}

View File

@ -1,213 +0,0 @@
//! Generation of the Profiling Low-level API and its implementation.
use crate::level;
// =====================================
// === Low-level profiling interface ===
// =====================================
/// Produces source code defining the Low-Level Profiling API for the given hierarchy of profiling
/// levels.
pub fn define_profiling_levels(ts: proc_macro::TokenStream) -> proc_macro::TokenStream {
let mut out = proc_macro::TokenStream::new();
let levels = level::parse_levels(crate::PROFILING_LEVEL_ENV_VAR, ts);
for level in &levels {
let profiler = Profiler::new(&level.name, level.obj_ident.clone());
out.extend(if level.enabled {
define_enabled_profiler(&profiler)
} else {
define_disabled_profiler(&profiler)
});
}
for (i, parent) in levels.iter().enumerate() {
for child in &levels[i..] {
out.extend(impl_parent(&parent.obj_ident, &child.obj_ident, child.enabled));
}
}
let ident = syn::Ident::new("ProfilingLevel", proc_macro2::Span::call_site());
out.extend(level::make_enum(ident, levels.iter().map(|level| &level.obj_ident)));
out
}
struct Profiler {
ident: syn::Ident,
start: syn::Ident,
create: syn::Ident,
doc_obj: String,
doc_start: String,
doc_create: String,
}
impl Profiler {
fn new(level: impl AsRef<str>, ident: syn::Ident) -> Self {
let level = level.as_ref();
let level_link = format!("[{level}-level](index.html#{level})");
Self {
ident,
start: quote::format_ident!("start_{level}"),
create: quote::format_ident!("create_{level}"),
doc_obj: format!("Identifies a {level_link} profiler."),
doc_start: format!("Start a new {level_link} profiler."),
doc_create: format!("Create a new {level_link} profiler, in unstarted state."),
}
}
}
fn define_enabled_profiler(profiler: &Profiler) -> proc_macro::TokenStream {
let Profiler { ident, start, create, doc_obj, doc_start, doc_create } = profiler;
let profiling_level_variant = ident;
(quote::quote! {
// =================================
// === Profiler (e.g. Objective) ===
// =================================
#[doc = #doc_obj]
#[derive(Copy, Clone, Debug)]
pub struct #ident(pub EventId);
// === Trait Implementations ===
impl Profiler for #ident {
fn start(
parent: EventId,
label: Label,
time: Option<Timestamp>,
start: StartState,
) -> Self {
let level = crate::ProfilingLevel::#profiling_level_variant;
#ident(EventLog.start(parent, label, time, start, level))
}
fn finish(self) {
EventLog.end(self.0, Timestamp::now())
}
fn pause(self) {
EventLog.pause(self.0, Timestamp::now());
}
fn resume(self) {
EventLog.resume(self.0, Timestamp::now());
}
}
// === Constructor macros ===
#[doc = #doc_start]
#[macro_export]
macro_rules! #start {
($parent: expr, $label: expr) => {{
use profiler::Parent;
let label = profiler::internal::Label(
concat!($label, " (", file!(), ":", line!(), ")"));
let profiler: profiler::internal::Started<profiler::#ident> =
$parent.start_child(label);
profiler
}};
($label: expr) => {{
let label = profiler::internal::Label(
concat!($label, " (", file!(), ":", line!(), ")"));
let parent = profiler::internal::EventId::implicit();
let now = Some(profiler::internal::Timestamp::now());
let started = profiler::internal::StartState::Active;
let profiler = profiler::#ident::start(parent, label, now, started);
profiler::internal::Started(profiler)
}};
}
#[doc = #doc_create]
#[macro_export]
macro_rules! #create {
($label: expr) => {{
let label = profiler::internal::Label(
concat!($label, " (", file!(), ":", line!(), ")"));
let parent = profiler::internal::EventId::implicit();
let now = Some(profiler::internal::Timestamp::now());
let paused = profiler::internal::StartState::Paused;
profiler::#ident::start(parent, label, now, paused)
}}
}
})
.into()
}
fn define_disabled_profiler(profiler: &Profiler) -> proc_macro::TokenStream {
let Profiler { ident, start, create, doc_obj, doc_start, doc_create, .. } = profiler;
(quote::quote! {
// =================================
// === Profiler (e.g. Objective) ===
// =================================
#[doc = #doc_obj]
#[derive(Copy, Clone, Debug)]
pub struct #ident(pub ());
// === Trait Implementations ===
impl Profiler for #ident {
fn start(
_: EventId,
_: Label,
_: Option<Timestamp>,
_: StartState,
) -> Self {
Self(())
}
fn finish(self) {}
fn pause(self) {}
fn resume(self) { }
}
// === Constructor macros ===
#[doc = #doc_start]
#[macro_export]
macro_rules! #start {
($parent: expr, $label: expr) => {{
let _unused_at_this_profiling_level = $parent;
profiler::internal::Started(profiler::#ident(()))
}};
($label: expr) => {{
profiler::internal::Started(profiler::#ident(()))
}};
}
#[doc = #doc_create]
#[macro_export]
macro_rules! #create {
($label: expr) => {{
profiler::#ident(())
}}
}
})
.into()
}
/// Generates an implementation of the [`Parent`] trait relating the given parent and child.
fn impl_parent(
parent_ident: &syn::Ident,
child_ident: &syn::Ident,
enabled: bool,
) -> proc_macro::TokenStream {
let body = if enabled {
quote::quote! {
let start = Some(Timestamp::now());
Started(#child_ident::start(self.0, label, start, StartState::Active))
}
} else {
quote::quote! {
Started(#child_ident(()))
}
};
(quote::quote! {
impl Parent<#child_ident> for #parent_ident {
fn start_child(&self, label: Label) -> Started<#child_ident> {
#body
}
}
})
.into()
}

View File

@ -1,208 +0,0 @@
//! Implementation of the [`#[profile]`] proc-macro.
use crate::wrap_async;
use quote::ToTokens;
use std::fmt;
use syn::visit_mut;
use syn::visit_mut::VisitMut;
/// The `#[profile]` proc-macro.
pub fn profile(
args: proc_macro::TokenStream,
ts: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let mut func = syn::parse_macro_input!(ts as syn::ItemFn);
let level: syn::Ident = syn::parse(args)
.expect("The `profile` macro requires a profiling-level argument, e.g. #[profile(Task)]");
let label = make_label(&func.sig.ident);
// Instrument awaits, whether at the top level (if this is an async fn) or in inner async
// blocks.
WrapAwait.visit_block_mut(&mut func.block);
// Different transformations for async or non-async.
let async_block_origin = match func.sig.asyncness {
// Async: transform it to an non-async fn containing an async block. The outer fn does
// not need any top-level profiling; all it does is set up the async block. We'll
// instrument the block below.
Some(_) => {
wrap_async::wrap_async_fn(&mut func);
AsyncBlockOrigin::FnBody
}
// Non-async: instrument the top level of the function.
None => {
profile_sync(&level, &label, &mut func);
AsyncBlockOrigin::Block
}
};
// Instrument any async blocks in the body.
let name = func.sig.ident.to_string();
let mut instrumentor = InstrumentAsync { level, func: name, origin: async_block_origin };
instrumentor.visit_block_mut(&mut func.block);
func.into_token_stream().into()
}
/// Decorate the input with file:line info determined by the proc_macro's call site.
fn make_label<L: fmt::Display>(name: L) -> String {
let span = proc_macro::Span::call_site();
let file = span.source_file().path();
let path = file.as_path().to_string_lossy();
let line = span.start().line();
format!("{name} ({path}:{line})")
}
// === WrapAwait ===
struct WrapAwait;
impl VisitMut for WrapAwait {
ignore_inner_fn_items!();
fn visit_expr_mut(&mut self, expr: &mut syn::Expr) {
match expr {
syn::Expr::Await(await_) => *expr = wrap_await(await_),
_ => visit_mut::visit_expr_mut(self, expr),
}
}
}
fn wrap_await(await_: &mut syn::ExprAwait) -> syn::Expr {
let expr = &mut await_.base;
WrapAwait.visit_expr_mut(expr);
assert!(
await_.attrs.is_empty(),
"#[profile] cannot wrap a function that applies attributes to an .await expression"
);
let wrapped = quote::quote! {
({
let future = #expr;
profiler::internal::Profiler::pause(__profiler);
let result = future.await;
profiler::internal::Profiler::resume(__profiler);
result
})
};
syn::parse2(wrapped).unwrap()
}
// === profile_sync ===
fn profile_sync(obj_ident: &syn::Ident, label: &str, func: &mut syn::ItemFn) {
let start_profiler = start_profiler(obj_ident, label, func.sig.asyncness.is_some());
let block = &func.block;
let body = quote::quote! {{
#start_profiler
let __profiler_scope = profiler::internal::Started(__profiler);
#block
}};
func.block = syn::parse2(body).unwrap();
}
fn start_profiler(
obj_ident: &syn::Ident,
label: &str,
asyncness: bool,
) -> proc_macro2::TokenStream {
let state = match asyncness {
true => quote::quote! { profiler::internal::StartState::Paused },
false => quote::quote! { profiler::internal::StartState::Active },
};
quote::quote! {
let __profiler = {
use profiler::internal::Profiler;
let parent = profiler::internal::EventId::implicit();
let now = Some(profiler::internal::Timestamp::now());
let label = profiler::internal::Label(#label);
profiler::#obj_ident::start(parent, label, now, #state)
};
}
}
// === InstrumentAsync ===
/// Inserts instrumentation into all async block in an item (ignoring inner items).
struct InstrumentAsync {
level: syn::Ident,
func: String,
origin: AsyncBlockOrigin,
}
impl VisitMut for InstrumentAsync {
ignore_inner_fn_items!();
fn visit_expr_mut(&mut self, expr: &mut syn::Expr) {
match expr {
syn::Expr::Async(async_) => *expr = self.instrument_async(async_),
_ => visit_mut::visit_expr_mut(self, expr),
}
}
}
impl InstrumentAsync {
/// Insert instrumentation into an async block.
fn instrument_async(&self, expr: &mut syn::ExprAsync) -> syn::Expr {
self.inner_instrumentor().visit_block_mut(&mut expr.block);
assert!(
expr.attrs.is_empty(),
"#[profile] cannot wrap a function that applies attributes to an async block"
);
let label = match self.origin {
AsyncBlockOrigin::FnBody => make_label(&self.func),
AsyncBlockOrigin::Block => {
let name = format!("<async block in {}>", &self.func);
make_label(name)
}
};
let start_profiler = start_profiler(&self.level, &label, true);
let move_ = &expr.capture;
let block = &expr.block;
let wrapped = if move_.is_some() {
quote::quote! {{
#start_profiler
let __profiler_scope = profiler::internal::Started(__profiler);
async move {
profiler::internal::Profiler::resume(__profiler);
let result = #block;
std::mem::drop(__profiler_scope);
result
}
}}
} else {
// We have to move the profiler into the async block, because borrowing it would
// restrict the lifetime of the block. So we use an outer `move` block to
// capture `__profiler`, and an inner non-move block to match the behavior
// of the original non-move block.
quote::quote! {{
#start_profiler
let __profiler_scope = profiler::internal::Started(__profiler);
let inner = async #block;
async move {
profiler::internal::Profiler::resume(__profiler);
let result = inner.await;
std::mem::drop(__profiler_scope);
result
}
}}
};
syn::parse2(wrapped).unwrap()
}
/// Produce an instrumentor suitable for instrumenting blocks nested inside this block.
fn inner_instrumentor(&self) -> Self {
let level = self.level.clone();
let func = self.func.clone();
let origin = AsyncBlockOrigin::Block;
Self { level, func, origin }
}
}
/// Distinguishes between an async block that was originally the body of an `async fn`, versus an
/// async block that was originated as an async block in the source.
enum AsyncBlockOrigin {
FnBody,
Block,
}

View File

@ -1,201 +0,0 @@
//! Implementation of [`wrap_async_fn`], a helper for macros that operate on async functions.
use quote::ToTokens;
use syn::visit_mut::VisitMut;
// ========================
// === VisitMut helpers ===
// ========================
/// Used in a `impl VisitMut` to block descent into inner function items.
macro_rules! ignore_inner_fn_items {
() => {
fn visit_item_fn_mut(&mut self, _: &mut syn::ItemFn) {}
};
}
// =====================
// === wrap_async_fn ===
// =====================
/// Convert an `async fn` into a `fn` returning a `Future`, implemented with an `async` block.
///
/// The output is functionally equivalent to the input (except the output won't `impl Send` even if
/// the original `async fn` did); this is useful as a basis for further transformation.
pub fn wrap_async_fn(func: &mut syn::ItemFn) {
// Wrap the body.
let block = &func.block;
let ret_ty = match &func.sig.output {
syn::ReturnType::Default => quote::quote! { () },
syn::ReturnType::Type(_, ty) => ty.into_token_stream(),
};
let body = quote::quote! {{
(async move {
let result: #ret_ty = #block;
result
})
}};
func.block = syn::parse2(body).unwrap();
// Transform the signature.
let output_ty = match &func.sig.output {
syn::ReturnType::Default => quote::quote! { () },
syn::ReturnType::Type(_, ty) => ty.to_token_stream(),
};
let output_lifetime = syn::Lifetime::new("'__profiler_out", proc_macro2::Span::call_site());
let lifetimes = explicitize_lifetimes(&mut func.sig);
let output = if lifetimes.is_empty() {
quote::quote! {
-> impl std::future::Future<Output=#output_ty>
}
} else {
// Bound all inputs on the output lifetime.
let type_bound = syn::TypeParamBound::Lifetime(output_lifetime.clone());
for param in &mut func.sig.generics.params {
match param {
syn::GenericParam::Lifetime(def) => def.bounds.push(output_lifetime.clone()),
syn::GenericParam::Type(def) => def.bounds.push(type_bound.clone()),
syn::GenericParam::Const(_) => (),
}
}
for arg in &mut func.sig.inputs {
if let syn::FnArg::Typed(syn::PatType { ty, .. }) = arg {
if let syn::Type::ImplTrait(def) = ty.as_mut() {
def.bounds.push(type_bound.clone());
}
}
}
// Add a definition for the output lifetime.
let lifetime_def = syn::LifetimeParam::new(output_lifetime.clone());
func.sig.generics.params.insert(0, syn::GenericParam::Lifetime(lifetime_def));
// Apply the output lifetime to the output.
quote::quote! {
-> impl std::future::Future<Output=#output_ty> + #output_lifetime
}
};
func.sig.asyncness = None;
func.sig.output = syn::parse2(output).unwrap();
}
// === Lifetime transformation ===
/// Make all lifetimes in function signature explicit.
///
/// Returns the lifetimes used in the function signature.
fn explicitize_lifetimes(sig: &mut syn::Signature) -> Vec<syn::Lifetime> {
// Go through the args; find:
// - anonymous lifetime: '_
// - implicit lifetimes: &foo
// - explicit lifetimes: &'a
// Make all input lifetimes explicit:
// - Use new lifetime explicitly in arg list.
// - Define new lifetime in generic params.
let mut input_transformer = ExplicitizeInputLifetimes::default();
for input in &mut sig.inputs {
input_transformer.visit_fn_arg_mut(input);
}
let ExplicitizeInputLifetimes { new_lifetimes, existing_lifetimes } = input_transformer;
let mut all_lifetimes = existing_lifetimes;
all_lifetimes.extend_from_slice(&new_lifetimes);
let new_lifetimes = new_lifetimes
.into_iter()
.map(|lt| syn::GenericParam::Lifetime(syn::LifetimeParam::new(lt)));
sig.generics.params.extend(new_lifetimes);
// There are two cases where output lifetimes may be elided:
// - There's exactly one lifetime in the inputs.
// - There's a receiver with a lifetime.
// If either case occurs, make any implicit output lifetimes explicit.
let default_lifetime = if all_lifetimes.len() == 1 {
Some(all_lifetimes[0].clone())
} else {
get_receiver_lifetime(sig).cloned()
};
if let Some(lifetime) = default_lifetime {
ExplicitizeOutputLifetimes { lifetime }.visit_return_type_mut(&mut sig.output);
}
all_lifetimes
}
#[derive(Default)]
struct ExplicitizeInputLifetimes {
new_lifetimes: Vec<syn::Lifetime>,
existing_lifetimes: Vec<syn::Lifetime>,
}
impl ExplicitizeInputLifetimes {
fn gen_lifetime(&mut self) -> syn::Lifetime {
let name = format!("'__profiler{}", self.new_lifetimes.len());
let new = syn::Lifetime::new(&name, proc_macro2::Span::call_site());
self.new_lifetimes.push(new.clone());
new
}
fn visit_elidable_lifetime(&mut self, lifetime: &mut Option<syn::Lifetime>) {
match lifetime {
Some(lifetime) => self.visit_lifetime_mut(lifetime),
None => *lifetime = Some(self.gen_lifetime()),
}
}
}
impl VisitMut for ExplicitizeInputLifetimes {
ignore_inner_fn_items!();
// Handles 'x in generic parameters in types of non-self arguments.
fn visit_lifetime_mut(&mut self, lifetime: &mut syn::Lifetime) {
let name = lifetime.ident.to_string();
if &name == "_" {
*lifetime = self.gen_lifetime();
} else {
self.existing_lifetimes.push(lifetime.clone());
}
}
// Handles &self.
fn visit_receiver_mut(&mut self, receiver: &mut syn::Receiver) {
if let Some((_, lifetime)) = &mut receiver.reference {
self.visit_elidable_lifetime(lifetime);
}
}
// Handles & in types of non-self arguments.
fn visit_type_reference_mut(&mut self, type_reference: &mut syn::TypeReference) {
self.visit_elidable_lifetime(&mut type_reference.lifetime);
}
}
struct ExplicitizeOutputLifetimes {
lifetime: syn::Lifetime,
}
impl VisitMut for ExplicitizeOutputLifetimes {
ignore_inner_fn_items!();
// Handles 'x in generic parameters in types.
fn visit_lifetime_mut(&mut self, lifetime: &mut syn::Lifetime) {
if &lifetime.ident.to_string() == "_" {
*lifetime = self.lifetime.clone();
}
}
// Handles & in types.
fn visit_type_reference_mut(&mut self, type_reference: &mut syn::TypeReference) {
if type_reference.lifetime.is_none() {
type_reference.lifetime = Some(self.lifetime.clone());
}
}
}
fn get_receiver_lifetime(sig: &syn::Signature) -> Option<&syn::Lifetime> {
match sig.inputs.first() {
Some(syn::FnArg::Receiver(syn::Receiver {
reference: Some((_, Some(lifetime))), ..
})) => Some(lifetime),
_ => None,
}
}

View File

@ -1,311 +0,0 @@
//! Defines the JSON-based profile event-log format.
//!
//! See: https://github.com/enso-org/design/blob/main/epics/profiling/implementation.md#profiling-data
use serde::Deserialize;
use serde::Serialize;
use std::borrow::Cow;
// ==============
// === Export ===
// ==============
pub mod builder;
pub use builder::Builder;
/// Metadata of any type.
pub type AnyMetadata = Box<serde_json::value::RawValue>;
// =============
// === Event ===
// =============
/// An entry in the profiling log.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Event<'a> {
/// Registers a label to be referenced by ID.
#[serde(rename = "L")]
Label {
/// The text content of the label. Might need to be an owned string, if the original data
/// contains an escape sequence.
#[serde(rename = "l")]
label: Cow<'a, str>,
},
/// The beginning of a measurement that starts in the paused state.
#[serde(rename = "C")]
Create(Start),
/// The beginning of a measurement, or the continuation after an interruption.
#[serde(rename = "S")]
Start {
/// Identifies the measurement.
#[serde(rename = "i")]
id: MeasurementId,
/// When the event occurred.
#[serde(rename = "t")]
timestamp: Timestamp,
},
/// The end of a measurement.
#[serde(rename = "E")]
End {
/// Identifies the measurement.
#[serde(rename = "i")]
id: MeasurementId,
/// When the event occurred.
#[serde(rename = "t")]
timestamp: Timestamp,
},
/// The beginning of an interruption to a measurement, e.g. an await point.
#[serde(rename = "P")]
Pause {
/// Identifies the measurement.
#[serde(rename = "i")]
id: MeasurementId,
/// When the event occurred.
#[serde(rename = "t")]
timestamp: Timestamp,
},
/// Metadata: wrapper with dependency-injected contents.
#[serde(rename = "X")]
Metadata(Timestamped<AnyMetadata>),
}
// =============
// === Start ===
// =============
/// A measurement-start entry in the profiling log.
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct Start {
/// Specifies parent measurement.
#[serde(rename = "p")]
#[serde(skip_serializing_if = "Parent::is_implicit")]
pub parent: Parent,
/// Start time, or None to indicate it is the same as `parent`.
#[serde(rename = "t")]
pub start: Option<Timestamp>,
/// Identifies where in the code this measurement originates.
#[serde(rename = "l")]
pub label: Label,
}
// === Label ===
/// The label of a profiler; this includes the name given at its creation, along with file and
/// line-number information.
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct Label(usize);
impl Label {
/// Return an index into the label table.
pub fn id(self) -> usize {
self.0
}
}
// ==============
// === Parent ===
// ==============
/// Specifies how the parent of a measurement is identified.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Parent {
/// Unspecified parent, to be identified from context.
Implicit,
/// Specific parent.
Explicit(ParentId),
}
impl Parent {
/// Unspecified parent, to be identified from context.
pub fn implicit() -> Self {
Parent::Implicit
}
/// Return whether the parent is implicit.
pub fn is_implicit(&self) -> bool {
*self == Parent::Implicit
}
/// Returns the special parent of top-level measurements.
pub fn root() -> Self {
Parent::Explicit(ParentId::Root)
}
}
impl From<MeasurementId> for Parent {
fn from(id: MeasurementId) -> Self {
Parent::Explicit(ParentId::Measurement(id))
}
}
// === ParentId ===
/// Identifies a parent for a measurement.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum ParentId {
/// The root of top-level measurements.
Root,
/// A runtime measurement.
Measurement(MeasurementId),
}
// === Serialized representation ===
impl Serialize for Parent {
fn serialize<S>(&self, ser: S) -> Result<S::Ok, S::Error>
where S: serde::ser::Serializer {
match *self {
Parent::Implicit => ser.serialize_none(),
Parent::Explicit(ParentId::Root) => ser.serialize_i64(-1),
Parent::Explicit(ParentId::Measurement(MeasurementId(id))) =>
ser.serialize_u64(id as u64),
}
}
}
impl<'de> Deserialize<'de> for Parent {
fn deserialize<D>(deserializer: D) -> Result<Parent, D::Error>
where D: serde::de::Deserializer<'de> {
let parent: Option<i64> = Deserialize::deserialize(deserializer)?;
Ok(match parent {
None => Parent::Implicit,
Some(-1) => Parent::Explicit(ParentId::Root),
Some(id) => {
let id = id.try_into().map_err(|_| {
let found = serde::de::Unexpected::Signed(id);
let wanted =
format!("an integer between 0 and {}, or the special value -1", usize::MAX);
serde::de::Error::invalid_value(found, &wanted.as_str())
})?;
Parent::Explicit(ParentId::Measurement(MeasurementId(id)))
}
})
}
}
// =====================
// === MeasurementId ===
// =====================
/// ID of a measurement (runtime instance of a profiler).
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize, PartialOrd, Ord)]
pub struct MeasurementId(pub usize);
// =================
// === Timestamp ===
// =================
/// A relative time; when added to this profile's [`Header::TimeOffset`] (if present), yields an
/// offset from the unix epoch.
///
/// Stored in microseconds; this provides plenty of range and precision, and unlike a float supports
/// [`Cmp`] and related traits easily.
#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Debug, Serialize, Deserialize)]
pub struct Timestamp(u64);
impl Timestamp {
/// Return the timestamp corresponding to an offset from the time origin, in ms.
pub fn from_ms(ms: f64) -> Self {
let ticks = (ms * 1000.0).round() as u64;
Self(ticks)
}
/// Convert to an offset from the time origin, in ms.
pub fn into_ms(self) -> f64 {
self.0 as f64 / 1000.0
}
}
// === Timestamped ===
/// Wrapper adding a timestamp to an object.
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct Timestamped<T> {
/// When the event occurred.
#[serde(rename = "t")]
pub time: Timestamp,
/// The data.
#[serde(rename = "d")]
pub data: T,
}
// ==============
// === Header ===
// ==============
/// Standard file headers.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Header {
/// Value that can be added to a [`Timestamp`] to translate it to an offset from the Unix
/// Epoch.
#[serde(rename = "$TimeOffset")]
TimeOffset(Timestamp),
/// Application-specific identifier used to distinguish log data from different processes.
#[serde(rename = "$Process")]
Process(String),
}
// =============
// === Tests ===
// =============
#[cfg(test)]
mod tests {
use crate::format;
/// Verify that the current implementation can still deserialize the format as of the first
/// stable release.
#[test]
fn format_stability() {
// Example data containing every type of event and every encoding of each field.
const LOG: &str = "[\
{\"C\":{\"p\":-1,\"t\":5210200,\"l\":3}},\
{\"C\":{\"p\":0,\"t\":5210000,\"l\":1}},\
{\"C\":{\"t\":5196300,\"l\":0}},\
{\"C\":{\"t\":null,\"l\":0}},\
{\"E\":{\"i\":0,\"t\":5199800}},\
{\"L\":{\"l\":\"entry_point_ide (app/gui/src/lib.rs:134)\"}},\
{\"P\":{\"i\":2,\"t\":5210200}},\
{\"S\":{\"i\":0,\"t\":5196300}},\
{\"X\":{\"t\":0,\"d\":{\"$Process\":\"Ide\"}}},\
{\"X\":{\"t\":0,\"d\":{\"$TimeOffset\":1650900741301300}}}\
]";
// Check that we can deserialize the data.
let events: Vec<format::Event> = serde_json::from_str(LOG).unwrap();
// Check that the deserialized structures contain all the information that was in the JSON.
// As an easy way of implementing this, we check a stricter property here: That
// re-serializing the data structures produces the same blob as the input.
assert_eq!(LOG, serde_json::to_string(&events).unwrap());
}
/// Verify that the current implementation can deserialize escaped paths in the json file.
#[test]
fn escaped_json() {
// Example data containing a string with escaped characters.
const LOG: &str = r#"[{"L":{"l":"entry_point_ide (app\\ui\\src\\lib.rs:134)"}}]"#;
// Check that we can deserialize the data.
let _events: Vec<format::Event> = serde_json::from_str(LOG).unwrap();
}
}

View File

@ -1,118 +0,0 @@
//! Supports constructing a document in the JSON format (see [`crate::format`]).
use crate::format;
use std::collections::HashMap;
// ===============
// === Builder ===
// ===============
/// Constructs a profile document for serialization into the JSON format (see [`crate::format`]).
#[derive(Debug, Default)]
pub struct Builder<'a> {
events: Vec<format::Event<'a>>,
next_measurement: usize,
labels: HashMap<&'a str, format::Label>,
}
impl<'a> Builder<'a> {
#[allow(missing_docs)]
pub fn new() -> Self {
Default::default()
}
/// Log a metadata event to the profile.
pub fn metadata<M>(&mut self, time: format::Timestamp, name: &'static str, data: M)
where M: serde::Serialize {
let data = Variant { name, data };
let data = serde_json::value::to_raw_value(&data).unwrap();
let event = format::Timestamped { time, data };
self.events.push(format::Event::Metadata(event));
}
/// Log a profiler-creation event to the profile.
pub fn create<'b: 'a>(
&mut self,
time: Option<format::Timestamp>,
parent: format::Parent,
label: &'b str,
) -> format::MeasurementId {
// Get or register label.
let next_label_id = self.labels.len();
let label = *self.labels.entry(label).or_insert_with(|| {
let label = label.into();
self.events.push(format::Event::Label { label });
format::Label(next_label_id)
});
// Create event.
let start = time;
let event = format::Start { parent, start, label };
self.events.push(format::Event::Create(event));
let id = self.next_measurement;
self.next_measurement += 1;
format::MeasurementId(id)
}
/// Log a profiler-start event to the profile.
pub fn start(&mut self, timestamp: format::Timestamp, id: format::MeasurementId) {
self.events.push(format::Event::Start { id, timestamp });
}
/// Log a profiler-end event to the profile.
pub fn end(&mut self, timestamp: format::Timestamp, id: format::MeasurementId) {
self.events.push(format::Event::End { id, timestamp });
}
/// Log a profiler-pause event to the profile.
pub fn pause(&mut self, timestamp: format::Timestamp, id: format::MeasurementId) {
self.events.push(format::Event::Pause { id, timestamp });
}
/// Attach a header to the profile indicating the offset of the file's timestamps from system
/// time.
pub fn time_offset(&mut self, offset: format::Timestamp) {
self.header(format::Header::TimeOffset(offset));
}
/// Attach a header to the profile identifying its process.
pub fn process<'b: 'a>(&mut self, process: &'b str) {
self.header(format::Header::Process(process.to_string()));
}
fn header(&mut self, data: format::Header) {
let data = serde_json::value::to_raw_value(&data).unwrap();
let time = format::Timestamp::default();
let event = format::Timestamped { time, data };
self.events.push(format::Event::Metadata(event));
}
/// Render the profile to a file.
pub fn build_string(self) -> String {
serde_json::to_string(&self.events).unwrap()
}
}
// ===============
// === Variant ===
// ===============
/// Wrapper for serializing an object as if it were a particular variant of some unspecified enum.
///
/// This allows serializing instances of one variant of an enum without knowledge of the other
/// variants.
struct Variant<T> {
name: &'static str,
data: T,
}
impl<T: serde::Serialize> serde::Serialize for Variant<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: serde::Serializer {
serializer.serialize_newtype_variant("", 0, self.name, &self.data)
}
}

View File

@ -1,548 +0,0 @@
//! Implementation details not used directly in normal usage of the Profiling API.
//!
//! `pub` items in this module support two uses:
//! - They support profiling crates in interpreting raw profiling data.
//! - They are used by [the macros](../index.html#macros) that provide the public interface to
//! `profiler`.
use crate::format;
use crate::log;
use crate::ProfilingLevel;
use std::fmt;
use std::rc;
// ======================================================
// === The global logs (EVENTS and the METADATA_LOGS) ===
// ======================================================
thread_local! {
static EVENT_LOG: log::Log<Event> = log::Log::new();
}
/// Global log of [`Events`]s.
pub(crate) static EVENTS: log::ThreadLocalLog<Event> = log::ThreadLocalLog::new(EVENT_LOG);
thread_local! {
static METADATA_LOG_LOG: log::Log<rc::Rc<dyn MetadataSource>> = log::Log::new();
}
/// Global registry of metadata logs.
pub(crate) static METADATA_LOGS: log::ThreadLocalLog<rc::Rc<dyn MetadataSource>> =
log::ThreadLocalLog::new(METADATA_LOG_LOG);
// =========================
// === Capturing the log ===
// =========================
/// Produce a JSON-formatted event log from the internal event logs.
pub fn get_log() -> String {
let LogData { events, metadata_names, mut metadata_entries } = get_raw_log();
let mut out = LogTranslator::new();
for (id, event) in events.into_iter().enumerate() {
let id = EventId(id as u32);
match event {
Event::Metadata { timestamp, data } => {
let ExternalMetadata { type_id } = data;
let id = type_id as usize;
let name = metadata_names[id];
let data = metadata_entries[id].next().unwrap();
out.metadata(timestamp, name, data);
}
Event::Start(Start { parent, start, label, .. }) => {
out.create(start, parent, label, id);
out.start(start.unwrap(), id);
}
Event::StartPaused(Start { parent, start, label, .. }) =>
out.create(start, parent, label, id),
Event::End { id, timestamp } => out.end(timestamp, id),
Event::Pause { id, timestamp } => out.pause(timestamp, id),
Event::Resume { id, timestamp } => out.start(timestamp, id),
}
}
out.finish()
}
// === Capture raw log data ===
/// Obtain the data from the internal event log.
pub(crate) fn get_raw_log() -> LogData {
let events = EVENTS.clone_all();
let metadatas: Vec<_> = METADATA_LOGS.clone_all();
let metadata_names: Vec<_> = metadatas.iter().map(|metadata| metadata.name()).collect();
let metadata_entries: Vec<_> =
metadatas.into_iter().map(|metadata| metadata.get_all()).collect();
LogData { events, metadata_names, metadata_entries }
}
/// A snapshot of the internal event log.
/// Contains all the information necessary to produce a profile.
pub(crate) struct LogData {
pub events: Vec<Event>,
metadata_names: Vec<&'static str>,
metadata_entries: Vec<Box<dyn Iterator<Item = Box<serde_json::value::RawValue>>>>,
}
// =====================
// === LogTranslator ===
// =====================
/// Translates [`profiler::internal`] types and IDs to [`profiler::format`] equivalents.
#[derive(Debug)]
struct LogTranslator<'a> {
profile: format::Builder<'a>,
ids: std::collections::HashMap<EventId, format::MeasurementId>,
}
macro_rules! translate_transition {
($name:ident) => {
fn $name(&mut self, time: Timestamp, id: EventId) {
self.profile.$name(time.into(), self.ids[&id]);
}
};
}
impl<'a> LogTranslator<'a> {
fn new() -> Self {
let mut profile = format::Builder::new();
profile.time_offset(Timestamp::time_offset().into());
profile.process("Ide");
let ids = Default::default();
Self { profile, ids }
}
fn finish(self) -> String {
self.profile.build_string()
}
fn metadata(&mut self, time: Timestamp, name: &'static str, data: format::AnyMetadata) {
self.profile.metadata(time.into(), name, data);
}
fn create(&mut self, time: Option<Timestamp>, parent: EventId, label: Label, id: EventId) {
let parent = match parent {
EventId::IMPLICIT => format::Parent::implicit(),
EventId::APP_LIFETIME => format::Parent::root(),
id => self.ids[&id].into(),
};
let time = time.map(|t| t.into());
let interval = self.profile.create(time, parent, label.0);
self.ids.insert(id, interval);
}
translate_transition!(start);
translate_transition!(end);
translate_transition!(pause);
}
// ===================
// === MetadataLog ===
// ===================
pub(crate) struct MetadataLog<T> {
pub name: &'static str,
pub entries: rc::Rc<log::Log<T>>,
}
pub(crate) trait MetadataSource {
fn name(&self) -> &'static str;
fn get_all(&self) -> Box<dyn Iterator<Item = Box<serde_json::value::RawValue>>>;
}
impl<T: 'static + serde::Serialize> MetadataSource for MetadataLog<T> {
fn name(&self) -> &'static str {
self.name
}
fn get_all(&self) -> Box<dyn Iterator<Item = Box<serde_json::value::RawValue>>> {
let mut entries = Vec::with_capacity(self.entries.len());
self.entries.for_each(|x| entries.push(serde_json::value::to_raw_value(&x).unwrap()));
Box::new(entries.into_iter())
}
}
// ======================
// === MetadataLogger ===
// ======================
/// An object that supports writing a specific type of metadata to the profiling log.
#[derive(Debug)]
pub struct MetadataLogger<T> {
id: u32,
entries: rc::Rc<log::Log<T>>,
}
impl<T: 'static + serde::Serialize> MetadataLogger<T> {
/// Create a MetadataLogger for logging a particular type.
///
/// The name given here must match the name used for deserialization.
pub fn new(name: &'static str) -> Self {
let id = METADATA_LOGS.len() as u32;
let entries = rc::Rc::new(log::Log::new());
METADATA_LOGS.push(rc::Rc::new(MetadataLog::<T> { name, entries: entries.clone() }));
Self { id, entries }
}
/// Write a metadata object to the profiling event log.
///
/// Returns an identifier that can be used to create references between log entries.
pub fn log(&self, t: T) -> EventId {
self.entries.push(t);
EventLog.metadata(self.id)
}
}
// ================
// === EventLog ===
// ================
/// The log of profiling events. Data is actually stored globally.
#[derive(Copy, Clone, Debug)]
pub struct EventLog;
impl EventLog {
/// Log the beginning of a measurement.
#[inline]
pub fn start(
self,
parent: EventId,
label: Label,
start: Option<Timestamp>,
state: StartState,
level: ProfilingLevel,
) -> EventId {
let m = Start { parent, label, start, level };
let event = match state {
StartState::Active => Event::Start(m),
StartState::Paused => Event::StartPaused(m),
};
self.log_event(event)
}
/// Log the end of a measurement.
#[inline]
pub fn end(self, id: EventId, timestamp: Timestamp) {
self.log_event(Event::End { id, timestamp });
}
/// Log the beginning of an interval in which the measurement is not active.
#[inline]
pub fn pause(self, id: EventId, timestamp: Timestamp) {
self.log_event(Event::Pause { id, timestamp });
}
/// Log the end of an interval in which the measurement is not active.
#[inline]
pub fn resume(self, id: EventId, timestamp: Timestamp) {
self.log_event(Event::Resume { id, timestamp });
}
/// Log metadata.
#[inline]
pub fn metadata(self, type_id: u32) -> EventId {
let timestamp = Timestamp::now();
let data = ExternalMetadata { type_id };
self.log_event(Event::Metadata { timestamp, data })
}
#[inline(always)]
fn log_event(self, event: Event) -> EventId {
let id = EventId(EVENTS.len() as u32);
EVENTS.push(event);
id
}
}
// === StartState ===
/// Specifies the initial state of a profiler.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum StartState {
/// The profiler starts in the running state.
Active,
/// The profiler starts in the paused state.
Paused,
}
// =============
// === Event ===
// =============
/// An entry in the profiling log.
#[derive(Debug, Clone, Copy)]
pub enum Event {
/// The beginning of a measurement.
Start(Start),
/// The beginning of a measurement that starts in the paused state.
StartPaused(Start),
/// The end of a measurement.
End {
/// Identifies the measurement by the ID of its Start event.
id: EventId,
/// When the event occurred.
timestamp: Timestamp,
},
/// The beginning of an interruption to a measurement, e.g. an await point.
Pause {
/// Identifies the measurement by the ID of its Start event.
id: EventId,
/// When the event occurred.
timestamp: Timestamp,
},
/// The end of an interruption to an a measurement, e.g. an await point.
Resume {
/// Identifies the measurement by the ID of its Start event.
id: EventId,
/// When the event occurred.
timestamp: Timestamp,
},
/// Metadata: wrapper with dependency-injected contents.
Metadata {
/// Application-specific data associated with a point in time.
data: ExternalMetadata,
/// When the event occurred.
timestamp: Timestamp,
},
}
impl Event {
/// Return the [`Start`] information, if this is an event that defines a profiler.
pub fn as_start(self) -> Option<Start> {
match self {
Event::Start(start) | Event::StartPaused(start) => Some(start),
_ => None,
}
}
}
// =============
// === Start ===
// =============
/// A measurement-start entry in the profiling log.
#[derive(Debug, Clone, Copy)]
pub struct Start {
/// Specifies parent measurement by its [`Start`].
pub parent: EventId,
/// Start time, or None to indicate it is the same as `parent`.
pub start: Option<Timestamp>,
/// Identifies where in the code this measurement originates.
pub label: Label,
/// Identifies the importance of this event.
pub level: ProfilingLevel,
}
// === Label ===
/// The label of a profiler; this includes the name given at its creation, along with file and
/// line-number information.
#[derive(Debug, Clone, Copy)]
pub struct Label(pub &'static str);
impl fmt::Display for Label {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
// =================
// === Timestamp ===
// =================
/// Time elapsed since the [time origin](https://www.w3.org/TR/hr-time-2/#sec-time-origin).
///
/// Stored as the raw output of performance.now() (floating-point milliseconds).
#[derive(Copy, Clone, PartialEq, PartialOrd, Debug)]
pub struct Timestamp {
ms: f64,
}
impl Timestamp {
#[inline(always)]
/// Return the current time, relative to the time origin.
pub fn now() -> Self {
Self { ms: now() }
}
/// Return the timestamp corresponding to an offset from the time origin, in ms.
#[inline(always)]
pub fn from_ms(ms: f64) -> Self {
Self { ms }
}
/// Return the timestamp of the time origin.
#[inline(always)]
pub fn time_origin() -> Self {
Self { ms: 0.0 }
}
/// Convert to an offset from the time origin, in ms.
#[inline(always)]
pub fn into_ms(self) -> f64 {
self.ms
}
/// Return the offset of the time origin from a system timestamp.
#[inline(always)]
pub fn time_offset() -> Self {
Self::from_ms(time_origin())
}
}
impl Default for Timestamp {
#[inline(always)]
fn default() -> Self {
Self::time_origin()
}
}
// === FFI ===
#[cfg(target_arch = "wasm32")]
fn now() -> f64 {
use enso_web as web;
use enso_web::traits::*;
web::window.performance_or_panic().now()
}
#[cfg(not(target_arch = "wasm32"))]
fn now() -> f64 {
// Monotonically-increasing timestamp, providing slightly more realistic data for tests than
// a constant.
thread_local! {
static NEXT_TIMESTAMP: std::cell::Cell<f64> = Default::default();
}
NEXT_TIMESTAMP.with(|timestamp| {
let now = timestamp.get();
timestamp.set(now + 0.1);
now
})
}
fn time_origin() -> f64 {
use enso_web as web;
use enso_web::traits::*;
web::window.performance_or_panic().time_origin()
}
// === Conversions to related types ===
impl From<Timestamp> for format::Timestamp {
#[inline(always)]
fn from(time: Timestamp) -> Self {
Self::from_ms(time.into_ms())
}
}
// ===============
// === EventId ===
// ===============
/// Identifies an event in the profiling log.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct EventId(pub u32);
impl EventId {
/// Special value indicating that an EventId is to be inferred from context.
pub const IMPLICIT: EventId = EventId(u32::MAX);
/// Special value indicating the root pseudo-profiler (the parent of runtime root profilers).
pub const APP_LIFETIME: EventId = EventId(u32::MAX - 1);
/// Special value indicating that no explicit prior event is associated.
///
/// When used to identify a parent, this indicates that the parent can be inferred to be the
/// current profiler.
pub const fn implicit() -> Self {
Self::IMPLICIT
}
/// If the value explicitly references a profiler ID, return it; otherwise, return [`None`].
pub fn explicit(self) -> Option<Self> {
if self == Self::IMPLICIT {
None
} else {
Some(self)
}
}
}
// ========================
// === ExternalMetadata ===
// ========================
/// Indicates where in the event log metadata from a particular external source should be inserted.
#[derive(Debug, Copy, Clone)]
pub struct ExternalMetadata {
type_id: u32,
}
// ================
// === Profiler ===
// ================
/// The interface supported by profilers of all profiling levels.
pub trait Profiler {
/// Log the beginning of a measurement.
///
/// Return an object that can be used to manage the measurement's lifetime.
fn start(parent: EventId, label: Label, time: Option<Timestamp>, start: StartState) -> Self;
/// Log the end of a measurement.
fn finish(self);
/// Log the beginning of an interval in which the profiler is not active.
fn pause(self);
/// Log the end of an interval in which the profiler is not active.
fn resume(self);
}
// ===============
// === Started ===
// ===============
/// A profiler that has a start time set, and will complete its measurement when dropped.
#[derive(Debug)]
pub struct Started<T: Profiler + Copy>(pub T);
impl<T: Profiler + Copy> Drop for Started<T> {
fn drop(&mut self) {
self.0.finish();
}
}
impl<T, U> crate::Parent<T> for Started<U>
where
U: crate::Parent<T> + Profiler + Copy,
T: Profiler + Copy,
{
fn start_child(&self, label: Label) -> Started<T> {
self.0.start_child(label)
}
}

View File

@ -1,744 +0,0 @@
//! Instrumentation for timing execution of code.
//!
//! Supports the
//! [Profiling](https://github.com/enso-org/design/blob/main/epics/profiling/implementation.md)
//! design.
//!
//! # Profiler hierarchy
//!
//! Every profiler has a parent, except the special profiler value [`APP_LIFETIME`]. Each of its
//! children is considered a *root profiler*.
//!
//! # Profiling levels
//!
//! Profiling has performance overhead; to support fine-grained measurement when it is needed, but
//! avoid its costs when it is not, measurements are classified into *profiling levels*.
//!
//! This API only allows creating a profiler of the same or finer-grained level than its parent.
//!
//! #### Objective
//! Measurements that correspond directly to aspects of the user experience. An *objective* can
//! contain other *objective*s, e.g. *GUI initialization* (which might be defined as: time from
//! opening the app until the app is ready to receive user input) contains *time until the loading
//! spinner finishes*.
//! #### Task
//! Coarse-grained tasks, such as app window initialization, GUI downloading, or WASM compilation. A
//! *task* can contain other *task*s e.g. GUI initialization contains GUI downloading.
//! #### Detail
//! All processes which can be removed in compile-time for the official stable release for users. We
//! might provide some users with special releases with enabled *detailed* profiling, however, it
//! should be possible to debug and understand most of user-provided logs with disabled *details*
//! view.
//! #### Debug
//! All processes which should be removed in compile-time by default even during app development. It
//! applies to every heavy-usage of the profiling framework, such as per-frame rendering profiling.
//!
//! ## Conditional compilation
//!
//! The level of profiling detail is set at compile time with an environment variable, e.g.
//! `ENSO_MAX_PROFILING_LEVEL=task`. When using the `run` script, this can be accomplished by
//! passing the argument `--profiling-level=task`.
//!
//! If the environment variable is not set, the level will default to the minimum supported,
//! *objective*.
//!
//! # Structured measurement
//!
//! This API can be used to make arbitrary measurements; in order to ensure measurements are easy to
//! interpret, the intervals selected for measurement should correspond as much as possible to the
//! units of organization of the code.
//!
//! To support such structured measurement, the **primary interface is a
//! [`#[profile]`](macro@profile) attribute macro**, which instruments a whole function.
//!
//! # Low-level: RAII interface
//!
//! When it is not feasible to measure at the function level (for example if moving the section of
//! interest into its own function would divide the code into unreasonably small functions), or a
//! measurement needs to be made with special properties (e.g. with its start time inherited from
//! its parent), a *RAII interface* supports **instrumenting a block of code**.
//!
//! The core of the interface is a set of [macros](index.html#macros) that create a new profiler,
//! and return a *RAII guard* object of a type like [`Started<Task>`]. The guard object will
//! automatically log the end of a measurement when it is dropped.
//!
//! In rare cases, it will be necessary to measure an interval that doesn't correspond to a block at
//! any level of the code. This can be achieved using the RAII interface by allowing the guard
//! object to escape the scope in which it is created to control its `drop()` time.
//!
//! ## Basic usage
//!
//! ```
//! # use enso_profiler as profiler;
//! # use profiler::profile;
//! async fn using_low_level_api(input: u32, profiler: impl profiler::Parent<profiler::Task>) {
//! if input == 4 {
//! let _profiler = profiler::start_task!(profiler, "subtask_4");
//! // ...
//! } else {
//! let _profiler = profiler::start_task!(profiler, "subtask_other");
//! profiler::await_!(callee(input), _profiler);
//! // ...
//! }
//! }
//!
//! #[profile(Detail)]
//! async fn callee(input: u32) {}
//! ```
//!
//! ### Measuring a block
//!
//! When a measurement is ended by implicitly dropping its profiler at the end of a block, the
//! profiler should be created as **the first line of the block**; it measures one full block, and
//! the line containing [`start_task!`] (or the like) acts as a title for the block.
//!
//! In this case, the binding used to control the scope of the measurement should have a **name
//! beginning with an underscore**, even if it is referenced (e.g. to create a child profiler). This
//! indicates that the binding is used to identify a scope, even if it is *also* used for its a
//! value.
//!
//! ### Accepting a parent argument
//!
//! A function using the low-level API may need to accept a profiler argument to use as the parent
//! for a new profiler. The function should be able to accept any type of profiler that is of a
//! suitable level to be a parent of the profiler it is creating. This is supported by accepting an
//! **argument that is generic over the [`Parent`] trait**.
//!
//! ### Profiling `.await`
//!
//! Within a profiled scope, `.await` should not be used directly. The wrapper [`await_!`] is
//! provided to await a future while making the profiling framework aware of the start and end times
//! of the await-interval.
//!
//! ## Advanced Example: creating a root profiler
//!
//! ```
//! # use enso_profiler as profiler;
//! fn root_objective() {
//! let _profiler = profiler::start_objective!(profiler::APP_LIFETIME, "root_objective");
//! // ...
//! }
//! ```
//!
//! ### Root profilers
//!
//! The profiler constructor macros require a parent. To create a *root profiler*, specify the
//! special value [`APP_LIFETIME`] as the parent.
// === Features ===
#![feature(test)]
#![feature(maybe_uninit_uninit_array)]
#![feature(extend_one)]
// === Non-Standard Linter Configuration ===
#![deny(unconditional_recursion)]
#![warn(missing_docs)]
#![warn(trivial_casts)]
// ==============
// === Export ===
// ==============
pub mod format;
pub mod internal;
pub mod log;
extern crate test;
use internal::*;
// ===============
// === prelude ===
// ===============
/// Widely-used exports.
pub mod prelude {
pub use crate::internal::Profiler;
pub use crate::profile;
}
// ========================
// === Logging Metadata ===
// ========================
/// Define a function that writes a specific type of metadata to the profiling log.
#[macro_export]
macro_rules! metadata_logger {
($name:expr, $fun:ident($ty:ty)) => {
/// Write a metadata object to the profiling event log.
pub fn $fun(data: $ty) {
thread_local! {
static LOGGER: $crate::internal::MetadataLogger<$ty> =
$crate::internal::MetadataLogger::new($name);
}
LOGGER.with(|logger| logger.log(data));
}
};
}
// ==============
// === Parent ===
// ==============
/// Any object representing a profiler that is a valid parent for a profiler of type T.
pub trait Parent<T: Profiler + Copy> {
/// Start a new profiler, with `self` as its parent.
fn start_child(&self, label: Label) -> Started<T>;
}
// ===============================================
// === Wrappers instrumenting async operations ===
// ===============================================
/// Await a future, logging appropriate await events for the given profiler.
#[macro_export]
macro_rules! await_ {
($evaluates_to_future:expr, $profiler:ident) => {{
let future = $evaluates_to_future;
profiler::internal::Profiler::pause($profiler.0);
let result = future.await;
profiler::internal::Profiler::resume($profiler.0);
result
}};
}
/// Await two futures concurrently, like [`futures::join`], but with more accurate profiling.
pub async fn join<T: futures::Future, U: futures::Future>(t: T, u: U) -> (T::Output, U::Output) {
futures::join!(t, u)
}
// ===================================
// === profiler_macros Invocations ===
// ===================================
/// Instruments a function.
///
/// For each call to the function, a measurement of the time interval corresponding to the
/// function's body is logged under the name of the function, with file:line information
/// attached.
///
/// # Usage
///
/// The argument to the macro is a profiler type name, identifying the
/// [profiling level](#profiling-levels) at which to instrument the function.
///
/// ```
/// # use enso_profiler as profiler;
/// # use enso_profiler::profile;
/// #[profile(Detail)]
/// fn example(input: u32) -> u32 {
/// input
/// }
/// ```
///
/// This will expand to the equivalent of:
///
/// ```
/// # use enso_profiler as profiler;
/// # use enso_profiler::profile;
/// fn example(input: u32) -> u32 {
/// let __profiler_scope = {
/// use profiler::internal::Profiler;
/// let parent = profiler::internal::EventId::implicit();
/// let now = Some(profiler::internal::Timestamp::now());
/// let label = profiler::internal::Label("example (profiler/src/lib.rs:78)");
/// let profiler =
/// profiler::Detail::start(parent, label, now, profiler::internal::StartState::Active);
/// profiler::internal::Started(profiler)
/// };
/// {
/// input
/// }
/// }
/// ```
///
/// The macro is used the same way with async functions:
///
/// ```
/// # use enso_profiler as profiler;
/// # use enso_profiler::profile;
/// #[profile(Detail)]
/// async fn example(input: u32) -> u32 {
/// input
/// }
/// ```
///
/// The implementation for async is a little more complicated:
///
/// ```
/// # use enso_profiler as profiler;
/// # use enso_profiler::profile;
/// fn async_example(input: u32) -> impl std::future::Future<Output = u32> {
/// let __profiler_scope = {
/// use profiler::internal::Profiler;
/// let parent = profiler::internal::EventId::implicit();
/// let now = Some(profiler::internal::Timestamp::now());
/// let label = profiler::internal::Label("async_example (lib.rs:571)");
/// let profiler =
/// profiler::Task::start(parent, label, now, profiler::internal::StartState::Paused);
/// profiler::internal::Started(profiler)
/// };
/// async move {
/// profiler::internal::Profiler::resume(__profiler_scope.0);
/// let result = { input };
/// std::mem::drop(__profiler_scope);
/// result
/// }
/// }
/// ```
///
/// # Limitations
///
/// ## `.await` expressions with attributes
///
/// `#[profile]` must rewrite `.await` expressions; it separates the base expression from the
/// `.await` in order to insert instrumentation between them. Since the literal expression the
/// attribute was applied to does not exist in the output, there is no way to handle the
/// attribute that would be correct for any type of attribute.
///
/// ## Send approximation
///
/// `#[profile]` breaks
/// [Send approximation](https://rust-lang.github.io/async-book/07_workarounds/03_send_approximation.html);
/// when it is applied to an `async fn`, the `Future` returned will always be `!Send`.
#[doc(inline)]
pub use enso_profiler_macros::profile;
enso_profiler_macros::define_profiling_levels![Objective, Task, Detail, Debug];
// === APP_LIFETIME ===
/// Pseudo-profiler serving as the root of the measurement hierarchy.
pub const APP_LIFETIME: Objective = Objective(EventId::APP_LIFETIME);
// ===================
// === EventStream ===
// ===================
/// An iterator over all logged events. This is a resumable iterator: After [`next()`] returns
/// [`None`], it can be iterated again to observe any additional events that have been logged since
/// the last it was used.
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct EventStream {
next_i: usize,
}
impl Iterator for EventStream {
type Item = Event;
fn next(&mut self) -> Option<Self::Item> {
let result = EVENTS.try_get(self.next_i, |e| *e);
result.inspect(|_| self.next_i += 1);
result
}
}
/// Return a stream that yields all logged events.
pub fn event_stream() -> EventStream {
EventStream::default()
}
// ======================
// === IntervalStream ===
// ======================
/// Return a stream that yields all logged events. This is a resumable iterator: After [`next()`]
/// returns [`None`], it can be iterated again to observe any additional events that have been
/// logged since the last it was used.
pub fn interval_stream() -> IntervalStream {
IntervalStream {
events: EventStream::default().enumerate(),
resume: Default::default(),
parent: Default::default(),
stack: Default::default(),
}
}
/// A stream that yields all logged events.
#[derive(Debug, Clone)]
pub struct IntervalStream {
events: std::iter::Enumerate<EventStream>,
// [`Timestamp`]s of events that have been paused (or started paused) and resumed. If a running
// event is not found in this collection, it has been running since its start event.
resume: std::collections::BTreeMap<EventId, Timestamp>,
parent: std::collections::BTreeMap<EventId, EventId>,
stack: Vec<EventId>,
}
impl IntervalStream {
fn resolve_start_id(&self, id: EventId) -> usize {
let non_empty_stack = "For an internal to end, the stack must be non-empty.";
id.explicit().unwrap_or_else(|| *self.stack.last().expect(non_empty_stack)).0 as usize
}
fn record_parent(&mut self, id: EventId, parent: EventId) {
let parent = parent
.explicit()
.unwrap_or_else(|| self.stack.last().copied().unwrap_or(EventId::APP_LIFETIME));
self.parent.insert(id, parent);
}
fn resolve_start_time(&self, start: &Start) -> Timestamp {
start.start.unwrap_or_else(|| self.resolve_parent_start(start.parent))
}
fn resolve_parent_start(&self, id: EventId) -> Timestamp {
let parent_was_recorded =
"If the event ID is implicit, we must have resolved its parent when we encountered it.";
let id = id.explicit().unwrap_or_else(|| *self.parent.get(&id).expect(parent_was_recorded));
EVENTS.get(id.0 as usize, |event| self.resolve_start_time(&event.as_start().unwrap()))
}
}
impl Iterator for IntervalStream {
type Item = Interval;
fn next(&mut self) -> Option<Self::Item> {
while let Some((i, event)) = self.events.next() {
let id = EventId(i as u32);
match event {
Event::Start(start) => {
self.stack.push(id);
self.record_parent(id, start.parent);
}
Event::StartPaused(start) => self.record_parent(id, start.parent),
Event::Resume { id, timestamp } => {
self.resume.insert(id, timestamp);
self.stack.push(id);
}
Event::End { id, timestamp } | Event::Pause { id, timestamp } => {
let start = self.resume.remove(&id).or_else(|| {
let id = self.resolve_start_id(id);
EVENTS.get(id, |e| match e {
Event::Start(start) => Some(self.resolve_start_time(start)),
Event::StartPaused(_) => None,
_ => unreachable!(),
})
});
if let Some(start) = start {
let label = EVENTS.get(id.0 as usize, |e| e.as_start().unwrap().label);
let end = timestamp;
return Some(Interval { label, start, end });
}
}
Event::Metadata { .. } => {}
}
}
None
}
}
/// Identifies a time period in which a particular profiler was running.
#[derive(Copy, Clone, Debug)]
pub struct Interval {
label: Label,
start: Timestamp,
end: Timestamp,
}
impl Interval {
/// Return a string identifying profiler, usually by the location in the code that it was
/// created.
pub fn label(&self) -> &'static str {
self.label.0
}
/// Return the time the profiler began running, in the same units as the [`DOMHighResTimeStamp`]
/// web API.
pub fn start(&self) -> f64 {
self.start.into_ms()
}
/// Return the time the profiler finished running, in the same units as the
/// [`DOMHighResTimeStamp`] web API.
pub fn end(&self) -> f64 {
self.end.into_ms()
}
}
// =============
// === Tests ===
// =============
#[cfg(test)]
mod log_tests {
use crate as profiler;
use profiler::profile;
fn get_log() -> Vec<profiler::internal::Event> {
crate::internal::get_raw_log().events
}
#[test]
fn root() {
{
// In any other crate we would refer to the macro as `profiler::start_objective!`, but
// "macro-expanded `macro_export` macros from the current crate cannot be referred to
// by absolute paths" (<https://github.com/rust-lang/rust/issues/52234>).
let _profiler = start_objective!(profiler::APP_LIFETIME, "test");
}
let log = get_log();
match &log[..] {
[profiler::Event::Start(m0), profiler::Event::End { id, timestamp: end_time }] => {
assert_eq!(m0.parent, profiler::APP_LIFETIME.0);
assert_eq!(id.0, 0);
assert!(m0.label.0.starts_with("test "));
assert!(*end_time >= m0.start.unwrap());
}
_ => panic!("log: {log:?}"),
}
}
#[test]
fn profile() {
#[profile(Objective)]
fn profiled() {}
profiled();
let log = get_log();
match &log[..] {
[profiler::Event::Start(m0), profiler::Event::End { id: id0, .. }] => {
assert!(m0.start.is_some());
assert_eq!(m0.parent, profiler::EventId::IMPLICIT);
assert_eq!(id0.0, 0);
}
_ => panic!("log: {log:?}"),
}
}
#[test]
fn profile_async() {
#[profile(Objective)]
async fn profiled() -> u32 {
let block = async { 4 };
block.await + 1
}
let future = profiled();
futures::executor::block_on(future);
let log = get_log();
#[rustfmt::skip]
match &log[..] {
[
// outer async fn: create, then start profiler
profiler::Event::StartPaused(_),
profiler::Event::Resume { id: profiler::internal::EventId(0), .. },
// inner async block: create profiler
profiler::Event::StartPaused(_),
// block.await: pause the fn, start the block
profiler::Event::Pause { id: profiler::internal::EventId(0), .. },
profiler::Event::Resume { id: profiler::internal::EventId(2), .. },
// block completes, resume fn, fn completes
profiler::Event::End { id: profiler::internal::EventId(2), .. },
profiler::Event::Resume { id: profiler::internal::EventId(0), .. },
profiler::Event::End { id: profiler::internal::EventId(0), .. },
] => (),
_ => panic!("log: {log:#?}"),
};
}
#[test]
fn non_move_async_block() {
#[profile(Objective)]
async fn profiled() {
let mut i = 0;
let block = async { i = 1 };
block.await;
assert_eq!(i, 1);
}
futures::executor::block_on(profiled());
let _ = get_log();
}
#[test]
fn inner_items() {
// Ensure items inside profiled function are not transformed by the outer profiler.
// This prevents profiled items inside profiled items from being doubly-profiled,
// and allows deciding whether and how to profile inner items separately from outer.
#[profile(Objective)]
async fn outer() {
async fn inner() {
let block = async move {};
block.await
}
inner().await
}
futures::executor::block_on(outer());
let log = get_log();
#[rustfmt::skip]
match &log[..] {
[
profiler::Event::StartPaused(_),
profiler::Event::Resume { id: profiler::internal::EventId( 0, ), .. },
profiler::Event::Pause { id: profiler::internal::EventId( 0, ), .. },
profiler::Event::Resume { id: profiler::internal::EventId( 0, ), .. },
profiler::Event::End { id: profiler::internal::EventId( 0, ), .. },
] => (),
_ => panic!("log: {log:#?}"),
};
}
}
// Performance analysis [KW]
//
// Performance impact: Except at low numbers of measurements, run time is dominated by growing the
// vector. I'm measuring about 1.6ns per logged measurement [Ryzen 5950X], when accumulating 10k
// measurements.
// I think the cost of the unavoidable performance.now() will be on the order of 1μs, in which case
// the overhead of #[profile] is within 0.1 % of an optimal implementation.
//
// Performance variability impact: There's no easy way to measure this, so I'm speaking
// theoretically here. The only operation expected to have a significantly variable cost is the
// Vec::push to grow the EVENTS log; it sometimes needs to reallocate. However even at its
// most expensive, it should be on the order of a 1μs (for reasonable numbers of measurements); so
// the variance introduced by this framework shouldn't disturb even very small measurements (I
// expect <1% added variability for a 1ms measurement).
#[cfg(test)]
mod bench {
use crate as profiler;
/// Perform a specified number of measurements, for benchmarking.
fn log_measurements(count: usize) {
for _ in 0..count {
let _profiler = start_objective!(profiler::APP_LIFETIME, "log_measurement");
}
let events: Vec<_> = crate::EVENTS.clone_all();
test::black_box(events);
}
#[bench]
fn log_measurements_1000(b: &mut test::Bencher) {
b.iter(|| log_measurements(1000));
}
#[bench]
fn log_measurements_10_000(b: &mut test::Bencher) {
b.iter(|| log_measurements(10_000));
}
/// For comparison with time taken by [`log_measurements`].
fn push_vec(count: usize, log: &mut Vec<profiler::Event>) {
for _ in 0..count {
log.push(profiler::Event::Start(profiler::Start {
parent: profiler::EventId::APP_LIFETIME,
start: None,
label: profiler::Label(""),
level: Default::default(),
}));
log.push(profiler::Event::End {
id: profiler::EventId::implicit(),
timestamp: Default::default(),
});
}
test::black_box(&log);
log.clear();
}
#[bench]
fn push_vec_1000(b: &mut test::Bencher) {
let mut log = vec![];
b.iter(|| push_vec(1000, &mut log));
}
#[bench]
fn push_vec_10_000(b: &mut test::Bencher) {
let mut log = vec![];
b.iter(|| push_vec(10_000, &mut log));
}
}
#[cfg(test)]
#[allow(unused)]
mod compile_tests {
use crate as profiler;
use profiler::profile;
/// Decorating a pub fn.
#[profile(Task)]
pub fn profiled_pub() {}
#[profile(Objective)]
async fn profiled_async() {}
#[profile(Objective)]
async fn polymorphic_return() -> Box<dyn PartialEq<u32>> {
Box::new(23)
}
#[profile(Objective)]
async fn input_impl_trait(foo: impl PartialEq<u32>, bar: &u32) -> bool {
foo.eq(bar)
}
#[profile(Objective)]
async fn input_t_trait<T: PartialEq<u32>>(foo: T, bar: &u32) -> bool {
foo.eq(bar)
}
#[profile(Objective)]
async fn borrows_input_references_in_output(x: &u32) -> &u32 {
x
}
#[profile(Objective)]
#[allow(clippy::needless_lifetimes)]
async fn borrows_input_references_in_output_explicitly<'a>(x: &'a u32) -> &'a u32 {
x
}
#[profile(Objective)]
async fn borrows_input_doesnt_use(_: &u32) -> u32 {
4
}
#[profile(Objective)]
async fn borrows_input_uses(x: &u32) -> u32 {
*x
}
#[profile(Objective)]
async fn borrows_two_args(x: &u32, y: &u32) -> u32 {
*x + *y
}
struct Foo(u32);
impl Foo {
#[profile(Objective)]
async fn borrows_self_and_arg(&self, x: &u32) -> u32 {
self.0 + *x
}
}
#[profile(Detail)]
#[allow(unsafe_code)]
unsafe fn profiled_unsafe() {}
fn mut_binding() {
#[profile(Objective)]
fn profiled(mut _x: u32) {
_x = 4;
}
}
#[profile(Task)]
fn profiled_destructuring((_x, _y): (u32, u32)) {}
fn root() {
let _profiler = start_task!(profiler::APP_LIFETIME, "test");
}
}

View File

@ -1,280 +0,0 @@
//! Data structure supporting:
//! - O(1) append (amortized), with low overhead.
//! - O(1) random-access reads.
//! - Single-threaded shared mutability.
//!
//! # Implementation
//!
//! Note [Log Safety]
//! =============================
//! Soundness of shared-mutable data requires avoiding reference conflicts: The data must not be
//! mutated while a shared-reference to it exists. This is ensured by:
//! - No public interface of [`Log`] allows keeping a reference with lifetime derived from the data.
//! - References taken within [`Log`]'s implementation don't overlap with other references in the
//! scope.
use std::cell;
/// Allocation unit of events within a [`Log`].
#[cfg_attr(debug_assertions, allow(dead_code))]
const BLOCK: usize = 1024;
// ===========
// === Log ===
// ===========
#[cfg(not(debug_assertions))]
pub use fast::Log;
#[cfg(debug_assertions)]
pub use safe::Log;
/// Fast implementation used when debug assertions are disabled.
#[cfg(not(debug_assertions))]
mod fast {
use super::*;
use std::mem;
/// A shared-mutable data structure supporting append and random-access read.
#[derive(Debug)]
pub struct Log<T> {
current: cell::UnsafeCell<Box<[mem::MaybeUninit<T>; BLOCK]>>,
completed: cell::UnsafeCell<Vec<Box<[T; BLOCK]>>>,
len: cell::Cell<usize>,
}
impl<T> Log<T> {
/// Create a new, empty [`Log`].
pub fn new() -> Self {
Self {
current: cell::UnsafeCell::new(Box::new(mem::MaybeUninit::uninit_array())),
completed: cell::UnsafeCell::new(Default::default()),
len: Default::default(),
}
}
/// Push an element.
#[inline]
#[allow(unsafe_code)] // Note [Log Safety]
pub fn push(&self, element: T) {
unsafe {
let i = self.len.get();
(*self.current.get())[i % BLOCK].write(element);
let i1 = i + 1;
if i1 % BLOCK == 0 {
// Current gradually-initialized block is full. Read it, cast it to a
// fully-initialized block, and replace it with a new empty block.
let empty = Box::new(mem::MaybeUninit::uninit_array());
let block = self.current.get().replace(empty);
let block =
mem::transmute::<Box<[mem::MaybeUninit<T>; BLOCK]>, Box<[T; BLOCK]>>(block);
// Add the old block to our collection of completed blocks.
(*self.completed.get()).push(block);
}
self.len.set(i1);
}
}
/// Returns the number of entries in the log.
#[inline]
pub fn len(&self) -> usize {
self.len.get()
}
/// Returns true if the log contains no entries.
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Applies a function to each entry in the log.
#[allow(unsafe_code)] // Note [Log Safety]
pub fn for_each<F>(&self, mut f: F)
where F: FnMut(&T) {
unsafe {
let blocks = self.len() / BLOCK;
let n = self.len() % BLOCK;
for i in 0..blocks {
// Safety: The contents of a completed block are never modified, so we can hold
// a borrow while calling the function (which may append to
// the log).
let block = &(*self.completed.get())[i];
block.iter().for_each(&mut f);
}
// Safety: The elements in the completed portion of the block are never modified, so
// we can hold a borrow while calling the function (which may append
// to the log).
let current = &(*self.current.get())[..n];
current.iter().map(|elem| elem.assume_init_ref()).for_each(f);
}
}
/// Pass the element at the specified index to the given function. Returns `None` if the
/// index is out of bounds.
#[inline]
#[allow(unsafe_code)] // Note [Log Safety]
pub fn get<U>(&self, index: usize, f: impl FnOnce(&T) -> U) -> Option<U> {
unsafe {
let block_i = index / BLOCK;
let i = index % BLOCK;
let blocks = &*self.completed.get();
let value = if let Some(block) = blocks.get(block_i) {
Some(&block[i])
} else if block_i == blocks.len() && i < self.len.get() % BLOCK {
Some((*self.current.get())[i].assume_init_ref())
} else {
None
};
// Safety: Whether the element is in a completed block, or in the completed portion
// of the current block, it will never be moved or modified; so we
// can hold a borrow while calling the function (which may append to
// the log).
value.map(f)
}
}
}
impl<T: Clone> Log<T> {
/// Return a collection of all entries currently in the log.
pub fn clone_all<C>(&self) -> C
where C: Default + Extend<T> {
let mut result = C::default();
self.for_each(|elem| result.extend_one(elem.clone()));
result
}
}
}
/// Checked implementation used when debug assertions are enabled.
#[cfg(debug_assertions)]
mod safe {
use super::*;
/// A shared-mutable data structure supporting append and random-access read.
#[derive(Debug)]
pub struct Log<T> {
data: cell::RefCell<Vec<T>>,
}
impl<T> Log<T> {
/// Create a new, empty [`Log`].
#[inline]
pub fn new() -> Self {
let data = cell::RefCell::new(Vec::new());
Self { data }
}
/// Push an element.
#[inline]
pub fn push(&self, element: T) {
self.data.borrow_mut().push(element)
}
/// Returns the number of entries in the log.
#[inline]
pub fn len(&self) -> usize {
self.data.borrow().len()
}
/// Returns true if the log contains no entries.
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Applies a function to each entry in the log.
#[inline]
pub fn for_each<F>(&self, f: F)
where F: FnMut(&T) {
self.data.borrow().iter().for_each(f)
}
/// Pass the element at the specified index to the given function. Returns `None` if the
/// index is out of bounds.
#[inline]
pub fn get<U>(&self, index: usize, f: impl FnOnce(&T) -> U) -> Option<U> {
self.data.borrow().get(index).map(f)
}
}
impl<T: Clone> Log<T> {
/// Return a collection of all entries currently in the log.
pub fn clone_all<C>(&self) -> C
where C: Default + Extend<T> {
let mut result = C::default();
result.extend(self.data.borrow().iter().cloned());
result
}
}
}
// This can't be derived without requiring T: Default, which is not otherwise needed.
// See: https://github.com/rust-lang/rust/issues/26925
impl<T> Default for Log<T> {
fn default() -> Self {
Log::new()
}
}
// ======================
// === ThreadLocalLog ===
// ======================
/// Wraps a [`Log`] for thread-local access.
#[derive(Debug)]
pub struct ThreadLocalLog<T: 'static>(std::thread::LocalKey<Log<T>>);
impl<T: 'static> ThreadLocalLog<T> {
#[allow(missing_docs)]
pub const fn new(log: std::thread::LocalKey<Log<T>>) -> Self {
Self(log)
}
/// Append an entry to the log.
pub fn push(&'static self, t: T) {
self.0.with(|this| this.push(t));
}
/// Return the number of entries in the log. Note that as the log is thread-local but
/// append-only, any function in the thread may increase this value, but it will never
/// decrease.
pub fn len(&'static self) -> usize {
self.0.with(|this| this.len())
}
/// Returns true if the log contains no entries.
pub fn is_empty(&'static self) -> bool {
self.0.with(|this| this.is_empty())
}
/// Get the entry at the given index, and pass it to a function; return the result of the
/// function.
///
/// Panics if the index is not less than [`len`].
pub fn get<U>(&'static self, i: usize, f: impl FnOnce(&T) -> U) -> U {
self.try_get(i, f).unwrap()
}
/// Get the entry at the given index, and pass it to a function; return the result of the
/// function.
///
/// Returns [`None`] if the index is not less than [`len`].
pub fn try_get<U>(&'static self, i: usize, f: impl FnOnce(&T) -> U) -> Option<U> {
self.0.with(|this| this.get(i, f))
}
}
impl<T: 'static + Clone> ThreadLocalLog<T> {
/// Return a collection of log entries since the program was started.
pub fn clone_all<C>(&'static self) -> C
where C: Default + Extend<T> {
self.0.with(|this| this.clone_all())
}
}

View File

@ -1,33 +0,0 @@
[package]
name = "enso-shapely"
version = "0.2.0"
authors = ["Enso Team <enso-dev@enso.org>"]
edition = "2021"
description = "Automated typeclass derivation."
readme = "README.md"
homepage = "https://github.com/enso-org/enso/lib/rust/shapely"
repository = "https://github.com/enso-org/enso"
license-file = "../../../LICENSE"
keywords = ["typeclass", "deriving"]
categories = ["algorithms"]
publish = true
[lib]
crate-type = ["rlib"]
[features]
default = []
[dependencies]
enso-zst = { path = "../zst" }
enso-shapely-macros = { path = "macros" }
wasm-bindgen = { workspace = true }
[dependencies.web-sys]
version = "0.3.4"
[dev-dependencies]
enso-prelude = { path = "../prelude" }
[lints]
workspace = true

View File

@ -1,3 +0,0 @@
# Shapely
This crate provides automatic derivation for useful type classes.

View File

@ -1,3 +0,0 @@
# Shapely Macros
This crate provides macros for typeclass derivation.

View File

@ -1,67 +0,0 @@
//! A macro allowing running functions after WASM initialization, before the main function.
use crate::prelude::*;
use crate::root_call_path::root_call_path;
// ========================
// === Main entry point ===
// ========================
/// Mangle the name replacing `_` with `__` and all non-ASCII characters with `_<charcode>_`. The
/// JS code contains a counterpart function that allows un-mangling the files, thus, after changing
/// this code, the JS one has to be updated as well.
fn mangle_name(name: &str) -> String {
name.chars()
.map(|c| if c.is_ascii_alphanumeric() { c.to_string() } else { format!("_{}_", c as u32) })
.collect()
}
/// Functions exposed in WASM have to have unique names. This utility creates a name based on the
/// location the function was defined at (module path, line number, column number).
fn unique_name(name: &syn::Ident) -> String {
mangle_name(&format!("{} ({name})", root_call_path()))
}
/// The prefix of the before-main entry point function in WASM. The JS code contains a code
/// referring to that name as well, so if you change it, you have to update the JS code as well.
const BEFORE_MAIN_ENTRY_POINT_PREFIX: &str = "before_main_entry_point";
/// The default priority of the before-main entry point. This number will be used as part of the
/// function name. Before-main entry points are sorted by name before being run.
const DEFAULT_PRIORITY: usize = 100;
/// Convert the function to a before-main entry point. Please note that the [`wasm-bindgen`] macro
/// has to be in scope for the result to compile.
pub fn run(
args: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let mut args_iter = args.into_iter();
let priority = match args_iter.next() {
None => DEFAULT_PRIORITY,
Some(token) => {
if args_iter.next().is_some() {
panic!(
"Expected maximum one argument, the entry point priority. If missing, the \
default priority will be used ({DEFAULT_PRIORITY})."
);
}
match token.to_string().parse::<usize>() {
Ok(priority) => priority,
Err(_) => panic!("The priority must be a number."),
}
}
};
let mut input_fn = syn::parse_macro_input!(input as syn::ImplItemMethod);
let name =
format!("{BEFORE_MAIN_ENTRY_POINT_PREFIX}_{priority}_{}", unique_name(&input_fn.sig.ident));
input_fn.sig.ident = quote::format_ident!("{name}");
let output = quote! {
#[wasm_bindgen::prelude::wasm_bindgen]
#input_fn
};
output.into()
}

View File

@ -1,219 +0,0 @@
use crate::prelude::*;
use enso_macro_utils::field_names;
use enso_macro_utils::identifier_sequence;
use enso_macro_utils::index_sequence;
use enso_macro_utils::path_matching_ident;
use syn::Attribute;
use syn::Data;
use syn::DataEnum;
use syn::DataStruct;
use syn::DeriveInput;
use syn::Fields;
use syn::Ident;
use syn::Lit;
use syn::Meta;
use syn::MetaNameValue;
use syn::NestedMeta;
use syn::Variant;
use syn::WhereClause;
use syn::WherePredicate;
// ==============
// === Consts ===
// ==============
/// Name of the custom attribute allowing customizing behavior of the generated `CloneRef`
/// implementation.
const CLONE_REF_ATTR: &str = "clone_ref";
/// Name of the property within customization attribute that allows defining custom bounds for
/// the generated `CloneRef` implementation.
const BOUND_NAME: &str = "bound";
// ============================
// === CloneRef for structs ===
// ============================
/// `clone_ref` function body for a given `struct` definition.
pub fn body_for_struct(ident: &Ident, data: &DataStruct) -> TokenStream {
match data.fields {
Fields::Unit =>
// Foo
quote!( #ident ),
Fields::Unnamed(ref fields) => {
let indices = index_sequence(fields.unnamed.len());
// Foo(self.0.clone_ref())
quote!(
#ident(#(self.#indices.clone_ref()),*)
)
}
Fields::Named(ref fields) => {
let names = field_names(fields);
// Foo { field0 : self.field0.clone_ref() }
quote!(
#ident {
#(#names : self.#names.clone_ref()),*
}
)
}
}
}
// ==========================
// === CloneRef for enums ===
// ==========================
/// Prepares a match arm for a single variant that `clone_ref`s such value.
pub fn arm_for_variant(data_ident: &Ident, variant: &Variant) -> TokenStream {
let fields = &variant.fields;
let variant_ident = &variant.ident;
match fields {
Fields::Unit => {
// Enum::Var => Enum::Var
quote!(
#data_ident::#variant_ident => #data_ident::#variant_ident
)
}
Fields::Named(fields) => {
let names = field_names(fields);
// Enum::Var {field0} => Enum::Var {field0 : field0.clone_ref()}
quote!(
#data_ident::#variant_ident { #(#names),* } =>
#data_ident::#variant_ident {
#( #names : #names.clone_ref() ),*
}
)
}
Fields::Unnamed(fields) => {
let names = identifier_sequence(fields.unnamed.len());
// Enum::Var(field0) => Enum::Var(field0.clone_ref())
quote!(
#data_ident::#variant_ident(#(#names),*) =>
#data_ident::#variant_ident(
#( #names.clone_ref() ),*
)
)
}
}
}
/// `clone_ref` function body for a given `enum` definition.
pub fn body_for_enum(ident: &Ident, data: &DataEnum) -> TokenStream {
if data.variants.is_empty() {
quote!(panic!(
"There cannot exist value of empty enum, so its clone_ref must not be called."
))
} else {
let make_arm = |variant| arm_for_variant(ident, variant);
let arms = data.variants.iter().map(make_arm);
quote!(
match self { #(#arms),* }
)
}
}
// ============================
// === Bounds customization ===
// ============================
/// Checks if the given attribute is our customization attribute.
pub fn is_clone_ref_customization(attr: &Attribute) -> bool {
path_matching_ident(&attr.path, CLONE_REF_ATTR)
}
/// Checks if the given Meta name-val pair defines user-provided bounds.
pub fn is_custom_bound(name_val: &MetaNameValue) -> bool {
path_matching_ident(&name_val.path, BOUND_NAME)
}
/// If this is our customization attribute, we retrieve user-provided bounds for the generated
/// `CloneRef` implementation.
///
/// Returns `None` is this is third-party attribute.
/// Panics if this is our attribute but the syntax is not correct.
pub fn clone_ref_bounds(attr: &Attribute) -> Option<Vec<WherePredicate>> {
// Silently ignore foreign attributes. Be picky only about our one.
is_clone_ref_customization(attr).then_some(())?;
let meta = attr.parse_meta().expect("Failed to parse attribute contents.");
let list = match meta {
Meta::List(ml) => ml.nested,
_ => panic!("Attribute contents does not conform to meta item."),
};
assert!(list.len() <= 1, "Only a single entry within `{CLONE_REF_ATTR}` attribute is allowed.");
let bound_value = match list.first() {
Some(NestedMeta::Meta(Meta::NameValue(name_val))) =>
if is_custom_bound(name_val) {
&name_val.lit
} else {
panic!("`{CLONE_REF_ATTR}` attribute can define value only for `{BOUND_NAME}`.")
},
Some(_) =>
panic!("`{CLONE_REF_ATTR}` attribute must contain a single name=value assignment."),
None => panic!("`{CLONE_REF_ATTR}` attribute must not be empty."),
};
let bound_str = if let Lit::Str(lit_str) = bound_value {
lit_str
} else {
panic!("`{BOUND_NAME}` value must be a string literal describing `where` predicates.")
};
let bounds_text = format!("where {}", bound_str.value());
let bounds = syn::parse_str::<WhereClause>(&bounds_text);
let bounds = bounds
.unwrap_or_else(|_| panic!("Failed to parse user-provided where clause: `{bounds_text}`."));
let ret = bounds.predicates.into_iter().collect();
Some(ret)
}
// ===================
// === Entry Point ===
// ===================
/// Derives `CloneRef` implementation, refer to `crate::derive_clone_ref` for details.
pub fn derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let decl = syn::parse_macro_input!(input as DeriveInput);
let ident = &decl.ident;
let body = match &decl.data {
Data::Struct(data_struct) => body_for_struct(ident, data_struct),
Data::Enum(data_enum) => body_for_enum(ident, data_enum),
Data::Union(_) => panic!("CloneRef cannot be derived for an untagged union input."),
};
let (impl_generics, ty_generics, inherent_where_clause_opt) = &decl.generics.split_for_impl();
// Where clause must contain both user-provided bounds and bounds inherent due to type
// declaration-level where clause.
let user_requested_bounds = decl.attrs.iter().filter_map(clone_ref_bounds).flatten();
let mut where_clause = enso_macro_utils::new_where_clause(user_requested_bounds);
for inherent_where_clause in inherent_where_clause_opt {
where_clause.predicates.extend(inherent_where_clause.predicates.iter().cloned())
}
let output = quote! {
impl #impl_generics CloneRef for #ident #ty_generics
#where_clause {
fn clone_ref(&self) -> Self {
#body
}
}
impl #impl_generics From<& #ident #ty_generics> for #ident #ty_generics
#where_clause {
fn from(t:& #ident #ty_generics) -> Self {
t.clone_ref()
}
}
};
output.into()
}

View File

@ -1,44 +0,0 @@
//! This module contains the [`derive`] function (implementing the [`crate::ForEachVariant`] derive
//! macro) as well as its helper functions.
use inflector::cases::snakecase::to_snake_case;
use proc_macro2::TokenStream;
use quote::quote;
use syn::punctuated::Punctuated;
use syn::Token;
// ======================
// === ForEachVariant ===
// ======================
/// Implementation of the `ForEachVariant` derive macro. For details, see the documentation of the
/// [`crate::derive_for_each_variant`] function.
pub fn derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let decl = syn::parse_macro_input!(input as syn::DeriveInput);
let ret = match decl.data {
syn::Data::Enum(ref e) => derive_for_enum(&decl, e),
_ => panic!("The `ForEachVariant` derive macro only works on enums."),
};
proc_macro::TokenStream::from(ret)
}
fn derive_for_enum(decl: &syn::DeriveInput, data: &syn::DataEnum) -> TokenStream {
let enum_name = &decl.ident;
let enum_snake_name = to_snake_case(&enum_name.to_string());
let macro_name = quote::format_ident!("for_each_{}_variant", enum_snake_name);
let variant_names: Punctuated<_, Token![,]> = data.variants.iter().map(|v| &v.ident).collect();
quote! {
/// Calls `f!` passing to it a comma-separated list of names of variants of [`#enum_name`]
/// enclosed in square brackets. The extra `args` are passed to `f!` verbatim after the
/// closing square bracket. For more details, see the documentation of the
/// [`ForEachVariant`] derive macro.
#[macro_export]
macro_rules! #macro_name {
( $f:ident($( $args:tt )*) ) => { $f!([ #variant_names ] $($args)*) }
}
pub use #macro_name;
}
}

View File

@ -1,22 +0,0 @@
use crate::prelude::*;
use syn::DeriveInput;
// ===================
// === Entry Point ===
// ===================
/// Makes sure that the structure does not derive [`Clone`] and that it implements custom [`Drop`]
/// implementation.
pub fn derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let decl = syn::parse_macro_input!(input as DeriveInput);
let ident = &decl.ident;
let (impl_generics, ty_generics, _) = &decl.generics.split_for_impl();
let output = quote! {
impl #impl_generics !Clone for #ident #ty_generics {}
impl #impl_generics ImplementsDrop for #ident #ty_generics {}
};
output.into()
}

View File

@ -1,147 +0,0 @@
//! This crate defines a custom derive macro `Iterator`. Should not be used
//! directly, but only through `enso-shapely` crate, as it provides utilities
//! necessary for the generated code to compile.
// === Features ===
#![feature(exact_size_is_empty)]
#![feature(proc_macro_span)]
#![feature(proc_macro_def_site)]
// === Non-Standard Linter Configuration ===
#![warn(missing_docs)]
#![warn(trivial_casts)]
#![warn(unused_qualifications)]
extern crate proc_macro;
mod before_main;
mod derive_clone_ref;
mod derive_for_each_variant;
mod derive_no_clone;
mod overlappable;
mod root_call_path;
mod tagged_enum;
mod prelude {
pub use enso_macro_utils::repr;
pub use proc_macro2::Span;
pub use proc_macro2::TokenStream;
pub use quote::quote;
}
/// Derives `CloneRef` implementation for given type. It performs `clone_ref` on every member
/// field. The input type must implement `Clone` and its every field must implement `CloneRef`.
///
/// For generic types no bounds are introduced in the generated implementation. To customize this
/// behavior user might add `#[clone_ref(bound="…")]` attribute. Then the generated implementation
/// will use the provided bounds.
///
/// Moreover, for a given struct `X` this macro generates also `impl From<&X> for X` which uses
/// `CloneRef` under the hood. The semantics of `CloneRef` makes each object to naturally provide
/// transformation from reference to an owned type.
///
/// Supported inputs are structs (unit, named, unnamed), enums (with unit, named, unnamed and no
/// variants at all). Unions are currently not supported.
#[proc_macro_derive(CloneRef, attributes(clone_ref))]
pub fn derive_clone_ref(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
derive_clone_ref::derive(input)
}
/// Makes sure that the structure does not derive [`Clone`] and that it implements custom [`Drop`]
/// implementation.
///
/// For the given input
/// ```text
/// #[derive(NoCloneBecauseOfCustomDrop)]
/// struct Test {}
/// ```
///
/// The following output will be generated:
/// ```text
/// struct Test {}
/// impl !Clone for Test {}
// impl ImplementsDrop for Test {}
/// ```
#[proc_macro_derive(NoCloneBecauseOfCustomDrop)]
pub fn derive_no_clone(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
derive_no_clone::derive(input)
}
/// Implements the `ForEachVariant` derive macro which creates a helper for iterating over each
/// variant of an enum at compile time. The derive panics if used on non-enum types.
///
/// The derive creates a macro (hereafter called loop-macro) named `for_each_NAME_variant` where
/// `NAME` is replaced with the name of the enum converted to snake case. The loop-macro takes a
/// name of another macro (hereafter called iterator-macro) as an argument followed by a
/// parenthesized list of extra arguments. The loop-macro expands to a call of the iterator-macro
/// with a list of comma-separated names of the enum variants wrapped in square brackets, followed
/// by the extra arguments defined above.
///
/// For example, the following code:
/// ```no_compile
/// #[derive(ForEachVariant)]
/// pub enum FooBar {
/// Foo,
/// Bar,
/// }
/// ```
/// results in the following macro being defined:
/// ```
/// #[macro_export]
/// macro_rules! for_each_foo_bar_variant {
/// ( $f:ident($( $args:tt )*) ) => { $f!([Foo, Bar] $($args)*) }
/// }
///
/// pub(crate) use for_each_foo_bar_variant;
/// ```
#[proc_macro_derive(ForEachVariant)]
pub fn derive_for_each_variant(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
derive_for_each_variant::derive(input)
}
#[allow(missing_docs)]
#[proc_macro_attribute]
pub fn overlappable(
attrs: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
overlappable::overlappable(attrs, input)
}
/// Transforms Rust enums into enums where each variant is a separate type. It also implements
/// several traits (such as conversions between variants and the enum type) and defines utility
/// functions, such as constructors. See [`tagged_enum::run`] to learn more.
#[proc_macro_attribute]
pub fn tagged_enum(
attr: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
tagged_enum::run(attr, input)
}
/// A macro allowing running functions after WASM initialization, before the main function. In order
/// to run a function before main, simply use this attribute (please note that the function has to
/// be public, as otherwise it can't be exported to WASM):
///
/// ```text
/// #[before_main]
/// pub fn any_name {
/// println!("I'm running before main!");
/// }
/// ```
#[proc_macro_attribute]
pub fn before_main(
attr: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
before_main::run(attr, input)
}
/// Macro reporting the root call path of itself. If it was used inside another macro "A", the
/// reported path will be the place where "A" was called.
#[proc_macro]
pub fn root_call_path(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
root_call_path::run(input)
}

View File

@ -1,49 +0,0 @@
use crate::prelude::*;
use proc_macro2::Ident;
pub fn overlappable(
attrs: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
let _attrs: TokenStream = attrs.into();
let decl = syn::parse_macro_input!(input as syn::ItemImpl);
// let mut path = decl.trait_.unwrap().1.clone();
// let path = path.segments.last_mut().iter().map(|ident| {
// Ident::new(&format!("MarketCtx_{}", repr(ident)) , Span::call_site());
// });
let mut marker_ctx_impl = decl;
let mut trait_ = marker_ctx_impl.trait_.as_mut();
trait_.iter_mut().for_each(|t| {
let path = &mut t.1;
path.segments.last_mut().iter_mut().for_each(|s| {
let rr = repr(&s);
s.ident = Ident::new(&format!("MarketCtx_{rr}"), Span::call_site());
});
});
// let mut marker_ctx_impl = decl.clone();
// let path = &mut marker_ctx_impl.trait_.as_mut().unwrap().1;
// path.segments.last_mut().iter_mut().for_each(|s| {
// let rr = repr(&s);
// s.ident = Ident::new(&format!("MarketCtx_{}", rr) , Span::call_site());
// });
// let name = repr(path);
// let marker_ctx_impl = syn::ItemImpl {
// .. decl
// };
let _output_tmp = quote! {
#marker_ctx_impl
};
let output = quote! {};
// println!("------------------");
// println!("{}", output_tmp);
output.into()
}

View File

@ -1,32 +0,0 @@
//! Macro reporting the root call path of itself. If it was used inside another macro "A", the
//! reported path will be the place where "A" was called.
use crate::prelude::*;
// ========================
// === Main entry point ===
// ========================
/// Get the root call path of the call side at compile time.
pub fn root_call_path() -> String {
let mut span = proc_macro::Span::call_site();
while let Some(parent) = span.parent() {
span = parent;
}
let source = span.source_file();
let start = span.start();
let path = source.path().to_str().unwrap_or_default().to_string();
format!("{path}:{}:{}", start.line(), start.column())
}
/// Macro reporting the root call path of itself. If it was used inside another macro "A", the
/// reported path will be the place where "A" was called.
pub fn run(_input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let path = root_call_path();
let output = quote! {
#path
};
output.into()
}

View File

@ -1,45 +0,0 @@
/// Computes a cartesian product of the provided input.
///
/// The following expression:
/// ```text
/// cartesian!([macro_path::f [args]] [a b c] [x y z]);
/// ```
///
/// ... expands to:
/// ```text
/// macro_path::f! { [args] [ [a x] [a y] [a z] [b x] [b y] [b z] [c x] [c y] [c z] ] }
/// ```
///
/// The `[args]` part is optional. The following expression:
///
/// ```text
/// cartesian!([macro_path::f] [a b c] [x y z]);
/// ```
///
/// ... expands to:
/// ```text
/// macro_path::f! { [ [a x] [a y] [a z] [b x] [b y] [b z] [c x] [c y] [c z] ] }
/// ```
#[macro_export]
macro_rules! cartesian {
($f:tt [$($a:tt)*] [$($b:tt)*]) => {
$crate::_cartesian_impl!{ $f [] [$($a)*] [$($b)*] [$($b)*] }
};
}
/// Internal helper for `cartesian` macro.
#[macro_export]
macro_rules! _cartesian_impl {
([$f:path] $out:tt [] $b:tt $init_b:tt) => {
$f!{ $out }
};
([$f:path [$($args:tt)*]] $out:tt [] $b:tt $init_b:tt) => {
$f!{ [$($args)*] $out }
};
($f:tt $out:tt [$a:tt $($at:tt)*] [] $init_b:tt) => {
$crate::_cartesian_impl!{ $f $out [$($at)*] $init_b $init_b }
};
($f:tt [$($out:tt)*] [$a:tt $($at:tt)*] [$b:tt $($bt:tt)*] $init_b:tt) => {
$crate::_cartesian_impl!{ $f [$($out)* [$a $b]] [$a $($at)*] [$($bt)*] $init_b }
};
}

View File

@ -1,110 +0,0 @@
// ==============
// === Export ===
// ==============
pub use crate::CloneRef;
pub use crate::NoCloneBecauseOfCustomDrop;
// ================
// === CloneRef ===
// ================
/// Clone for internal-mutable structures. This trait can be implemented only if mutating one
/// structure will be reflected in all of its clones. Please note that it does not mean that all the
/// fields needs to provide internal mutability as well. For example, a structure can remember it's
/// creation time and store it as `f32`. As long as it cannot be mutated, the structure can
/// implement `CloneRef`. In order to guide the auto-deriving mechanism, it is advised to wrap all
/// immutable fields in the `Immutable` newtype.
pub trait CloneRef: Sized + Clone {
fn clone_ref(&self) -> Self;
}
// === Macros ===
#[macro_export]
macro_rules! impl_clone_ref_as_clone {
([$($bounds:tt)*] $($toks:tt)*) => {
impl <$($bounds)*> CloneRef for $($toks)* {
fn clone_ref(&self) -> Self {
self.clone()
}
}
impl <$($bounds)*> From<&$($toks)*> for $($toks)* {
fn from(t:&$($toks)*) -> Self {
t.clone_ref()
}
}
};
($($toks:tt)*) => {
impl CloneRef for $($toks)* {
fn clone_ref(&self) -> Self {
self.clone()
}
}
impl From<&$($toks)*> for $($toks)* {
fn from(t:&$($toks)*) -> Self {
t.clone_ref()
}
}
};
}
#[macro_export]
macro_rules! impl_clone_ref_as_clone_no_from {
([$($bounds:tt)*] $($toks:tt)*) => {
impl <$($bounds)*> CloneRef for $($toks)* {
fn clone_ref(&self) -> Self {
self.clone()
}
}
};
($($toks:tt)*) => {
impl CloneRef for $($toks)* {
fn clone_ref(&self) -> Self {
self.clone()
}
}
};
}
// === Prim Impls ===
impl_clone_ref_as_clone_no_from!(());
impl_clone_ref_as_clone_no_from!(f32);
impl_clone_ref_as_clone_no_from!(f64);
impl_clone_ref_as_clone_no_from!(i32);
impl_clone_ref_as_clone_no_from!(i64);
impl_clone_ref_as_clone_no_from!(u32);
impl_clone_ref_as_clone_no_from!(u64);
impl_clone_ref_as_clone_no_from!(usize);
impl_clone_ref_as_clone_no_from!(std::any::TypeId);
impl_clone_ref_as_clone_no_from!([T] std::marker::PhantomData<T>);
impl_clone_ref_as_clone_no_from!([T] enso_zst::ZST<T>);
impl_clone_ref_as_clone_no_from!([T:?Sized] std::rc::Rc<T>);
impl_clone_ref_as_clone_no_from!([T:?Sized] std::rc::Weak<T>);
impl_clone_ref_as_clone_no_from!(wasm_bindgen::JsValue);
impl_clone_ref_as_clone_no_from!(web_sys::Element);
impl_clone_ref_as_clone_no_from!(web_sys::HtmlDivElement);
impl_clone_ref_as_clone_no_from!(web_sys::HtmlElement);
impl_clone_ref_as_clone_no_from!(web_sys::Performance);
impl_clone_ref_as_clone_no_from!(web_sys::WebGl2RenderingContext);
impl_clone_ref_as_clone_no_from!(web_sys::HtmlCanvasElement);
impl_clone_ref_as_clone_no_from!(web_sys::EventTarget);
// === Option ===
impl<T: CloneRef> CloneRef for Option<T> {
fn clone_ref(&self) -> Self {
self.as_ref().map(|t| t.clone_ref())
}
}

View File

@ -1,207 +0,0 @@
// README README README README README README README README README README README
// README README README README README README README README README README README
// README README README README README README README README README README README
// This library is in a very early stage. It will be refactored and improved
// soon. It should not be reviewed now.
// === Features ===
#![feature(type_ascription)]
#![feature(marker_trait_attr)]
// ==============
// === Export ===
// ==============
pub mod cartesian;
pub mod clone_ref;
pub mod shared;
pub mod singleton;
pub use enso_shapely_macros::*;
/// A macro which passes its input to its output.
#[macro_export]
macro_rules! identity {
($($ts:tt)*) => {$($ts)*}
}
/// Replaces the first argument with the second one. It is useful when creating macros which match
/// a pattern and you want to generate as many repetitions of a token as there was matches. For
/// example, when matching `$($name:ident)*`, you may want to generate as many empty tuples as
/// the number of names matched. You can do it by using `$(replace!{$name,()})*`.
#[macro_export]
macro_rules! replace {
($a:tt,$($b:tt)*) => {$($b)*}
}
/// The same as [`newtype_prim`] but does not generate derive clauses.
#[macro_export]
macro_rules! newtype_prim_no_derives {
($( $(#$meta:tt)* $name:ident($type:ty); )*) => {$(
$(#$meta)*
pub struct $name {
raw:$type
}
impl $name {
/// Constructor.
pub const fn new(raw:$type) -> Self {
Self {raw}
}
/// Checked subtraction. Returns [`None`] if the result would be negative.
pub fn checked_sub(self, rhs: Self) -> Option<Self> {
self.raw.checked_sub(rhs.raw).map(Self::new)
}
}
impl Deref for $name {
type Target = $type;
fn deref(&self) -> &Self::Target {
&self.raw
}
}
impl DerefMut for $name {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.raw
}
}
impl From<$type> for $name { fn from(t:$type) -> Self { Self::new(t) } }
impl From<&$type> for $name { fn from(t:&$type) -> Self { Self::new(*t) } }
impl From<&&$type> for $name { fn from(t:&&$type) -> Self { Self::new(**t) } }
impl From<$name> for $type { fn from(t:$name) -> Self { t.raw } }
impl From<&$name> for $type { fn from(t:&$name) -> Self { t.raw } }
impl From<&&$name> for $type { fn from(t:&&$name) -> Self { t.raw } }
impl fmt::Debug for $name {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple(stringify!($name))
.field(&self.raw)
.finish()
}
}
)*}
}
/// Generates a newtype wrapper for the provided types. It also generates a lot of impls,
/// including Copy, Clone, Debug, Default, Display, From, Into, Deref, and DerefMut.
///
/// For the following input:
/// ```text
/// newtype_prim! {
/// AttributeIndex(usize);
/// }
/// ```
///
/// The following code is generated:
/// ```text
/// #[derive(Copy, Clone, CloneRef, Debug, Default, Display, Eq, Hash, Ord, PartialOrd, PartialEq)]
/// pub struct AttributeIndex {
/// raw: usize,
/// }
/// impl AttributeIndex {
/// /// Constructor.
/// pub fn new(raw: usize) -> Self {
/// Self { raw }
/// }
/// }
/// impl Deref for AttributeIndex {
/// type Target = usize;
/// fn deref(&self) -> &Self::Target {
/// &self.raw
/// }
/// }
/// impl DerefMut for AttributeIndex {
/// fn deref_mut(&mut self) -> &mut Self::Target {
/// &mut self.raw
/// }
/// }
/// impl From<usize> for AttributeIndex {
/// fn from(t: usize) -> Self {
/// Self::new(t)
/// }
/// }
/// impl From<&usize> for AttributeIndex {
/// fn from(t: &usize) -> Self {
/// Self::new(*t)
/// }
/// }
/// impl From<&&usize> for AttributeIndex {
/// fn from(t: &&usize) -> Self {
/// Self::new(**t)
/// }
/// }
/// impl From<AttributeIndex> for usize {
/// fn from(t: AttributeIndex) -> Self {
/// t.raw
/// }
/// }
/// impl From<&AttributeIndex> for usize {
/// fn from(t: &AttributeIndex) -> Self {
/// t.raw
/// }
/// }
/// impl From<&&AttributeIndex> for usize {
/// fn from(t: &&AttributeIndex) -> Self {
/// t.raw
/// }
/// }
/// ```
#[macro_export]
macro_rules! newtype_prim {
($( $(#$meta:tt)* $name:ident($type:ty); )*) => {
$crate::newtype_prim_no_derives! {
$(
$(#$meta)*
#[derive(Copy,Clone,CloneRef,Default,Display,Eq,Hash,Ord,PartialOrd,PartialEq)]
$name($type);
)*
}
}
}
/// The same as [`newtype_prim`] but does not generate [`Default`] derive clause.
#[macro_export]
macro_rules! newtype_prim_no_default {
($( $(#$meta:tt)* $name:ident($type:ty); )*) => {
$crate::newtype_prim_no_derives! {
$(
$(#$meta)*
#[derive(Copy,Clone,CloneRef,Display,Eq,Hash,Ord,PartialOrd,PartialEq)]
$name($type);
)*
}
}
}
/// The same as [`newtype_prim`] but does not generate [`Default`] and [`Display`] derive clauses.
#[macro_export]
macro_rules! newtype_prim_no_default_no_display {
($( $(#$meta:tt)* $name:ident($type:ty); )*) => {
$crate::newtype_prim_no_derives! {
$(
$(#$meta)*
#[derive(Copy,Clone,CloneRef,Eq,Hash,Ord,PartialOrd,PartialEq)]
$name($type);
)*
}
}
}
#[macro_export]
macro_rules! derive_clone_plus {
($name:ident) => {
impl<T: Clone + Into<$name>> From<&T> for $name {
fn from(t: &T) -> Self {
t.clone().into()
}
}
};
}

View File

@ -1,494 +0,0 @@
/// This module implements the `shared` macro, an utility allowing for easy definition of
/// `Rc<RefCell<...>>` wrappers.
/// This macro provides an easy way to define secure `Rc<RefCell<...>>` wrappers for a given struct.
///
/// This macro accepts a body which is very similar to normal struct definition. There are a few
/// notable differences:
/// - The first token this macro accepts should be the name of the wrapped structure.
/// - The implementation block does not have a name. It is always implemented for the struct. You
/// are allowed to provide multiple impl blocks.
///
/// This macro traverses the definition and for each function, it generates a borrowing counterpart.
/// It also handles the `new` function in a special way. Please note, that this macro generates
/// only safe bindings. If your original function returns a reference, the generated code will fail.
/// If you want to return references with some custom guard system, implement that outside of this
/// macro usage.
///
/// For the given input:
/// ```text
/// shared! { Uniform
///
/// #[derive(Clone,Copy,Debug)]
/// pub struct UniformData<Value> {
/// value: Value,
/// dirty: bool,
/// }
///
/// impl<Value:UniformValue> {
/// /// Constructor.
/// pub fn new(value:Value) -> Self {
/// let dirty = false;
/// Self {value,dirty}
/// }
///
/// /// Checks whether the uniform was changed and not yet updated.
/// pub fn check_dirty(&self) -> bool {
/// self.dirty
/// }
///
/// /// Modifies the value stored by the uniform.
/// pub fn modify<F:FnOnce(&mut Value)>(&mut self, f:F) {
/// self.set_dirty();
/// f(&mut self.value);
/// }
/// }}
/// ```
///
/// The following output will be generated:
///
/// ```text
/// #[derive(Clone,Copy,Debug)]
/// pub struct UniformData<Value> {
/// value: Value,
/// dirty: bool,
/// }
///
/// impl<Value:UniformValue> for UniformData<Value> {
/// #[doc = r###"Constructor."###]
/// pub fn new(value:Value) -> Self {
/// let dirty = false;
/// Self {value,dirty}
/// }
///
/// #[doc = r###"Checks whether the uniform was changed and not yet updated."###]
/// pub fn check_dirty(&self) -> bool {
/// self.dirty
/// }
///
/// #[doc = r###"Modifies the value stored by the uniform."###]
/// pub fn modify<F:FnOnce(&mut Value)>(&mut self, f:F) {
/// self.set_dirty();
/// f(&mut self.value);
/// }
/// }
///
/// #[derive(Clone,Copy,Debug)]
/// pub struct Uniform<Value> {
/// rc: Rc<RefCell<UniformData<Value>>>
/// }
///
/// impl<Value:UniformValue> for Uniform<Value> {
/// #[doc = r###"Constructor."###]
/// pub fn new(value:Value) -> Self {
/// let rc = Rc::new(RefCell::new(UniformData::new(value)));
/// Self {rc}
/// }
///
/// #[doc = r###"Checks whether the uniform was changed and not yet updated."###]
/// pub fn check_dirty(&self) -> bool {
/// self.rc.borrow.check_dirty()
/// }
///
/// #[doc = r###"Modifies the value stored by the uniform."###]
/// pub fn modify<F:FnOnce(&mut Value)>(&self, f:F) {
/// self.borrow_mut().modify(f)
/// }
/// }
/// ```
///
/// **Note**
/// Both the implementation as well as usage syntax of this macro will be nicer if it was
/// implemented as procedural macro. However, no IDE supports expansion of procedural macros
/// currently, so it was implemented as macro rules instead.
#[macro_export]
macro_rules! shared {
($name:ident $($in:tt)*) => {
$crate::angles_to_brackets_shallow! { shared_bracket [$name] $($in)* }
}
}
#[macro_export]
macro_rules! shared_bracket_impl {
([impl [$($impl_params:tt)*] $name:ident $name_mut:ident $([$($params:tt)*])?] [
$(
$(#[$($meta:tt)*])*
$acc:vis fn $fn_name:ident
$([$($fn_params:tt)*])? ($($fn_args:tt)*) $(-> $fn_type:ty)? $(where $($wt1:ty : $wt2:path),* )? {
$($fn_body:tt)*
}
)*
]) => {
impl <$($impl_params)*> $name_mut $(<$($params)*>)? {
$(
$(#[$($meta)*])*
$acc fn $fn_name $(<$($fn_params)*>)*
($($fn_args)*) $(-> $fn_type)? $(where $($wt1 : $wt2),* )? {$($fn_body)*}
)*
}
impl <$($impl_params)*> $name $(<$($params)*>)? {
$($crate::shared_bracket_fn! {
$name_mut :: $(#[$($meta)*])*
$acc fn $fn_name [$($($fn_params)*)*] ($($fn_args)*) $(-> $fn_type)? $(where $($wt1 : $wt2),* )?
})*
}
};
}
#[macro_export]
macro_rules! shared_bracket_fn {
( $base:ident :: $(#[$($meta:tt)*])* $acc:vis fn new $([$($params:tt)*])?
($($arg:ident : $arg_type:ty),*) $(-> $type:ty)? $(where $($wt1:ty : $wt2:path),* )? ) => {
$(#[$($meta)*])*
$acc fn new $(<$($params)*>)* ($($arg : $arg_type),*) $(-> $type)? $(where $($wt1 : $wt2),* )? {
Self { rc: Rc::new(RefCell::new($base::new($($arg),*))) }
}
};
( $base:ident :: $(#[$($meta:tt)*])* $acc:vis fn $name:ident $([$($params:tt)*])?
(&self $(,$($arg:ident : $arg_type:ty),+)?) $(-> $type:ty)? $(where $($wt1:ty : $wt2:path),* )? ) => {
$(#[$($meta)*])*
$acc fn $name $(<$($params)*>)* (&self $(,$($arg : $arg_type),*)?) $(-> $type)? $(where $($wt1 : $wt2),* )? {
self.rc.borrow().$name($($($arg),*)?)
}
};
( $base:ident :: $(#[$($meta:tt)*])* $acc:vis fn $name:ident $([$($params:tt)*])?
(&mut self $(,$($arg:ident : $arg_type:ty),+)?) $(-> $type:ty)? $(where $($wt1:ty : $wt2:path),* )? ) => {
$(#[$($meta)*])*
$acc fn $name $(<$($params)*>)* (&self $(,$($arg : $arg_type),*)?) $(-> $type)? $(where $($wt1 : $wt2),* )? {
self.rc.borrow_mut().$name($($($arg),*)?)
}
};
}
#[macro_export]
macro_rules! shared_bracket_normalized {
( [$name:ident] [
$(#[$($meta:tt)*])*
$(##[$($imeta:tt)*])*
pub struct $name_mut:ident $params:tt {
$($(#[$($field_meta:tt)*])* $field:ident : $field_type:ty),* $(,)?
}
$(impl $([$($impl_params:tt)*])? {$($impl_body:tt)*})*
]) => {
$crate::shared_struct! {
$(#[$($meta)*])*
$(##[$($imeta)*])*
pub struct $name $name_mut $params {
$($(#[$($field_meta)*])* $field : $field_type),*
}
}
$($crate::angles_to_brackets_shallow! {shared_bracket_impl
[impl [$($($impl_params)*)?] $name $name_mut $params] $($impl_body)*
})*
};
}
#[macro_export]
macro_rules! shared_struct {
(
$(#[$($meta:tt)*])*
$(##[$($imeta:tt)*])*
pub struct $name:ident $name_mut:ident [$($params:tt)*] {
$($(#[$($field_meta:tt)*])* $field:ident : $field_type:ty),* $(,)?
}
) => {
$(#[$($meta)*])*
#[derive(CloneRef)]
pub struct $name <$($params)*> { rc: Rc<RefCell<$name_mut<$($params)*>>> }
$(#[$($meta)*])*
$(#[$($imeta)*])*
pub struct $name_mut <$($params)*> { $($(#[$($field_meta)*])* $field : $field_type),* }
impl<$($params)*> Clone for $name <$($params)*> {
fn clone(&self) -> Self {
let rc = self.rc.clone();
Self {rc}
}
}
paste! {
$(#[$($meta)*])*
#[derive(CloneRef)]
pub struct [<Weak $name>] <$($params)*> { weak: Weak<RefCell<$name_mut<$($params)*>>> }
impl<$($params)*> Clone for [<Weak $name>] <$($params)*> {
fn clone(&self) -> Self {
let weak = self.weak.clone();
Self {weak}
}
}
impl<$($params)*> [<Weak $name>] <$($params)*> {
/// Attempts to upgrade the weak pointer to an rc, delaying dropping of the inner
/// value if successful.
pub fn upgrade(&self) -> Option<$name <$($params)*>> {
self.weak.upgrade().map(|rc| $name {rc})
}
}
impl<$($params)*> WeakElement for [<Weak $name>] <$($params)*> {
type Strong = $name <$($params)*> ;
fn new(view: &Self::Strong) -> Self {
view.downgrade()
}
fn view(&self) -> Option<Self::Strong> {
self.upgrade()
}
fn is_expired(&self) -> bool {
self.weak.is_expired()
}
fn clone(view: &Self::Strong) -> Self::Strong where Self: Sized {
view.clone()
}
}
impl<$($params)*> $name <$($params)*> {
/// Downgrade the reference to weak ref.
pub fn downgrade(&self) -> [<Weak $name>] <$($params)*> {
let weak = Rc::downgrade(&self.rc);
[<Weak $name>] {weak}
}
/// Call operation with borrowed data. Should be use in implementation of wrapper
/// only.
fn with_borrowed<F,R>(&self, operation:F) -> R
where F : FnOnce(&mut $name_mut<$($params)*>) -> R {
operation(&mut self.rc.borrow_mut())
}
/// Wraps given data object into a shared handle.
pub fn new_from_data(data:$name_mut<$($params)*>) -> Self {
Self {rc:Rc::new(RefCell::new(data))}
}
/// Check if the shared pointer points to the same struct as `other`.
pub fn identity_equals(&self, other:&Self) -> bool {
Rc::ptr_eq(&self.rc,&other.rc)
}
}
}
};
}
#[macro_export]
macro_rules! angles_to_brackets_shallow {
($f:ident $f_arg:tt $($in:tt)*) => {
$crate::_angles_to_brackets_shallow! { $f $f_arg [] [] [] $($in)* }
}
}
#[macro_export]
macro_rules! _angles_to_brackets_shallow {
( $f:ident $f_arg:tt [] [$($out:tt)*] [] ) => { $crate::$f! { $f_arg [$($out)*] } };
( $f:ident $f_arg:tt [] [$($out:tt)*] [$($cout:tt)*] ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* $($cout)*] [] } };
( $f:ident $f_arg:tt [] [$($out:tt)*] [$($cout:tt)*] < $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [.] [$($out)* $($cout)*] [] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] << $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [. .] $out [$($cout)* <] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] <<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [. . .] $out [$($cout)* <<] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] <<<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [. . . .] $out [$($cout)* <<<] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] <<<<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [. . . . .] $out [$($cout)* <<<<] $($rest)* } };
( $f:ident $f_arg:tt [$($depth:tt)*] $out:tt [$($cout:tt)*] < $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)* .] $out [$($cout)* <] $($rest)* } };
( $f:ident $f_arg:tt [$($depth:tt)*] $out:tt [$($cout:tt)*] << $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)* . .] $out [$($cout)* <<] $($rest)* } };
( $f:ident $f_arg:tt [$($depth:tt)*] $out:tt [$($cout:tt)*] <<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)* . . .] $out [$($cout)* <<<] $($rest)* } };
( $f:ident $f_arg:tt [$($depth:tt)*] $out:tt [$($cout:tt)*] <<<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)* . . . .] $out [$($cout)* <<<<] $($rest)* } };
( $f:ident $f_arg:tt [$($depth:tt)*] $out:tt [$($cout:tt)*] <<<<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)* . . . . .] $out [$($cout)* <<<<<] $($rest)* } };
( $f:ident $f_arg:tt [. $($depth:tt)*] $out:tt [$($cout:tt)*] -> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [. $($depth)*] $out [$($cout)* ->] $($rest)* } };
( $f:ident $f_arg:tt [.] [$($out:tt)*] $cout:tt > $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* $cout] [] $($rest)* } };
( $f:ident $f_arg:tt [. .] [$($out:tt)*] [$($cout:tt)*] >> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* [$($cout)* >]] [] $($rest)* } };
( $f:ident $f_arg:tt [. . .] [$($out:tt)*] [$($cout:tt)*] >>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* [$($cout)* >>]] [] $($rest)* } };
( $f:ident $f_arg:tt [. . . .] [$($out:tt)*] [$($cout:tt)*] >>>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* [$($cout)* >>>]] [] $($rest)* } };
( $f:ident $f_arg:tt [. . . . .] [$($out:tt)*] [$($cout:tt)*] >>>>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* [$($cout)* >>>>]] [] $($rest)* } };
( $f:ident $f_arg:tt [. $($depth:tt)*] $out:tt [$($cout:tt)*] > $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)*] $out [$($cout)* >] $($rest)* } };
( $f:ident $f_arg:tt [. . $($depth:tt)*] $out:tt [$($cout:tt)*] >> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)*] $out [$($cout)* >>] $($rest)* } };
( $f:ident $f_arg:tt [. . . $($depth:tt)*] $out:tt [$($cout:tt)*] >>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)*] $out [$($cout)* >>>] $($rest)* } };
( $f:ident $f_arg:tt [. . . . $($depth:tt)*] $out:tt [$($cout:tt)*] >>>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)*] $out [$($cout)* >>>>] $($rest)* } };
( $f:ident $f_arg:tt [. . . . . $($depth:tt)*] $out:tt [$($cout:tt)*] >>>>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)*] $out [$($cout)* >>>>>] $($rest)* } };
// Function output handling
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt $t26:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 $t26 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt $t26:tt $t27:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 $t26 $t27 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt $t26:tt $t27:tt $t28:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 $t26 $t27 $t28 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt $t26:tt $t27:tt $t28:tt $t29:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 $t26 $t27 $t28 $t29 {$($b)*}] $($rest)* } };
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt $t26:tt $t27:tt $t28:tt $t29:tt $t30:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 $t26 $t27 $t28 $t29 $t30 {$($b)*}] $($rest)* } };
// Any token handling
( $f:ident $f_arg:tt $depth:tt $out:tt [$($cout:tt)*] $t:tt $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg $depth $out [$($cout)* $t] $($rest)* } };
}
#[macro_export]
macro_rules! shared_bracket {
([$name:ident] [$($in:tt)*]) => {
$crate::normalize_input! { shared_bracket_normalized [$name] $($in)* }
}
}
#[macro_export]
macro_rules! normalize_input {
($f:ident $f_args:tt $($in:tt)*) => {
$crate::_normalize_input! { $f $f_args [] $($in)* }
}
}
#[macro_export]
macro_rules! _normalize_input {
// Finish.
( $f:ident $f_args:tt $out:tt ) => {
$crate::$f! { $f_args $out }
};
// Structs.
( $f:ident $f_args:tt [$($out:tt)*]
$(#[$($meta:tt)*])*
pub struct $name:tt $([$($params:tt)*])? {$($body:tt)*}
$($rest:tt)*
) => {
$crate::_normalize_input! { $f $f_args
[$($out)*
$(#[$($meta)*])*
pub struct $name [$($($params)*)?] {$($body)*}
] $($rest)* }
};
// Any token.
( $f:ident $f_args:tt [$($out:tt)*] $in:tt $($rest:tt)* ) => {
$crate::_normalize_input! { $f $f_args [$($out)* $in] $($rest)* }
};
}
/// New version of [`shared`] - faster, understood by IntelliJ, but not yet covering all cases yet
/// (like where contexts in functions). These cases can be added in the future.
#[macro_export]
macro_rules! shared2 {
($name:ident
$(#$data_meta:tt)*
pub struct $data_name:ident $body:tt
impl {$(
$(#$fn_meta:tt)*
$fn_vis:vis fn $fn_name:ident $(<$($fn_param:ident : $fn_param_ty:ty),*>)?
($($fn_args:tt)*) $(-> $fn_out:ty)? { $($fn_body:tt)* }
)*}
) => {
$(#$data_meta)*
pub struct $data_name $body
#[derive(Clone, CloneRef)]
$(#$data_meta)*
pub struct $name {
rc: Rc<RefCell<$data_name>>
}
$(
$crate::shared_fn2!{ $name $data_name
[[$(#$fn_meta)*] $fn_vis [<$($($fn_param : ($fn_param_ty)),*)?>]] $fn_name
($($fn_args)*) ($($fn_args)*) [-> ($($fn_out)?)] { $($fn_body)* }
}
)*
};
}
#[macro_export]
macro_rules! shared_fn2 {
($name:ident $data_name:ident
$fn_sig:tt $fn_name:ident
(&self $(,$($fn_arg:ident : $fn_arg_ty:ty),*)?) $fn_all_args:tt $fn_out:tt $fn_body:tt
) => {
impl $data_name {
$crate::shared_fn_flatten2! { $fn_sig $fn_name $fn_all_args $fn_out $fn_body }
}
impl $name {
$crate::shared_fn_flatten2! {
$fn_sig $fn_name (&self $(,$($fn_arg : $fn_arg_ty),*)?) $fn_out {
self.rc.borrow().$fn_name($($($fn_arg),*)?)
}
}
}
};
($name:ident $data_name:ident
$fn_sig:tt $fn_name:ident
(&mut self $(,$($fn_arg:ident : $fn_arg_ty:ty),*)?) $fn_all_args:tt $fn_out:tt $fn_body:tt
) => {
impl $data_name {
$crate::shared_fn_flatten2! { $fn_sig $fn_name $fn_all_args $fn_out $fn_body }
}
impl $name {
$crate::shared_fn_flatten2! {
$fn_sig $fn_name (&self $(,$($fn_arg : $fn_arg_ty),*)?) $fn_out {
self.rc.borrow_mut().$fn_name($($($fn_arg),*)?)
}
}
}
};
($name:ident $data_name:ident
$fn_sig:tt new ($($fn_arg:ident : $fn_arg_ty:ty),*) $fn_all_args:tt $fn_out:tt $fn_body:tt
) => {
impl $data_name {
$crate::shared_fn_flatten2! { $fn_sig new $fn_all_args $fn_out $fn_body }
}
impl $name {
$crate::shared_fn_flatten2! { $fn_sig new $fn_all_args $fn_out {
Self { rc: Rc::new(RefCell::new($data_name::new ($($fn_arg),*))) }
} }
}
};
($name:ident $data_name:ident
$fn_sig:tt $fn_name:ident ($($fn_args2:tt)*) $fn_all_args:tt $fn_out:tt $fn_body:tt
) => {
impl $data_name {
$crate::shared_fn_flatten2! { $fn_sig $fn_name $fn_all_args $fn_out $fn_body }
}
};
}
#[macro_export]
macro_rules! shared_fn_flatten2 {
(
[ [$($fn_meta:tt)*] $fn_vis:vis [$($fn_params:tt)*] ]
$fn_name:ident ($($fn_args:tt)*) [-> $fn_out:tt] {
$($fn_body:tt)*
}
) => {
$($fn_meta)*
$fn_vis fn $fn_name $($fn_params)* ($($fn_args)*) -> $fn_out { $($fn_body)* }
};
}

View File

@ -1,175 +0,0 @@
//! This module defines helpers for defining singletons and associated enum types. A singleton is
//! a type with one possible value. It is used mainly for a type level programming purposes.
/// Defines singleton types. For the following input:
/// ```text
/// define_singletons!{
/// /// A Foo!
/// Foo,
/// /// A Bar!
/// Bar,
/// }
/// ```
///
/// It expands to:
///
/// ```
/// #[allow(missing_docs)]
/// #[derive(Copy, Clone, Debug)]
/// #[doc = r###"A Foo!"###]
/// pub struct Foo;
/// impl Default for Foo {
/// fn default() -> Self {
/// Self
/// }
/// }
/// #[allow(missing_docs)]
/// #[derive(Copy, Clone, Debug)]
/// #[doc = r###"A Bar!"###]
/// pub struct Bar;
/// impl Default for Bar {
/// fn default() -> Self {
/// Self
/// }
/// }
/// ```
#[macro_export]
macro_rules! define_singletons {
( $( $(#$meta:tt)* $name:ident ),* $(,)? ) => {$(
#[allow(missing_docs)]
#[derive(Copy,Clone,Debug,PartialEq,Eq)]
$(#$meta)*
pub struct $name;
impl Default for $name {
fn default() -> Self {
Self
}
}
)*}
}
/// Defines an associated enum type for predefined singletons.
///
/// For the following input:
/// ```text
/// define_singleton_enum!{
/// MyEnum {
/// /// A Foo!
/// Foo,
/// /// A Bar!
/// Bar,
/// }
/// }
/// ```
///
/// It expands to:
///
/// ```text
/// #[allow(missing_docs)]
/// #[derive(Copy, Clone, Debug)]
/// pub enum MyEnum {
/// #[doc = r###"A Foo!"###]
/// Foo,
/// #[doc = r###"A Bar!"###]
/// Bar,
/// }
/// impl From<Foo> for MyEnum {
/// fn from(_: Foo) -> Self {
/// Self::Foo
/// }
/// }
/// impl From<ZST<Foo>> for MyEnum {
/// fn from(_: ZST<Foo>) -> Self {
/// Self::Foo
/// }
/// }
/// impl From<Bar> for MyEnum {
/// fn from(_: Bar) -> Self {
/// Self::Bar
/// }
/// }
/// impl From<ZST<Bar>> for MyEnum {
/// fn from(_: ZST<Bar>) -> Self {
/// Self::Bar
/// }
/// }
/// ```
#[macro_export]
macro_rules! define_singleton_enum_from {
(
$(#$meta:tt)*
$name:ident {
$( $(#$field_meta:tt)* $field:ident ),* $(,)?
}
) => {
#[allow(missing_docs)]
#[derive(Copy,Clone,Debug,PartialEq,Eq)]
$(#$meta)*
pub enum $name {
$( $(#$field_meta)* $field ),*
}
$(
impl From<$field> for $name {
fn from(_:$field) -> Self {
Self::$field
}
}
impl From<ZST<$field>> for $name {
fn from(_:ZST<$field>) -> Self {
Self::$field
}
}
)*
}
}
/// Defines singletons and an associated enum type.
/// It expands to the same as `define_singletons` and `define_singleton_enum_from`.
#[macro_export]
macro_rules! define_singleton_enum {
(
$(#$meta:tt)*
$name:ident {
$(
$(#$variant_meta:tt)*
$variant:ident $(($($variant_field:tt)*))?
),* $(,)?
}
) => {
$(
$crate::define_singleton_enum_struct! {
$(#$variant_meta)*
$variant ($($($variant_field)*)?)
}
)*
$crate::define_singleton_enum_from! { $(#$meta)* $name {$($(#$variant_meta)* $variant),*}}
}
}
#[macro_export]
macro_rules! define_singleton_enum_struct {
( $(#$meta:tt)* $name:ident () ) => {
#[allow(missing_docs)]
#[derive(Copy,Clone,Debug,PartialEq,Eq)]
$(#$meta)*
pub struct $name;
impl Default for $name {
fn default() -> Self {
Self
}
}
};
( $(#$meta:tt)* $name:ident ($($args:tt)*) ) => {
#[allow(missing_docs)]
#[derive(Clone,Debug,PartialEq,Eq)]
$(#$meta)*
pub struct $name($($args)*);
};
}

View File

@ -1,64 +0,0 @@
[package]
name = "enso-web"
version = "0.1.0"
authors = ["Enso Team <contact@enso.org>"]
edition = "2021"
[lib]
[dependencies]
enso-debug-api = { path = "../debug-api" }
console_error_panic_hook = { workspace = true }
gloo-timers = { version = "0.2.1", features = ["futures"] }
js-sys = { workspace = true }
wasm-bindgen = { workspace = true }
derivative = { workspace = true }
enso-logging = { path = "../logging" }
enso-shapely = { path = "../shapely" }
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
async-std = { version = "1.5.0" }
[dependencies.web-sys]
version = "0.3.4"
features = [
'Blob',
'Document',
'Node',
'Element',
'HtmlElement',
'HtmlDivElement',
'HtmlHeadElement',
'HtmlCollection',
'CssStyleDeclaration',
'HtmlCanvasElement',
'WebGlBuffer',
'WebGlRenderingContext',
'WebGl2RenderingContext',
'CanvasRenderingContext2d',
'WebGlProgram',
'WebGlShader',
'WebGlQuery',
'Window',
'Navigator',
'console',
'Performance',
'Event',
'MouseEvent',
'EventTarget',
'Text',
'DomRect',
'DomRectReadOnly',
'Location',
'ReadableStream',
'AddEventListenerOptions',
'EventListenerOptions',
'KeyboardEvent',
'WheelEvent',
]
[dev-dependencies]
wasm-bindgen-test = { workspace = true }
[lints]
workspace = true

View File

@ -1,170 +0,0 @@
/// Implementation of clipboard management. For full documentation, see the rust module.
// =============
// === Impls ===
// =============
function fallbackWriteText(text) {
let successful = false
let textArea = document.createElement('textarea')
// *** This styling is an extra step which is likely not required. ***
//
// Why is it here? To ensure:
// 1. the element is able to have focus and selection.
// 2. if element was to flash render it has minimal visual impact.
// 3. less flakyness with selection and copying which **might** occur if
// the textarea element is not visible.
//
// The likelihood is the element won't even render, not even a
// flash, so some of these are just precautions. However in
// Internet Explorer the element is visible whilst the popup
// box asking the user for permission for the web page to
// copy to the clipboard.
// Place in top-left corner of screen regardless of scroll position.
textArea.style.position = 'fixed'
textArea.style.top = 0
textArea.style.left = 0
// Ensure it has a small width and height. Setting to 1px / 1em
// doesn't work as this gives a negative w/h on some browsers.
textArea.style.width = '2em'
textArea.style.height = '2em'
// We don't need padding, reducing the size if it does flash render.
textArea.style.padding = 0
// Clean up any borders.
textArea.style.border = 'none'
textArea.style.outline = 'none'
textArea.style.boxShadow = 'none'
// Avoid flash of white box if rendered for any reason.
textArea.style.background = 'transparent'
textArea.value = text
textArea.style.top = '0'
textArea.style.left = '0'
textArea.style.position = 'fixed'
document.body.appendChild(textArea)
textArea.focus()
textArea.select()
try {
successful = document.execCommand('copy') == 1
} catch (err) {}
document.body.removeChild(textArea)
if (!successful) {
console.error('Could not write to clipboard.')
}
}
export function writeText(text) {
if (!navigator.clipboard) {
fallbackWriteText(text)
} else {
navigator.clipboard.writeText(text).then(
() => {},
err => {
fallbackWriteText(text)
}
)
}
}
/// Write custom `data` payload to the clipboard. Data will be saved as a `Blob` with `mimeType`.
/// If `textData` is not empty, an additional clipboard item will be written with the `text/plain` type.
///
/// Unlike `writeText`, there are no special fallbacks in case of errors or the clipboard being unavailable.
/// If writing did not succeeed, the function will simply log an error to the console.
export function writeCustom(mimeType, data, textData) {
if (!navigator.clipboard) {
console.error('Clipboard API not available.')
} else {
const blob = new Blob([data], { type: mimeType })
const payload = { [blob.type]: blob }
if (typeof textData === 'string' && textData !== '') {
payload['text/plain'] = new Blob([textData], { type: 'text/plain' })
}
navigator.clipboard.write([new ClipboardItem(payload)]).then(
() => {},
err => {
console.error('Could not write to clipboard.', err)
}
)
}
}
/// Firefox only supports reading the clipboard in browser extensions, so it will
/// only work with `cmd + v` shortcut. To learn more, see the
/// [MSDN compatibility note](https://developer.mozilla.org/en-US/docs/Web/API/Clipboard/readText).
let lastTextPaste = ''
function init_firefox_fallback() {
// Checking whether the window is defined. It could not be defined if the program is run in
// node, for example to extract the shaders.
if (typeof window !== 'undefined') {
window.addEventListener('paste', event => {
lastTextPaste = (event.clipboardData || window.clipboardData).getData('text')
})
}
}
export function readText(callback) {
if (!navigator.clipboard) {
callback(lastTextPaste)
} else {
navigator.clipboard.readText().then(
function (text) {
callback(text)
},
function (err) {
callback(lastTextPaste)
}
)
}
}
/// Read a custom payload of `expectedMimeType` from the clipboard, passing it to `whenExpected` callback.
/// If there is no value of `expectedMimeType` in the payload, use `plainTextFallback` callback instead.
///
/// Unlike `readText`, there are no special fallbacks in case of errors or the clipboard being unavailable.
/// If reading did not succeeed, the function will simply log an error to the console.
export function readCustom(expectedMimeType, whenExpected, plainTextFallback) {
if (!navigator.clipboard) {
console.error('Clipboard API not available.')
} else {
readCustomImpl(expectedMimeType, whenExpected, plainTextFallback)
}
}
/// Helper function for `readCustom`, see its documentation.
async function readCustomImpl(expectedMimeType, whenExpected, plainTextFallback) {
try {
const data = await navigator.clipboard.read()
for (const item of data) {
if (item.types.includes(expectedMimeType)) {
const blob = await item.getType(expectedMimeType)
const buffer = await blob.arrayBuffer()
whenExpected(new Uint8Array(buffer))
return
}
}
// We use a separate loop to make sure `expectedMimeType` has a priority, no matter the order of items.
for (const item of data) {
if (item.types.includes('text/plain')) {
const blob = await item.getType('text/plain')
const text = await blob.text()
plainTextFallback(text)
return
}
}
} catch (error) {
console.error('Error while reading clipboard.', error)
}
}
// ======================
// === Initialization ===
// ======================
init_firefox_fallback()

View File

@ -1,79 +0,0 @@
// The IntersectionObserver interface of the Intersection Observer API provides
// a way to asynchronously observe changes in the intersection of a target
// element with an ancestor element or with a top-level document's viewport.
// The ancestor element or viewport is referred to as the root.
//
// See also
// https://developer.mozilla.org/en-US/docs/Web/API/IntersectionObserver
// ==============
// === IxPool ===
// ==============
class IxPool {
constructor() {
this.next = 0
this.free = []
}
reserve() {
let ix
if (this.free.length == 0) {
ix = this.next
this.next += 1
} else {
ix = this.free.shift()
}
return ix
}
drop(ix) {
this.free.unshift(ix)
}
}
// ============
// === Pool ===
// ============
class Pool {
constructor(cons) {
this.cons = cons
this.ixs = new IxPool()
}
reserve(...args) {
let ix = this.ixs.reserve()
this[ix] = this.cons(...args)
return ix
}
drop(ix) {
this.ixs.drop(ix)
this[ix] = null
}
}
// ============================
// === IntersectionObserver ===
// ============================
let intersectionObserverPool = new Pool((...args) => new IntersectionObserver(...args))
export function intersection_observe(target, f) {
let id = intersectionObserverPool.reserve(intersection_observer_update(f))
intersectionObserverPool[id].observe(target)
return id
}
export function intersection_unobserve(id) {
intersectionObserverPool[id].disconnect()
intersectionObserverPool.drop(id)
}
function intersection_observer_update(f) {
return entries => {
let rect = entries[0].boundingClientRect
f(rect.x, rect.y, rect.width, rect.height)
}
}

View File

@ -1,73 +0,0 @@
// ==============
// === IxPool ===
// ==============
class IxPool {
constructor() {
this.next = 0
this.free = []
}
reserve() {
let ix
if (this.free.length == 0) {
ix = this.next
this.next += 1
} else {
ix = this.free.shift()
}
return ix
}
drop(ix) {
this.free.unshift(ix)
}
}
// ============
// === Pool ===
// ============
class Pool {
constructor(cons) {
this.cons = cons
this.ixs = new IxPool()
}
reserve(...args) {
let ix = this.ixs.reserve()
this[ix] = this.cons(...args)
return ix
}
drop(ix) {
this.ixs.drop(ix)
this[ix] = null
}
}
// ======================
// === ResizeObserver ===
// ======================
let resizeObserverPool = new Pool((...args) => new ResizeObserver(...args))
export function resize_observe(target, f) {
let id = resizeObserverPool.reserve(resize_observer_update(f))
// We are using the devicePixelContentBoxSize option to get correct results here, as explained in this
// article: https://webglfundamentals.org/webgl/lessons/webgl-resizing-the-canvas.html
resizeObserverPool[id].observe(target, { box: 'content-box' })
return id
}
export function resize_unobserve(id) {
resizeObserverPool[id].disconnect()
resizeObserverPool.drop(id)
}
function resize_observer_update(f) {
return entries => {
let rect = entries[0].contentRect
f(rect.width, rect.height)
}
}

View File

@ -1,11 +0,0 @@
//! Parent module for web API bindings. It contains both native, WASM bindings and mock ones, which
//! allow compilation of the API to native code without throwing panics in order for it to be useful
//! in native tests.
// ==============
// === Export ===
// ==============
pub mod mock;
pub mod wasm;

View File

@ -1,878 +0,0 @@
//! Mocked bindings to the web-api allowing its compilation for the native target without throwing
//! panics.
// === Non-Standard Linter Configuration ===
#![allow(clippy::boxed_local)]
use crate::prelude::*;
use std::default::Default;
use std::marker::Unsize;
// ===================
// === MockDefault ===
// ===================
/// Default value provider. Similar to [`Default`] but with additional implementations.
#[allow(missing_docs)]
pub trait MockDefault {
fn mock_default() -> Self;
}
/// [`MockDefault::mock_default`] accessor.
pub fn mock_default<T: MockDefault>() -> T {
T::mock_default()
}
impl MockDefault for () {
fn mock_default() -> Self {}
}
impl<T: MockDefault> MockDefault for Option<T> {
fn mock_default() -> Self {
Some(mock_default())
}
}
impl<T: MockDefault, E> MockDefault for Result<T, E> {
fn mock_default() -> Self {
Ok(mock_default())
}
}
impl<T> MockDefault for Vec<T> {
fn mock_default() -> Self {
vec![]
}
}
/// Macro which generates [`MockDefault`] impls which redirect the call to [`Default::default`].
macro_rules! auto_impl_mock_default {
( $($tp:ident $(< $($arg:ident),* >)? ),* ) => {
$(
impl $(<$($arg),*>)? MockDefault for $tp $(<$($arg),*>)? {
fn mock_default() -> Self {
Default::default()
}
}
)*
};
}
auto_impl_mock_default!(bool, i16, i32, u32, f64, String);
// ================
// === MockData ===
// ================
/// Every mock structure implements this trait.
pub trait MockData {}
/// Macro used to generate mock structures. See the expansion of generated structures to learn more.
#[macro_export]
macro_rules! mock_struct {
( $([$opt:ident])?
$name:ident $(<$( $param:ident $(: ?$param_tp:ident)? ),*>)? $(=> $deref:ident)?
) => {
#[allow(missing_copy_implementations)]
#[allow(non_snake_case)]
#[allow(missing_docs)]
pub struct $name $(<$($param $(:?$param_tp)?),*>)? {
$($( $param : PhantomData<$param> ),*)?
}
/// # Safety
/// The usage of [`std::mem::transmute`] is safe here as we transmute ZST types.
#[allow(unsafe_code)]
impl$(<$($param $(:?$param_tp)?),*>)?
$name $(<$($param),*>)? {
/// Const constructor.
pub const fn const_new() -> Self {
unsafe { std::mem::transmute(()) }
}
}
impl$(<$($param $(:?$param_tp)?),*>)?
Debug for $name $(<$($param),*>)? {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, stringify!($name))
}
}
#[allow(unsafe_code)]
impl $(<$($param $(:?$param_tp)?),*>)?
Default for $name $(<$($param),*>)? {
fn default() -> Self {
Self::const_new()
}
}
impl $(<$($param $(:?$param_tp)?),*>)?
MockDefault for $name $(<$($param),*>)? {
fn mock_default() -> Self {
std::default::Default::default()
}
}
impl $(<$($param $(:?$param_tp)?),*>)?
Clone for $name $(<$($param),*>)? {
fn clone(&self) -> Self {
std::default::Default::default()
}
}
impl $(<$($param $(:?$param_tp)?),*>)?
CloneRef for $name $(<$($param),*>)? {
fn clone_ref(&self) -> Self {
std::default::Default::default()
}
}
impl $(<$($param $(:?$param_tp)?),*>)?
MockData for $name $(<$($param),*>)? {}
mock_struct_deref! {[$($deref)?] $name $(<$( $param $(:?$param_tp)?),*>)?}
mock_struct_as_ref! {[$($opt)?] $name $(<$( $param $(:?$param_tp)?),*>)? $(=> $deref)?}
mock_struct_into_js_ref! {$name $(<$( $param $(:?$param_tp)?),*>)? $(=> $deref)?}
};
}
/// Helper of [`mock_struct`].
#[macro_export]
macro_rules! mock_struct_as_ref {
([NO_AS_REF] $($ts:tt)*) => {};
([] $name:ident $(<$( $param:ident $(: ?$param_tp:ident)? ),*>)?
$(=> $deref:ident)?
) => {
/// # Safety
/// The usage of [`std::mem::transmute`] is safe here as we transmute ZST types.
#[allow(unsafe_code)]
impl<__T__: MockData, $($($param $(:?$param_tp)? ),*)?>
AsRef<__T__> for $name $(<$($param),*>)? {
fn as_ref(&self) -> &__T__ {
unsafe { std::mem::transmute(self) }
}
}
};
}
/// Helper of [`mock_struct`].
#[macro_export]
macro_rules! mock_struct_into_js_ref {
(JsValue $(<$( $param:ident $(: ?$param_tp:ident)? ),*>)? $(=> $deref:ident)?) => {};
($name:ident $(<$( $param:ident $(: ?$param_tp:ident)? ),*>)? => JsValue) => {};
($name:ident $(<$( $param:ident $(: ?$param_tp:ident)? ),*>)? $(=> $deref:ident)?) => {
impl $(<$($param $(:?$param_tp)?),*>)?
From<$name $(<$($param),*>)?> for JsValue {
fn from(_: $name $(<$($param),*>)?) -> Self {
std::default::Default::default()
}
}
};
}
/// Helper of [`mock_struct`].
#[macro_export]
macro_rules! mock_struct_deref {
([] $($ts:tt)*) => {};
([$deref:ident] $name:ident $(<$( $param:ident $(: ?$param_tp:ident)? ),*>)?) => {
impl $(<$($param $(:?$param_tp)?),*>)?
Deref for $name $(<$($param),*>)? {
type Target = $deref;
fn deref(&self) -> &Self::Target {
self.as_ref()
}
}
impl $(<$($param $(:?$param_tp)?),*>)?
From<$name $(<$($param),*>)?> for $deref {
fn from(_: $name) -> Self {
std::default::Default::default()
}
}
};
}
// ===============
// === mock_fn ===
// ===============
/// Create a mock implementation of a non-public function. Read the docs of [`mock_fn_gen`] to learn
/// more.
#[macro_export(local_inner_macros)]
macro_rules! mock_fn {
( $($ts:tt)* ) => {
mock_fn_gen! {[] $($ts)*}
};
}
/// Create a mock implementation of a public function. Read the docs of [`mock_fn_gen`] to learn
/// more.
#[macro_export(local_inner_macros)]
macro_rules! mock_pub_fn {
( $($ts:tt)* ) => {
mock_fn_gen! {[pub] $($ts)*}
};
}
/// Macro used to generate mock methods. Methods look just like their provided signature with a body
/// returning `mock_default()`. There are two special cases: for functions returning `&Self`, and
/// `&mut Self`, which just pass `&self` and `&mut self` to the output, respectively.
#[macro_export(local_inner_macros)]
macro_rules! mock_fn_gen {
($viz:tt $name:ident $(<$($fn_tp:ident),*>)? (&self $($args:tt)*) -> &Self ) => {
$crate::mock_fn_gen_print! {
$viz $name $(<$($fn_tp),*>)? (&self $($args)*) -> &Self {self}
}
};
($viz:tt $name:ident $(<$($fn_tp:ident),*>)? (&mut self $($args:tt)*) -> &mut Self ) => {
$crate::mock_fn_gen_print! {
$viz $name $(<$($fn_tp),*>)? (&mut self $($args)*) -> &mut Self {self}
}
};
($viz:tt $name:ident $(<$($fn_tp:ident),*>)? (&self $($args:tt)*) -> &$out:ty ) => {
$crate::mock_fn_gen_print! {
$viz $name $(<$($fn_tp),*>)? (&self $($args)*) -> &$out {self.as_ref()}
}
};
($viz:tt $name:ident $(<$($fn_tp:ident),*>)? ($($args:tt)*) $(-> $out:ty)? ) => {
$crate::mock_fn_gen_print! {
$viz $name $(<$($fn_tp),*>)? ($($args)*) $(-> $out)? {mock_default()}
}
};
}
/// Macro used to print the final version of the function.
#[macro_export(local_inner_macros)]
macro_rules! mock_fn_gen_print {
([$($viz:ident)?] $name:ident $(<$($fn_tp:ident),*>)?
( $($args:tt)* ) $(-> $out:ty)? {$($body:tt)*} ) => {
#[allow(unused_variables)]
#[allow(clippy::too_many_arguments)]
#[allow(clippy::should_implement_trait)]
#[allow(missing_docs)]
$($viz)? fn $name $(<$($fn_tp),*>)? ( $($args)* ) $(-> $out)? {
$($body)*
}
};
}
/// Combination of [`mock_struct`] and [`mock_pub_fn`].
#[macro_export(local_inner_macros)]
macro_rules! mock_data {
( $([$opt:ident])?
$name:ident $(<$( $param:ident $(: ?$param_tp:ident)? ),*>)? $(=> $deref:ident)?
$(
fn $fn_name:ident $(<$($fn_tp:ident),*>)? ($($args:tt)*) $(-> $out:ty)?;
)*
) => {
mock_struct!{$([$opt])? $name $(<$($param $(:?$param_tp)?),*>)? $(=> $deref)?}
impl $(<$($param $(:?$param_tp)?),*>)? $name $(<$($param),*>)? {
$(
mock_pub_fn!{$fn_name $(<$($fn_tp),*>)? ($($args)*) $(-> $out)?}
)*
}
};
}
// ==============
// === JsCast ===
// ==============
/// Mock of [`JsCast`] is implemented for all mocked types.
impl<T: MockData + MockDefault + AsRef<JsValue> + Into<JsValue>> JsCast for T {}
/// Mock of [`wasm_bindgen::JsCast`].
#[allow(missing_docs)]
pub trait JsCast
where Self: MockData + MockDefault + AsRef<JsValue> + Into<JsValue> {
fn has_type<T>(&self) -> bool {
true
}
fn dyn_into<T>(self) -> Result<T, Self>
where T: JsCast {
Ok(self.unchecked_into())
}
fn dyn_ref<T>(&self) -> Option<&T>
where T: JsCast {
Some(self.unchecked_ref())
}
fn unchecked_into<T>(self) -> T
where T: JsCast {
T::unchecked_from_js(self.into())
}
fn unchecked_ref<T>(&self) -> &T
where T: JsCast {
T::unchecked_from_js_ref(self.as_ref())
}
fn is_instance_of<T>(&self) -> bool {
true
}
fn instanceof(_val: &JsValue) -> bool {
true
}
fn is_type_of(_val: &JsValue) -> bool {
true
}
fn unchecked_from_js(_val: JsValue) -> Self {
mock_default()
}
fn unchecked_from_js_ref(val: &JsValue) -> &Self {
val.as_ref()
}
}
// ===============
// === JsValue ===
// ===============
mock_data! { JsValue
fn is_undefined(&self) -> bool;
fn is_null(&self) -> bool;
fn from_str(s: &str) -> JsValue;
fn from_f64(n: f64) -> JsValue;
fn as_f64(&self) -> Option<f64>;
}
impl JsValue {
/// NULL value mock.
pub const NULL: JsValue = JsValue {};
}
impl AsRef<JsValue> for wasm_bindgen::JsValue {
fn as_ref(&self) -> &JsValue {
&JsValue::NULL
}
}
impl From<wasm_bindgen::JsValue> for JsValue {
fn from(_: wasm_bindgen::JsValue) -> Self {
Default::default()
}
}
impl From<js_sys::Uint8Array> for JsValue {
fn from(_: js_sys::Uint8Array) -> Self {
Default::default()
}
}
impl From<f32> for JsValue {
fn from(_: f32) -> Self {
Default::default()
}
}
impl From<&str> for JsValue {
fn from(_: &str) -> Self {
Default::default()
}
}
impl From<&String> for JsValue {
fn from(_: &String) -> Self {
Default::default()
}
}
impl From<String> for JsValue {
fn from(_: String) -> Self {
Default::default()
}
}
// ===============
// === Closure ===
// ===============
mock_data! { [NO_AS_REF] Closure<T: ?Sized>
fn wrap(_data: Box<T>) -> Closure<T>;
fn once<F>(_fn_once: F) -> Closure<F>;
}
#[allow(missing_docs)]
impl Closure<dyn FnOnce()> {
pub fn once_into_js<F>(_fn_once: F) -> JsValue {
Default::default()
}
}
#[allow(missing_docs)]
impl<T: ?Sized> Closure<T> {
pub fn new<F>(_t: F) -> Closure<T>
where F: Unsize<T> + 'static {
mock_default()
}
}
/// The generated structure does not implement a generic [`AsRef`] impl, as the usages base on the
/// fact that there exist exactly one such an impl (provided below), so the type inferencer can
/// monomoprphise more free variables.
#[allow(unsafe_code)]
impl<T: ?Sized> AsRef<JsValue> for Closure<T> {
fn as_ref(&self) -> &JsValue {
unsafe { std::mem::transmute(self) }
}
}
/// This impl is provided to mimic the behavior of the [`wasm_bindgen::Closure`] type. It silences
/// clippy warnings.
impl<T: ?Sized> Drop for Closure<T> {
fn drop(&mut self) {}
}
// ================
// === Js Prims ===
// ================
// === String ===
mock_data! { JsString => Object
fn to_string(&self) -> String;
}
impl From<JsString> for String {
fn from(_: JsString) -> Self {
"JsString".into()
}
}
impl From<&JsString> for String {
fn from(_: &JsString) -> Self {
"JsString".into()
}
}
// === Array ===
mock_data! { Array => Object
fn length(&self) -> u32;
fn get(&self, index: u32) -> JsValue;
fn of2(a: &JsValue, b: &JsValue) -> Array;
fn of3(a: &JsValue, b: &JsValue, c: &JsValue) -> Array;
fn of4(a: &JsValue, b: &JsValue, c: &JsValue, d: &JsValue) -> Array;
fn of5(a: &JsValue, b: &JsValue, c: &JsValue, d: &JsValue, e: &JsValue) -> Array;
fn to_vec(&self) -> Vec<JsValue>;
}
// === Map ===
mock_data! { Map => Object
fn new() -> Self;
fn get(&self, key: &JsValue) -> JsValue;
fn set(&self, key: &JsValue, value: &JsValue) -> Map;
fn entries(&self) -> Iterator;
}
// === Error ===
mock_data! { Error => Object
fn new(message: &str) -> Self;
}
// ====================
// === DOM Elements ===
// ====================
// === WebGl2RenderingContext ===
/// The [`WebGl2RenderingContext`] is not a mocked structure because it defines tons of
/// constants that we use heavily. Instead, the rendering engine runs context-less when
/// compiled to native tests.
pub use web_sys::WebGl2RenderingContext;
// === Object ===
mock_data! { Object => JsValue
fn new() -> Self;
fn value_of(&self) -> Object;
fn keys(object: &Object) -> Array;
}
// === EventTarget ===
mock_data! { EventTarget => Object
fn remove_event_listener_with_callback
(&self, tp: &str, f: &Function) -> Result<(), JsValue>;
fn remove_event_listener_with_callback_and_event_listener_options
(&self, tp: &str, f: &Function, opt: &EventListenerOptions) -> Result<(), JsValue>;
fn add_event_listener_with_callback
(&self, tp: &str, f: &Function) -> Result<(), JsValue>;
fn add_event_listener_with_callback_and_bool
(&self, tp: &str, f: &Function, opt: bool) -> Result<(), JsValue>;
fn add_event_listener_with_callback_and_add_event_listener_options
(&self, tp: &str, f: &Function, opt: &AddEventListenerOptions)
-> Result<(), JsValue>;
}
// === Document ===
mock_data! { Document => EventTarget
fn body(&self) -> Option<HtmlElement>;
fn head(&self) -> Option<HtmlHeadElement>;
fn fonts(&self) -> FontFaceSet;
fn create_element(&self, local_name: &str) -> Result<Element, JsValue>;
fn get_element_by_id(&self, element_id: &str) -> Option<Element>;
fn create_text_node(&self, data: &str) -> Text;
fn dispatch_event(&self, event: &Event) -> Result<bool, JsValue>;
}
// === Window ===
mock_data! { Window => EventTarget
fn document(&self) -> Option<Document>;
fn open_with_url_and_target(&self, url: &str, target: &str)
-> Result<Option<Window>, JsValue>;
fn request_animation_frame(&self, callback: &Function) -> Result<i32, JsValue>;
fn cancel_animation_frame(&self, handle: i32) -> Result<(), JsValue>;
fn performance(&self) -> Option<Performance>;
fn device_pixel_ratio(&self) -> f64;
fn set_timeout_with_callback_and_timeout_and_arguments_0
(&self, handler: &Function, timeout: i32) -> Result<i32, JsValue>;
fn set_interval_with_callback_and_timeout_and_arguments_0
(&self, handler: &Function, timeout: i32) -> Result<i32, JsValue>;
fn clear_timeout_with_handle(&self, handle: i32);
fn clear_interval_with_handle(&self, handle: i32);
}
// === Function ===
mock_data! { Function
fn call0(&self, context: &JsValue) -> Result<JsValue, JsValue>;
fn call1(&self, context: &JsValue, arg1: &JsValue) -> Result<JsValue, JsValue>;
fn call2(&self, context: &JsValue, arg1: &JsValue, arg2: &JsValue) -> Result<JsValue, JsValue>;
fn call3(&self, context: &JsValue, arg1: &JsValue, arg2: &JsValue, arg3: &JsValue)
-> Result<JsValue, JsValue>;
}
// === AddEventListenerOptions ===
mock_data! { AddEventListenerOptions
fn new() -> Self;
}
impl AddEventListenerOptions {
mock_pub_fn!(capture(&mut self, val:bool) -> &mut Self);
mock_pub_fn!(passive(&mut self, val:bool) -> &mut Self);
mock_pub_fn!(once(&mut self, val:bool) -> &mut Self);
}
// === EventListenerOptions ===
mock_data! { EventListenerOptions
fn new() -> Self;
}
impl EventListenerOptions {
mock_pub_fn!(capture(&mut self, val:bool) -> &mut Self);
}
// === Event ===
mock_data! { Event => Object
fn new(type_: &str) -> Result<Event, JsValue>;
fn prevent_default(&self);
fn stop_propagation(&self);
fn current_target(&self) -> Option<EventTarget>;
}
// === KeyboardEvent ===
mock_data! { KeyboardEvent => Event
fn key(&self) -> String;
fn code(&self) -> String;
fn alt_key(&self) -> bool;
fn ctrl_key(&self) -> bool;
fn shift_key(&self) -> bool;
fn meta_key(&self) -> bool;
}
// === MouseEvent ===
mock_data! { MouseEvent => Event
fn button(&self) -> i16;
fn alt_key(&self) -> bool;
fn ctrl_key(&self) -> bool;
fn client_x(&self) -> i32;
fn client_y(&self) -> i32;
fn offset_x(&self) -> i32;
fn offset_y(&self) -> i32;
fn screen_x(&self) -> i32;
fn screen_y(&self) -> i32;
fn movement_x(&self) -> i32;
fn movement_y(&self) -> i32;
}
// === WheelEvent ===
mock_data! { WheelEvent => MouseEvent
fn delta_x(&self) -> f64;
fn delta_y(&self) -> f64;
}
// === HtmlCollection ===
mock_data! { HtmlCollection
fn length(&self) -> u32;
fn get_with_index(&self, index: u32) -> Option<Element>;
}
// === DomRect ===
mock_data! { DomRect
fn width(&self) -> f64;
fn height(&self) -> f64;
fn left(&self) -> f64;
fn right(&self) -> f64;
fn top(&self) -> f64;
fn bottom(&self) -> f64;
}
// === Element ===
mock_data! { Element => Node
fn remove(&self);
fn children(&self) -> HtmlCollection;
fn get_bounding_client_rect(&self) -> DomRect;
fn set_inner_html(&self, value: &str);
fn set_class_name(&self, value: &str);
fn set_id(&self, value: &str);
fn set_attribute(&self, name: &str, value: &str) -> Result<(), JsValue>;
fn set_scroll_top(&self, value: i32);
fn prepend_with_node_0(&self) -> Result<(), JsValue>;
fn prepend_with_node_1(&self, n1: &Node) -> Result<(), JsValue>;
fn prepend_with_node_2(&self, n1: &Node, n2:&Node) -> Result<(), JsValue>;
fn prepend_with_node_3(&self, n1: &Node, n2:&Node, n3:&Node) -> Result<(), JsValue>;
}
// === HtmlElement ===
mock_data! { HtmlElement => Element
fn set_class_name(&self, n: &str);
fn set_inner_text(&self, value: &str);
fn inner_text(&self) -> String;
fn get_elements_by_class_name(&self, class_names: &str) -> HtmlCollection;
fn style(&self) -> CssStyleDeclaration;
fn offset_top(&self) -> i32;
}
impl From<HtmlElement> for EventTarget {
fn from(_: HtmlElement) -> Self {
Default::default()
}
}
// === HtmlHeadElement ===
mock_data! { HtmlHeadElement => HtmlElement }
// === HtmlHeadElement ===
mock_data! { Promise
fn then(&self, cb: &Closure<dyn FnMut(JsValue)>) -> Promise;
fn resolve(cb: &JsValue) -> Promise;
}
// === HtmlHeadElement ===
mock_data! { FontFaceSet
fn ready(&self) -> Result<Promise, JsValue>;
}
// === HtmlDivElement ===
mock_data! { HtmlDivElement => HtmlElement }
impl From<HtmlDivElement> for EventTarget {
fn from(_: HtmlDivElement) -> Self {
Default::default()
}
}
impl PartialEq<HtmlDivElement> for HtmlDivElement {
fn eq(&self, _: &HtmlDivElement) -> bool {
true
}
}
// === HtmlDivElement ===
mock_data! { Text => CharacterData }
// === CharacterData ===
mock_data! { CharacterData => Node }
// === HtmlCanvasElement ===
mock_data! { HtmlCanvasElement => HtmlElement
fn width(&self) -> u32;
fn height(&self) -> u32;
fn set_width(&self, value: u32);
fn set_height(&self, value: u32);
fn get_context(&self, context_id: &str) -> Result<Option<Object>, JsValue>;
fn get_context_with_context_options(
&self,
context_id: &str,
context_options: &JsValue
) -> Result<Option<Object>, JsValue>;
}
// === HtmlCanvasElement ===
mock_data! { TextMetrics
fn actual_bounding_box_right(&self) -> u32;
fn actual_bounding_box_left(&self) -> u32;
fn width(&self) -> u32;
}
// === CanvasRenderingContext2d ===
mock_data! { CanvasRenderingContext2d
fn save(&self);
fn measure_text(&self, text: &str) -> Result<TextMetrics, JsValue>;
fn restore(&self);
fn begin_path(&self);
fn stroke(&self);
fn move_to(&self, x: f64, y: f64);
fn line_to(&self, x: f64, y: f64);
fn scale(&self, x: f64, y: f64) -> Result<(), JsValue>;
fn set_fill_style(&self, value: &JsValue);
fn set_stroke_style(&self, value: &JsValue);
fn clear_rect(&self, x: f64, y: f64, w: f64, h: f64);
fn set_line_width(&self, value: f64);
fn translate(&self, x: f64, y: f64) -> Result<(), JsValue>;
fn fill_rect(&self, x: f64, y: f64, w: f64, h: f64);
fn set_font(&self, font: &str);
fn set_text_align(&self, text_align: &str);
fn fill_text(&self, text: &str, x: f64, y: f64) -> Result<(), JsValue>;
fn draw_image_with_html_canvas_element_and_sw_and_sh_and_dx_and_dy_and_dw_and_dh(
&self, image: &HtmlCanvasElement,
sx: f64, sy: f64, sw: f64, sh: f64, dx: f64, dy: f64, dw: f64, dh: f64
) -> Result<(), JsValue>;
}
// === Node ===
mock_data! { Node => EventTarget
fn parent_node(&self) -> Option<Node>;
fn remove_child(&self, child: &Node) -> Result<Node, JsValue>;
fn set_text_content(&self, value: Option<&str>);
fn append_child(&self, node: &Node) -> Result<Node, JsValue>;
fn first_child(&self) -> Option<Node>;
fn last_child(&self) -> Option<Node>;
fn insert_before(
&self,
node: &Node,
child: Option<&Node>
) -> Result<Node, JsValue>;
}
// === WebGlQuery ===
mock_data! { WebGlQuery => Object }
// ===========
// === CSS ===
// ===========
// === CssStyleDeclaration ===
mock_data! { CssStyleDeclaration => Object
fn set_property(&self, property: &str, value: &str) -> Result<(), JsValue>;
}
// =============
// === Other ===
// =============
// === Performance ===
mock_data! { Performance => EventTarget
fn now(&self) -> f64;
fn time_origin(&self) -> f64;
}
// ===============
// === Reflect ===
// ===============
mock_data! { Reflect
fn get(target: &JsValue, key: &JsValue) -> Result<JsValue, JsValue>;
fn set(
target: &JsValue,
property_key: &JsValue,
value: &JsValue
) -> Result<bool, JsValue>;
}
// ===========================
// === Window and Document ===
// ===========================
#[allow(non_upper_case_globals)]
#[allow(missing_docs)]
pub static window: Window = Window {};
#[allow(non_upper_case_globals)]
#[allow(missing_docs)]
pub static document: Document = Document::const_new();
// ================
// === Iterator ===
// ================
#[derive(Default, Clone, Copy, Debug)]
#[allow(missing_docs)]
pub struct Iterator;
impl MockDefault for Iterator {
fn mock_default() -> Self {
Default::default()
}
}
#[derive(Default, Clone, Copy, Debug)]
#[allow(missing_docs)]
pub struct IntoIter;
impl MockDefault for IntoIter {
fn mock_default() -> Self {
Default::default()
}
}
impl IntoIterator for Iterator {
type Item = Result<JsValue, JsValue>;
type IntoIter = IntoIter;
fn into_iter(self) -> IntoIter {
Default::default()
}
}
impl std::iter::Iterator for IntoIter {
type Item = Result<JsValue, JsValue>;
fn next(&mut self) -> Option<Self::Item> {
None
}
}

View File

@ -1,143 +0,0 @@
//! Native bindings to the web-api.
use crate::prelude::*;
// ==============
// === Export ===
// ==============
pub use js_sys::Array;
pub use js_sys::Error;
pub use js_sys::Function;
pub use js_sys::JsString;
pub use js_sys::Map;
pub use js_sys::Object;
pub use js_sys::Promise;
pub use std::time::Duration;
pub use std::time::Instant;
pub use wasm_bindgen::prelude::Closure;
pub use wasm_bindgen::prelude::*;
pub use wasm_bindgen::JsCast;
pub use wasm_bindgen::JsValue;
pub use web_sys::console;
pub use web_sys::AddEventListenerOptions;
pub use web_sys::CanvasRenderingContext2d;
pub use web_sys::Document;
pub use web_sys::Element;
pub use web_sys::Event;
pub use web_sys::EventListenerOptions;
pub use web_sys::EventTarget;
pub use web_sys::HtmlCanvasElement;
pub use web_sys::HtmlCollection;
pub use web_sys::HtmlDivElement;
pub use web_sys::HtmlElement;
pub use web_sys::KeyboardEvent;
pub use web_sys::MouseEvent;
pub use web_sys::Node;
pub use web_sys::Performance;
pub use web_sys::WebGl2RenderingContext;
pub use web_sys::WebGlQuery;
pub use web_sys::WheelEvent;
pub use web_sys::Window;
// ================
// === Function ===
// ================
#[wasm_bindgen(inline_js = "
export function new_function_with_args(args, body) {
return new Function(args, body)
}
")]
extern "C" {
// See the docs of [`crate::FunctionOps`].
#[allow(unsafe_code)]
#[wasm_bindgen(catch)]
pub fn new_function_with_args(args: &str, body: &str) -> Result<Function, JsValue>;
}
// ===============
// === Reflect ===
// ===============
/// [`wasm-bindgen`] defines [`Reflect`] as a module. This library needs to extend it with new
/// functions and thus it is mocked as this phantom structure.
#[derive(Copy, Clone, Debug)]
pub struct Reflect {}
#[allow(missing_docs)]
impl Reflect {
pub fn get(target: &JsValue, key: &JsValue) -> Result<JsValue, JsValue> {
js_sys::Reflect::get(target, key)
}
pub fn set(target: &JsValue, key: &JsValue, value: &JsValue) -> Result<bool, JsValue> {
js_sys::Reflect::set(target, key, value)
}
}
// ===========================
// === Window and Document ===
// ===========================
#[cfg(target_arch = "wasm32")]
/// Similar to [`lazy_static`], but does not require the type to be synced between threads (WASM32
/// target is single-threaded.
macro_rules! wasm_lazy_global {
($name:ident : $tp:ty = $expr:expr) => {
#[allow(missing_docs)]
pub mod $name {
use super::*;
pub static mut STORE: Option<$tp> = None;
// [`Copy`] and [`Clone`] are not implemented on purpose, so when the value is cloned,
// the operation will deref to it's target type.
#[allow(missing_copy_implementations)]
#[derive(Debug)]
pub struct Ref {}
}
impl Deref for $name::Ref {
type Target = $tp;
#[allow(unsafe_code)]
fn deref(&self) -> &Self::Target {
unsafe {
$name::STORE.as_ref().unwrap_or_else(|| {
let val = $expr;
$name::STORE = Some(val);
$name::STORE.as_ref().unwrap()
})
}
}
}
#[allow(non_upper_case_globals)]
#[allow(missing_docs)]
pub const $name: $name::Ref = $name::Ref {};
};
}
/// Get the global window object.
///
/// # Safety
/// We are using an unsafe cast here in order to make it working in node. Please note that node does
/// NOT expose a `window` global object. We are creating it there manually. This created object
/// exposes some `window` functions, such as `.performance.now()`. It is enough for us to run the
/// generated WASM there.
pub fn get_window() -> Window {
js_sys::global().unchecked_into::<Window>()
}
#[cfg(target_arch = "wasm32")]
wasm_lazy_global! { window : Window = get_window() }
#[cfg(target_arch = "wasm32")]
wasm_lazy_global! { document : Document = window.document().unwrap() }

View File

@ -1,157 +0,0 @@
//! Clipboard management utilities.
//!
//! Please note:
//! - Every function here uses the [Clipboard API](https://developer.mozilla.org/en-US/docs/Web/API/Clipboard_API)
//! under the hood.
//! - Every function is asynchronous. The delay for receiving or sending data may be caused for
//! example by waiting for permission from the user.
//! - Every function will probably display a permission prompt to the user for the first time it is
//! used.
//! - The website has to be served over HTTPS for these functions to work correctly.
//! - These functions needs to be called from within user-initiated event callbacks, like mouse or
//! key press. Otherwise it may not work.
//! - Web browsers do not support MIME types other than `text/plain`, `text/html`, and `image/png`
//! in general. However, using
//! [Clipboard pickling](https://github.com/w3c/editing/blob/gh-pages/docs/clipboard-pickling/explainer.md),
//! we can practically use any MIME type.
//!
//! To learn more, see this [StackOverflow question](https://stackoverflow.com/questions/400212/how-do-i-copy-to-the-clipboard-in-javascript).
use crate::prelude::*;
use js_sys::Uint8Array;
use wasm_bindgen::prelude::wasm_bindgen;
#[cfg(not(target_arch = "wasm32"))]
use wasm_bindgen::prelude::Closure;
// =============
// === Types ===
// =============
/// MIME type of the data.
pub type MimeType = String;
/// The data to be written to the clipboard.
pub type BinaryData<'a> = &'a [u8];
type ReadTextClosure = Closure<dyn Fn(String)>;
type ReadClosure = Closure<dyn Fn(Vec<u8>)>;
// ===================
// === JS Bindings ===
// ===================
#[wasm_bindgen(module = "/js/clipboard.js")]
extern "C" {
#[allow(unsafe_code)]
fn writeText(text: String);
#[allow(unsafe_code)]
fn readText(closure: &ReadTextClosure);
#[allow(unsafe_code)]
fn writeCustom(mime_type: String, data: Uint8Array, text_data: String);
#[allow(unsafe_code)]
fn readCustom(
expected_mime_type: String,
when_expected: &ReadClosure,
plain_text_fallback: &ReadTextClosure,
);
}
/// Write the provided data to the clipboard, using the provided MIME type.
/// If `text_data` is present, it will be added to the clipboard with a `text/plain` MIME type.
///
/// See the module documentation for mode details.
///
/// - Unlike `write_text`, there is no special fallback mechanism in case of failures or unavailable
/// clipboard. The function will simply report an error to the console.
pub fn write(data: BinaryData<'_>, mime_type: MimeType, text_data: Option<String>) {
let data = Uint8Array::from(data);
writeCustom(mime_type, data, text_data.unwrap_or_default());
}
/// Read the arbitrary binary data from the console. It is expected to have `expected_mime_type`.
/// If the value of such type is not present in the clipboard content, the `plain/text` MIME type
/// is requested and the result is passed to the `plain_text_fallback` callback.
///
/// See the module documentation for more details.
///
/// - Unlike `read_text`, there is no special fallback mechanism in case of failures or unavailable
/// clipboard. The function will simply report an error to the console.
pub fn read(
expected_mime_type: MimeType,
when_expected: impl Fn(Vec<u8>) + 'static,
plain_text_fallback: impl Fn(String) + 'static,
) {
let when_expected_handler = create_handler_binary(when_expected);
let fallback_handler = create_handler_string(plain_text_fallback);
readCustom(
expected_mime_type,
when_expected_handler.borrow().as_ref().unwrap(),
fallback_handler.borrow().as_ref().unwrap(),
);
}
/// Write the provided text to the clipboard.
///
/// See the module documentation for more details.
///
/// In case something fails, this function implements a fallback mechanism which tries
/// to create a hidden text field, fill it with the text and use the obsolete
/// [Document.execCommand](https://developer.mozilla.org/en-US/docs/Web/API/Document/execCommand)
/// function.
pub fn write_text(text: impl Into<String>) {
let text = text.into();
writeText(text)
}
/// Read the text from the clipboard.
///
/// See the module documentation for more details.
///
/// This function works in a very strange way in Firefox.
/// [Firefox only supports reading the clipboard in browser extensions](https://developer.mozilla.org/en-US/docs/Web/API/Clipboard/readText).
/// In such case this function fallbacks to the `paste` event. Whenever it is triggered, it
/// remembers its value and passes it to the callback. This means, that in Firefox this function
/// will work correctly only when called as a direct action to the `cmd + v` shortcut.
pub fn read_text(callback: impl Fn(String) + 'static) {
let handler = create_handler_string(callback);
readText(handler.borrow().as_ref().unwrap());
}
// ===============
// === Helpers ===
// ===============
fn create_handler_string(
callback: impl Fn(String) + 'static,
) -> Rc<RefCell<Option<Closure<dyn Fn(String)>>>> {
let handler: Rc<RefCell<Option<ReadTextClosure>>> = Default::default();
let handler_clone = handler.clone_ref();
let closure: ReadTextClosure = Closure::new(move |result| {
*handler_clone.borrow_mut() = None;
callback(result);
});
*handler.borrow_mut() = Some(closure);
handler
}
fn create_handler_binary(
callback: impl Fn(Vec<u8>) + 'static,
) -> Rc<RefCell<Option<Closure<dyn Fn(Vec<u8>)>>>> {
let handler: Rc<RefCell<Option<ReadClosure>>> = Default::default();
let handler_clone = handler.clone_ref();
let closure: ReadClosure = Closure::new(move |result| {
*handler_clone.borrow_mut() = None;
callback(result);
});
*handler.borrow_mut() = Some(closure);
handler
}

View File

@ -1,8 +0,0 @@
//! Helper code for dealing with web_sys's `Closure`.
// ==============
// === Export ===
// ==============
pub mod storage;

View File

@ -1,92 +0,0 @@
// === Non-Standard Linter Configuration ===
#![allow(missing_docs)]
use crate::prelude::*;
use derivative::Derivative;
#[cfg(not(target_arch = "wasm32"))]
use js_sys::Function;
use wasm_bindgen::convert::FromWasmAbi;
use wasm_bindgen::JsCast;
// ==============
// === Export ===
// ==============
pub use wasm_bindgen::prelude::Closure;
// ======================
// === ClosureStorage ===
// ======================
/// Constraint for JS closure argument types
pub trait ClosureArg = FromWasmAbi + 'static;
/// Function that can be wrapped into a `Closure`.
pub trait ClosureFn<Arg> = FnMut(Arg) + 'static where Arg: ClosureArg;
/// Stores an optional closure.
/// The purpose it reduce boilerplate repeating when setting JS callbacks.
#[derive(Debug, Derivative)]
#[derivative(Default(bound = ""))]
pub struct OptionalFmMutClosure<Arg> {
/// The stored closure.
pub closure: Option<Closure<dyn FnMut(Arg)>>,
}
impl<Arg> OptionalFmMutClosure<Arg> {
/// An empty closure storage.
pub fn new() -> OptionalFmMutClosure<Arg> {
Default::default()
}
/// Stores the given closure.
pub fn store(&mut self, closure: Closure<dyn FnMut(Arg)>) -> &Function {
self.closure = Some(closure);
// TODO [mwu]: `insert` should be used when we bump rustc - and then get rid of unwrap.
// Blocked by https://github.com/enso-org/ide/issues/1028
// The `unwrap` call is safe, because the line above set closure to `Some`.
self.js_ref().unwrap()
}
/// Obtain JS reference to the closure (that can be passed e.g. as a callback
/// to an event handler).
pub fn js_ref(&self) -> Option<&Function> {
self.closure.as_ref().map(|closure| closure.as_ref().unchecked_ref())
}
/// Wraps given function into a Closure.
pub fn wrap(&mut self, f: impl ClosureFn<Arg>) -> &Function {
let boxed = Box::new(f);
// Note: [mwu] Not sure exactly why, but compiler sometimes require this
// explicit type below and sometimes does not.
let wrapped: Closure<dyn FnMut(Arg)> = Closure::wrap(boxed);
self.store(wrapped)
}
/// Clears the current closure.
/// Note: if reference to it is still used by JS, it will throw an exception
/// on calling attempt. Be careful of dangling references.
pub fn clear(&mut self) {
self.closure = None;
}
/// Register this closure as an event handler.
/// No action is taken if there is no closure stored.
pub fn add_listener<EventType: crate::event::Type>(&self, target: &EventType::Target) {
if let Some(function) = self.js_ref() {
EventType::add_listener(target, function)
}
}
/// Unregister this closure as an event handler. The closure must be the same as when it was
/// registered.
pub fn remove_listener<EventType: crate::event::Type>(&self, target: &EventType::Target) {
if let Some(function) = self.js_ref() {
EventType::remove_listener(target, function)
}
}
}

View File

@ -1,58 +0,0 @@
//! Utilities for DOM events.
use js_sys::Function;
use wasm_bindgen::JsValue;
use web_sys::Event;
use web_sys::EventTarget;
// ==============
// === Export ===
// ==============
pub mod listener;
// =============
// === Event ===
// =============
/// This trait represents a type of event that may fire from some specific JS `EventTarget`.
///
/// For example, `WebSocket.close` is such an event, where `close` is event type and `WebSocket` is
/// the `EventTarget`.
///
/// The purpose is to increase type safety by grouping event type name, event target type and event
/// value type together.
///
/// Typically this trait is to be implemented for uncreatable types, created for the sole
/// purpose of denoting a particular event type within a context of an event target.
pub trait Type {
/// The event value -- i.e. the Rust type of a value that will be passed as an argument
/// to the listener.
/// For example `web_sys::CloseEvent`.
type Interface: AsRef<Event>;
/// The type of the EventTarget object that fires this type of event, e.g. `web_sys::WebSocket`.
type Target: AsRef<EventTarget> + AsRef<JsValue> + Clone + PartialEq;
/// The type of the event as a string. For example `"close"`.
const NAME: &'static str;
/// Add a given function to the event's target as an event listener. It will be called each
/// time event fires until listener is removed through `remove_listener`.
fn add_listener(target: &Self::Target, listener: &Function) {
// The unwrap here is safe, as the `addEventListener` never throws.
EventTarget::add_event_listener_with_callback(target.as_ref(), Self::NAME, listener)
.unwrap()
}
/// Remove the event listener. The `add_listener` method should have been called before with
/// the very same function argument.
fn remove_listener(target: &Self::Target, listener: &Function) {
// The unwrap here is safe, as the `addEventListener` never throws.
EventTarget::remove_event_listener_with_callback(target.as_ref(), Self::NAME, listener)
.unwrap()
}
}

View File

@ -1,117 +0,0 @@
// === Non-Standard Linter Configuration ===
#![allow(missing_docs)]
use crate::prelude::*;
use crate::closure::storage::ClosureFn;
use crate::closure::storage::OptionalFmMutClosure;
use derivative::Derivative;
// ============
// === Slot ===
// ============
/// A Slot stores a callback and manages its connection with JS `EventTarget`.
///
/// Both callback and the target can be set independently using `set_target` and `set_callback`.
/// Additionally, callback can be cleared at any point by `clear_callback`.
///
/// When both target and callback are set, slot ensures that the callback is registered as an
/// event listener in the target.
///
/// When changing target, `Slot` reattaches callback.
///
/// `Slot` owns callback and wraps it into JS closure. `Slot` also keeps reference to the target,
/// so it must not be leaked.
#[derive(Derivative)]
#[derivative(Debug(bound = "EventType::Interface: Debug"))]
pub struct Slot<EventType: crate::event::Type> {
#[derivative(Debug = "ignore")]
target: Option<EventType::Target>,
js_closure: OptionalFmMutClosure<EventType::Interface>,
}
impl<EventType: crate::event::Type> Slot<EventType> {
/// Create a new `Slot`. As the initial target is provided, the listener will register once it
/// gets a callback (see [[set_callback]]).
pub fn new(target: &EventType::Target) -> Self {
Self { target: Some(target.clone()), js_closure: Default::default() }
}
/// Register the event listener if both target and callback are set.
fn add_if_active(&mut self) {
if let (Some(target), Some(function)) = (self.target.as_ref(), self.js_closure.js_ref()) {
debug!("Attaching the callback.");
EventType::add_listener(target, function)
}
}
/// Unregister the event listener if both target and callback are set.
fn remove_if_active(&mut self) {
if let (Some(target), Some(function)) = (self.target.as_ref(), self.js_closure.js_ref()) {
debug!("Detaching the callback.");
EventType::remove_listener(target, function)
}
}
/// Set a new target.
///
/// If callback is set, it will be reattached as a listener to a newly set target.
pub fn set_target(&mut self, target: &EventType::Target) {
// Prevent spurious reattaching that could affect listeners order.
if Some(target) != self.target.as_ref() {
self.remove_if_active();
self.target = Some(target.clone());
self.add_if_active()
}
}
/// Clear event target.
///
/// If callback is set, it will be unregistered.
pub fn clear_target(&mut self, target: &EventType::Target) {
// Prevent spurious reattaching that could affect listeners order.
if Some(target) != self.target.as_ref() {
self.remove_if_active();
self.target = None;
}
}
/// Assign a new event callback closure and register it in the target.
///
/// If the listener was registered with the previous closure, it will unregister first.
///
/// Caveat: using this method will move the event listener to the end of the registered
/// callbacks. This will affect the order of callback calls.
pub fn set_callback(&mut self, f: impl ClosureFn<EventType::Interface>) {
self.remove_if_active();
self.js_closure.wrap(f);
self.add_if_active()
}
/// Erase the callback.
///
/// The stored closure will be dropped and event listener unregistered.
pub fn clear_callback(&mut self) {
self.remove_if_active();
self.js_closure.clear();
}
/// Detach and attach the listener to the target.
///
/// The purpose is to move this slot to the end of the listeners list.
pub fn reattach(&mut self) {
self.remove_if_active();
self.add_if_active();
}
}
/// Unregister listener on drop.
impl<EventType: crate::event::Type> Drop for Slot<EventType> {
fn drop(&mut self) {
self.remove_if_active();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,187 +0,0 @@
//! This module provides helpers for platform specific logic.
// ================
// === Platform ===
// ================
/// This enumeration lists all the supported platforms.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[allow(missing_docs)]
pub enum Platform {
Android,
FreeBSD,
IOS,
Linux,
MacOS,
OpenBSD,
Windows,
}
pub use Platform::*;
#[allow(missing_docs)]
impl Platform {
pub fn is_android(self) -> bool {
self == Android
}
pub fn is_freebsd(self) -> bool {
self == FreeBSD
}
pub fn is_ios(self) -> bool {
self == IOS
}
pub fn is_linux(self) -> bool {
self == Linux
}
pub fn is_macos(self) -> bool {
self == MacOS
}
pub fn is_openbsd(self) -> bool {
self == OpenBSD
}
pub fn is_windows(self) -> bool {
self == Windows
}
}
/// An error indicating that the platform was not recognized.
#[derive(Clone, Copy, Debug)]
pub struct UnknownPlatform;
impl TryFrom<&str> for Platform {
type Error = UnknownPlatform;
#[allow(clippy::if_same_then_else)]
fn try_from(s: &str) -> Result<Self, Self::Error> {
let name = s.to_lowercase();
if name.contains("darwin") {
Ok(MacOS)
} else if name.contains("mac") {
Ok(MacOS)
} else if name.contains("linux") {
Ok(Linux)
}
// CAREFUL: this matches also "darwin" (that's why it's declared below the "darwin" match).
else if name.contains("win") {
Ok(Windows)
} else if name.contains("ios") {
Ok(IOS)
} else if name.contains("iphone") {
Ok(IOS)
} else if name.contains("ipad") {
Ok(IOS)
} else if name.contains("android") {
Ok(Android)
} else if name.contains("freebsd") {
Ok(FreeBSD)
} else if name.contains("openbsd") {
Ok(OpenBSD)
} else if name.contains("bsd") {
Ok(FreeBSD)
} else {
Err(UnknownPlatform)
}
}
}
impl TryFrom<String> for Platform {
type Error = UnknownPlatform;
fn try_from(s: String) -> Result<Self, Self::Error> {
Platform::try_from(s.as_str())
}
}
// ================================
// === Compile Time Redirection ===
// ================================
/// Queries which platform we are on.
#[cfg(target_arch = "wasm32")]
pub fn current() -> Option<Platform> {
current_wasm()
}
/// Queries which platform we are on.
#[cfg(not(target_arch = "wasm32"))]
pub fn current() -> Option<Platform> {
current_native()
}
// ====================
// === Current WASM ===
// ====================
/// Queries which platform we are on.
#[allow(clippy::if_same_then_else)]
#[cfg(target_arch = "wasm32")]
pub fn current_wasm() -> Option<Platform> {
use super::window;
let navigator = window.navigator();
let platform = navigator.platform().unwrap_or_default().to_lowercase();
let agent = navigator.user_agent().unwrap_or_default().to_lowercase();
Platform::try_from(platform).or_else(|_| Platform::try_from(agent)).ok()
}
// ======================
// === Current Native ===
// ======================
#[cfg(target_os = "android")]
fn current_native() -> Option<Platform> {
Some(Android)
}
#[cfg(target_os = "ios")]
fn current_native() -> Option<Platform> {
Some(IOS)
}
#[cfg(target_os = "linux")]
fn current_native() -> Option<Platform> {
Some(Linux)
}
#[cfg(target_os = "macos")]
fn current_native() -> Option<Platform> {
Some(MacOS)
}
#[cfg(target_os = "windows")]
fn current_native() -> Option<Platform> {
Some(Windows)
}
#[cfg(not(any(
target_arch = "wasm32",
target_os = "android",
target_os = "ios",
target_os = "linux",
target_os = "macos",
target_os = "windows"
)))]
fn current_native() -> Option<Platform> {
None
}
// =============
// === Tests ===
// =============
#[cfg(all(test, any(target_os = "linux", target_os = "windows", target_os = "macos")))]
mod test {
use super::*;
use wasm_bindgen_test::wasm_bindgen_test;
use wasm_bindgen_test::wasm_bindgen_test_configure;
wasm_bindgen_test_configure!(run_in_browser);
#[wasm_bindgen_test]
fn platform() {
assert_eq!(current(), current_native())
}
}

View File

@ -1,72 +0,0 @@
//! Binding to the https://developer.mozilla.org/en-US/docs/Web/API/ResizeObserver.
use crate::prelude::*;
// =============
// === Types ===
// =============
/// Listener closure for the [`ResizeObserver`].
pub type Listener = Closure<dyn FnMut(f32, f32)>;
// ===================
// === JS Bindings ===
// ===================
#[cfg(target_arch = "wasm32")]
use wasm_bindgen::prelude::wasm_bindgen;
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen(module = "/js/resize_observer.js")]
extern "C" {
#[allow(unsafe_code)]
fn resize_observe(target: &JsValue, closure: &Listener) -> usize;
#[allow(unsafe_code)]
fn resize_unobserve(id: usize);
}
#[cfg(not(target_arch = "wasm32"))]
fn resize_observe(_target: &JsValue, _closure: &Listener) -> usize {
0
}
#[cfg(not(target_arch = "wasm32"))]
fn resize_unobserve(_id: usize) {}
// ======================
// === ResizeObserver ===
// ======================
/// The ResizeObserver interface reports changes to the dimensions of an DOM Element's content or
/// border box. ResizeObserver avoids infinite callback loops and cyclic dependencies that are often
/// created when resizing via a callback function. It does this by only processing elements deeper
/// in the DOM in subsequent frames.
///
/// See also https://developer.mozilla.org/en-US/docs/Web/API/ResizeObserver.
#[derive(Debug)]
#[allow(missing_docs)]
pub struct ResizeObserver {
pub target: JsValue,
pub listener: Listener,
pub observer_id: usize,
}
impl ResizeObserver {
/// Constructor.
pub fn new(target: &JsValue, listener: Listener) -> Self {
let target = target.clone_ref();
let observer_id = resize_observe(&target, &listener);
Self { target, listener, observer_id }
}
}
impl Drop for ResizeObserver {
fn drop(&mut self) {
resize_unobserve(self.observer_id);
}
}

View File

@ -1,65 +0,0 @@
//! Helpers for the Web Streaming API in Rust, mostly the missing bindings in the [`web_sys`] crate.
use wasm_bindgen::prelude::wasm_bindgen;
use wasm_bindgen::JsCast;
use wasm_bindgen::JsValue;
// ===================================
// === ReadableStreamDefaultReader ===
// ===================================
#[wasm_bindgen]
extern "C" {
/// The wrapper for ReadableStreamDefaultReader js class.
///
/// See https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultReader.
pub type ReadableStreamDefaultReader;
// Returns a Promise providing access to the next chunk in the stream's internal queue.
//
// See https://developer.mozilla.org/en-US/docs/Web/API/ReadableStreamDefaultReader/read.
#[allow(unsafe_code)]
#[wasm_bindgen(method)]
pub fn read(this: &ReadableStreamDefaultReader) -> js_sys::Promise;
}
// ===============
// === BlobExt ===
// ===============
/// The extension for [`js_sys::Blob`] API.
// TODO[ao] Those functions are part of the official API on newer web_sys versions, however the
// version bump is tricky, see https://github.com/enso-org/ide/issues/1591.
pub trait BlobExt {
/// Returns a ReadableStream which upon reading returns the data contained within the Blob.
/// See https://developer.mozilla.org/en-US/docs/Web/API/Blob/stream.
fn stream(&self) -> Result<web_sys::ReadableStream, JsValue>;
/// Returns a Reader of the Blob data. It assumes that the reader is of
/// [`ReadableStreamDefaultReader`] type. See also
/// https://developer.mozilla.org/en-US/docs/Web/API/Blob/stream and
/// https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/getReader
fn stream_reader(&self) -> Result<ReadableStreamDefaultReader, JsValue>;
}
impl BlobExt for web_sys::Blob {
#[allow(unused_qualifications)]
fn stream(&self) -> Result<web_sys::ReadableStream, JsValue> {
let this = self.as_ref();
let method_as_value = js_sys::Reflect::get(this, &"stream".into())?;
let method = method_as_value.dyn_into::<js_sys::Function>()?;
method.call0(this)?.dyn_into()
}
#[allow(unused_qualifications)]
fn stream_reader(&self) -> Result<ReadableStreamDefaultReader, JsValue> {
let stream = self.stream();
let method_as_value = js_sys::Reflect::get(&stream, &"getReader".into())?;
let method = method_as_value.dyn_into::<js_sys::Function>()?;
method.call0(&stream)?.dyn_into()
}
}

View File

@ -17,9 +17,7 @@ crate-type = ["rlib"]
default = []
[dependencies]
bytemuck = { workspace = true }
serde = { workspace = true }
paste = { workspace = true }
[lints]
workspace = true

Some files were not shown because too many files have changed in this diff Show More