diff --git a/.npmrc b/.npmrc deleted file mode 100644 index 521a9f7c07..0000000000 --- a/.npmrc +++ /dev/null @@ -1 +0,0 @@ -legacy-peer-deps=true diff --git a/Cargo.lock b/Cargo.lock index d4d05eebdc..7eee44ad9c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -156,97 +156,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "async-executor" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17adb73da160dfb475c183343c8cccd80721ea5a605d3eb57125f0a7b7a92d0b" -dependencies = [ - "async-lock", - "async-task", - "concurrent-queue", - "fastrand", - "futures-lite", - "slab", -] - -[[package]] -name = "async-global-executor" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776" -dependencies = [ - "async-channel", - "async-executor", - "async-io", - "async-lock", - "blocking", - "futures-lite", - "once_cell", -] - -[[package]] -name = "async-io" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c374dda1ed3e7d8f0d9ba58715f924862c63eae6849c92d3a18e7fbde9e2794" -dependencies = [ - "async-lock", - "autocfg", - "concurrent-queue", - "futures-lite", - "libc", - "log", - "parking", - "polling", - "slab", - "socket2", - "waker-fn", - "windows-sys 0.42.0", -] - -[[package]] -name = "async-lock" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8101efe8695a6c17e02911402145357e718ac92d3ff88ae8419e84b1707b685" -dependencies = [ - "event-listener", - "futures-lite", -] - -[[package]] -name = "async-std" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" -dependencies = [ - "async-channel", - "async-global-executor", - "async-io", - "async-lock", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "once_cell", - "pin-project-lite", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - -[[package]] -name = "async-task" -version = "4.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" - [[package]] name = "async-trait" version = "0.1.78" @@ -258,12 +167,6 @@ dependencies = [ "syn 2.0.53", ] -[[package]] -name = "atomic-waker" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "debc29dde2e69f9e47506b525f639ed42300fc014a3e007832592448fa8e4599" - [[package]] name = "atty" version = "0.2.14" @@ -687,20 +590,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "blocking" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c67b173a56acffd6d2326fb7ab938ba0b00a71480e14902b2591c87bc5741e8" -dependencies = [ - "async-channel", - "async-lock", - "async-task", - "atomic-waker", - "fastrand", - "futures-lite", -] - [[package]] name = "boolinator" version = "2.4.0" @@ -752,16 +641,6 @@ dependencies = [ "syn 1.0.107", ] -[[package]] -name = "buf_redux" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f" -dependencies = [ - "memchr", - "safemem", -] - [[package]] name = "bumpalo" version = "3.12.2" @@ -801,26 +680,6 @@ dependencies = [ "syn 1.0.107", ] -[[package]] -name = "bytemuck" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea" -dependencies = [ - "bytemuck_derive", -] - -[[package]] -name = "bytemuck_derive" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdde5c9cd29ebd706ce1b35600920a33550e402fc998a2e53ad3b42c3c47a192" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.53", -] - [[package]] name = "byteorder" version = "1.4.3" @@ -1577,17 +1436,6 @@ dependencies = [ "serde", ] -[[package]] -name = "enso-debug-api" -version = "0.1.0" -dependencies = [ - "derivative", - "futures", - "js-sys", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "enso-doc-parser" version = "0.1.0" @@ -1596,7 +1444,6 @@ dependencies = [ "enso-metamodel-lexpr", "enso-parser", "enso-prelude", - "enso-profiler", "enso-reflect", "lexpr", "pretty_assertions", @@ -1656,6 +1503,17 @@ dependencies = [ "syn 1.0.107", ] +[[package]] +name = "enso-macros" +version = "0.2.7" +dependencies = [ + "Inflector", + "enso-macro-utils", + "proc-macro2", + "quote", + "syn 1.0.107", +] + [[package]] name = "enso-metamodel" version = "0.1.0" @@ -1689,7 +1547,7 @@ dependencies = [ "enso-parser-syntax-tree-visitor", "enso-prelude", "enso-reflect", - "enso-shapely-macros", + "paste", "rand 0.8.5", "rand_chacha 0.3.1", "rand_distr", @@ -1755,45 +1613,16 @@ dependencies = [ [[package]] name = "enso-prelude" -version = "0.2.6" +version = "0.2.7" dependencies = [ - "anyhow", "boolinator", "derivative", "derive_more", "enso-logging", + "enso-macros", "enso-reflect", - "enso-shapely", "enso-zst", - "failure", - "futures", - "itertools", - "paste", "serde", - "serde_json", - "smallvec", - "web-sys", -] - -[[package]] -name = "enso-profiler" -version = "0.1.0" -dependencies = [ - "enso-profiler-macros", - "enso-web", - "futures", - "serde", - "serde_json", -] - -[[package]] -name = "enso-profiler-macros" -version = "0.1.0" -dependencies = [ - "Inflector", - "proc-macro2", - "quote", - "syn 2.0.53", ] [[package]] @@ -1813,53 +1642,10 @@ dependencies = [ "syn 1.0.107", ] -[[package]] -name = "enso-shapely" -version = "0.2.0" -dependencies = [ - "enso-prelude", - "enso-shapely-macros", - "enso-zst", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "enso-shapely-macros" -version = "0.2.1" -dependencies = [ - "Inflector", - "boolinator", - "enso-macro-utils", - "itertools", - "proc-macro2", - "quote", - "syn 1.0.107", -] - -[[package]] -name = "enso-web" -version = "0.1.0" -dependencies = [ - "async-std", - "console_error_panic_hook", - "derivative", - "enso-debug-api", - "enso-logging", - "enso-shapely", - "gloo-timers", - "js-sys", - "wasm-bindgen", - "wasm-bindgen-test", - "web-sys", -] - [[package]] name = "enso-zst" version = "0.1.0" dependencies = [ - "bytemuck", - "paste", "serde", ] @@ -2197,18 +1983,6 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" -[[package]] -name = "gloo-timers" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - [[package]] name = "goblin" version = "0.6.1" @@ -2523,7 +2297,6 @@ dependencies = [ "multimap", "new_mime_guess", "octocrab", - "paste", "path-absolutize", "path-slash", "pathdiff", @@ -2693,15 +2466,6 @@ dependencies = [ "simple_asn1", ] -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log", -] - [[package]] name = "language-tags" version = "0.3.2" @@ -2783,9 +2547,6 @@ name = "log" version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" -dependencies = [ - "value-bag", -] [[package]] name = "logstat" @@ -2894,24 +2655,6 @@ dependencies = [ "serde", ] -[[package]] -name = "multipart" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00dec633863867f29cb39df64a397cdf4a6354708ddd7759f70c7fb51c5f9182" -dependencies = [ - "buf_redux", - "httparse", - "log", - "mime", - "mime_guess", - "quick-error", - "rand 0.8.5", - "safemem", - "tempfile", - "twoway", -] - [[package]] name = "nanorand" version = "0.7.0" @@ -3352,20 +3095,6 @@ dependencies = [ "serde", ] -[[package]] -name = "polling" -version = "2.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22122d5ec4f9fe1b3916419b76be1e80bcb93f618d071d2edf841b137b2a2bd6" -dependencies = [ - "autocfg", - "cfg-if", - "libc", - "log", - "wepoll-ffi", - "windows-sys 0.42.0", -] - [[package]] name = "port_check" version = "0.1.5" @@ -3483,12 +3212,6 @@ dependencies = [ "unicase", ] -[[package]] -name = "quick-error" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" - [[package]] name = "quote" version = "1.0.35" @@ -3909,12 +3632,6 @@ version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" -[[package]] -name = "safemem" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" - [[package]] name = "same-file" version = "1.0.6" @@ -4104,17 +3821,6 @@ dependencies = [ "unsafe-libyaml", ] -[[package]] -name = "sha-1" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - [[package]] name = "sha1" version = "0.6.1" @@ -4631,18 +4337,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-tungstenite" -version = "0.17.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f714dd15bead90401d77e04243611caec13726c2408afd5b31901dfcdcb3b181" -dependencies = [ - "futures-util", - "log", - "tokio", - "tungstenite", -] - [[package]] name = "tokio-util" version = "0.7.4" @@ -4787,34 +4481,6 @@ dependencies = [ "unicode-width", ] -[[package]] -name = "tungstenite" -version = "0.17.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0" -dependencies = [ - "base64 0.13.1", - "byteorder", - "bytes", - "http", - "httparse", - "log", - "rand 0.8.5", - "sha-1", - "thiserror", - "url", - "utf-8", -] - -[[package]] -name = "twoway" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59b11b2b5241ba34be09c3cc85a36e56e48f9888862e19cedf23336d35316ed1" -dependencies = [ - "memchr", -] - [[package]] name = "typenum" version = "1.16.0" @@ -4911,12 +4577,6 @@ version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8db7427f936968176eaa7cdf81b7f98b980b18495ec28f1b5791ac3bfe3eea9" -[[package]] -name = "utf-8" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" - [[package]] name = "utf8-width" version = "0.1.6" @@ -4945,12 +4605,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" -[[package]] -name = "value-bag" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fec26a25bd6fca441cdd0f769fd7f891bae119f996de31f86a5eddccef54c1d" - [[package]] name = "vcpkg" version = "0.2.15" @@ -5026,7 +4680,6 @@ dependencies = [ "log", "mime", "mime_guess", - "multipart", "percent-encoding", "pin-project", "rustls-pemfile 0.2.1", @@ -5036,7 +4689,6 @@ dependencies = [ "serde_urlencoded", "tokio", "tokio-stream", - "tokio-tungstenite", "tokio-util", "tower-service", "tracing", @@ -5222,15 +4874,6 @@ dependencies = [ "websocket-codec", ] -[[package]] -name = "wepoll-ffi" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb" -dependencies = [ - "cc", -] - [[package]] name = "which" version = "4.4.0" diff --git a/Cargo.toml b/Cargo.toml index b42022c571..8e37263b4e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -113,7 +113,6 @@ derivative = { version = "2.2" } futures = { version = "0.3" } itertools = { version = "0.12.1" } lazy_static = { version = "1.4" } -paste = { version = "1.0" } serde_json = { version = "1.0", features = ["raw_value"] } smallvec = { version = "1.0.0" } js-sys = { version = "0.3" } @@ -150,4 +149,3 @@ quote = { version = "1.0.23" } semver = { version = "1.0.0", features = ["serde"] } strum = { version = "0.26.2", features = ["derive"] } thiserror = "1.0.40" -bytemuck = { version = "1.13.1", features = ["derive"] } diff --git a/build/ci_utils/Cargo.toml b/build/ci_utils/Cargo.toml index adbe06ada3..5ca71179f5 100644 --- a/build/ci_utils/Cargo.toml +++ b/build/ci_utils/Cargo.toml @@ -37,7 +37,6 @@ mime = "0.3.16" multimap = "0.8.3" new_mime_guess = "4.0.0" octocrab = { workspace = true } -paste = { workspace = true } path-absolutize = "3.0.11" pathdiff = "0.2.1" path-slash = "0.2.1" @@ -69,7 +68,7 @@ which = "4.2.2" zip = { version = "0.6.2", default-features = false, features = ["deflate"] } [dev-dependencies] -warp = "0.3.2" +warp = { version = "0.3.2", default-features = false } wiremock = "0.5.10" [lints] diff --git a/build/ci_utils/src/programs/sbt.rs b/build/ci_utils/src/programs/sbt.rs index 0b90ea025d..7d123e8174 100644 --- a/build/ci_utils/src/programs/sbt.rs +++ b/build/ci_utils/src/programs/sbt.rs @@ -21,26 +21,6 @@ impl Manipulator for ServerAutostart { } } -macro_rules! strong_string { - ($name:ident($inner_ty:ty)) => { - paste::paste! { - #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash, PartialOrd, Ord)] - pub struct $name(pub <$inner_ty as ToOwned>::Owned); - - impl $name { - pub fn new(inner: impl Into<<$inner_ty as ToOwned>::Owned>) -> Self { - Self(inner.into()) - } - } - - #[derive(Debug, Serialize, PartialEq, Eq, Hash, PartialOrd, Ord)] - pub struct [<$name Ref>]<'a>(pub &'a $inner_ty); - } - }; -} - -strong_string!(Task(str)); - #[derive(Clone, Copy, Debug, Default)] pub struct Sbt; diff --git a/build/enso-formatter/src/lib.rs b/build/enso-formatter/src/lib.rs index 8ac4dca2a1..54deec2961 100644 --- a/build/enso-formatter/src/lib.rs +++ b/build/enso-formatter/src/lib.rs @@ -9,8 +9,6 @@ //! Possible extensions, not implemented yet: //! - Sections are automatically keeping spacing. -// === Features === -#![feature(exit_status_error)] // === Non-Standard Linter Configuration === #![allow(missing_docs)] #![deny(keyword_idents)] diff --git a/lib/rust/data-structures/src/im_list.rs b/lib/rust/data-structures/src/im_list.rs index 1ed37330da..47011859ae 100644 --- a/lib/rust/data-structures/src/im_list.rs +++ b/lib/rust/data-structures/src/im_list.rs @@ -82,7 +82,7 @@ impl NonEmpty { /// Convert this list to a vector. pub fn to_vec(&self) -> Vec<&T> { - let mut out = vec![&self.head]; + let mut out = vec![self.head()]; let mut list = self.tail(); loop { match list.head() { diff --git a/lib/rust/data-structures/src/lib.rs b/lib/rust/data-structures/src/lib.rs index d01ef70258..391ae14111 100644 --- a/lib/rust/data-structures/src/lib.rs +++ b/lib/rust/data-structures/src/lib.rs @@ -1,10 +1,5 @@ //! Library of general data structures. -// === Features === -#![feature(associated_type_bounds)] -#![feature(test)] -#![feature(trait_alias)] -#![feature(cell_update)] // === Non-Standard Linter Configuration === #![deny(unconditional_recursion)] #![warn(missing_docs)] diff --git a/lib/rust/debug-api/Cargo.toml b/lib/rust/debug-api/Cargo.toml deleted file mode 100644 index 5b69bc99d7..0000000000 --- a/lib/rust/debug-api/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "enso-debug-api" -version = "0.1.0" -authors = ["Enso Team "] -edition = "2021" - -[dependencies] -derivative = { workspace = true } -futures = { workspace = true } -js-sys = { workspace = true } -wasm-bindgen = { workspace = true } -web-sys = { version = "0.3.4", features = ["console"] } - -[lints] -workspace = true diff --git a/lib/rust/debug-api/src/lib.rs b/lib/rust/debug-api/src/lib.rs deleted file mode 100644 index dfb4be6028..0000000000 --- a/lib/rust/debug-api/src/lib.rs +++ /dev/null @@ -1,183 +0,0 @@ -//! Functionality for producing debug information. - -// === Features === -#![feature(extern_types)] - -use futures::prelude::*; - -use derivative::Derivative; -use std::sync::atomic::AtomicBool; -use std::sync::atomic::Ordering; - - - -// =========================== -// === LifecycleController === -// =========================== - -/// Handle to an API for managing application shutdown. -#[derive(Derivative)] -#[derivative(Debug)] -pub struct LifecycleController { - #[derivative(Debug = "ignore")] - #[cfg_attr(not(target_arg = "wasm32"), allow(unused))] - api: js::lifecycle::Lifecycle, -} - -impl LifecycleController { - /// Try to obtain a handle. Will succeed if running in Electron. - pub fn new() -> Option { - lifecycle_controller().map(|api| Self { api }) - } - - /// Initiate application shutdown. - pub fn quit(&self) { - #[cfg(target_arch = "wasm32")] - self.api.quit(); - #[cfg(not(target_arch = "wasm32"))] - unreachable!("Instance can only be acquired under wasm32."); - } -} - - - -// =========================== -// === Saving profile data === -// =========================== - -/// Emit profile data. -pub fn save_profile(profile: &str) { - static PROFILE_SAVED: AtomicBool = AtomicBool::new(false); - let already_saved = PROFILE_SAVED.swap(true, Ordering::Relaxed); - if !already_saved { - match profiling_data_api() { - Some(api) => api.save_profile(profile), - None => web_sys::console::log_1(&profile.into()), - } - } -} - -/// Get profile data loaded from files, if the Electron API is available. -pub fn load_profiles() -> Option>> { - let api = profiling_data_api()?; - let (sender, receiver) = futures::channel::oneshot::channel(); - let handler = wasm_bindgen::prelude::Closure::once(|profiles: Vec| { - let context = "Parsing profile file as UTF-8 String"; - let profiles: Vec = - profiles.into_iter().map(|value| value.as_string().expect(context)).collect(); - // This only fails if the receiver was dropped; in that case the data is no longer needed. - let _result = sender.send(profiles); - }); - api.load_profiles(&handler); - Some(async move { - let result = receiver.await; - drop(handler); - // The error case (Cancelled) cannot occur, because the handler owns the sender, and we - // ensure the handler isn't dropped until after we have received the data. - result.unwrap() - }) -} - - - -// ====================== -// === GPU Debug Info === -// ====================== - -/// Load a page displaying information used for debugging hardware-specific rendering issues. -pub fn open_gpu_debug_info() { - if let Some(api) = hardware_info_api() { - api.open_gpu_debug_info(); - } -} - - - -// =========== -// === FFI === -// =========== - -/// Javascript FFI -#[allow(clippy::empty_docs)] // https://github.com/rust-lang/rust-clippy/issues/12377 -pub mod js { - /// Enso Lifecycle API - pub mod lifecycle { - use wasm_bindgen::prelude::*; - - #[wasm_bindgen] - extern "C" { - pub type Lifecycle; - - #[wasm_bindgen(method, js_name = quit)] - #[allow(unsafe_code)] - pub fn quit(this: &Lifecycle); - } - } - - /// Enso Profiling Data API - pub mod profiling_data { - use wasm_bindgen::prelude::*; - - #[wasm_bindgen] - extern "C" { - pub type ProfilingData; - - #[wasm_bindgen(method, js_name = saveProfile)] - #[allow(unsafe_code)] - pub fn save_profile(this: &ProfilingData, data: &str); - - #[wasm_bindgen(method, js_name = loadProfiles)] - #[allow(unsafe_code)] - pub fn load_profiles(this: &ProfilingData, callback: &Closure)>); - } - } - - /// Enso Hardware Info API - pub mod hardware_info { - use wasm_bindgen::prelude::*; - - #[wasm_bindgen] - extern "C" { - pub type HardwareInfo; - - #[wasm_bindgen(method, js_name = openGpuDebugInfo)] - #[allow(unsafe_code)] - pub fn open_gpu_debug_info(this: &HardwareInfo); - } - } - - /// Enso Console API - pub mod console { - use wasm_bindgen::prelude::*; - - #[wasm_bindgen] - extern "C" { - pub type Console; - - #[wasm_bindgen(method, js_name = error)] - #[allow(unsafe_code)] - pub fn error(this: &Console, data: &str); - } - } -} - -macro_rules! window_prop_getter { - ($prop:expr; $fun:ident -> $ty:ty) => { - /// Return a property of `window`, cast to an expected type. - pub fn $fun() -> Option<$ty> { - use wasm_bindgen::JsCast; - let window = web_sys::window()?; - let prop = $prop; - let val = js_sys::Reflect::get(&window, &prop.into()).ok()?; - if val.is_undefined() { - return None; - } - Some(val.unchecked_into()) - } - }; -} - -window_prop_getter!("enso_console"; console -> js::console::Console); -window_prop_getter!("enso_lifecycle"; lifecycle_controller -> js::lifecycle::Lifecycle); -window_prop_getter!("enso_profiling_data"; profiling_data_api -> js::profiling_data::ProfilingData); -window_prop_getter!("enso_hardware_info"; hardware_info_api -> js::hardware_info::HardwareInfo); diff --git a/lib/rust/macro-utils/src/lib.rs b/lib/rust/macro-utils/src/lib.rs index 679b614c5a..f8162d5335 100644 --- a/lib/rust/macro-utils/src/lib.rs +++ b/lib/rust/macro-utils/src/lib.rs @@ -1,58 +1,16 @@ //! A number of helper functions meant to be used in the procedural enso-shapely-macros //! definitions. -// === Features === -#![feature(trait_alias)] // === Non-Standard Linter Configuration === #![warn(missing_docs)] -use proc_macro2::TokenStream; use proc_macro2::TokenTree; -use quote::quote; -use syn::visit::Visit; use syn::WhereClause; use syn::WherePredicate; use syn_1 as syn; -// ========================== -// === Token Stream Utils === -// ========================== - -/// Maps all the tokens in the stream using a given function. -pub fn map_tokens TokenTree>(input: TokenStream, f: F) -> TokenStream { - let ret_iter = input.into_iter().map(f); - ret_iter.collect() -} - -/// Rewrites stream replacing each token with a sequence of tokens returned by -/// the given function. The groups (e.g. token tree within braces) are unpacked, -/// rewritten and repacked into groups -- the function is applied recursively. -pub fn rewrite_stream TokenStream + Copy>( - input: TokenStream, - f: F, -) -> TokenStream { - let mut ret = TokenStream::new(); - for token in input.into_iter() { - match token { - TokenTree::Group(group) => { - let delim = group.delimiter(); - let span = group.span(); - let rewritten = rewrite_stream(group.stream(), f); - let mut new_group = proc_macro2::Group::new(delim, rewritten); - new_group.set_span(span); - let new_group = vec![TokenTree::from(new_group)]; - ret.extend(new_group.into_iter()) - } - _ => ret.extend(f(token)), - } - } - ret -} - - - // =================== // === Token Utils === // =================== @@ -71,36 +29,6 @@ pub fn matching_ident(token: &TokenTree, name: &str) -> bool { // === Repr === // ============ -/// Obtains text representation of given `ToTokens`-compatible input. -pub fn repr(t: &T) -> String { - quote!(#t).to_string() -} - - - -// =================== -// === Field Utils === -// =================== - -/// Collects all fields, named or not. -pub fn fields_list(fields: &syn::Fields) -> Vec<&syn::Field> { - match fields { - syn::Fields::Named(ref f) => f.named.iter().collect(), - syn::Fields::Unnamed(ref f) => f.unnamed.iter().collect(), - syn::Fields::Unit => Default::default(), - } -} - -/// Returns token that refers to the field. -/// -/// It is the field name for named field and field index for unnamed fields. -pub fn field_ident_token(field: &syn::Field, index: syn::Index) -> TokenStream { - match &field.ident { - Some(ident) => quote!(#ident), - None => quote!(#index), - } -} - /// Returns names of the named fields. pub fn field_names(fields: &syn::FieldsNamed) -> Vec<&syn::Ident> { fields @@ -141,103 +69,6 @@ pub fn identifier_sequence(len: usize) -> Vec { -// ======================= -// === Type Path Utils === -// ======================= - -/// Obtain list of generic arguments on the path's segment. -pub fn path_segment_generic_args(segment: &syn::PathSegment) -> Vec<&syn::GenericArgument> { - match segment.arguments { - syn::PathArguments::AngleBracketed(ref args) => args.args.iter().collect(), - _ => Vec::new(), - } -} - -/// Obtain list of generic arguments on the path's last segment. -/// -/// Empty, if path contains no segments. -pub fn ty_path_generic_args(ty_path: &syn::TypePath) -> Vec<&syn::GenericArgument> { - ty_path.path.segments.last().map_or(Vec::new(), path_segment_generic_args) -} - -/// Obtain list of type arguments on the path's last segment. -pub fn ty_path_type_args(ty_path: &syn::TypePath) -> Vec<&syn::Type> { - ty_path_generic_args(ty_path) - .iter() - .filter_map(|generic_arg| match generic_arg { - syn::GenericArgument::Type(t) => Some(t), - _ => None, - }) - .collect() -} - -/// Last type argument of the last segment on the type path. -pub fn last_type_arg(ty_path: &syn::TypePath) -> Option<&syn::GenericArgument> { - ty_path_generic_args(ty_path).last().copied() -} - - - -// ===================== -// === Collect Types === -// ===================== - -/// Visitor that accumulates all visited `syn::TypePath`. -#[derive(Debug, Default)] -pub struct TypeGatherer<'ast> { - /// Observed types accumulator. - pub types: Vec<&'ast syn::TypePath>, -} - -impl<'ast> Visit<'ast> for TypeGatherer<'ast> { - fn visit_type_path(&mut self, node: &'ast syn::TypePath) { - self.types.push(node); - syn::visit::visit_type_path(self, node); - } -} - -/// All `TypePath`s in the given's `Type` subtree. -pub fn gather_all_types(node: &syn::Type) -> Vec<&syn::TypePath> { - let mut type_gather = TypeGatherer::default(); - type_gather.visit_type(node); - type_gather.types -} - -/// All text representations of `TypePath`s in the given's `Type` subtree. -pub fn gather_all_type_reprs(node: &syn::Type) -> Vec { - gather_all_types(node).iter().map(repr).collect() -} - - - -// ======================= -// === Type Dependency === -// ======================= - -/// Naive type equality test by comparing its representation with a string. -pub fn type_matches_repr(ty: &syn::Type, target_repr: &str) -> bool { - repr(ty) == target_repr -} - -/// Naive type equality test by comparing their text representations. -pub fn type_matches(ty: &syn::Type, target_param: &syn::GenericParam) -> bool { - type_matches_repr(ty, &repr(target_param)) -} - -/// Does type depends on the given type parameter. -pub fn type_depends_on(ty: &syn::Type, target_param: &syn::GenericParam) -> bool { - let target_param = repr(target_param); - let relevant_types = gather_all_types(ty); - relevant_types.iter().any(|ty| repr(ty) == target_param) -} - -/// Does enum variant depend on the given type parameter. -pub fn variant_depends_on(var: &syn::Variant, target_param: &syn::GenericParam) -> bool { - var.fields.iter().any(|field| type_depends_on(&field.ty, target_param)) -} - - - // =================== // === WhereClause === // =================== @@ -247,90 +78,3 @@ pub fn new_where_clause(predicates: impl IntoIterator) -> let predicates = syn::punctuated::Punctuated::from_iter(predicates); WhereClause { where_token: Default::default(), predicates } } - - - -// ============= -// === Tests === -// ============= - -#[cfg(test)] -mod tests { - use super::*; - - fn parse(code: &str) -> T { - syn::parse_str(code).unwrap() - } - - #[test] - fn repr_round_trips() { - let program = "pub fn repr(t: &T) -> String {}"; - let tokens = parse::(program); - let quoted_program = repr(&tokens); - let tokens2 = parse::("ed_program); - // check only second round-trip, first is allowed to break whitespace - assert_eq!(repr(&tokens), repr(&tokens2)); - } - - #[test] - fn fields_list_test() { - let tuple_like = "struct Unnamed(i32, String, T);"; - let proper_struct = "struct Named{i: i32, s: String, t: T}"; - let expected_types = vec!["i32", "String", "T"]; - - fn assert_field_types(program: &str, expected_types: &[&str]) { - let tokens = parse::(program); - let fields = fields_list(&tokens.fields); - let types = fields.iter().map(|f| repr(&f.ty)); - assert_eq!(Vec::from_iter(types), expected_types); - } - - assert_field_types(tuple_like, &expected_types); - assert_field_types(proper_struct, &expected_types); - } - - #[test] - fn type_dependency() { - let param: syn::GenericParam = parse("T"); - let depends = |code| { - let ty: syn::Type = parse(code); - type_depends_on(&ty, ¶m) - }; - - // sample types that depend on `T` - let dependents = - vec!["T", "Option", "Pair", "Pair", "Pair", "&T", "&'t mut T"]; - // sample types that do not depend on `T` - let independents = - vec!["Tt", "Option", "Pair", "Pair", "Pair", "i32", "&str"]; - for dependent in dependents { - assert!(depends(dependent), "{} must depend on {}", repr(&dependent), repr(¶m)); - } - for independent in independents { - assert!( - !depends(independent), - "{} must not depend on {}", - repr(&independent), - repr(¶m) - ); - } - } - - #[test] - fn collecting_type_path_args() { - fn check(expected_type_args: Vec<&str>, ty_path: &str) { - let ty_path = parse(ty_path); - let args = ty_path_type_args(&ty_path); - assert_eq!(expected_type_args.len(), args.len()); - let zipped = expected_type_args.iter().zip(args.iter()); - for (expected, got) in zipped { - assert_eq!(expected, &repr(got)); - } - } - check(vec!["T"], "std::Option"); - check(vec!["U"], "std::Option"); - check(vec!["A", "B"], "Either"); - assert_eq!(last_type_arg(&parse("i32")), None); - assert_eq!(repr(&last_type_arg(&parse("Foo"))), "C"); - } -} diff --git a/lib/rust/metamodel/src/lib.rs b/lib/rust/metamodel/src/lib.rs index 8d10ebe4eb..0a0ee3b849 100644 --- a/lib/rust/metamodel/src/lib.rs +++ b/lib/rust/metamodel/src/lib.rs @@ -44,8 +44,6 @@ //! Java code after all computation is completed. // === Features === -#![feature(option_get_or_insert_default)] -#![feature(type_alias_impl_trait)] #![feature(impl_trait_in_assoc_type)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] diff --git a/lib/rust/parser/Cargo.toml b/lib/rust/parser/Cargo.toml index 7142707a8a..1c69716c15 100644 --- a/lib/rust/parser/Cargo.toml +++ b/lib/rust/parser/Cargo.toml @@ -13,8 +13,8 @@ license-file = "../../LICENSE" enso-prelude = { path = "../prelude" } enso-reflect = { path = "../reflect" } enso-data-structures = { path = "../data-structures" } -enso-shapely-macros = { path = "../shapely/macros" } enso-parser-syntax-tree-visitor = { path = "src/syntax/tree/visitor" } +paste = { version = "1.0" } serde = { workspace = true } serde_json = { workspace = true } uuid = { version = "1.1", features = ["serde"] } diff --git a/lib/rust/parser/doc-parser/Cargo.toml b/lib/rust/parser/doc-parser/Cargo.toml index 9bf7d96df4..feb49bd3af 100644 --- a/lib/rust/parser/doc-parser/Cargo.toml +++ b/lib/rust/parser/doc-parser/Cargo.toml @@ -12,7 +12,6 @@ license-file = "../../LICENSE" [dependencies] enso-parser = { path = ".." } enso-prelude = { path = "../../prelude" } -enso-profiler = { path = "../../profiler" } enso-reflect = { path = "../../reflect" } serde = { workspace = true } diff --git a/lib/rust/parser/doc-parser/src/doc_sections.rs b/lib/rust/parser/doc-parser/src/doc_sections.rs index 840b16ac2a..3378216655 100644 --- a/lib/rust/parser/doc-parser/src/doc_sections.rs +++ b/lib/rust/parser/doc-parser/src/doc_sections.rs @@ -41,7 +41,6 @@ impl DocParser { } /// Parse the documentation. - #[profile(Detail)] pub fn parse(&mut self, input: &str) -> Vec { for (line_number, line) in input.trim_start().lines().enumerate() { let location = Location::start_of_line(line_number); @@ -78,10 +77,9 @@ impl Argument { // We split by the first colon or space, whatever comes first. // Typically a colon must be used as a separator, but in some documentation snippets we // have there is no colon and the name of the argument is simply the first word. - let split = text.splitn(2, |c| c == ':' || c == ' '); - let (name, description) = split.collect_tuple().unwrap_or((text, "")); - let name = name.trim().to_string(); - let description = description.trim().to_string(); + let mut split = text.splitn(2, |c| c == ':' || c == ' '); + let name = split.next().unwrap_or(text).trim().to_string(); + let description = split.next().unwrap_or_default().trim().to_string(); Self { name, description } } } diff --git a/lib/rust/parser/doc-parser/src/lib.rs b/lib/rust/parser/doc-parser/src/lib.rs index 2cab247218..c245f2060d 100644 --- a/lib/rust/parser/doc-parser/src/lib.rs +++ b/lib/rust/parser/doc-parser/src/lib.rs @@ -29,11 +29,6 @@ pub use doc_sections::DocSection; -pub(crate) use enso_profiler as profiler; -pub(crate) use enso_profiler::profile; - - - // ============ // === Tags === // ============ diff --git a/lib/rust/parser/doc-parser/src/main.rs b/lib/rust/parser/doc-parser/src/main.rs index f52da335e8..d9f2c547ad 100644 --- a/lib/rust/parser/doc-parser/src/main.rs +++ b/lib/rust/parser/doc-parser/src/main.rs @@ -1,13 +1,5 @@ //! Prints a debug representation of Enso documentation found in the given Enso source file(s). -#![recursion_limit = "256"] -// === Features === -#![feature(assert_matches)] -#![feature(allocator_api)] -#![feature(exact_size_is_empty)] -#![feature(test)] -#![feature(let_chains)] -#![feature(if_let_guard)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] #![allow(clippy::precedence)] @@ -28,7 +20,7 @@ use enso_parser::prelude::*; fn main() { let args = std::env::args().skip(1); - if args.is_empty() { + if args.len() == 0 { use std::io::Read; let mut input = String::new(); std::io::stdin().read_to_string(&mut input).unwrap(); diff --git a/lib/rust/parser/src/lexer.rs b/lib/rust/parser/src/lexer.rs index 9dee4288ed..578668f355 100644 --- a/lib/rust/parser/src/lexer.rs +++ b/lib/rust/parser/src/lexer.rs @@ -1502,7 +1502,7 @@ pub mod test { let is_operator = false; let left_offset = test_code(left_offset); let code = test_code(code); - token::ident_(left_offset, code, is_free, lift_level, is_uppercase, is_operator, false) + ident(left_offset, code, is_free, lift_level, is_uppercase, is_operator, false).into() } /// Constructor. @@ -1510,7 +1510,16 @@ pub mod test { let lift_level = code.chars().rev().take_while(|t| *t == '\'').count() as u32; let left_offset = test_code(left_offset); let code = test_code(code); - token::wildcard_(left_offset, code, lift_level) + wildcard(left_offset, code, lift_level).into() + } + + /// Constructor. + pub fn digits_(code: &str) -> Token<'_> { + digits(test_code(""), test_code(code), None).into() + } + /// Constructor. + pub fn newline_<'s>(left_offset: &'s str, code: &'s str) -> Token<'s> { + newline(test_code(left_offset), test_code(code)).into() } /// Constructor. @@ -1611,52 +1620,52 @@ mod tests { #[test] fn test_case_block() { - let newline = newline_(empty(), test_code("\n")); - test_lexer("\n", vec![newline_(empty(), test_code("\n"))]); + let newline = newline_("", "\n"); + test_lexer("\n", vec![newline_("", "\n")]); test_lexer("\n foo\n bar", vec![ - block_start_(empty(), empty()), + block_start(empty(), empty()).into(), newline.clone(), ident_(" ", "foo"), newline.clone(), ident_(" ", "bar"), - block_end_(empty(), empty()), + block_end(empty(), empty()).into(), ]); test_lexer("foo\n +", vec![ ident_("", "foo"), - block_start_(empty(), empty()), + block_start(empty(), empty()).into(), newline, operator_(" ", "+"), - block_end_(empty(), empty()), + block_end(empty(), empty()).into(), ]); } #[test] fn test_case_block_bad_indents() { - let newline = newline_(empty(), test_code("\n")); + let newline = newline_("", "\n"); #[rustfmt::skip] test_lexer(" foo\n bar\nbaz", vec![ - block_start_(empty(), empty()), - newline_(empty(), empty()), + block_start(empty(), empty()).into(), + newline_("", ""), ident_(" ", "foo"), newline.clone(), ident_(" ", "bar"), - block_end_(empty(), empty()), + block_end(empty(), empty()).into(), newline.clone(), ident_("", "baz"), ]); #[rustfmt::skip] test_lexer("\n foo\n bar\nbaz", vec![ - block_start_(empty(), empty()), + block_start(empty(), empty()).into(), newline.clone(), ident_(" ", "foo"), newline.clone(), ident_(" ", "bar"), - block_end_(empty(), empty()), + block_end(empty(), empty()).into(), newline.clone(), ident_("", "baz"), ]); #[rustfmt::skip] test_lexer("\n foo\n bar\n baz", vec![ - block_start_(empty(), empty()), + block_start(empty(), empty()).into(), newline.clone(), ident_(" ", "foo"), newline.clone(), ident_(" ", "bar"), newline, ident_(" ", "baz"), - block_end_(empty(), empty()), + block_end(empty(), empty()).into(), ]); } @@ -1664,8 +1673,8 @@ mod tests { fn test_case_whitespace_only_line() { test_lexer_many(vec![("foo\n \nbar", vec![ ident_("", "foo"), - newline_(empty(), test_code("\n")), - newline_(test_code(" "), test_code("\n")), + newline_("", "\n"), + newline_(" ", "\n"), ident_("", "bar"), ])]); } @@ -1690,7 +1699,7 @@ mod tests { #[test] fn test_numeric_literal() { - test_lexer("10", vec![digits_(empty(), test_code("10"), None)]); + test_lexer("10", vec![digits_("10")]); } #[test] diff --git a/lib/rust/parser/src/lib.rs b/lib/rust/parser/src/lib.rs index fda4d98be1..3af3ce96bf 100644 --- a/lib/rust/parser/src/lib.rs +++ b/lib/rust/parser/src/lib.rs @@ -110,10 +110,10 @@ pub mod syntax; /// Popular utilities, imported by most modules of this crate. pub mod prelude { - pub use enso_prelude::serde_reexports::*; pub use enso_prelude::*; pub use enso_reflect as reflect; pub use enso_reflect::Reflect; + pub(crate) use paste::paste; /// Return type for functions that will only fail in case of a bug in the implementation. #[derive(Debug, Default)] diff --git a/lib/rust/parser/src/macros/resolver.rs b/lib/rust/parser/src/macros/resolver.rs index 10a6b54ad1..b74a73f40f 100644 --- a/lib/rust/parser/src/macros/resolver.rs +++ b/lib/rust/parser/src/macros/resolver.rs @@ -349,7 +349,7 @@ impl<'s> Resolver<'s> { fn resolve_match(&mut self, macro_def: ¯os::Definition, segments_start: usize) { let mut def_segments = macro_def.segments.to_vec().into_iter().rev(); let segments = self.segments.drain(segments_start..).rev(); - let segments: NonEmptyVec<_> = segments.collect_vec().try_into().unwrap(); + let segments: NonEmptyVec<_> = segments.collect::>().try_into().unwrap(); let mut pattern_matched_segments = segments.mapped(|segment| { let count_must_match = "Internal error. Macro definition and match segments count mismatch."; diff --git a/lib/rust/parser/src/main.rs b/lib/rust/parser/src/main.rs index 7a4a774697..47c828df02 100644 --- a/lib/rust/parser/src/main.rs +++ b/lib/rust/parser/src/main.rs @@ -1,13 +1,5 @@ //! Tests for [`enso_parser`]. -#![recursion_limit = "256"] -// === Features === -#![feature(assert_matches)] -#![feature(allocator_api)] -#![feature(exact_size_is_empty)] -#![feature(test)] -#![feature(let_chains)] -#![feature(if_let_guard)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] #![allow(clippy::precedence)] @@ -28,7 +20,7 @@ use enso_parser::prelude::*; fn main() { let args = std::env::args().skip(1); let mut parser = enso_parser::Parser::new(); - if args.is_empty() { + if args.len() == 0 { use std::io::Read; let mut input = String::new(); std::io::stdin().read_to_string(&mut input).unwrap(); diff --git a/lib/rust/parser/src/source/span.rs b/lib/rust/parser/src/source/span.rs index 8c2d5ef26a..410d4a2979 100644 --- a/lib/rust/parser/src/source/span.rs +++ b/lib/rust/parser/src/source/span.rs @@ -197,21 +197,10 @@ impl<'s> Span<'s> { pub fn length_including_whitespace(&self) -> code::Length { self.left_offset.code.length() + self.code_length } -} -impl<'s> AsRef> for Span<'s> { - fn as_ref(&self) -> &Span<'s> { - self - } -} - -impl<'s, 'a, T> PartialSemigroup for Span<'s> -where - T: Into>, - 's: 'a, -{ #[inline(always)] - fn concat_mut(&mut self, other: T) { + fn concat<'a>(mut self, other: impl Into>) -> Self + where 's: 'a { let other = other.into(); if self.code_length.is_zero() { self.left_offset += other.left_offset; @@ -224,6 +213,13 @@ where self.code_length += other.left_offset.code.length(); self.code_length += other.code_length; } + self + } +} + +impl<'s> AsRef> for Span<'s> { + fn as_ref(&self) -> &Span<'s> { + self } } diff --git a/lib/rust/parser/src/syntax/item.rs b/lib/rust/parser/src/syntax/item.rs index a758003737..c1bf531a43 100644 --- a/lib/rust/parser/src/syntax/item.rs +++ b/lib/rust/parser/src/syntax/item.rs @@ -99,36 +99,3 @@ pub enum Ref<'s, 'a> { Token(token::Ref<'s, 'a>), Tree(&'a Tree<'s>), } - - -// ====================== -// === Variant Checks === -// ====================== - -/// For each token variant, generates a function checking if the token is of the given variant. For -/// example, the `is_ident` function checks if the token is an identifier. -macro_rules! generate_variant_checks { - ( - $(#$enum_meta:tt)* - pub enum $enum:ident { - $( - $(#$variant_meta:tt)* - $variant:ident $({ - $($(#$field_meta:tt)* pub $field:ident : $field_ty:ty),* $(,)? - })? - ),* $(,)? - } - ) => { paste!{ - impl<'s> Item<'s> { - $( - $(#[$($variant_meta)*])* - #[allow(missing_docs)] - pub fn [](&self) -> bool { - self.is_variant(token::variant::VariantMarker::$variant) - } - )* - } - }}; -} - -crate::with_token_definition!(generate_variant_checks()); diff --git a/lib/rust/parser/src/syntax/token.rs b/lib/rust/parser/src/syntax/token.rs index 4746bd5050..04bfc68a43 100644 --- a/lib/rust/parser/src/syntax/token.rs +++ b/lib/rust/parser/src/syntax/token.rs @@ -96,8 +96,6 @@ use crate::prelude::*; use crate::source::*; -use enso_shapely_macros::tagged_enum; - // ============= @@ -623,15 +621,6 @@ macro_rules! generate_token_aliases { Token(left_offset, code, variant::$variant($($($field),*)?)) } - /// Constructor. - pub fn [<$variant:snake:lower _>]<'s> ( - left_offset: impl Into>, - code: Code<'s>, - $($($field : $field_ty),*)? - ) -> Token<'s> { - Token(left_offset, code, variant::$variant($($($field),*)?)).into() - } - impl<'s> From> for Token<'s, Variant> { fn from(token: Token<'s, variant::$variant>) -> Self { token.map_variant(|t| t.into()) diff --git a/lib/rust/parser/src/syntax/tree.rs b/lib/rust/parser/src/syntax/tree.rs index f2a100d23e..2e2e6c7479 100644 --- a/lib/rust/parser/src/syntax/tree.rs +++ b/lib/rust/parser/src/syntax/tree.rs @@ -7,7 +7,6 @@ use crate::syntax::*; use crate::span_builder; use enso_parser_syntax_tree_visitor::Visitor; -use enso_shapely_macros::tagged_enum; // ============== @@ -1078,16 +1077,13 @@ impl<'s> From> for Tree<'s> { /// as AST nodes ([`TreeVisitor`]), span information ([`SpanVisitor`]), and AST nodes or tokens /// altogether ([`ItemVisitor`]). A visitor is a struct that is modified when traversing the target /// elements. Visitors are also capable of tracking when they entered or exited a nested -/// [`Tree`] structure, and they can control how deep the traversal should be performed. To learn -/// more, see the [`RefCollectorVisitor`] implementation, which traverses [`Tree`] and collects -/// references to all [`Tree`] nodes in a vector. +/// [`Tree`] structure, and they can control how deep the traversal should be performed. /// /// # Visitable traits /// This macro also defines visitable traits, such as [`TreeVisitable`] or [`SpanVisitable`], which -/// provide [`Tree`] elements with such functions as [`visit`], [`visit_mut`], [`visit_span`], or -/// [`visit_span_mut`]. These functions let you run visitors. However, as defining a visitor is -/// relatively complex, a set of traversal functions are provided, such as [`map`], [`map_mut`], -/// [`map_span`], or [`map_span_mut`]. +/// provide [`Tree`] elements with such functions as [`visit`] or [`visit_span`]. These functions +/// let you run visitors. However, as defining a visitor is relatively complex, a traversal function +/// [`map`] is provided. /// /// # Generalization of the implementation /// The current implementation bases on a few non-generic traits. One might define a way better @@ -1113,24 +1109,12 @@ pub trait TreeVisitor<'s, 'a>: Visitor { fn visit(&mut self, ast: &'a Tree<'s>) -> bool; } -/// The visitor trait allowing for [`Tree`] nodes mutable traversal. -#[allow(missing_docs)] -pub trait TreeVisitorMut<'s>: Visitor { - fn visit_mut(&mut self, ast: &mut Tree<'s>) -> bool; -} - /// The visitor trait allowing for [`Span`] traversal. #[allow(missing_docs)] pub trait SpanVisitor<'s, 'a>: Visitor { fn visit(&mut self, ast: span::Ref<'s, 'a>) -> bool; } -/// The visitor trait allowing for [`Span`] mutable traversal. -#[allow(missing_docs)] -pub trait SpanVisitorMut<'s>: Visitor { - fn visit_mut(&mut self, ast: span::RefMut<'s, '_>) -> bool; -} - /// The visitor trait allowing for [`Item`] traversal. #[allow(missing_docs)] pub trait ItemVisitor<'s, 'a>: Visitor { @@ -1138,61 +1122,20 @@ pub trait ItemVisitor<'s, 'a>: Visitor { } macro_rules! define_visitor { - ($name:ident, $visit:ident) => { - define_visitor_no_mut! {$name, $visit} - define_visitor_mut! {$name, $visit} - }; -} - -macro_rules! define_visitor_no_mut { - ($name:ident, $visit:ident) => { - paste! { - define_visitor_internal! { - $name, - $visit, - [[<$name Visitor>]<'s, 'a>], - [<$name Visitable>], - } - } - }; -} - -macro_rules! define_visitor_mut { - ($name:ident, $visit:ident) => { - paste! { - define_visitor_internal! { - [_mut mut] - $name, - [<$visit _mut>], - [[<$name VisitorMut>]<'s>], - [<$name VisitableMut>], - } - } - }; -} - -macro_rules! define_visitor_internal { ( - $([$pfx_mod:ident $mod:ident])? $name:ident, $visit:ident, - [$($visitor:tt)*], - $visitable:ident, - ) => { paste! { + $visitor:ident, + $visitable:ident + ) => { /// The visitable trait. See documentation of [`define_visitor`] to learn more. #[allow(missing_docs)] pub trait $visitable<'s, 'a> { - fn $visit(&'a $($mod)? self, _visitor: &mut V) {} - } - - impl<'s, 'a, T: $visitable<'s, 'a>> $visitable<'s, 'a> for Box { - fn $visit(&'a $($mod)? self, visitor: &mut V) { - $visitable::$visit(& $($mod)? **self, visitor) - } + fn $visit>(&'a self, _visitor: &mut V) {} } impl<'s, 'a, T: $visitable<'s, 'a>> $visitable<'s, 'a> for Option { - fn $visit(&'a $($mod)? self, visitor: &mut V) { + fn $visit>(&'a self, visitor: &mut V) { if let Some(elem) = self { $visitable::$visit(elem, visitor) } @@ -1202,7 +1145,7 @@ macro_rules! define_visitor_internal { impl<'s, 'a, T: $visitable<'s, 'a>, E: $visitable<'s, 'a>> $visitable<'s, 'a> for Result { - fn $visit(&'a $($mod)? self, visitor: &mut V) { + fn $visit>(&'a self, visitor: &mut V) { match self { Ok(elem) => $visitable::$visit(elem, visitor), Err(elem) => $visitable::$visit(elem, visitor), @@ -1211,20 +1154,17 @@ macro_rules! define_visitor_internal { } impl<'s, 'a, T: $visitable<'s, 'a>> $visitable<'s, 'a> for Vec { - fn $visit(&'a $($mod)? self, visitor: &mut V) { - self.[]().map(|t| $visitable::$visit(t, visitor)).for_each(drop); + fn $visit>(&'a self, visitor: &mut V) { + self.iter().map(|t| $visitable::$visit(t, visitor)).for_each(drop); } } impl<'s, 'a, T: $visitable<'s, 'a>> $visitable<'s, 'a> for NonEmptyVec { - fn $visit(&'a $($mod)? self, visitor: &mut V) { - self.[]().map(|t| $visitable::$visit(t, visitor)).for_each(drop); + fn $visit>(&'a self, visitor: &mut V) { + self.iter().map(|t| $visitable::$visit(t, visitor)).for_each(drop); } } - - impl<'s, 'a> $visitable<'s, 'a> for &str {} - impl<'s, 'a> $visitable<'s, 'a> for str {} - }}; + }; } macro_rules! define_visitor_for_tokens { @@ -1238,13 +1178,12 @@ macro_rules! define_visitor_for_tokens { } ) => { impl<'s, 'a> TreeVisitable<'s, 'a> for token::$kind {} - impl<'s, 'a> TreeVisitableMut<'s, 'a> for token::$kind {} }; } -define_visitor!(Tree, visit); -define_visitor!(Span, visit_span); -define_visitor_no_mut!(Item, visit_item); +define_visitor!(Tree, visit, TreeVisitor, TreeVisitable); +define_visitor!(Span, visit_span, SpanVisitor, SpanVisitable); +define_visitor!(Item, visit_item, ItemVisitor, ItemVisitable); crate::with_token_definition!(define_visitor_for_tokens()); @@ -1252,11 +1191,9 @@ crate::with_token_definition!(define_visitor_for_tokens()); // === Trait Implementations for Simple Leaf Types === macro_rules! spanless_leaf_impls { - ($ty:ident) => { + ($ty:ty) => { impl<'s, 'a> TreeVisitable<'s, 'a> for $ty {} - impl<'s, 'a> TreeVisitableMut<'s, 'a> for $ty {} impl<'a, 's> SpanVisitable<'s, 'a> for $ty {} - impl<'a, 's> SpanVisitableMut<'s, 'a> for $ty {} impl<'a, 's> ItemVisitable<'s, 'a> for $ty {} impl<'s> span::Builder<'s> for $ty { fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> { @@ -1269,6 +1206,7 @@ macro_rules! spanless_leaf_impls { spanless_leaf_impls!(u32); spanless_leaf_impls!(bool); spanless_leaf_impls!(VisibleOffset); +spanless_leaf_impls!(Cow<'static, str>); // === TreeVisitable special cases === @@ -1281,16 +1219,8 @@ impl<'s, 'a> TreeVisitable<'s, 'a> for Tree<'s> { } } -impl<'s, 'a> TreeVisitableMut<'s, 'a> for Tree<'s> { - fn visit_mut>(&'a mut self, visitor: &mut V) { - if visitor.visit_mut(self) { - self.variant.visit_mut(visitor) - } - } -} impl<'s, 'a, T> TreeVisitable<'s, 'a> for Token<'s, T> {} -impl<'s, 'a, T> TreeVisitableMut<'s, 'a> for Token<'s, T> {} // === SpanVisitable special cases === @@ -1306,16 +1236,6 @@ impl<'s, 'a> SpanVisitable<'s, 'a> for Tree<'s> { } } -impl<'s, 'a> SpanVisitableMut<'s, 'a> for Tree<'s> { - fn visit_span_mut>(&'a mut self, visitor: &mut V) { - if visitor.visit_mut(span::RefMut { - left_offset: &mut self.span.left_offset, - code_length: self.span.code_length, - }) { - self.variant.visit_span_mut(visitor) - } - } -} impl<'a, 's, T> SpanVisitable<'s, 'a> for Token<'s, T> { fn visit_span>(&'a self, visitor: &mut V) { @@ -1324,12 +1244,6 @@ impl<'a, 's, T> SpanVisitable<'s, 'a> for Token<'s, T> { } } -impl<'a, 's, T> SpanVisitableMut<'s, 'a> for Token<'s, T> { - fn visit_span_mut>(&'a mut self, visitor: &mut V) { - let code_length = self.code.length(); - visitor.visit_mut(span::RefMut { left_offset: &mut self.left_offset, code_length }); - } -} // === ItemVisitable special cases === @@ -1351,31 +1265,6 @@ where &'a Token<'s, T>: Into> } -// === String === - -impl<'s, 'a> TreeVisitable<'s, 'a> for String {} -impl<'s, 'a> TreeVisitableMut<'s, 'a> for String {} -impl<'a, 's> SpanVisitable<'s, 'a> for String {} -impl<'a, 's> SpanVisitableMut<'s, 'a> for String {} -impl<'a, 's> ItemVisitable<'s, 'a> for String {} -impl<'s> span::Builder<'s> for String { - fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> { - span - } -} - -impl<'s, 'a> TreeVisitable<'s, 'a> for Cow<'static, str> {} -impl<'s, 'a> TreeVisitableMut<'s, 'a> for Cow<'static, str> {} -impl<'a, 's> SpanVisitable<'s, 'a> for Cow<'static, str> {} -impl<'a, 's> SpanVisitableMut<'s, 'a> for Cow<'static, str> {} -impl<'a, 's> ItemVisitable<'s, 'a> for Cow<'static, str> {} -impl<'s> span::Builder<'s> for Cow<'static, str> { - fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> { - span - } -} - - // ========================== // === CodePrinterVisitor === @@ -1420,36 +1309,6 @@ impl<'s> Tree<'s> { -// =========================== -// === RefCollectorVisitor === -// =========================== - -/// A visitor collecting references to all [`Tree`] nodes. -#[derive(Debug, Default)] -#[allow(missing_docs)] -struct RefCollectorVisitor<'s, 'a> { - pub vec: Vec<&'a Tree<'s>>, -} - -impl<'s, 'a> Visitor for RefCollectorVisitor<'s, 'a> {} -impl<'s, 'a> TreeVisitor<'s, 'a> for RefCollectorVisitor<'s, 'a> { - fn visit(&mut self, ast: &'a Tree<'s>) -> bool { - self.vec.push(ast); - true - } -} - -impl<'s> Tree<'s> { - /// Collect references to all [`Tree`] nodes and return them in a vector. - pub fn collect_vec_ref(&self) -> Vec<&Tree<'s>> { - let mut visitor = RefCollectorVisitor::default(); - self.visit(&mut visitor); - visitor.vec - } -} - - - // ================= // === FnVisitor === // ================= @@ -1467,12 +1326,6 @@ impl<'s: 'a, 'a, T, F: Fn(&'a Tree<'s>) -> T> TreeVisitor<'s, 'a> for FnVisitor< } } -impl<'s, T, F: Fn(&mut Tree<'s>) -> T> TreeVisitorMut<'s> for FnVisitor { - fn visit_mut(&mut self, ast: &mut Tree<'s>) -> bool { - (self.0)(ast); - true - } -} impl<'s> Tree<'s> { /// Map the provided function over each [`Tree`] node. The function results will be discarded. @@ -1480,12 +1333,6 @@ impl<'s> Tree<'s> { let mut visitor = FnVisitor(f); self.visit(&mut visitor); } - - /// Map the provided function over each [`Tree`] node. The function results will be discarded. - pub fn map_mut(&mut self, f: impl Fn(&mut Tree<'s>) -> T) { - let mut visitor = FnVisitor(f); - self.visit_mut(&mut visitor); - } } @@ -1510,39 +1357,3 @@ impl<'s> Tree<'s> { self.variant.visit_item(&mut ItemFnVisitor { f }); } } - - - -// ================= -// === Traversal === -// ================= - -impl<'s> Tree<'s> { - /// Return an iterator over the operands of the given left-associative operator, in reverse - /// order. - pub fn left_assoc_rev<'t, 'o>(&'t self, operator: &'o str) -> LeftAssocRev<'o, 't, 's> { - let tree = Some(self); - LeftAssocRev { operator, tree } - } -} - -/// Iterator over the operands of a particular left-associative operator, in reverse order. -#[derive(Debug)] -pub struct LeftAssocRev<'o, 't, 's> { - operator: &'o str, - tree: Option<&'t Tree<'s>>, -} - -impl<'o, 't, 's> Iterator for LeftAssocRev<'o, 't, 's> { - type Item = &'t Tree<'s>; - fn next(&mut self) -> Option { - if let box Variant::OprApp(OprApp { lhs, opr: Ok(opr), rhs }) = &self.tree?.variant - && opr.code == self.operator - { - self.tree = lhs.into(); - rhs.into() - } else { - self.tree.take() - } - } -} diff --git a/lib/rust/parser/src/syntax/tree/visitor/src/lib.rs b/lib/rust/parser/src/syntax/tree/visitor/src/lib.rs index d94ecd8142..79e4f93717 100644 --- a/lib/rust/parser/src/syntax/tree/visitor/src/lib.rs +++ b/lib/rust/parser/src/syntax/tree/visitor/src/lib.rs @@ -43,9 +43,7 @@ pub fn derive_visitor(input: proc_macro::TokenStream) -> proc_macro::TokenStream let ident = &decl.ident; let (impl_generics, ty_generics, _inherent_where_clause_opt) = &decl.generics.split_for_impl(); let body = gen_body(quote!(TreeVisitable::visit), &decl.data, false); - let body_mut = gen_body(quote!(TreeVisitableMut::visit_mut), &decl.data, true); let body_span = gen_body(quote!(SpanVisitable::visit_span), &decl.data, false); - let body_span_mut = gen_body(quote!(SpanVisitableMut::visit_span_mut), &decl.data, true); let body_item = gen_body(quote!(ItemVisitable::visit_item), &decl.data, false); let impl_generics_vec: Vec<_> = impl_generics.to_token_stream().into_iter().collect(); @@ -71,14 +69,6 @@ pub fn derive_visitor(input: proc_macro::TokenStream) -> proc_macro::TokenStream } } - impl #impl_generics TreeVisitableMut #impl_generics for #ident #ty_generics { - fn visit_mut>(&'a mut self, visitor:&mut T) { - visitor.before_visiting_children(); - #body_mut - visitor.after_visiting_children(); - } - } - impl #impl_generics SpanVisitable #impl_generics for #ident #ty_generics { fn visit_span(&'a self, visitor:&mut T) { visitor.before_visiting_children(); @@ -87,14 +77,6 @@ pub fn derive_visitor(input: proc_macro::TokenStream) -> proc_macro::TokenStream } } - impl #impl_generics SpanVisitableMut #impl_generics for #ident #ty_generics { - fn visit_span_mut>(&'a mut self, visitor:&mut T) { - visitor.before_visiting_children(); - #body_span_mut - visitor.after_visiting_children(); - } - } - impl #impl_generics ItemVisitable #impl_generics for #ident #ty_generics { fn visit_item(&'a self, visitor:&mut T) { visitor.before_visiting_children(); @@ -104,11 +86,6 @@ pub fn derive_visitor(input: proc_macro::TokenStream) -> proc_macro::TokenStream } }; - // #[allow(missing_docs)] - // pub trait ItemVisitable<'s, 'a> { - // fn visit_item>(&'a self, _visitor: &mut V) {} - // } - output.into() } diff --git a/lib/rust/prelude/Cargo.toml b/lib/rust/prelude/Cargo.toml index 0807af31f0..208f743c49 100644 --- a/lib/rust/prelude/Cargo.toml +++ b/lib/rust/prelude/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "enso-prelude" -version = "0.2.6" +version = "0.2.7" authors = ["Enso Team "] edition = "2021" description = "An augmented standard library in the vein of Haskell's prelude." @@ -18,62 +18,12 @@ crate-type = ["rlib"] [dependencies] enso-logging = { path = "../logging" } enso-reflect = { path = "../reflect" } -enso-shapely = { path = "../shapely" } enso-zst = { path = "../zst" } -anyhow = { workspace = true } boolinator = { workspace = true } derivative = { workspace = true } derive_more = { workspace = true } -failure = { workspace = true } -futures = { workspace = true } -itertools = { workspace = true } -paste = { workspace = true } serde = { workspace = true } -serde_json = { workspace = true } -smallvec = { workspace = true } - -[dependencies.web-sys] -version = "0.3.4" -features = [ - "TextMetrics", - 'AddEventListenerOptions', - 'CanvasRenderingContext2d', - 'CssStyleDeclaration', - 'Document', - 'DomRect', - 'Element', - 'Event', - 'EventTarget', - 'EventTarget', - 'FontFaceSet', - 'HtmlCanvasElement', - 'HtmlCollection', - 'HtmlDivElement', - 'HtmlElement', - 'HtmlHeadElement', - 'HtmlImageElement', - 'KeyboardEvent', - 'Location', - 'MouseEvent', - 'Node', - 'Performance', - 'Url', - 'WebGl2RenderingContext', - 'WebGlBuffer', - 'WebGlFramebuffer', - 'WebGlProgram', - 'WebGlRenderingContext', - 'WebGlShader', - 'WebGlSync', - 'WebGlTexture', - 'WebGlUniformLocation', - 'WebGlUniformLocation', - 'WebGlVertexArrayObject', - 'WheelEvent', - 'Window', - 'console', - 'FontFace' -] +enso-macros = { path = "macros" } [lints] workspace = true diff --git a/lib/rust/shapely/macros/Cargo.toml b/lib/rust/prelude/macros/Cargo.toml similarity index 57% rename from lib/rust/shapely/macros/Cargo.toml rename to lib/rust/prelude/macros/Cargo.toml index c2dcc7c147..fff8cd0271 100644 --- a/lib/rust/shapely/macros/Cargo.toml +++ b/lib/rust/prelude/macros/Cargo.toml @@ -1,16 +1,12 @@ [package] -name = "enso-shapely-macros" -version = "0.2.1" +name = "enso-macros" +version = "0.2.7" authors = ["Enso Team "] edition = "2021" -description = "Automated typeclass derivation." -readme = "README.md" -homepage = "https://github.com/enso-org/enso/lib/rust/shapely/macros" +description = "Common macros used in Enso codebase." +homepage = "https://github.com/enso-org/enso" repository = "https://github.com/enso-org/enso" license-file = "../../../LICENSE" -keywords = ["typeclass", "deriving", "macro"] -categories = ["algorithms"] -publish = true [lib] proc-macro = true @@ -23,8 +19,6 @@ enso-macro-utils = { path = "../../macro-utils" } proc-macro2 = { workspace = true } quote = { workspace = true } Inflector = "0.11.4" -itertools = { workspace = true } -boolinator = { workspace = true } [dependencies.syn] version = "1.0" diff --git a/lib/rust/prelude/macros/src/lib.rs b/lib/rust/prelude/macros/src/lib.rs new file mode 100644 index 0000000000..0928869bb7 --- /dev/null +++ b/lib/rust/prelude/macros/src/lib.rs @@ -0,0 +1,23 @@ +//! This crate defines a custom attribute macro [`tagged_enum`]. + +// === Non-Standard Linter Configuration === +#![warn(missing_docs)] +#![warn(trivial_casts)] +#![warn(unused_qualifications)] + + + +extern crate proc_macro; + +mod tagged_enum; + +/// Transforms Rust enums into enums where each variant is a separate type. It also implements +/// several traits (such as conversions between variants and the enum type) and defines utility +/// functions, such as constructors. See [`tagged_enum::run`] to learn more. +#[proc_macro_attribute] +pub fn tagged_enum( + attr: proc_macro::TokenStream, + input: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + tagged_enum::run(attr, input) +} diff --git a/lib/rust/shapely/macros/src/tagged_enum.rs b/lib/rust/prelude/macros/src/tagged_enum.rs similarity index 99% rename from lib/rust/shapely/macros/src/tagged_enum.rs rename to lib/rust/prelude/macros/src/tagged_enum.rs index 3b94de5481..14d86b25f6 100644 --- a/lib/rust/shapely/macros/src/tagged_enum.rs +++ b/lib/rust/prelude/macros/src/tagged_enum.rs @@ -1,6 +1,5 @@ -use crate::prelude::*; - use inflector::cases::snakecase::to_snake_case; +use quote::quote; use syn::AttrStyle; use syn::Attribute; use syn::Data; diff --git a/lib/rust/prelude/src/data.rs b/lib/rust/prelude/src/data.rs index 0777608890..f7fb4dddaf 100644 --- a/lib/rust/prelude/src/data.rs +++ b/lib/rust/prelude/src/data.rs @@ -7,9 +7,5 @@ // ============== mod non_empty_vec; -pub mod semigroup; -pub mod vec_indexed_by; pub use non_empty_vec::NonEmptyVec; -pub use semigroup::*; -pub use vec_indexed_by::VecIndexedBy; diff --git a/lib/rust/prelude/src/data/non_empty_vec.rs b/lib/rust/prelude/src/data/non_empty_vec.rs index 12f4f5ee35..d69e26eca7 100644 --- a/lib/rust/prelude/src/data/non_empty_vec.rs +++ b/lib/rust/prelude/src/data/non_empty_vec.rs @@ -12,17 +12,14 @@ use std::vec::Drain; // =================== /// A version of [`std::vec::Vec`] that can't be empty. -#[allow(missing_docs)] -#[derive(Clone, Debug, Eq, PartialEq, Deref, DerefMut, Reflect)] +#[derive(Clone, Debug, Eq, PartialEq, Deref, DerefMut, Reflect, Serialize, Deserialize)] #[reflect(transparent)] -#[derive(crate::serde_reexports::Serialize)] -#[derive(crate::serde_reexports::Deserialize)] -pub struct NonEmptyVec { - #[reflect(as = "Vec")] - pub elems: VecIndexedBy, +pub struct NonEmptyVec { + /// An internal vector that contains at least one element at all times. + pub elems: Vec, } -impl NonEmptyVec { +impl NonEmptyVec { /// Construct a new non-empty vector. /// /// # Examples @@ -32,39 +29,19 @@ impl NonEmptyVec { /// use enso_prelude::NonEmptyVec; /// let mut vec: NonEmptyVec = NonEmptyVec::new(0, vec![]); /// ``` - pub fn new(first: T, rest: Vec) -> NonEmptyVec { - let mut elems = VecIndexedBy::with_capacity(1 + rest.len()); + pub fn new(first: T, rest: Vec) -> NonEmptyVec { + let mut elems = Vec::with_capacity(1 + rest.len()); elems.push(first); elems.extend(rest); NonEmptyVec { elems } } - /// Construct a new non-empty vector. - /// - /// # Examples - /// - /// ``` - /// #![allow(unused_mut)] - /// use enso_prelude::NonEmptyVec; - /// let mut vec: NonEmptyVec = NonEmptyVec::new_with_last(vec![], 0); - /// ``` - pub fn new_with_last(mut elems: Vec, last: T) -> NonEmptyVec { - elems.push(last); - NonEmptyVec { elems: elems.into() } - } - /// Length of the vector. #[allow(clippy::len_without_is_empty)] pub fn len(&self) -> usize { self.elems.len() } - /// Return the last valid index. - pub fn last_valid_index(&self) -> I - where I: From { - (self.len() - 1).into() - } - /// Construct a `NonEmptyVec` containing a single element. /// /// # Examples @@ -75,12 +52,12 @@ impl NonEmptyVec { /// assert_eq!(vec.get(0), Some(&0)); /// assert_eq!(vec.len(), 1); /// ``` - pub fn singleton(first: T) -> NonEmptyVec { + pub fn singleton(first: T) -> NonEmptyVec { let elems = vec![first]; - Self { elems: elems.into() } + Self { elems } } - /// Construct a new, `NonEmptyVec` containing the provided element and with the + /// Construct a new, `NonEmptyVec` containing the provided element and with the /// provided `capacity`. /// /// If `capacity` is 0, then the vector will be allocated with capacity for the provided `first` @@ -97,7 +74,7 @@ impl NonEmptyVec { /// /// ``` /// use enso_prelude::NonEmptyVec; - /// let mut vec = NonEmptyVec::<_, usize>::with_capacity(0, 10); + /// let mut vec = NonEmptyVec::<_>::with_capacity(0, 10); /// /// // The vector contains one item, even though it has capacity for more /// assert_eq!(vec.len(), 1); @@ -110,9 +87,9 @@ impl NonEmptyVec { /// // ...but this may make the vector reallocate /// vec.push(11); /// ``` - pub fn with_capacity(first: T, capacity: usize) -> NonEmptyVec { + pub fn with_capacity(first: T, capacity: usize) -> NonEmptyVec { debug_assert_ne!(capacity, 0, "Capacity must be greater than zero for a NonEmptyVec."); - let mut elems = VecIndexedBy::with_capacity(capacity); + let mut elems = Vec::with_capacity(capacity); elems.push(first); NonEmptyVec { elems } } @@ -132,7 +109,7 @@ impl NonEmptyVec { /// /// ``` /// use enso_prelude::NonEmptyVec; - /// let mut vec = NonEmptyVec::<_, usize>::new(0, vec![]); + /// let mut vec = NonEmptyVec::<_>::new(0, vec![]); /// vec.reserve(10); /// assert!(vec.capacity() >= 11); /// ``` @@ -149,7 +126,7 @@ impl NonEmptyVec { /// /// ``` /// use enso_prelude::NonEmptyVec; - /// let mut vec = NonEmptyVec::<_, usize>::with_capacity(0, 10); + /// let mut vec = NonEmptyVec::<_>::with_capacity(0, 10); /// assert_eq!(vec.capacity(), 10); /// vec.shrink_to_fit(); /// assert!(vec.capacity() < 10); @@ -168,7 +145,7 @@ impl NonEmptyVec { /// /// ``` /// use enso_prelude::NonEmptyVec; - /// let mut vec = NonEmptyVec::<_, usize>::new(0, vec![1, 2]); + /// let mut vec = NonEmptyVec::<_>::new(0, vec![1, 2]); /// vec.push(3); /// assert_eq!(vec.len(), 4); /// ``` @@ -176,24 +153,9 @@ impl NonEmptyVec { self.elems.push(value) } - /// Remove an element from the back of the collection, returning it. - /// - /// # Examples - /// - /// ``` - /// use enso_prelude::NonEmptyVec; - /// let mut vec = NonEmptyVec::<_, usize>::new(0, vec![1]); - /// assert!(vec.pop_if_has_more_than_1_elem().is_some()); - /// assert!(vec.pop_if_has_more_than_1_elem().is_none()); - /// assert_eq!(vec.len(), 1); - /// ``` - pub fn pop_if_has_more_than_1_elem(&mut self) -> Option { - (self.len() > 1).and_option_from(|| self.elems.pop()) - } - /// Remove an element from the back of the collection, returning it and a new possibly empty /// vector. - pub fn pop(mut self) -> (T, VecIndexedBy) { + pub fn pop(mut self) -> (T, Vec) { let first = self.elems.pop().unwrap(); (first, self.elems) } @@ -204,7 +166,7 @@ impl NonEmptyVec { /// /// ``` /// use enso_prelude::NonEmptyVec; - /// let vec = NonEmptyVec::<_, usize>::new(0, vec![1, 2]); + /// let vec = NonEmptyVec::<_>::new(0, vec![1, 2]); /// assert_eq!(*vec.first(), 0); /// ``` pub fn first(&self) -> &T { @@ -217,7 +179,7 @@ impl NonEmptyVec { /// /// ``` /// use enso_prelude::NonEmptyVec; - /// let mut vec = NonEmptyVec::<_, usize>::new(0, vec![1, 2]); + /// let mut vec = NonEmptyVec::<_>::new(0, vec![1, 2]); /// assert_eq!(*vec.first_mut(), 0); /// ``` pub fn first_mut(&mut self) -> &mut T { @@ -230,7 +192,7 @@ impl NonEmptyVec { /// /// ``` /// use enso_prelude::NonEmptyVec; - /// let vec = NonEmptyVec::<_, usize>::new(0, vec![1, 2]); + /// let vec = NonEmptyVec::<_>::new(0, vec![1, 2]); /// assert_eq!(*vec.last(), 2) /// ``` pub fn last(&self) -> &T { @@ -243,7 +205,7 @@ impl NonEmptyVec { /// /// ``` /// use enso_prelude::NonEmptyVec; - /// let mut vec = NonEmptyVec::<_, usize>::new(0, vec![1, 2]); + /// let mut vec = NonEmptyVec::<_>::new(0, vec![1, 2]); /// assert_eq!(*vec.last_mut(), 2) /// ``` pub fn last_mut(&mut self) -> &mut T { @@ -252,7 +214,7 @@ impl NonEmptyVec { /// Convert this non-empty vector to vector. pub fn into_vec(self) -> Vec { - self.elems.into() + self.elems } /// Consume this non-empty vector and return it's first element. The rest will be dropped. @@ -265,11 +227,7 @@ impl NonEmptyVec { let elems = self.elems.into_iter().map(f).collect(); NonEmptyVec { elems } } -} -impl NonEmptyVec -where I: vec_indexed_by::Index -{ /// Obtain a mutable reference to the element in the vector at the specified `index`. /// /// # Examples @@ -281,20 +239,18 @@ where I: vec_indexed_by::Index /// assert!(reference.is_some()); /// assert_eq!(*reference.unwrap(), 0); /// ``` - pub fn get_mut(&mut self, index: I) -> Option<&mut T> { + pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { self.elems.get_mut(index) } /// Get the tail reference. - pub fn tail(&self) -> &[T] - where I: From { - &self.elems[I::from(1_u8)..] + pub fn tail(&self) -> &[T] { + &self.elems[1..] } /// Get the mutable tail reference. - pub fn tail_mut(&mut self) -> &mut [T] - where I: From { - &mut self.elems[I::from(1_u8)..] + pub fn tail_mut(&mut self) -> &mut [T] { + &mut self.elems[1..] } /// Create a draining iterator that removes the specified range in the vector and yields the @@ -316,68 +272,47 @@ where I: vec_indexed_by::Index /// assert_eq!(drained, [1, 2, 3, 4, 5]) /// ``` pub fn drain(&mut self, range: R) -> Drain - where - R: RangeBounds, - I: PartialOrd + Copy + From, { - if range.contains(&I::from(0_u8)) { + where R: RangeBounds { + if range.contains(&0) { match range.end_bound() { - Bound::Included(n) => self.elems.drain(I::from(1_u8)..=*n), - Bound::Excluded(n) => self.elems.drain(I::from(1_u8)..*n), - Bound::Unbounded => self.elems.drain(I::from(1_u8)..), + Bound::Included(n) => self.elems.drain(1..=*n), + Bound::Excluded(n) => self.elems.drain(1..*n), + Bound::Unbounded => self.elems.drain(1..), } } else { self.elems.drain(range) } } - - /// Insert the contents of an iterator at a specified index in the collection. - /// - /// This is optimal if: - /// - The specified index is equal to the length of the vector, - /// - or the lower bound of the iterator's `size_hint()` is exact. - /// - /// Otherwise, a temporary vector is allocated and the tail is moved twice. - /// - /// # Panics - /// - /// Panics if the given index is greater than the length of the vector. - /// - /// # Examples - /// - /// ``` - /// use enso_prelude::NonEmptyVec; - /// let mut vec = NonEmptyVec::new(0, vec![1, 4, 5]); - /// vec.extend_at(2, vec![2, 3]); - /// assert_eq!(&vec.as_slice(), &[0, 1, 2, 3, 4, 5]) - /// ``` - pub fn extend_at(&mut self, index: I, elems: impl IntoIterator) { - self.splice(index..index, elems); - } } // === Trait Impls === -impl Default for NonEmptyVec { +impl Default for NonEmptyVec { fn default() -> Self { Self::singleton(default()) } } -impl TryFrom> for NonEmptyVec { +impl TryFrom> for NonEmptyVec { type Error = (); fn try_from(elems: Vec) -> Result { - (!elems.is_empty()).as_result_from(|| NonEmptyVec { elems: elems.into() }, || ()) + if elems.is_empty() { + Err(()) + } else { + Ok(NonEmptyVec { elems }) + } } } -impl From> for Vec { - fn from(v: NonEmptyVec) -> Self { - v.elems.into() +impl From> for Vec { + fn from(v: NonEmptyVec) -> Self { + v.elems } } -impl IntoIterator for NonEmptyVec { + +impl IntoIterator for NonEmptyVec { type Item = T; type IntoIter = std::vec::IntoIter; fn into_iter(self) -> Self::IntoIter { @@ -385,7 +320,7 @@ impl IntoIterator for NonEmptyVec { } } -impl<'a, T, I> IntoIterator for &'a NonEmptyVec { +impl<'a, T> IntoIterator for &'a NonEmptyVec { type Item = &'a T; type IntoIter = slice::Iter<'a, T>; fn into_iter(self) -> Self::IntoIter { @@ -393,7 +328,7 @@ impl<'a, T, I> IntoIterator for &'a NonEmptyVec { } } -impl<'a, T, I> IntoIterator for &'a mut NonEmptyVec { +impl<'a, T> IntoIterator for &'a mut NonEmptyVec { type Item = &'a mut T; type IntoIter = slice::IterMut<'a, T>; fn into_iter(self) -> Self::IntoIter { diff --git a/lib/rust/prelude/src/data/semigroup.rs b/lib/rust/prelude/src/data/semigroup.rs deleted file mode 100644 index 4b0bb69846..0000000000 --- a/lib/rust/prelude/src/data/semigroup.rs +++ /dev/null @@ -1,145 +0,0 @@ -//! In mathematics, a semigroup is an algebraic structure consisting of a set together with an -//! associative binary operation. A semigroup generalizes a monoid in that there might not exist an -//! identity element. It also (originally) generalized a group (a monoid with all inverses) to a -//! type where every element did not have to have an inverse, thus the name semigroup. - -use std::collections::HashMap; -use std::hash::BuildHasher; -use std::hash::Hash; - - - -// ================= -// === Semigroup === -// ================= - -/// Mutable Semigroup definition. Impls should satisfy the associativity law: -/// `x.concat(y.concat(z)) = x.concat(y).concat(z)`, in symbolic form: -/// `x <> (y <> z) = (x <> y) <> z` -pub trait PartialSemigroup: Clone { - /// An associative operation. - fn concat_mut(&mut self, other: T); - - /// An associative operation. - fn concat_ref(&self, other: T) -> Self - where Self: Clone { - self.clone().concat(other) - } - - /// An associative operation. - fn concat(mut self, other: T) -> Self { - self.concat_mut(other); - self - } -} - -impl Semigroup for T where T: PartialSemigroup + for<'t> PartialSemigroup<&'t T> {} -pub trait Semigroup: PartialSemigroup + for<'t> PartialSemigroup<&'t Self> { - fn partial_times_mut(&mut self, n: usize) { - let val = self.clone(); - for _ in 0..n - 1 { - self.concat_mut(&val) - } - } - - fn partial_times(mut self, n: usize) -> Self { - self.partial_times_mut(n); - self - } -} - - - -// ==================== -// === Stdlib Impls === -// ==================== - -// === Option === - -impl PartialSemigroup<&Option> for Option { - fn concat_mut(&mut self, other: &Self) { - if let Some(r) = other { - match self { - None => *self = Some(r.clone()), - Some(l) => l.concat_mut(r), - } - } - } -} - -impl PartialSemigroup> for Option { - fn concat_mut(&mut self, other: Self) { - if let Some(r) = other { - match self { - None => *self = Some(r), - Some(l) => l.concat_mut(r), - } - } - } -} - - -// === HashMap === - -impl PartialSemigroup<&HashMap> for HashMap -where - K: Eq + Hash + Clone, - V: Semigroup, - S: Clone + BuildHasher, -{ - fn concat_mut(&mut self, other: &Self) { - for (key, new_val) in other { - let key = key.clone(); - self.entry(key) - .and_modify(|val| val.concat_mut(new_val)) - .or_insert_with(|| new_val.clone()); - } - } -} - -impl PartialSemigroup> for HashMap -where - K: Eq + Hash + Clone, - V: Semigroup, - S: Clone + BuildHasher, -{ - fn concat_mut(&mut self, other: Self) { - for (key, new_val) in other { - self.entry(key).and_modify(|val| val.concat_mut(&new_val)).or_insert(new_val); - } - } -} - - -// === Vec === - -impl PartialSemigroup<&Vec> for Vec { - fn concat_mut(&mut self, other: &Self) { - self.extend(other.iter().cloned()) - } -} - -impl PartialSemigroup> for Vec { - fn concat_mut(&mut self, other: Self) { - self.extend(other) - } -} - - - -// ============= -// === Tests === -// ============= - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn option() { - assert_eq!(None::>.concat(&None), None); - assert_eq!(Some(vec![1]).concat(&None), Some(vec![1])); - assert_eq!(None.concat(&Some(vec![1])), Some(vec![1])); - assert_eq!(Some(vec![1]).concat(&Some(vec![2])), Some(vec![1, 2])); - } -} diff --git a/lib/rust/prelude/src/data/vec_indexed_by.rs b/lib/rust/prelude/src/data/vec_indexed_by.rs deleted file mode 100644 index a519277e5f..0000000000 --- a/lib/rust/prelude/src/data/vec_indexed_by.rs +++ /dev/null @@ -1,287 +0,0 @@ -use crate::*; - -use std::alloc::Allocator; -use std::ops::Bound; - - - -// ===================== -// === Helper macros === -// ===================== - -macro_rules! ranged_fn { - ($name:ident $([$($parm:tt)*])? ($($arg:ident : $arg_tp:ty),* ) -> $out:ty ) => { - pub fn $name $(<$($parm)*>)? - (&mut self, range: impl RangeBounds $(,$arg:$arg_tp)* ) -> $out { - let map_bound = |bound| match bound { - Bound::<&I>::Included(t) => Bound::Included((*t).into()), - Bound::<&I>::Excluded(t) => Bound::Excluded((*t).into()), - Bound::<&I>::Unbounded => Bound::Unbounded, - }; - let start = map_bound(range.start_bound()); - let end = map_bound(range.end_bound()); - self.vec.$name((start, end) $(,$arg)*) - } - }; -} - - - -// ==================== -// === VecIndexedBy === -// ==================== - -pub trait Index = Copy + From + Into; - -#[derive(crate::serde_reexports::Serialize)] -#[derive(crate::serde_reexports::Deserialize)] -#[derive(Derivative, Deref, DerefMut, From, Into)] -#[derivative(Clone(bound = "T: Clone, A: Allocator + Clone"))] -#[derivative(Debug(bound = "T: Debug, A: Allocator"))] -#[derivative(Default(bound = "A: Allocator, Vec: Default"))] -#[derivative(PartialEq(bound = "Vec: PartialEq"))] -#[derivative(Eq(bound = "Vec: PartialEq"))] -pub struct VecIndexedBy { - #[serde(bound( - serialize = "Vec: crate::serde_reexports::Serialize", - deserialize = "Vec: crate::serde_reexports::Deserialize<'de>" - ))] - #[deref] - #[deref_mut] - vec: Vec, - key: ZST, -} - -impl VecIndexedBy { - pub fn with_capacity(capacity: usize) -> Self { - Vec::with_capacity(capacity).into() - } -} - -impl VecIndexedBy -where A: Allocator -{ - /// Return the last valid index, if any. - pub fn last_valid_index(&self) -> Option - where I: From { - if self.vec.is_empty() { - None - } else { - Some((self.len() - 1).into()) - } - } -} - - - -// ============== -// === Traits === -// ============== - -define_not_same_trait!(); - -impl VecIndexedBy -where - A: Allocator, - I: Index, -{ - pub fn get_mut(&mut self, index: I) -> Option<&mut T> { - self.vec.get_mut(index.into()) - } - - pub fn get(&self, index: I) -> Option<&T> { - self.vec.get(index.into()) - } - - pub fn insert(&mut self, index: I, element: T) { - self.vec.insert(index.into(), element) - } - - pub fn remove(&mut self, index: I) -> T { - self.vec.remove(index.into()) - } - - ranged_fn! {drain() -> std::vec::Drain<'_, T, A>} - ranged_fn! {splice[Iter: IntoIterator](replace_with: Iter) -> std::vec::Splice<'_, Iter::IntoIter, A>} -} - -impl From> for VecIndexedBy -where A: Allocator -{ - fn from(vec: Vec) -> Self { - Self { vec, key: default() } - } -} - -impl From> for Vec -where A: Allocator -{ - fn from(vec: VecIndexedBy) -> Self { - vec.vec - } -} - -impl From<&Vec> for VecIndexedBy -where - T: Clone, - A: Allocator + Clone, -{ - fn from(vec: &Vec) -> Self { - Self { vec: vec.clone(), key: default() } - } -} - -impl From<&VecIndexedBy> for VecIndexedBy -where - T: Clone, - A: Allocator + Clone, -{ - fn from(vec: &VecIndexedBy) -> Self { - vec.clone() - } -} - - -impl std::ops::Index for VecIndexedBy -where - I: Index, - A: Allocator, -{ - type Output = T; - fn index(&self, index: I) -> &Self::Output { - &self.vec[index.into()] - } -} - -impl std::ops::Index> for VecIndexedBy -where - I: Index, - A: Allocator, -{ - type Output = [T]; - fn index(&self, range: Range) -> &Self::Output { - &self.vec[range.start.into()..range.end.into()] - } -} - -impl std::ops::Index> for VecIndexedBy -where - I: Index, - A: Allocator, -{ - type Output = [T]; - fn index(&self, range: RangeFrom) -> &Self::Output { - &self.vec[range.start.into()..] - } -} - -impl std::ops::Index> for VecIndexedBy -where - I: Index, - A: Allocator, -{ - type Output = [T]; - fn index(&self, range: RangeTo) -> &Self::Output { - &self.vec[..range.end.into()] - } -} - -impl std::ops::Index for VecIndexedBy -where - I: Index, - A: Allocator, - (RangeFull, I): NotSame, -{ - type Output = [T]; - fn index(&self, _range: RangeFull) -> &Self::Output { - &self.vec[..] - } -} - -impl IndexMut for VecIndexedBy -where - I: Index, - A: Allocator, -{ - fn index_mut(&mut self, index: I) -> &mut Self::Output { - &mut self.vec[index.into()] - } -} - -impl IndexMut> for VecIndexedBy -where - I: Index, - A: Allocator, -{ - fn index_mut(&mut self, range: Range) -> &mut Self::Output { - &mut self.vec[range.start.into()..range.end.into()] - } -} - -impl IndexMut> for VecIndexedBy -where - I: Index, - A: Allocator, -{ - fn index_mut(&mut self, range: RangeFrom) -> &mut Self::Output { - &mut self.vec[range.start.into()..] - } -} - -impl IndexMut> for VecIndexedBy -where - I: Index, - A: Allocator, -{ - fn index_mut(&mut self, range: RangeTo) -> &mut Self::Output { - &mut self.vec[..range.end.into()] - } -} - -impl IndexMut for VecIndexedBy -where - I: Index, - A: Allocator, - (RangeFull, I): NotSame, -{ - fn index_mut(&mut self, _range: RangeFull) -> &mut Self::Output { - &mut self.vec[..] - } -} - -impl IntoIterator for VecIndexedBy -where A: Allocator -{ - type Item = T; - type IntoIter = std::vec::IntoIter; - fn into_iter(self) -> Self::IntoIter { - self.vec.into_iter() - } -} - -impl<'a, T, I, A> IntoIterator for &'a VecIndexedBy -where A: Allocator -{ - type Item = &'a T; - type IntoIter = slice::Iter<'a, T>; - fn into_iter(self) -> Self::IntoIter { - self.vec.iter() - } -} - -impl<'a, T, I, A> IntoIterator for &'a mut VecIndexedBy -where A: Allocator -{ - type Item = &'a mut T; - type IntoIter = slice::IterMut<'a, T>; - fn into_iter(self) -> Self::IntoIter { - self.vec.iter_mut() - } -} - -impl FromIterator for VecIndexedBy { - fn from_iter>(iter: Iter) -> VecIndexedBy { - let vec = Vec::from_iter(iter); - Self { vec, key: default() } - } -} diff --git a/lib/rust/prelude/src/lib.rs b/lib/rust/prelude/src/lib.rs index 6b35949250..4aa0fc9636 100644 --- a/lib/rust/prelude/src/lib.rs +++ b/lib/rust/prelude/src/lib.rs @@ -3,55 +3,27 @@ //! defines several aliases and utils which may find their place in new //! libraries in the future. -// === Features === -#![feature(trait_alias)] -#![feature(allocator_api)] -#![feature(auto_traits)] -#![feature(negative_impls)] -#![feature(pattern)] - mod data; -mod macros; -mod not_same; -mod option; -mod serde; -mod smallvec; mod std_reexports; -mod string; mod vec; -pub use crate::serde::*; -pub use crate::smallvec::*; +pub use enso_macros::*; pub use enso_zst::*; -pub use anyhow; pub use data::*; -pub use macros::*; -pub use option::*; pub use std_reexports::*; -pub use string::*; pub use vec::*; pub use boolinator::Boolinator; pub use derivative::Derivative; pub use derive_more::*; pub use enso_reflect::prelude::*; -pub use itertools::Itertools; -pub use paste::paste; -pub use std::ops::AddAssign; +pub use serde::Deserialize; +pub use serde::Serialize; -/// Serde reexports for the code generated by declarative macros. -/// -/// They cannot be directly reexported from prelude, as the methods `serialize` and `deserialize` -/// that would be brought into scope by this, would collide with the other IDE-defined traits. -pub mod serde_reexports { - pub use serde::Deserialize; - pub use serde::Serialize; -} - // =============== // === Logging === diff --git a/lib/rust/prelude/src/macros.rs b/lib/rust/prelude/src/macros.rs deleted file mode 100644 index 7656078d99..0000000000 --- a/lib/rust/prelude/src/macros.rs +++ /dev/null @@ -1,326 +0,0 @@ -//! This module defines set of common macros which are useful across different projects. - - -// ============== -// === Export === -// ============== - -pub use enso_shapely::ForEachVariant; - - - -/// Allows for nicer definition of impls, similar to what Haskell or Scala does. Reduces the needed -/// boilerplate. For example, the following usage: -/// -/// ```text -/// struct A { name:String }; -/// impls! { From for String { |t| t.name.clone() } } -/// ``` -/// -/// compiles to: -/// ``` -/// struct A { -/// name: String, -/// }; -/// impl From for String { -/// fn from(t: A) -> Self { -/// t.name.clone() -/// } -/// } -/// ``` -/// -/// This macro is meant to support many standard traits (like From) and should grow in the future. -/// Currently supported ones are: -/// * From<…> -/// * From + &From<…> -/// * Into + &Into<…> -/// * PhantomFrom<…> -#[macro_export] -macro_rules! impls { - ($([$($impl_params:tt)*])? From<$ty:ty> for $target:ty $(where [$($bounds:tt)*])? { - |$arg:tt| $($result:tt)* - } ) => { - #[allow(clippy::redundant_closure_call)] - impl <$($($impl_params)*)?> From <$ty> for $target $(where $($bounds)*)? { - fn from (arg:$ty) -> Self { - (|$arg:$ty| $($result)*)(arg) - } - } - }; - - ($([$($impl_params:tt)*])? From + &From <$ty:ty> for $target:ty $(where [$($bounds:tt)*])? { - |$arg:tt| $($result:tt)* - } ) => { - #[allow(clippy::redundant_closure_call)] - #[allow(clippy::identity_conversion)] - impl <$($($impl_params)*)?> From <$ty> for $target $(where $($bounds)*)? { - fn from (arg:$ty) -> Self { - (|$arg:$ty| $($result)*)(arg) - } - } - - #[allow(clippy::redundant_closure_call)] - #[allow(clippy::identity_conversion)] - impl <$($($impl_params)*)?> From <&$ty> for $target $(where $($bounds)*)? { - fn from (arg:&$ty) -> Self { - (|$arg:&$ty| $($result)*)(arg) - } - } - }; - - ($([$($impl_params:tt)*])? Into + &Into <$ty:ty> for $target:ty $(where [$($bounds:tt)*])? { - |$arg:tt| $($result:tt)* - } ) => { - #[allow(clippy::redundant_closure_call)] - #[allow(clippy::identity_conversion)] - impl <$($($impl_params)*)?> Into <$ty> for $target $(where $($bounds)*)? { - fn into(self) -> $ty { - (|$arg:Self| $($result)*)(self) - } - } - - #[allow(clippy::redundant_closure_call)] - #[allow(clippy::identity_conversion)] - impl <$($($impl_params)*)?> Into <$ty> for &$target $(where $($bounds)*)? { - fn into(self) -> $ty { - (|$arg:Self| $($result)*)(self) - } - } - }; - - ($([$($impl_params:tt)*])? PhantomFrom<$ty:ty> for $target:ty { - $($result:tt)* - } ) => { - impl <$($($impl_params)*)?> From > for $target { - fn from (_:ZST<$ty>) -> Self { - $($result)* - } - } - }; -} - -#[macro_export] -macro_rules! alias { - ($( $(#$meta:tt)* $name:ident = {$($tok:tt)*} )*) => {$( - $(#$meta)* - pub trait $name: $($tok)* {} - impl $name for T {} - )*}; - - (no_docs $( $(#$meta:tt)* $name:ident = {$($tok:tt)*} )*) => {$( - $(#$meta)* - #[allow(missing_docs)] - pub trait $name: $($tok)* {} - impl $name for T {} - )*}; -} - - - -// ============== -// === Lambda === -// ============== - -/// Clones all arguments from the first argument list by using `CloneRef` and defines lambda with -/// arguments from the second argument list (if present). For example, the following usage -/// -/// ```text -/// f! { (a,b)(c) a + b + c } -/// ``` -/// -/// is equivalent to: -/// -/// ```text -/// { -/// let a = a.clone_ref(); -/// let b = b.clone_ref(); -/// move |c| { a + b + c } -/// } -/// ``` -#[macro_export] -macro_rules! f { - ([$($name:ident),*] ($($args:tt)*) $($expr:tt)*) => { - { - $(let $name = $name.clone_ref();)* - move |$($args)*| { $($expr)* } - } - }; - - ([$($name:ident),*] $($expr:tt)*) => { - { - $(let $name = $name.clone_ref();)* - move || { $($expr)* } - } - }; - - (($($args:tt)*) $name:ident . $($toks:tt)*) => { - f! { [$name] ($($args)*) $name . $($toks)* } - }; - - (($($args:tt)*) { $name:ident . $($toks:tt)* }) => { - f! { [$name] ($($args)*) { $name . $($toks)* } } - }; - - ($name:ident . $($toks:tt)*) => { - f! { [$name] $name . $($toks)* } - }; -} - -/// Variant of the `f` macro producing a lambda which drops its first argument. -#[macro_export] -macro_rules! f_ { - ([$($name:ident),*] $($expr:tt)*) => { - f! { [$($name),*] (_) $($expr)* } - }; - - ($name:ident . $($toks:tt)*) => { - f_! { [$name] $name . $($toks)* } - }; - - ( { $name:ident . $($toks:tt)* } ) => { - f_! { [$name] { $name . $($toks)* } } - }; -} - -/// Variant of the `f` macro producing a lambda which drops its first and second arguments. -#[macro_export] -macro_rules! f__ { - ([$($name:ident),*] $($expr:tt)*) => { - f! { [$($name),*] (_,_) $($expr)* } - }; - - ($name:ident . $($toks:tt)*) => { - f__! { [$name] $name . $($toks)* } - }; - - ( { $name:ident . $($toks:tt)* } ) => { - f__! { [$name] { $name . $($toks)* } } - }; -} - - - -// =================== -// === Unreachable === -// =================== - -/// A macro for use in situations where the code is unreachable. -/// -/// This macro will panic in debug builds, but in release builds it expands to -/// the unsafe [`std::hint::unreachable_unchecked()`] function, which allows the -/// compiler to optimise more. -#[macro_export] -macro_rules! unreachable_panic { - () => { - unreachable_panic!("This code was marked as unreachable.") - }; - ($msg:tt) => { - if cfg!(debug_assertions) { - panic!($msg) - } else { - use std::hint::unreachable_unchecked; - #[allow(unsafe_code)] - unsafe { - unreachable_unchecked() - } - } - }; -} - - - -// ==================== -// === ReflectMatch === -// ==================== - -/// Used to match a value against a set of candidates, while keeping track of the candidates. -/// -/// This achieves the same function as using a `HashMap` to dispatch between a set of handlers, but -/// does not require reifying the handlers, which can be inconvenient (e.g. if they contain -/// `.await`, of if they need conflicting captures from the environment). -/// -/// # Example -/// -/// ``` -/// use enso_prelude::*; -/// -/// let selected = "foo"; -/// let out = reflect_match!(match selected as options { -/// "bar" => Ok(1), -/// "baz" => Ok(2), -/// _ => Err(format!("Unexpected choice: {selected}. Must be one of: {options:?}.")), -/// }); -/// ``` -/// -/// This is functionally equivalent to: -/// -/// ``` -/// # use std::collections::HashMap; -/// -/// let selected = "foo"; -/// let mut dispatch = HashMap::new(); -/// dispatch.insert("bar", 1); -/// dispatch.insert("baz", 2); -/// let options = dispatch.keys(); -/// let error = format!("Unexpected choice: {selected}. Must be one of: {options:?}."); -/// let out = dispatch.get(selected).ok_or(error); -/// ``` -#[macro_export] -macro_rules! reflect_match { - (@acc ($dispatch:ident, $value:expr, $candidates:ident, { - _ => $fallback:expr $(,)? - }) -> {$( $branches:tt )*}) => {{ - let mut $dispatch = $crate::ReflectMatch::new($value); - match () { - $( $branches )* - _ => { - let $candidates = $dispatch.into_candidates(); - $fallback - } - } - }}; - (@acc ($dispatch:ident, $value:expr, $candidates:ident, { - $candidate:literal => $branch:expr, - $( $rest:tt )* - }) -> {$( $branches:tt )*}) => { - reflect_match!(@acc ($dispatch, $value, $candidates, { $( $rest )* }) -> { - $( $branches )* - _ if $dispatch.matches($candidate) => $branch, - }) - }; - (match $value:tt as $candidates:tt { $( $branches:tt )* }) => { - reflect_match!(@acc (dispatch, $value, $candidates, { $( $branches )* }) -> {}) - }; -} - - -// === ReflectMatch Runtime Support === - -/// Match a value against a set of candidates; if no match is found, the list of candidates is -/// available. See [`reflect_match!`] for motivation and usage examples. -#[derive(Debug)] -pub struct ReflectMatch { - value: T, - candidates: Vec, -} - -impl ReflectMatch { - /// Create a new dispatcher, for a given value. - pub fn new(value: T) -> Self { - let candidates = Default::default(); - Self { value, candidates } - } - - /// Test the value against a candidate. Return whether it's a match. - pub fn matches(&mut self, key: U) -> bool - where T: PartialEq { - let matches = self.value == key; - self.candidates.push(key); - matches - } - - /// Return the candidates the match was tested against. - pub fn into_candidates(self) -> Vec { - self.candidates - } -} diff --git a/lib/rust/prelude/src/not_same.rs b/lib/rust/prelude/src/not_same.rs deleted file mode 100644 index b164fe60bc..0000000000 --- a/lib/rust/prelude/src/not_same.rs +++ /dev/null @@ -1,18 +0,0 @@ -//! [`NotSame`] trait definition. - - - -/// Defines the [`NotSame`] trait. It can be used to disambiguate conflicting trait implementations. -/// For example, it is not allowed to implement `impl From> for MyType`, because -/// Rust standard library defines `impl From for T`. This trait allows to disambiguate such -/// cases by writing `impl From> for MyType where (U, T) : NotSame`. However, -/// because of some strange reasons, it does not work if it is defined in another crate and has to -/// be defined locally, on-demand. As soon as it will be possible to define it in prelude, it should -/// be refactored. See its usages to learn more. -#[macro_export] -macro_rules! define_not_same_trait { - () => { - auto trait NotSame {} - impl !NotSame for (T, T) {} - }; -} diff --git a/lib/rust/prelude/src/option.rs b/lib/rust/prelude/src/option.rs deleted file mode 100644 index a2f75b29b1..0000000000 --- a/lib/rust/prelude/src/option.rs +++ /dev/null @@ -1,90 +0,0 @@ -//! This module defines utilities for working with the [`std::option::Option`] type. - - - -/// Adds mapping methods to the `Option` type. -pub trait OptionOps { - type Item; - fn map_none(self, f: F) -> Self - where F: FnOnce(); - fn map_ref<'a, U, F>(&'a self, f: F) -> Option - where F: FnOnce(&'a Self::Item) -> U; - fn map_or_default(self, f: F) -> U - where - U: Default, - F: FnOnce(Self::Item) -> U; - fn if_some_or_default(self, f: F) -> U - where - U: Default, - F: FnOnce() -> U; - fn map_ref_or_default<'a, U, F>(&'a self, f: F) -> U - where - U: Default, - F: FnOnce(&'a Self::Item) -> U; - fn for_each(self, f: F) - where F: FnOnce(Self::Item) -> U; - fn for_each_ref<'a, U, F>(&'a self, f: F) - where F: FnOnce(&'a Self::Item) -> U; - /// Returns true if option contains Some with value matching given predicate. - fn contains_if<'a, F>(&'a self, f: F) -> bool - where F: FnOnce(&'a Self::Item) -> bool; -} - -impl OptionOps for Option { - type Item = T; - - fn map_none(self, f: F) -> Self - where - F: FnOnce(), - T: Sized, { - if self.is_none() { - f() - } - self - } - - fn map_ref<'a, U, F>(&'a self, f: F) -> Option - where F: FnOnce(&'a Self::Item) -> U { - self.as_ref().map(f) - } - - fn map_or_default(self, f: F) -> U - where - U: Default, - F: FnOnce(Self::Item) -> U, { - self.map_or_else(U::default, f) - } - - fn if_some_or_default(self, f: F) -> U - where - U: Default, - F: FnOnce() -> U, { - self.map_or_else(U::default, |_| f()) - } - - fn map_ref_or_default<'a, U, F>(&'a self, f: F) -> U - where - U: Default, - F: FnOnce(&'a Self::Item) -> U, { - self.as_ref().map_or_default(f) - } - - fn for_each(self, f: F) - where F: FnOnce(Self::Item) -> U { - if let Some(x) = self { - f(x); - } - } - - fn for_each_ref<'a, U, F>(&'a self, f: F) - where F: FnOnce(&'a Self::Item) -> U { - if let Some(x) = self { - f(x); - } - } - - fn contains_if<'a, F>(&'a self, f: F) -> bool - where F: FnOnce(&'a Self::Item) -> bool { - self.as_ref().map_or(false, f) - } -} diff --git a/lib/rust/prelude/src/serde.rs b/lib/rust/prelude/src/serde.rs deleted file mode 100644 index b8912b6bca..0000000000 --- a/lib/rust/prelude/src/serde.rs +++ /dev/null @@ -1,96 +0,0 @@ -//! Module for utilities related to serialization/deserialization using the `serde` library. - -use serde::Deserialize; - - - -/// Try to deserialize value of type `Ret`. In case of any error, it is ignored and the default -/// value is returned instead. -pub fn deserialize_or_default<'d, Ret, D>(d: D) -> Result -where - for<'e> Ret: Default + Deserialize<'e>, - D: serde::Deserializer<'d>, { - // We first parse as generic JSON value. This is necessary to consume parser input. - // If we just tried parsing the desired type directly and ignored error, we would end up with - // `trailing characters` error in non-trivial cases. - let raw_json = <&serde_json::value::RawValue>::deserialize(d)?; - serde_json::from_str(raw_json.get()).or_else(|_error| Ok(Ret::default())) -} - -/// Deserialize a JSON value that is either of `Ret` type or equals `null`. A `null` is converted -/// to a default value of `Ret` type. -/// -/// Example usage: -/// ``` -/// # use serde::Deserialize; -/// # use enso_prelude::deserialize_null_as_default; -/// #[derive(Debug, Deserialize, PartialEq)] -/// struct Foo { -/// #[serde(default, deserialize_with = "deserialize_null_as_default")] -/// blah: Vec, -/// } -/// fn check_deserialized_eq(code: &str, expected_deserialized: &Foo) { -/// let deserialized = serde_json::from_str::(code).unwrap(); -/// assert_eq!(&deserialized, expected_deserialized); -/// } -/// let empty_foo = Foo { blah: vec![] }; -/// check_deserialized_eq(r#"{"blah" : null }"#, &empty_foo); -/// check_deserialized_eq(r#"{}"#, &empty_foo); -/// check_deserialized_eq(r#"{"blah" : [] }"#, &empty_foo); -/// check_deserialized_eq(r#"{"blah" : [1,2,3] }"#, &Foo { blah: vec![1, 2, 3] }); -/// ``` -pub fn deserialize_null_as_default<'d, Ret, D>(d: D) -> Result -where - for<'e> Ret: Default + Deserialize<'e>, - D: serde::Deserializer<'d>, { - let option_value = Option::deserialize(d)?; - Ok(option_value.unwrap_or_default()) -} - - -#[cfg(test)] -mod tests { - use super::*; - - use serde::Serialize; - - #[test] - fn deserialize_or_default_attribute_test() { - // Two structures - same except for `deserialize_or_default` atribute. - // One fails to deserialize, second one goes through. - #[derive(Debug, Deserialize, PartialEq, Eq, Serialize)] - struct Foo { - blah: String, - boom: Vec, - } - #[derive(Debug, Deserialize, Eq, PartialEq, Serialize)] - struct Bar { - #[serde(deserialize_with = "deserialize_or_default")] - blah: String, - boom: Vec, - } - let code = r#"{"blah" : {}, "boom" : [1,2,3] }"#; - let result = serde_json::from_str::(code); - assert!(result.is_err()); - - let deserialized = serde_json::from_str::(code).unwrap(); - assert_eq!(deserialized, Bar { blah: "".into(), boom: vec![1, 2, 3] }); - } - - #[test] - fn deserialize_or_default_attribute_for_optional_field() { - #[derive(Debug, Deserialize, Eq, PartialEq, Serialize)] - struct Foo { - #[serde(default, deserialize_with = "deserialize_or_default")] - blah: Option, - boom: Vec, - } - let code = r#"{"blah" : "blah", "boom" : [1,2,3] }"#; - let deserialized = serde_json::from_str::(code).unwrap(); - assert_eq!(deserialized, Foo { blah: Some("blah".to_owned()), boom: vec![1, 2, 3] }); - - let code = r#"{"boom" : [1,2,3] }"#; - let deserialized = serde_json::from_str::(code).unwrap(); - assert_eq!(deserialized, Foo { blah: None, boom: vec![1, 2, 3] }); - } -} diff --git a/lib/rust/prelude/src/smallvec.rs b/lib/rust/prelude/src/smallvec.rs deleted file mode 100644 index c57939a9bc..0000000000 --- a/lib/rust/prelude/src/smallvec.rs +++ /dev/null @@ -1,120 +0,0 @@ -//! This module defines utilities for working with the [`SmallVec`] type. - -use std::cmp::Ordering; - - -// ============== -// === Export === -// ============== - -pub use smallvec::SmallVec; - - - -/// Adds methods to the `SmallVec` type. -pub trait SmallVecOps { - type Item; - - /// Binary searches this sorted slice with a comparator function. - /// - /// The comparator function should implement an order consistent - /// with the sort order of the underlying slice, returning an - /// order code that indicates whether its argument is `Less`, - /// `Equal` or `Greater` the desired target. - /// - /// If the value is found then [`Result::Ok`] is returned, containing the - /// index of the matching element. If there are multiple matches, then any - /// one of the matches could be returned. If the value is not found then - /// [`Result::Err`] is returned, containing the index where a matching - /// element could be inserted while maintaining sorted order. - /// - /// # Implementation Details - /// Please note that the following implementation is a copy-paste from - /// [`Vec::binary_search_by`]. - fn binary_search_by(&self, f: F) -> Result - where F: FnMut(&Self::Item) -> Ordering; - - /// Binary searches this sorted slice for a given element. - /// - /// If the value is found then [`Result::Ok`] is returned, containing the - /// index of the matching element. If there are multiple matches, then any - /// one of the matches could be returned. If the value is not found then - /// [`Result::Err`] is returned, containing the index where a matching - /// element could be inserted while maintaining sorted order. - /// - /// # Implementation Details - /// Please note that the following implementation is a copy-paste from - /// [`Vec::binary_search`]. - fn binary_search(&self, t: &Self::Item) -> Result - where Self::Item: Ord; -} - -impl SmallVecOps for SmallVec { - type Item = ::Item; - - #[allow(unsafe_code)] - fn binary_search_by(&self, mut f: F) -> Result - where F: FnMut(&Self::Item) -> Ordering { - let s = self; - let mut size = s.len(); - if size == 0 { - return Err(0); - } - let mut base = 0usize; - while size > 1 { - let half = size / 2; - let mid = base + half; - // SAFETY: the call is made safe by the following inconstants: - // - `mid >= 0`: by definition - // - `mid < size`: `mid = size / 2 + size / 4 + size / 8 ...` - let cmp = f(unsafe { s.get_unchecked(mid) }); - base = if cmp == Ordering::Greater { base } else { mid }; - size -= half; - } - // SAFETY: base is always in [0, size) because base <= mid. - let cmp = f(unsafe { s.get_unchecked(base) }); - if cmp == Ordering::Equal { - Ok(base) - } else { - Err(base + (cmp == Ordering::Less) as usize) - } - } - - fn binary_search(&self, t: &Self::Item) -> Result - where Self::Item: Ord { - self.binary_search_by(|p| p.cmp(t)) - } -} - - - -// ============= -// === Tests === -// ============= - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_binary_search_by() { - let v = SmallVec::<[usize; 4]>::from_iter([5, 10, 20, 40].iter().copied()); - assert_eq!(v.binary_search_by(|probe| probe.cmp(&0)), Err(0)); - assert_eq!(v.binary_search_by(|probe| probe.cmp(&5)), Ok(0)); - assert_eq!(v.binary_search_by(|probe| probe.cmp(&6)), Err(1)); - assert_eq!(v.binary_search_by(|probe| probe.cmp(&9)), Err(1)); - assert_eq!(v.binary_search_by(|probe| probe.cmp(&10)), Ok(1)); - assert_eq!(v.binary_search_by(|probe| probe.cmp(&11)), Err(2)); - } - - #[test] - fn test_binary_search() { - let v = SmallVec::<[usize; 4]>::from_iter([5, 10, 20, 40].iter().copied()); - assert_eq!(v.binary_search(&0), Err(0)); - assert_eq!(v.binary_search(&5), Ok(0)); - assert_eq!(v.binary_search(&6), Err(1)); - assert_eq!(v.binary_search(&9), Err(1)); - assert_eq!(v.binary_search(&10), Ok(1)); - assert_eq!(v.binary_search(&11), Err(2)); - } -} diff --git a/lib/rust/prelude/src/std_reexports.rs b/lib/rust/prelude/src/std_reexports.rs index 2faa8a826e..b5edf44e1d 100644 --- a/lib/rust/prelude/src/std_reexports.rs +++ b/lib/rust/prelude/src/std_reexports.rs @@ -24,6 +24,7 @@ pub use std::iter::FromIterator; pub use std::marker::PhantomData; pub use std::mem; pub use std::ops::Add; +pub use std::ops::AddAssign; pub use std::ops::Deref; pub use std::ops::DerefMut; pub use std::ops::Div; diff --git a/lib/rust/prelude/src/string.rs b/lib/rust/prelude/src/string.rs deleted file mode 100644 index 1343b18509..0000000000 --- a/lib/rust/prelude/src/string.rs +++ /dev/null @@ -1,551 +0,0 @@ -//! This module defines several useful string variants, including copy-on-write and immutable -//! implementations. - -use derive_more::*; -use enso_shapely::clone_ref::*; -use itertools::*; - -use crate::impls; - -use serde::Deserialize; -use serde::Serialize; -use std::borrow::Borrow; -use std::borrow::Cow; -use std::ops::Deref; -use std::rc::Rc; -use std::str::pattern; - - - -// ================= -// === StringOps === -// ================= - -pub trait StringOps { - fn is_enclosed(&self, first_char: char, last_char: char) -> bool; - - /// Splits `self` twice. Once at the first occurrence of `start_marker` and once at the first - /// occurence of `end_marker`. Returns a triple containing the split `self` as a prefix, middle, - /// and suffix. If `self` could not be split twice, returns [`None`]. - /// - /// [`None`]: ::std::option::Option::None - fn split_twice<'a, P>( - &'a self, - start_marker: P, - end_marker: P, - ) -> Option<(&'a str, &'a str, &'a str)> - where - P: pattern::Pattern<'a>; - - /// Converts the camel case string to snake case. For example, converts `FooBar` to `foo_bar`. - fn camel_case_to_snake_case(&self) -> String; - - /// Converts the first letter of the string to uppercase. For example, converts `foo` to `Foo`. - fn capitalize_first_letter(&self) -> String; -} - -impl> StringOps for T { - /// Check if given string starts and ends with given characters. - /// - /// Optimized to be O(1) if both characters are within ASCII range. - fn is_enclosed(&self, first_char: char, last_char: char) -> bool { - let text = self.as_ref(); - if first_char.is_ascii() && last_char.is_ascii() { - let bytes = text.as_bytes(); - bytes.first() == Some(&(first_char as u8)) && bytes.last() == Some(&(last_char as u8)) - } else { - let mut chars = text.chars(); - let first = chars.next(); - let last = chars.last().or(first); - first == Some(first_char) && last == Some(last_char) - } - } - - fn split_twice<'a, P>( - &'a self, - start_marker: P, - end_marker: P, - ) -> Option<(&'a str, &'a str, &'a str)> - where - P: pattern::Pattern<'a>, - { - let text = self.as_ref(); - let (prefix, rest) = text.split_once(start_marker)?; - let (mid, suffix) = rest.split_once(end_marker)?; - Some((prefix, mid, suffix)) - } - - fn camel_case_to_snake_case(&self) -> String { - let mut result = String::new(); - let mut chars = self.as_ref().chars(); - if let Some(first) = chars.next() { - result.push(first.to_ascii_lowercase()); - } - for c in chars { - if c.is_uppercase() { - result.push('_'); - result.push(c.to_ascii_lowercase()); - } else { - result.push(c); - } - } - result - } - - fn capitalize_first_letter(&self) -> String { - let mut chars = self.as_ref().chars(); - if let Some(first) = chars.next() { - first.to_uppercase().to_string() + chars.as_str() - } else { - String::new() - } - } -} - -// =========== -// === Str === -// =========== - -/// Abstraction for any kind of string as an argument. Functions defined as -/// `fn test(s: Str) { ... }` can be called with `String`, `&String`, and `&str` without -/// requiring caller to know the implementation details. Moreover, the definition can decide if it -/// needs allocation or not. Calling `s.as_ref()` will never allocate, while `s.into()` will -/// allocate only when necessary. -pub trait Str = Into + AsRef; - -// ================= -// === CowString === -// ================= - -// === Definition === - -/// A copy-on-write String implementation. It is a newtype wrapper for `Cow<'static,str>` and -/// provides many useful impls for efficient workflow. Use it whenever you want to store a string -/// but you are not sure if the string will be allocated or not. This way you can store a static -/// slice as long as you can and switch to allocated String on demand. -#[derive(Clone, Debug, Default, Display)] -pub struct CowString(Cow<'static, str>); - -// === Conversions From CowString === - -impls! { From <&CowString> for String { |t| t.clone().into() } } -impls! { From for String { |t| t.0.into() } } - -// === Conversions To CowString === - -impls! { From > for CowString { |t| Self(t) } } -impls! { From <&Cow<'static,str>> for CowString { |t| Self(t.clone()) } } -impls! { From <&'static str> for CowString { |t| Self(t.into()) } } -impls! { From for CowString { |t| Self(t.into()) } } -impls! { From <&String> for CowString { |t| t.to_string().into() } } -impls! { From <&CowString> for CowString { |t| t.clone() } } - -// === Instances === - -impl Deref for CowString { - type Target = str; - fn deref(&self) -> &str { - self.0.deref() - } -} - -impl AsRef for CowString { - fn as_ref(&self) -> &str { - self.deref() - } -} - -// ================ -// === ImString === -// ================ - -/// Immutable string implementation with a fast clone implementation. -#[derive(Clone, CloneRef, Eq, Hash, PartialEq, Ord, PartialOrd)] -#[derive(Deserialize, Serialize)] -pub struct ImString { - content: Rc, -} - -impl ImString { - /// Constructor. - pub fn new(content: impl Into>) -> Self { - Self { content: content.into() } - } - - /// Extract a string slice containing the entire string. - pub fn as_str(&self) -> &str { - &self.content - } -} - -impl Default for ImString { - fn default() -> Self { - "".into() - } -} - -impl std::fmt::Display for ImString { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Display::fmt(&self.content, f) - } -} - -impl std::fmt::Debug for ImString { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Debug::fmt(&self.content, f) - } -} - -impl Deref for ImString { - type Target = str; - fn deref(&self) -> &Self::Target { - &self.content - } -} - -impl AsRef for ImString { - fn as_ref(&self) -> &ImString { - self - } -} - -impl AsRef for ImString { - fn as_ref(&self) -> &str { - self.content.as_ref() - } -} - -impl Borrow for ImString { - fn borrow(&self) -> &str { - &self.content - } -} - -impl From> for ImString { - fn from(content: Rc) -> Self { - Self { content } - } -} - -impl From for ImString { - fn from(t: String) -> Self { - Self::new(t) - } -} - -impl From<&String> for ImString { - fn from(t: &String) -> Self { - Self::new(t.as_str()) - } -} - -impl From<&&String> for ImString { - fn from(t: &&String) -> Self { - Self::new(t.as_str()) - } -} - -impl From<&str> for ImString { - fn from(t: &str) -> Self { - Self::new(t) - } -} - -impl From<&&str> for ImString { - fn from(t: &&str) -> Self { - Self::new(*t) - } -} - -impl From> for ImString { - fn from(t: Cow) -> Self { - t.into_owned().into() - } -} - -impl From for Rc { - fn from(t: ImString) -> Self { - t.content - } -} - -impl From for String { - fn from(value: ImString) -> Self { - value.as_str().into() - } -} - -impl PartialEq<&str> for ImString { - fn eq(&self, other: &&str) -> bool { - self.content.as_ref().eq(*other) - } -} - -impl PartialEq for ImString { - fn eq(&self, other: &str) -> bool { - self.content.as_ref().eq(other) - } -} - -impl PartialEq for &str { - fn eq(&self, other: &ImString) -> bool { - self.eq(&other.content.as_ref()) - } -} - -impl PartialEq for str { - fn eq(&self, other: &ImString) -> bool { - self.eq(other.content.as_ref()) - } -} - - -impl PartialEq for ImString { - fn eq(&self, other: &String) -> bool { - self.content.as_ref().eq(other) - } -} - -impl PartialEq for String { - fn eq(&self, other: &ImString) -> bool { - self.eq(other.content.as_ref()) - } -} - - -// ================== -// === ToImString === -// ================== - -/// Conversion of a value to [`ImString`]. -#[allow(missing_docs)] -pub trait ToImString { - fn to_im_string(&self) -> ImString; -} - -impl ToImString for ImString { - fn to_im_string(&self) -> ImString { - self.clone() - } -} - -impl ToImString for String { - fn to_im_string(&self) -> ImString { - self.into() - } -} - -impl ToImString for &String { - fn to_im_string(&self) -> ImString { - self.into() - } -} - -impl ToImString for str { - fn to_im_string(&self) -> ImString { - self.into() - } -} - -impl ToImString for &str { - fn to_im_string(&self) -> ImString { - self.into() - } -} - - -// === Macros === - -/// Defines a newtype for `ImString`. -#[macro_export] -macro_rules! im_string_newtype { - ($($(#$meta:tt)* $name:ident),* $(,)?) => { - im_string_newtype_without_serde!{ $( - #[derive($crate::serde_reexports::Serialize,$crate::serde_reexports::Deserialize)] - $(#$meta)* $name - ),* } - }; -} - -#[macro_export] -macro_rules! im_string_newtype_without_serde { - ($($(#$meta:tt)* $name:ident),* $(,)?) => {$( - $(#$meta)* - #[derive(Clone,CloneRef,Debug,Default,Eq,Hash,PartialEq)] - - pub struct $name { - content : ImString - } - - impl $name { - /// Constructor. - pub fn new(content:impl Into) -> Self { - let content = content.into(); - Self {content} - } - } - - impl Deref for $name { - type Target = str; - fn deref(&self) -> &Self::Target { - &self.content - } - } - - impl AsRef<$name> for $name { - fn as_ref(&self) -> &$name { - self - } - } - - impl AsRef for $name { - fn as_ref(&self) -> &ImString { - self.content.as_ref() - } - } - - impl AsRef for $name { - fn as_ref(&self) -> &str { - self.content.as_ref() - } - } - - impl From for $name { - fn from(t:String) -> Self { - Self::new(t) - } - } - - impl From<&String> for $name { - fn from(t:&String) -> Self { - Self::new(t) - } - } - - impl From<&&String> for $name { - fn from(t:&&String) -> Self { - Self::new(t) - } - } - - impl From for $name { - fn from(t:ImString) -> Self { - Self::new(t) - } - } - - impl From<&str> for $name { - fn from(t:&str) -> Self { - Self::new(t) - } - } - - impl From<&&str> for $name { - fn from(t:&&str) -> Self { - Self::new(t) - } - } - - impl From<&$name> for String { - fn from(t:&$name) -> Self { - t.content.to_string() - } - } - )*}; -} - -// =============================== -// === Common Pre- and Postfix === -// =============================== - -/// Return the length of the longest common prefix of the two strings. If they are completely -/// different this will be zero. -/// -/// Example: -/// ``` -/// # use enso_prelude::*; -/// let a = "🐁hospital"; -/// let b = "🐁host"; -/// let c = "🐇bunny🐇"; -/// -/// assert_eq!(common_prefix_length(a, b), 4); -/// assert_eq!(common_prefix_length(a, c), 0); -/// assert_eq!(common_prefix_length(a, a), 9); -/// ``` -pub fn common_prefix_length(source_a: &str, source_b: &str) -> usize { - let shortest = source_a.chars().count().min(source_b.chars().count()); - let chars_a = source_a.chars(); - let chars_b = source_b.chars(); - let mut zipped = chars_a.zip(chars_b); - let mismatch = zipped.find_position(|(a, b)| *a != *b); - mismatch.map(|(ix, _)| ix).unwrap_or(shortest) -} - -/// Return the length of the longest common postfix of the two strings. If they are completely -/// different this will be zero. -/// -/// Example: -/// ``` -/// # use enso_prelude::*; -/// let a = "sunny🐇yard"; -/// let b = "🐇yard"; -/// let c = "🐇"; -/// -/// assert_eq!(common_postfix_length(a, b), 5); -/// assert_eq!(common_postfix_length(a, c), 0); -/// assert_eq!(common_postfix_length(a, a), 10); -/// ``` -pub fn common_postfix_length(source_a: &str, source_b: &str) -> usize { - let shortest = source_a.chars().count().min(source_b.chars().count()); - let chars_a = source_a.chars().rev(); - let chars_b = source_b.chars().rev(); - let mut zipped = chars_a.zip(chars_b); - let mismatch = zipped.find_position(|(a, b)| *a != *b); - mismatch.map(|(ix, _)| ix).unwrap_or(shortest) -} - - -// ============= -// === Tests === -// ============= - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_string_ops() { - // === Matching against ascii === - assert!("{}".is_enclosed('{', '}')); - assert!("{ }".is_enclosed('{', '}')); - assert!(!"{".is_enclosed('{', '}')); - assert!(!"{a".is_enclosed('{', '}')); - assert!(!"a}".is_enclosed('{', '}')); - assert!(!"}".is_enclosed('{', '}')); - assert!(!"".is_enclosed('{', '}')); - assert!("{a}".is_enclosed('{', '}')); - assert!("{字}".is_enclosed('{', '}')); - assert!(!"{".is_enclosed('{', '}')); - assert!(!"{字".is_enclosed('{', '}')); - assert!(!"字}".is_enclosed('{', '}')); - assert!(!"}".is_enclosed('{', '}')); - assert!(!"".is_enclosed('{', '}')); - - // === Matching against non-ascii === - assert!("【】".is_enclosed('【', '】')); - assert!("【 】".is_enclosed('【', '】')); - assert!("【 a】".is_enclosed('【', '】')); - assert!(!"【".is_enclosed('【', '】')); - assert!(!"【a".is_enclosed('【', '】')); - assert!(!"a】".is_enclosed('【', '】')); - assert!(!"】".is_enclosed('【', '】')); - assert!(!"".is_enclosed('【', '】')); - - // === Edge case of matching single char string === - assert!("{".is_enclosed('{', '{')); - assert!("【".is_enclosed('【', '【')); - - // === Splitting a string twice === - assert!("a.b.c,d,e".split_twice('.', ',').unwrap() == ("a", "b.c", "d,e")); - } -} diff --git a/lib/rust/prelude/src/vec.rs b/lib/rust/prelude/src/vec.rs index 7e7b5ca5de..271ca191f9 100644 --- a/lib/rust/prelude/src/vec.rs +++ b/lib/rust/prelude/src/vec.rs @@ -1,8 +1,5 @@ //! This module defines utilities for working with the [`std::vec::Vec`] type. -use derivative::Derivative; -use failure::_core::hint::unreachable_unchecked; - // ============== @@ -10,78 +7,6 @@ use failure::_core::hint::unreachable_unchecked; // ============== pub trait VecOps: AsMut> + Sized { - /// Pushes the provided `item` onto the [`std::vec::Vec`], and then returns an immutable - /// reference to the item. - fn push_and_get(&mut self, item: T) -> &T { - let vec = self.as_mut(); - vec.push(item); - let item_ix = vec.len() - 1; - #[allow(unsafe_code)] - unsafe { - vec.get(item_ix).unwrap_or_else(|| unreachable_unchecked()) - } - } - - /// Pushes the provided `item` onto the [`std::vec::Vec`], and then returns a mutable reference - /// to the item. - fn push_and_get_mut(&mut self, item: T) -> &mut T { - let vec = self.as_mut(); - vec.push(item); - let item_ix = vec.len() - 1; - #[allow(unsafe_code)] - unsafe { - vec.get_mut(item_ix).unwrap_or_else(|| unreachable_unchecked()) - } - } - - /// Extend the vector with the provided `iter`. - fn extended>(mut self, iter: I) -> Self { - self.as_mut().extend(iter); - self - } - - /// Push element to the vector. - fn pushed(mut self, item: T) -> Self { - self.as_mut().push(item); - self - } - - /// Self but reversed. - fn reversed(mut self) -> Self { - self.as_mut().reverse(); - self - } - - /// Remove first element equal to `item` and returns it if any. - fn remove_item(&mut self, item: &T) -> Option - where T: PartialEq { - let vec = self.as_mut(); - let index = vec.iter().position(|x| *x == *item); - index.map(|i| vec.remove(i)) - } - - /// Attempts to remove `T` if its `index` is valid. If not, it returns `None`. - fn try_remove(&mut self, index: usize) -> Option { - let vec = self.as_mut(); - if index < vec.len() { - Some(vec.remove(index)) - } else { - None - } - } - - /// Attempts to remove the first element of `Vec`, returns `None` if its length is zero. - fn pop_front(&mut self) -> Option { - self.try_remove(0) - } - - /// Removes the last `n` elements from the vector. Returns true if the elements were removed. - fn remove_last_n(&mut self, n: usize) -> bool { - let vec = self.as_mut(); - let new_size = vec.len().checked_sub(n); - new_size.map(|new_size| vec.truncate(new_size)).is_some() - } - /// Pop and return the last element, if the vector is non-empty and the given predicate returns /// true when applied to the last element. fn pop_if(&mut self, f: F) -> Option @@ -94,23 +19,6 @@ pub trait VecOps: AsMut> + Sized { } None } - - /// Index the vector. If it is too short, extend it with default value. - fn index_or_resize_mut(&mut self, index: usize) -> &mut T - where T: Clone + Default { - self.index_or_resize_with_mut(index, || Default::default()) - } - - /// Index the vector. If it is too short, extend it with the provided default value. - #[allow(unsafe_code)] - fn index_or_resize_with_mut(&mut self, index: usize, cons: impl Fn() -> T) -> &mut T - where T: Clone { - let vec = self.as_mut(); - if index >= vec.len() { - vec.resize(index + 1, cons()); - } - unsafe { vec.get_mut(index).unwrap_or_else(|| unreachable_unchecked()) } - } } impl VecOps for Vec {} @@ -164,12 +72,17 @@ impl VecOps for Vec {} /// } /// } /// ``` -#[derive(Clone, Debug, Derivative, Eq, PartialEq)] -#[derivative(Default(bound = ""))] +#[derive(Clone, Debug, Eq, PartialEq)] pub struct VecAllocation { data: Vec, } +impl Default for VecAllocation { + fn default() -> Self { + Self { data: Vec::new() } + } +} + impl VecAllocation { /// Create a new, empty allocation. pub fn new() -> Self { @@ -190,35 +103,3 @@ impl VecAllocation { std::mem::take(&mut self.data) } } - - - -// ============= -// === Tests === -// ============= - -#[cfg(test)] -mod tests { - use super::*; - - struct Test { - pub item: usize, - } - - #[test] - fn test_push_and_get() { - let mut vec = Vec::new(); - let item = Test { item: 10 }; - let item_in_vec = vec.push_and_get(item); - assert_eq!(item_in_vec.item, 10) - } - - #[test] - fn test_push_and_get_mut() { - let mut vec = Vec::new(); - let item = Test { item: 10 }; - let item_in_vec = vec.push_and_get_mut(item); - item_in_vec.item = 20; - assert_eq!(item_in_vec.item, 20); - } -} diff --git a/lib/rust/profiler/Cargo.toml b/lib/rust/profiler/Cargo.toml deleted file mode 100644 index 27af18ccb8..0000000000 --- a/lib/rust/profiler/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "enso-profiler" -version = "0.1.0" -edition = "2021" -authors = ["Enso Team "] - -[dependencies] -futures = { workspace = true } -serde = { workspace = true } -serde_json = { version = "1.0.59", features = ["raw_value"] } -enso-profiler-macros = { path = "macros" } -enso-web = { path = "../web" } - -[lints] -workspace = true diff --git a/lib/rust/profiler/data/Cargo.toml b/lib/rust/profiler/data/Cargo.toml deleted file mode 100644 index de5807c1b1..0000000000 --- a/lib/rust/profiler/data/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -name = "enso-profiler-data" -version = "0.1.0" -edition = "2021" -authors = ["Enso Team "] - -[dependencies] -derivative = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -enso-prelude = { path = "../../prelude" } -enso-profiler = { path = "../" } - -[dev-dependencies] -futures = { workspace = true } - -[lints] -workspace = true diff --git a/lib/rust/profiler/data/src/aggregate.rs b/lib/rust/profiler/data/src/aggregate.rs deleted file mode 100644 index 596db80d3f..0000000000 --- a/lib/rust/profiler/data/src/aggregate.rs +++ /dev/null @@ -1,129 +0,0 @@ -//! Supports aggregating interval data by profiler to analyze total time spent, abstracting away -//! *when* intervals occurred. - -use enso_prelude::*; - -use crate::Class; - -use std::collections::HashMap; - - - -// ===================== -// === Configuration === -// ===================== - -/// Frames shorter than this duration, and all their children, will be excluded from interval -/// reports. -/// -/// Some operations are not very expensive, but are repeated in many frames. These operations add -/// noise to the analysis: Their total duration can be high even if they have no actual performance -/// impact, and their total duration will vary depending on how long the profile is recorded. -/// Filtering them out makes profiling results more consistent, and more focused on the costs that -/// matter. -// This could logically be a configuration option, but in practice we'll probably never want to turn -// it off or change it. -const SKIP_FRAMES_BELOW_MS: f64 = 16.6; - - -// ================== -// === Aggregator === -// ================== - -/// Aggregate time spent in different functions. -#[derive(Default, Debug)] -pub struct Aggregator { - stack: Vec, - root: Frame, -} - -impl Aggregator { - /// Add data from a profile to the tree. - pub fn add_profile(&mut self, profile: &crate::Profile) { - let not_short_frame = |&&child: &&crate::IntervalId| { - let interval = &profile[child]; - let measurement = &profile[interval.measurement]; - match measurement.classify() { - Class::OnFrame => interval - .interval - .duration_ms() - .map_or(true, |duration| duration >= SKIP_FRAMES_BELOW_MS), - _ => true, - } - }; - for &child in profile.root_interval().children.iter().filter(not_short_frame) { - self.visit_interval(profile, child); - } - } - - /// Add the interval to a [`Frame`]; recurse into children. - fn visit_interval( - &mut self, - profile: &crate::Profile, - active: crate::IntervalId, - ) { - let active = &profile[active]; - let label = profile[active.measurement].label.to_string().into(); - self.stack.push(label); - match active.interval.duration_ms() { - Some(duration) if duration > 0.0 => { - self.log_interval(duration); - for child in &active.children { - self.visit_interval(profile, *child); - } - } - _ => (), - }; - self.stack.pop(); - } - - /// Add the interval to the total for the current stack. - fn log_interval(&mut self, duration: f64) { - let stack = &self.stack; - let mut frame = &mut self.root; - for id in stack { - frame = frame.children.entry(id.clone()).or_default(); - } - frame.duration += duration; - frame.intervals += 1; - } -} - -impl From for Frame { - fn from(Aggregator { root, .. }: Aggregator) -> Self { - root - } -} - - -// ============= -// === Frame === -// ============= - -/// Aggregated info about all occurrences of a particular stack of profilers. -#[derive(Default, Debug)] -pub struct Frame { - duration: f64, - /// Aggregated intervals that ran as children of this profiler. - pub children: HashMap, - intervals: usize, -} - -impl Frame { - /// Return the duration spent in this profiler's intervals, exclusive of time in child - /// intervals. - pub fn self_duration(&self) -> f64 { - let children_duration: f64 = self.children.values().map(Frame::total_duration).sum(); - self.duration - children_duration - } - - /// Return the duration spent in this profiler's intervals. - pub fn total_duration(&self) -> f64 { - self.duration - } - - /// Return the number of intervals this aggregate represents. - pub fn interval_count(&self) -> usize { - self.intervals - } -} diff --git a/lib/rust/profiler/data/src/bin/devtools.rs b/lib/rust/profiler/data/src/bin/devtools.rs deleted file mode 100644 index c478c20a08..0000000000 --- a/lib/rust/profiler/data/src/bin/devtools.rs +++ /dev/null @@ -1,132 +0,0 @@ -//! Tool that generates Chrome DevTools-compatible files from profiling interval data. -//! -//! The Chrome DevTools profile format has no official publicly available documentation. -//! Someone's description of it is available here: -//! https://docs.google.com/document/d/1lieZBBXZiEKOVk5vLCGmMT99_O-5lv9cGXoKnhqlY4g/preview -//! -//! # Usage -//! -//! The tool reads a -//! [JSON-formatted event log](https://github.com/enso-org/design/blob/main/epics/profiling/implementation.md#file-format) -//! from stdin, and writes a report to stdout. -//! -//! For example: -//! -//! ```console -//! ~/git/enso/data $ cargo run --bin intervals < profile.json > devtools.json -//! ``` - -// === Features === -#![feature(test)] - -// === Non-Standard Linter Configuration === -#![deny(unconditional_recursion)] -#![warn(missing_docs)] -#![warn(trivial_casts)] - -use enso_profiler::format::AnyMetadata; -use enso_profiler_data as data; - - - -/// Support for the Chrome DevTools profile format. -mod devtools { - // ============= - // === Event === - // ============= - - /// DevTools-profile interval. - #[derive(serde::Serialize)] - pub struct Event { - pub name: String, - #[serde(rename = "cat")] - pub category: String, - #[serde(rename = "ph")] - pub event_type: EventType, - #[serde(rename = "ts")] - pub timestamp_us: u64, - #[serde(rename = "dur")] - pub duration_us: u64, - #[serde(rename = "pid")] - pub process_id: u32, - #[serde(rename = "tid")] - pub thread_id: u32, - // Actually a type of map, but we don't need to write anything there. - pub args: Option<()>, - } - - /// Information about type of event in DevTools profiling interval. - #[derive(Clone, Copy, Eq, PartialEq, serde::Serialize)] - pub enum EventType { - #[serde(rename = "X")] - Complete, - } -} - - - -// ============ -// === main === -// ============ - -fn main() { - use std::io::Read; - let mut log = String::new(); - std::io::stdin().read_to_string(&mut log).unwrap(); - let profile: data::Profile = log.parse().unwrap(); - let events = IntervalTranslator::run(&profile); - serde_json::to_writer(std::io::stdout(), &events).unwrap(); -} - - - -// ========================== -// === IntervalTranslator === -// ========================== - -/// Translates `profiler` data to the Chrome DevTools format. -struct IntervalTranslator<'p, Metadata> { - profile: &'p data::Profile, - events: Vec, -} - -impl<'p, Metadata> IntervalTranslator<'p, Metadata> { - /// Translate `profiler` data to the Chrome DevTools format. - fn run(profile: &'p data::Profile) -> Vec { - let events = Default::default(); - let mut builder = Self { profile, events }; - // We skip the root node APP_LIFETIME, which is not a real measurement. - for child in &profile.root_interval().children { - builder.visit_interval(*child); - } - let Self { events, .. } = builder; - events - } -} - -impl<'p, Metadata> IntervalTranslator<'p, Metadata> { - /// Translate an interval, and its children. - fn visit_interval(&mut self, active: data::IntervalId) { - let active = &self.profile[active]; - let measurement = &self.profile[active.measurement]; - let start = active.interval.start.into_ms(); - // DevTools ignores open intervals. - if let Some(duration_ms) = active.interval.duration_ms() { - let duration_us = (duration_ms * 1000.0) as u64; - let event = devtools::Event { - name: measurement.label.to_string(), - event_type: devtools::EventType::Complete, - category: "interval".to_owned(), - duration_us, - timestamp_us: (start * 1000.0) as u64, - process_id: 1, - thread_id: 1, - args: None, - }; - self.events.push(event); - } - for child in &active.children { - self.visit_interval(*child); - } - } -} diff --git a/lib/rust/profiler/data/src/bin/intervals.rs b/lib/rust/profiler/data/src/bin/intervals.rs deleted file mode 100644 index 63d6f2abef..0000000000 --- a/lib/rust/profiler/data/src/bin/intervals.rs +++ /dev/null @@ -1,135 +0,0 @@ -//! Tool that generates interval reports from profiling data. -//! -//! # Usage -//! -//! The tool reads a -//! [JSON-formatted event log](https://github.com/enso-org/design/blob/main/epics/profiling/implementation.md#file-format) -//! from stdin, and writes a report to stdout. -//! -//! For example: -//! -//! ```console -//! ~/git/enso/data $ cargo run --bin intervals < profile.json | less -//! ``` - -// === Features === -#![feature(test)] -#![feature(let_chains)] - -// === Non-Standard Linter Configuration === -#![deny(unconditional_recursion)] -#![warn(missing_docs)] -#![warn(trivial_casts)] - -use enso_prelude::*; - -use std::collections::HashMap; -use enso_profiler::format::AnyMetadata; -use enso_profiler_data as data; - - - -// ===================== -// === Configuration === -// ===================== - -/// Set this to filter the output to matching profilers and their children. -const INCLUDE_ONLY_SUBTREES_MATCHING_PREFIX: Option<&str> = None; - - -// ============ -// === main === -// ============ - -fn main() { - use std::io::Read; - - let mut log = String::new(); - std::io::stdin().read_to_string(&mut log).unwrap(); - let profile: data::Profile = log.parse().unwrap(); - let mut aggregator = data::aggregate::Aggregator::default(); - aggregator.add_profile(&profile); - let root = data::aggregate::Frame::from(aggregator); - let funcs = FuncCollector::run(&root); - let kv_to_func = |(label, timings)| Func { label, timings }; - let mut funcs: Vec<_> = funcs.into_iter().map(kv_to_func).collect(); - funcs.sort_unstable_by(|a, b| a.timings.self_duration.total_cmp(&b.timings.self_duration)); - println!("self_duration,total_duration,count,profiler"); - for Func { label, timings } in funcs.iter().rev() { - let FuncTimings { total_duration, self_duration, count } = timings; - println!("{self_duration:>6.1},{total_duration:>6.1},{count},{label}"); - } - let mut total_duration = 0.0; - for Func { timings, .. } in funcs.iter() { - total_duration += timings.self_duration; - } - println!("0.0,{total_duration:>6.1},1,(total_self_duration)"); -} - - -// ===================== -// === FuncCollector === -// ===================== - -/// Aggregates all intervals created by a particular profiler, abstracting away where in the stack -/// it occurs. -#[derive(Default)] -struct FuncCollector { - funcs: HashMap, -} - -impl FuncCollector { - /// Aggregate all intervals created by a particular profiler. - fn run(root: &data::aggregate::Frame) -> HashMap { - let mut collector = FuncCollector::default(); - for (label, frame) in &root.children { - collector.visit(label, frame, default()); - } - let FuncCollector { funcs, .. } = collector; - funcs - } -} - -impl FuncCollector { - /// Add time spent in an interval to the running sums; recurse into children. - fn visit(&mut self, label: &Label, frame: &data::aggregate::Frame, enable: bool) { - let enable = enable - || INCLUDE_ONLY_SUBTREES_MATCHING_PREFIX - .map_or(true, |prefix| label.starts_with(prefix)); - if enable { - let func = self.funcs.entry(label.clone()).or_default(); - func.self_duration += frame.self_duration(); - func.total_duration += frame.total_duration(); - func.count += frame.interval_count(); - } - for (label, frame) in &frame.children { - self.visit(label, frame, enable); - } - } -} - -type Label = ImString; - - -// =================== -// === FuncTimings === -// =================== - -/// Aggregate of all time spent in a particular profiler's intervals. -#[derive(Default)] -struct FuncTimings { - total_duration: f64, - self_duration: f64, - count: usize, -} - - -// ============ -// === Func === -// ============ - -/// Identifies a profiler, and contains information about the time spent in its intervals. -struct Func { - label: Label, - timings: FuncTimings, -} diff --git a/lib/rust/profiler/data/src/bin/measurements.rs b/lib/rust/profiler/data/src/bin/measurements.rs deleted file mode 100644 index 507b93b6da..0000000000 --- a/lib/rust/profiler/data/src/bin/measurements.rs +++ /dev/null @@ -1,86 +0,0 @@ -//! Tool that generates measurement hierarchy reports from profiling data. -//! -//! # Usage -//! -//! The tool reads a JSON-formatted event log from stdin, and writes a report to stdout. -//! -//! For example: -//! -//! ```console -//! ~/git/enso/data $ cargo run --bin measurements < profile.json | less -//! ``` - -// === Features === -#![feature(test)] - -// === Non-Standard Linter Configuration === -#![deny(unconditional_recursion)] -#![warn(missing_docs)] -#![warn(trivial_casts)] - -use enso_profiler::format::AnyMetadata; -use enso_profiler_data as profiler_data; - - - -// ========================= -// === print_measurement === -// ========================= - -/// Pretty-print a [`profiler_data::Measurement`], including all children, in a way that illustrates -/// the hierarchy of the data. Results will be written to stdout. -fn print_measurement( - profile: &profiler_data::Profile, - measurement: profiler_data::MeasurementId, - i: usize, -) { - let measurement = &profile[measurement]; - let mut indent = String::new(); - for _ in 0..i { - indent.push_str(" "); - } - println!("{}{}", indent, measurement.label); - print!("{indent}"); - print!(" {:.1}", measurement.created.into_ms()); - for active in &measurement.intervals { - let interval = &profile[*active]; - print!(" {}", fmt_interval(interval.interval)); - } - println!(); - for active in &measurement.intervals { - let interval = &profile[*active]; - for metadata in &interval.metadata { - println!("{} {}", indent, metadata.data); - } - } - for child in &measurement.children { - print_measurement(profile, *child, i + 1); - } -} - - -// === formatting === - -/// Format a [`profiler_data::Interval`] in an easy-to-read way. -fn fmt_interval(interval: profiler_data::Interval) -> String { - let start = interval.start.into_ms(); - let end = interval.end.map(|x| format!("{:.1}", x.into_ms())).unwrap_or_default(); - format!("{start:.1}-{end}") -} - - - -// ============ -// === main === -// ============ - -fn main() { - use std::io::Read; - - let mut log = String::new(); - std::io::stdin().read_to_string(&mut log).unwrap(); - let profile: profiler_data::Profile = log.parse().unwrap(); - for root in &profile.root_measurement().children { - print_measurement(&profile, *root, 0); - } -} diff --git a/lib/rust/profiler/data/src/bin/processes.rs b/lib/rust/profiler/data/src/bin/processes.rs deleted file mode 100644 index 856d883e61..0000000000 --- a/lib/rust/profiler/data/src/bin/processes.rs +++ /dev/null @@ -1,259 +0,0 @@ -//! Tool for comparing the latencies of different processes in reacting to an event. -//! -//! # Usage -//! -//! The tool reads a JSON-formatted event log from stdin, and writes CSV data to stdout. -//! -//! For example: -//! -//! ```console -//! ~/git/enso/data $ cargo run --bin processes compile_new_shaders,backend_execution < profile.json -//! ``` - -// === Non-Standard Linter Configuration === -#![deny(unconditional_recursion)] -#![warn(missing_docs)] -#![warn(trivial_casts)] - -use enso_profiler_data as profiler_data; -use profiler_data::Class; -use profiler_data::MeasurementId; -use profiler_data::OpaqueMetadata; -use profiler_data::Profile; -use profiler_data::Timestamp; -use std::collections::HashMap; -use std::default::Default; -use std::path::Path; -use std::str::FromStr; - - - -// =============== -// === Process === -// =============== - -/// Used to classify work into sets that are executed in parallel with each other. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub struct Process(usize); - - -// === Processes === - -/// A profile's [`Process`]es. -#[derive(Debug)] -pub struct Processes { - in_order: Vec, - names: HashMap, - by_label: HashMap, -} - -impl FromStr for Processes { - type Err = (); - fn from_str(labels: &str) -> Result { - let process_labels = labels.split(',').map(|s| s.to_owned()); - let mut in_order = Vec::new(); - let mut names = HashMap::new(); - let mut by_label = HashMap::new(); - for (i, label) in process_labels.enumerate() { - let p = Process(i); - in_order.push(p); - names.insert(p, label.clone()); - by_label.insert(label, p); - } - Ok(Self { in_order, names, by_label }) - } -} - - - -// ================================= -// === Categorizing measurements === -// ================================= - -/// Categorize the given profile's measurements by process. -pub fn categorize_measurements( - profile: &Profile, - process_by_label: &HashMap, -) -> HashMap { - let root = profile.root_measurement_id(); - let mut measurement_process = Default::default(); - let current = Default::default(); - categorize_subtree(&mut measurement_process, profile, process_by_label, root, current); - measurement_process -} - -fn categorize_subtree( - measurement_process: &mut HashMap, - profile: &Profile, - process_by_label: &HashMap, - measurement_id: MeasurementId, - current: Option, -) { - let measurement = &profile[measurement_id]; - let new = process_by_label.get(&measurement.label.name).cloned(); - if let Some(process) = new { - measurement_process.insert(measurement_id, process); - } - let current = new.or(current); - for &child in &measurement.children { - categorize_subtree(measurement_process, profile, process_by_label, child, current); - } -} - - - -// ========================= -// === Process end times === -// ========================= - -/// Find the end of each process, i.e. when the last work attributed to it is completed. -pub fn process_ends( - profile: &Profile, - measurement_process: &HashMap, - root: MeasurementId, -) -> Vec<(Process, f64)> { - let mut ends = Default::default(); - for &child in &profile[root].children { - gather_ends(&mut ends, profile, measurement_process, child); - } - let root_start = profile[root].created; - ends.into_iter() - .map(|(process, end)| { - let end = end.into_ms() - root_start.into_ms(); - (process, end) - }) - .collect() -} - -fn gather_ends( - ends: &mut HashMap, - profile: &Profile, - measurement_process: &HashMap, - measurement_id: MeasurementId, -) { - let measurement = &profile[measurement_id]; - if let Some(process) = measurement_process.get(&measurement_id) { - let last_interval = measurement.intervals.last(); - let end = last_interval.and_then(|&i| profile[i].interval.end); - if let Some(new_end) = end { - let end = ends.entry(*process).or_default(); - if new_end > *end { - *end = new_end; - } - } - } - for &child in &measurement.children { - gather_ends(ends, profile, measurement_process, child); - } -} - - - -// ==================== -// === Working time === -// ==================== - -/// Sum the time any profiler not attributable to a foreign process is active during the given -/// interval. -pub fn working_time_in_interval( - profile: &Profile, - measurement_process: &HashMap, - interval_start: Timestamp, - interval_end: Timestamp, -) -> f64 { - let mut total = 0.0; - for &i in &profile.root_interval().children { - let interval = &profile[i]; - if measurement_process.contains_key(&interval.measurement) { - continue; - } - let interval = interval.interval; - let start = interval.start; - if let Some(end) = interval.end { - let start = std::cmp::max(start, interval_start).into_ms(); - let end = std::cmp::min(end, interval_end).into_ms(); - let duration = end - start; - if duration.is_sign_positive() { - total += duration; - } - } - } - total -} - - - -// =========================== -// === Highlighted regions === -// =========================== - -/// Get the region of interest in the profile, identified by a special profiler that must be present -/// in the data. -pub fn get_highlighted_region( - profile: &Profile, -) -> (MeasurementId, Timestamp, Timestamp) { - let is_highlight = |&m: &MeasurementId| profile[m].classify() == Class::Highlight; - let mut highlights: Vec<_> = profile.measurement_ids().filter(is_highlight).collect(); - let mut highlights = highlights.drain(..); - let head = highlights.next(); - let rest = highlights.len(); - let m_id = match (head, rest) { - (Some(first), 0) => first, - _ => { - let clause1 = "This tool currently only supports profiles of batch-mode workflows"; - let clause2 = "which should all have exactly one highlighted region"; - unimplemented!("{}, {}.", clause1, clause2); - } - }; - let measurement = &profile[m_id]; - let start = measurement.created; - let non_empty_highlight_required = "Incomplete profile: Highlighted region contains no data."; - let last_interval = measurement.intervals.last().expect(non_empty_highlight_required); - let end = profile[*last_interval].interval.end; - let complete_profile_required = "Incomplete profile: Highlighted region was not ended."; - let end = end.expect(complete_profile_required); - (m_id, start, end) -} - - - -// ============ -// === Main === -// ============ - -fn main() { - let mut args = std::env::args(); - let argv0 = args.next().unwrap(); - let labels = "foreign_process_label1,foreign_process_label2,..."; - let profiles = "profile1.json profile2.json ..."; - let usage = &format!("Usage: {argv0} {labels} {profiles}"); - let processes = Processes::from_str(&args.next().expect(usage)).expect(usage); - let mut cols = vec!["profile".into(), "main".into()]; - cols.extend(processes.in_order.iter().map(|p| processes.names[p].clone())); - println!("{}", cols.join(",")); - for path in args { - let path = Path::new(&path); - let results = analyze_file(path, &processes); - let results: Vec<_> = results.iter().map(|x| x.to_string()).collect(); - let file = path.file_stem().unwrap().to_str().unwrap(); - println!("{},{}", file, results.join(",")); - } -} - -fn analyze_file(path: &Path, processes: &Processes) -> Vec { - let log = std::fs::read_to_string(path).unwrap(); - let profile: Profile = log.parse().unwrap(); - let measurement_process = categorize_measurements(&profile, &processes.by_label); - let (root, root_start, root_end) = get_highlighted_region(&profile); - let other_process_latencies: HashMap<_, _> = process_ends(&profile, &measurement_process, root) - .into_iter() - .map(|(p, end)| (p, end)) - .collect(); - let main_process_time = - working_time_in_interval(&profile, &measurement_process, root_start, root_end); - let process_latency = |p| other_process_latencies.get(p).cloned().unwrap_or_default(); - - let mut results = vec![main_process_time]; - results.extend(processes.in_order.iter().map(process_latency)); - results -} diff --git a/lib/rust/profiler/data/src/lib.rs b/lib/rust/profiler/data/src/lib.rs deleted file mode 100644 index 65ddc5213a..0000000000 --- a/lib/rust/profiler/data/src/lib.rs +++ /dev/null @@ -1,660 +0,0 @@ -//! Interface to profile data. -//! -//! # Overview -//! -//! Usage of this API starts with applying [`str::parse`] to JSON profiling data, returning a -//! [`Measurement`] which is the root of the hierarchy of profiler outputs. -//! -//! Parsing is robust to changes in the definitions of metadata types; if deserialization of some -//! metadata entries fails, the resulting error type provides access to the result of deserializing -//! all the data that succeeded (see [`Error::RecoverableFormatError`]). -//! -//! # Usage example: storing and retrieving metadata -//! -//! ``` -//! use enso_profiler as profiler; -//! use enso_profiler_data as profiler_data; -//! use profiler::profile; -//! -//! // Some metadata types. -//! #[derive(serde::Deserialize, PartialEq, Eq, Debug)] -//! struct MyDataA(u32); -//! profiler::metadata_logger!("MyDataA", log_data_a(u32)); -//! -//! #[derive(serde::Deserialize, PartialEq, Eq, Debug)] -//! struct MyDataB(String); -//! profiler::metadata_logger!("MyDataB", log_data_b(String)); -//! -//! #[profile(Objective)] -//! fn action_producing_metadata() { -//! log_data_a(23); -//! log_data_b("5".into()); -//! } -//! -//! fn store_and_retrieve_metadata() { -//! action_producing_metadata(); -//! -//! // To deserialize, we define a metadata type as an enum. -//! // -//! // Each variant has a name and type that match the string-argument and type-parameter that -//! // match the `profiler::metadata_logger!` definition. If the type is a newtype, the -//! // metadata logger may accept the wrapped type for convenience; a newtype and its contents -//! // have the same serialized form. -//! #[derive(serde::Deserialize, PartialEq, Eq, Debug)] -//! enum MyMetadata { -//! MyDataA(MyDataA), -//! MyDataB(MyDataB), -//! // In this case we've handled everything. -//! // If we intended to handle some metadata and silently ignore anything else, we could -//! // include a catch-all variant like: -//! // `#[serde(other)] Other` -//! // On the other hand, if we intend to handle every type of metadata, we can omit the -//! // catch-all variant; unknown metadata will produce an -//! // [`Error::RecoverableFormatError`], which we can use to emit a warning and continue. -//! } -//! -//! // Obtain log data directly; it could also be deserialized from a file. -//! let log = profiler::internal::get_log(); -//! // Parse the log. Interpret metadata according to the enum defined above. -//! let profile: profiler_data::Profile = log.parse().unwrap(); -//! // Verify the MyData objects are present and attached to the right interval. -//! let interval = &profile[profile.root_interval().children[0]]; -//! let action = &profile[interval.measurement]; -//! assert_eq!(&action.label.name, "action_producing_metadata"); -//! assert_eq!(interval.metadata[0].data, MyMetadata::MyDataA(MyDataA(23))); -//! assert_eq!(interval.metadata[1].data, MyMetadata::MyDataB(MyDataB("5".into()))); -//! // Timestamps can be used to compare the order of events. -//! assert!(interval.metadata[0].time < interval.metadata[1].time); -//! } -//! -//! store_and_retrieve_metadata(); -//! ``` - -// === Features === -#![feature(test)] - -// === Non-Standard Linter Configuration === -#![deny(unconditional_recursion)] -#![warn(missing_docs)] -#![warn(trivial_casts)] - -use enso_profiler as profiler; -use profiler::format; -use std::error; -use std::fmt; -use std::rc::Rc; - - -// ============== -// === Export === -// ============== - -pub mod aggregate; -pub mod parse; - - - -// ============= -// === Error === -// ============= - -/// Describes an error and where it occurred. -pub enum Error { - /// Failed to deserialize the event log at all. The file is corrupt, or in a completely - /// incompatible format. - FormatError(serde_json::Error), - /// Failed to deserialize some events; if this is caused by a change to a metadata type, the - /// core data and metadata of unaffected types will still be available. - /// - /// For an example of handling a recoverable failure, see `tests::skip_failed_metadata`. - RecoverableFormatError { - /// Deserialization errors for each metadata Event that failed to parse. - errors: Vec>, - /// A profile with metadata of one or more types excluded due to format incompatibility. - /// There is one missing metadata object for each value in `errors`. - with_missing_data: Profile, - }, - /// Failed to interpret the event log data. - DataError(EventError), -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{self:?}") - } -} - -// This cannot be derived because: https://github.com/rust-lang/rust/issues/26925 -// Also, the debug output doesn't need to include the entire with_missing_data. -impl fmt::Debug for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Error::FormatError(e) => e.fmt(f), - Error::RecoverableFormatError { errors, .. } => errors.fmt(f), - Error::DataError(e) => e.fmt(f), - } - } -} - -impl error::Error for Error { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - Some(match self { - Error::FormatError(e) => e, - Error::RecoverableFormatError { errors, .. } => &errors[0], - Error::DataError(e) => e, - }) - } -} - -/// An error associated with a particular event in the log. -#[derive(Debug)] -pub struct EventError { - #[allow(unused)] // displayed by Debug - /// The event's index in the log. - log_pos: usize, - #[allow(unused)] // displayed by Debug - /// The error. - error: E, -} - -impl fmt::Display for EventError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{self:?}") - } -} - -impl error::Error for EventError { - fn source(&self) -> Option<&(dyn error::Error + 'static)> { - self.error.source() - } -} - - - -// ============================== -// === Multi-process profiles === -// ============================== - -/// Parse data representing profiling information collected by multiple processes. -pub fn parse_multiprocess_profile( - data: &str, -) -> impl Iterator, Error>> + '_ { - serde_json::Deserializer::from_str(data).into_iter::>().map( - |profile| { - let raw_parse_error = "Cannot parse input as sequence of JSON values!"; - profile.expect(raw_parse_error).get().parse() - }, - ) -} - - - -// =============== -// === Profile === -// =============== - -/// All the profiling information captured by one process during one run of the application. -/// -/// This is parameterized by a type that determines how metadata is interpreted. The type must be -/// an enum, with a variant for each type of metadata that is handled. Each variant's name and type -/// should correspond to the parameters supplied to [`profiler::metadata_logger`]. For an example, -/// see the docs for the [`crate`]. -#[derive(Clone, Debug)] -pub struct Profile { - /// The hierarchy of profilers. A parent-child relationship indicates that the child was - /// started while the parent was running. - pub measurements: Vec, - /// The hierarchy of intervals. A parent-child relationship indicates that the child is - /// contained within the parent. - pub intervals: Vec>, - /// Optional information about this profile. - pub headers: Headers, -} - -impl Profile { - /// A virtual measurement containing the top-level measurements as children. - pub fn root_measurement(&self) -> &Measurement { - self.measurements.last().unwrap() - } - - /// A virtual interval containing the top-level intervals as children. - pub fn root_interval(&self) -> &ActiveInterval { - self.intervals.last().unwrap() - } - - /// Id of a virtual measurement containing the top-level measurements as children. - pub fn root_measurement_id(&self) -> MeasurementId { - MeasurementId(self.measurements.len() - 1) - } - - /// Id of a virtual interval containing the top-level intervals as children. - pub fn root_interval_id(&self) -> IntervalId { - IntervalId(self.intervals.len() - 1) - } - - /// Iterate over all metadata in the profile. - pub fn metadata(&self) -> impl Iterator> { - self.intervals.iter().flat_map(|interval| interval.metadata.iter()) - } - - /// Iterate over the IDs of all measurements. - pub fn measurement_ids(&self) -> impl Iterator { - (0..self.measurements.len()).map(MeasurementId) - } -} - - -// === Headers === - -/// Information about the profile. -#[derive(Clone, Debug, Default)] -pub struct Headers { - /// A value that can be used to translate a timestamp to system time. - pub time_offset: Option, - /// An application-specific identifier used to distinguish logs from different processes. - pub process: Option, -} - - -// === IDs and indexing === - -/// Identifies a measurement in a particular profile. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub struct MeasurementId(pub(crate) usize); - -/// Identifies an interval in a particular profile. -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub struct IntervalId(pub(crate) usize); - -impl std::ops::Index for Profile { - type Output = Measurement; - fn index(&self, MeasurementId(index): MeasurementId) -> &Self::Output { - &self.measurements[index] - } -} - -impl std::ops::Index for Profile { - type Output = ActiveInterval; - fn index(&self, IntervalId(index): IntervalId) -> &Self::Output { - &self.intervals[index] - } -} - - - -// =================== -// === Measurement === -// =================== - -/// All the information produced by a profiler. -#[derive(Clone, Debug)] -pub struct Measurement { - /// Identifies the profiler's source and scope to the user. - pub label: Rc