mirror of
https://github.com/enso-org/enso.git
synced 2024-12-28 14:42:25 +03:00
Add IDE Code From enso Repository (https://github.com/enso-org/ide/pull/128)
As per https://github.com/luna/enso/issues/431
Original commit: 8f993cc1d2
This commit is contained in:
parent
ebfc075938
commit
db977692f7
@ -2,16 +2,24 @@
|
||||
|
||||
members = [
|
||||
"build-utilities",
|
||||
"lib/code-builder",
|
||||
"lib/core",
|
||||
"lib/core/embedded-fonts",
|
||||
"lib/core/msdf-sys",
|
||||
"lib/data",
|
||||
"lib/eval-tt",
|
||||
"lib/ide/ast/impl",
|
||||
"lib/ide/ast/macros",
|
||||
"lib/ide/file-manager",
|
||||
"lib/ide/json-rpc",
|
||||
"lib/ide/parser",
|
||||
"lib/ide/utils",
|
||||
"lib/logger",
|
||||
"lib/macro-utils",
|
||||
"lib/optics",
|
||||
"lib/prelude",
|
||||
"lib/code-builder",
|
||||
"lib/shapely/impl",
|
||||
"lib/shapely/macros",
|
||||
"lib/system/web",
|
||||
]
|
||||
|
||||
|
@ -6,3 +6,4 @@ edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
download-lp = "0.2.0"
|
||||
path-clean = "0.1.0"
|
||||
|
@ -1,20 +1,24 @@
|
||||
#![feature(option_unwrap_none)]
|
||||
#![feature(trait_alias)]
|
||||
|
||||
use std::{fs, path};
|
||||
use std::io::ErrorKind;
|
||||
|
||||
/// Types that can yield a reference to std::path::Path.
|
||||
pub trait PathRef = AsRef<path::Path>;
|
||||
|
||||
/// A structure describing a concrete release package on github.
|
||||
pub struct GithubRelease<Str:AsRef<str>> {
|
||||
pub struct GithubRelease<Str> {
|
||||
pub project_url : Str,
|
||||
pub version : Str,
|
||||
pub filename : Str,
|
||||
}
|
||||
|
||||
impl<Str:AsRef<str>> GithubRelease<Str> {
|
||||
impl<Str> GithubRelease<Str> {
|
||||
/// Download the release package from github
|
||||
///
|
||||
/// The project_url should be a project's main page on github.
|
||||
pub fn download(&self, destination_dir:&path::Path) {
|
||||
pub fn download(&self, destination_dir:&path::Path) where Str:AsRef<str> {
|
||||
let url = format!(
|
||||
"{project}/releases/download/{version}/{filename}",
|
||||
project = self.project_url.as_ref(),
|
||||
@ -34,3 +38,31 @@ impl<Str:AsRef<str>> GithubRelease<Str> {
|
||||
fatal_error.unwrap_none();
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts path to an absolute form.
|
||||
pub fn absolute_path(path: impl PathRef) -> std::io::Result<path::PathBuf> {
|
||||
use path_clean::PathClean;
|
||||
let path = path.as_ref();
|
||||
if path.is_absolute() {
|
||||
Ok(path.to_path_buf().clean())
|
||||
} else {
|
||||
Ok(std::env::current_dir()?.join(path).clean())
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the environment variable or panic if not available.
|
||||
pub fn env_var_or_panic(var_name:&str) -> String {
|
||||
match std::env::var(var_name) {
|
||||
Ok(var) => var,
|
||||
Err(e) =>
|
||||
panic!("failed to read environment variable {}: {}", var_name, e),
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks if the current build is targeting wasm32.
|
||||
///
|
||||
/// Relies on `TARGET` environment variable set by cargo for build scripts.
|
||||
pub fn targeting_wasm() -> bool {
|
||||
let target = env_var_or_panic("TARGET");
|
||||
target.contains("wasm32")
|
||||
}
|
||||
|
@ -9,4 +9,4 @@ edition = "2018"
|
||||
[features]
|
||||
|
||||
[dependencies]
|
||||
basegl-prelude = { version = "0.1.0", path = "../prelude" }
|
||||
enso-prelude = { version = "0.1.0", path = "../prelude" }
|
||||
|
@ -8,7 +8,7 @@
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
|
||||
use basegl_prelude::*;
|
||||
use enso_prelude::*;
|
||||
use std::fmt::Write;
|
||||
|
||||
|
||||
|
@ -15,7 +15,7 @@ no_unboxed_callbacks = []
|
||||
[dependencies]
|
||||
basegl-core-embedded-fonts = { version = "0.1.0" , path = "embedded-fonts" }
|
||||
basegl-core-msdf-sys = { version = "0.1.0" , path = "msdf-sys" }
|
||||
basegl-prelude = { version = "0.1.0" , path = "../prelude" }
|
||||
enso-prelude = { version = "0.1.0" , path = "../prelude" }
|
||||
basegl-system-web = { version = "0.1.0" , path = "../system/web" }
|
||||
code-builder = { version = "0.1.0" , path = "../code-builder" }
|
||||
data = { version = "0.1.0" , path = "../data" }
|
||||
|
@ -10,7 +10,7 @@ crate-type = ["cdylib", "rlib"]
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
basegl-prelude = { version = "0.1.0", path="../../prelude"}
|
||||
enso-prelude = { version = "0.1.0", path="../../prelude"}
|
||||
|
||||
[build-dependencies]
|
||||
basegl-build-utilities = { version = "0.1.0", path="../../../build-utilities" }
|
||||
|
@ -3,8 +3,8 @@
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
|
||||
use basegl_prelude::*;
|
||||
use basegl_prelude::fmt::{Formatter, Error};
|
||||
use enso_prelude::*;
|
||||
use enso_prelude::fmt::{Formatter, Error};
|
||||
|
||||
/// A base of built-in fonts in application
|
||||
///
|
||||
|
@ -11,7 +11,7 @@ crate-type = ["cdylib", "rlib"]
|
||||
wasm-bindgen = "0.2.53"
|
||||
js-sys = "0.3.30"
|
||||
nalgebra = "0.19.0"
|
||||
basegl-prelude = { version = "0.1.0", path="../../prelude" }
|
||||
enso-prelude = { version = "0.1.0", path="../../prelude" }
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.3.3"
|
||||
|
@ -7,7 +7,7 @@ mod internal;
|
||||
pub mod emscripten_data;
|
||||
pub mod test_utils;
|
||||
|
||||
pub use basegl_prelude as prelude;
|
||||
pub use enso_prelude as prelude;
|
||||
use internal::*;
|
||||
|
||||
use emscripten_data::ArrayMemoryView;
|
||||
|
@ -55,7 +55,7 @@ pub mod system;
|
||||
|
||||
/// Prelude - commonly used utilities.
|
||||
pub mod prelude {
|
||||
pub use basegl_prelude::*;
|
||||
pub use enso_prelude::*;
|
||||
pub use logger::*;
|
||||
pub use shapely::newtype_copy;
|
||||
pub use shapely::shared;
|
||||
|
@ -3,7 +3,7 @@
|
||||
#[warn(missing_docs)]
|
||||
pub mod glsl;
|
||||
|
||||
use basegl_prelude::*;
|
||||
use enso_prelude::*;
|
||||
|
||||
use js_sys::Float32Array;
|
||||
use web_sys::WebGlBuffer;
|
||||
|
@ -8,4 +8,4 @@ edition = "2018"
|
||||
crate-type = ["rlib", "cdylib"]
|
||||
|
||||
[dependencies]
|
||||
basegl-prelude = { version = "0.1.0" , path = "../prelude" }
|
||||
enso-prelude = { version = "0.1.0" , path = "../prelude" }
|
@ -4,4 +4,4 @@
|
||||
|
||||
pub mod opt_vec;
|
||||
|
||||
pub use basegl_prelude as prelude;
|
||||
pub use enso_prelude as prelude;
|
20
gui/lib/ide/ast/impl/Cargo.toml
Normal file
20
gui/lib/ide/ast/impl/Cargo.toml
Normal file
@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "ast"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <contact@luna-lang.org>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
derive_more = { version = "0.15.0" }
|
||||
failure = { version = "0.1.5" }
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_json = { version = "1.0" }
|
||||
shrinkwraprs = { version = "0.2.1" }
|
||||
uuid = { version = "0.8.1", features = ["serde", "v4"] }
|
||||
|
||||
ast-macros = { version = "0.1.0", path = "../macros" }
|
||||
enso-prelude = { version = "0.1.0", path = "../../../prelude" }
|
||||
shapely = { version = "0.1.0", path = "../../../shapely/impl" }
|
14
gui/lib/ide/ast/impl/src/internal.rs
Normal file
14
gui/lib/ide/ast/impl/src/internal.rs
Normal file
@ -0,0 +1,14 @@
|
||||
/// Iterate recursively over tree-like structure implementing `IntoIterator`.
|
||||
pub fn iterate_subtree<T>(ast:T) -> impl Iterator<Item=T::Item>
|
||||
where T: IntoIterator<Item=T> + Copy {
|
||||
let generator = move || {
|
||||
let mut nodes:Vec<T> = vec![ast];
|
||||
while !nodes.is_empty() {
|
||||
let ast = nodes.pop().unwrap();
|
||||
nodes.extend(ast.into_iter());
|
||||
yield ast;
|
||||
}
|
||||
};
|
||||
|
||||
shapely::GeneratingIterator(generator)
|
||||
}
|
1081
gui/lib/ide/ast/impl/src/lib.rs
Normal file
1081
gui/lib/ide/ast/impl/src/lib.rs
Normal file
File diff suppressed because it is too large
Load Diff
551
gui/lib/ide/ast/impl/src/repr.rs
Normal file
551
gui/lib/ide/ast/impl/src/repr.rs
Normal file
@ -0,0 +1,551 @@
|
||||
use crate::*;
|
||||
|
||||
// ======================
|
||||
// === Token Literals ===
|
||||
// =====================
|
||||
|
||||
/// Token representing blank.
|
||||
pub const BLANK_TOKEN:char = '_';
|
||||
|
||||
/// Symbol appearing after base of the number literal.
|
||||
pub const NUMBER_BASE_SEPARATOR:char = '_';
|
||||
|
||||
/// Suffix to made a modifier from an operator
|
||||
pub const MOD_SUFFIX:char = '=';
|
||||
|
||||
/// Symbol enclosing raw Text line.
|
||||
pub const FMT_QUOTE:char = '\'';
|
||||
|
||||
/// Symbol enclosing formatted Text line.
|
||||
pub const RAW_QUOTE:char = '"';
|
||||
|
||||
/// Symbol used to break lines in Text block.
|
||||
pub const NEWLINE:char = '\n';
|
||||
|
||||
/// Symbol introducing escape segment in the Text.
|
||||
pub const BACKSLASH:char = '\\';
|
||||
|
||||
/// Symbol enclosing expression segment in the formatted Text.
|
||||
pub const EXPR_QUOTE:char = '`';
|
||||
|
||||
/// Symbol that introduces UTF-16 code in the formatted Text segment.
|
||||
pub const UNICODE16_INTRODUCER:char = 'u';
|
||||
|
||||
/// String that opens "UTF-21" code in the formatted Text segment.
|
||||
pub const UNICODE21_OPENER:&str = "u{";
|
||||
|
||||
/// String that closese "UTF-21" code in the formatted Text segment.
|
||||
pub const UNICODE21_CLOSER:&str = "}";
|
||||
|
||||
/// Symbol that introduces UTF-16 code in the formatted Text segment.
|
||||
pub const UNICODE32_INTRODUCER:char = 'U';
|
||||
|
||||
/// Quotes opening block of the raw text.
|
||||
pub const RAW_BLOCK_QUOTES:&str = "\"\"\"";
|
||||
|
||||
/// Quotes opening block of the formatted text.
|
||||
pub const FMT_BLOCK_QUOTES:&str = "'''";
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Builder ===
|
||||
// ===============
|
||||
|
||||
make_repr_span!(Empty);
|
||||
make_repr_span!(Letter, self.char);
|
||||
make_repr_span!(Space , self);
|
||||
make_repr_span!(Text , self.str);
|
||||
make_repr_span!(Seq , self.first, self.second);
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === TextBlockLine ===
|
||||
// =====================
|
||||
|
||||
/// Not an instance of `HasSpan`, as it needs to know parent block's offset.
|
||||
impl<T: HasSpan> TextBlockLine<T> {
|
||||
fn span(&self, block_offset:usize) -> usize {
|
||||
let line_count = self.empty_lines.len() + 1;
|
||||
let empty_lines_space:usize = self.empty_lines.iter().sum();
|
||||
let line_breaks = line_count * NEWLINE.span();
|
||||
empty_lines_space + line_breaks + block_offset + self.text.span()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HasRepr> TextBlockLine<T> {
|
||||
fn write_repr(&self, target:&mut String, block_offset:usize) {
|
||||
for empty_line_spaces in &self.empty_lines {
|
||||
(NEWLINE,empty_line_spaces).write_repr(target);
|
||||
}
|
||||
(NEWLINE,block_offset,&self.text).write_repr(target);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === Text Segments ===
|
||||
// =====================
|
||||
|
||||
make_repr_span!(SegmentPlain , self.value);
|
||||
make_repr_span!(SegmentRawEscape, BACKSLASH, self.code );
|
||||
make_repr_span!(SegmentExpr<T> , EXPR_QUOTE, self.value, EXPR_QUOTE);
|
||||
make_repr_span!(SegmentEscape , BACKSLASH, self.code );
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === RawEscape ===
|
||||
// =================
|
||||
|
||||
make_repr_span!(Unfinished);
|
||||
make_repr_span!(Invalid , self.str );
|
||||
make_repr_span!(Slash , BACKSLASH);
|
||||
make_repr_span!(Quote , FMT_QUOTE);
|
||||
make_repr_span!(RawQuote, RAW_QUOTE);
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Escape ===
|
||||
// ==============
|
||||
|
||||
make_repr_span!(EscapeCharacter , self.c );
|
||||
make_repr_span!(EscapeControl , self.name );
|
||||
make_repr_span!(EscapeNumber , self.digits);
|
||||
make_repr_span!(EscapeUnicode16 , UNICODE16_INTRODUCER, self.digits);
|
||||
make_repr_span!(EscapeUnicode21 , UNICODE21_OPENER , self.digits
|
||||
, UNICODE21_CLOSER);
|
||||
make_repr_span!(EscapeUnicode32 , UNICODE32_INTRODUCER, self.digits);
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Block ===
|
||||
// =============
|
||||
|
||||
make_repr_span!(BlockLine<T>, self.elem, self.off);
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Macro ===
|
||||
// =============
|
||||
|
||||
// === Macro Segments ==
|
||||
|
||||
make_repr_span!(MacroMatchSegment<T> , self.head, self.body);
|
||||
make_repr_span!(MacroAmbiguousSegment, self.head, self.body);
|
||||
|
||||
|
||||
// === MacroPatternMatch subtypes ===
|
||||
|
||||
make_repr_span!(MacroPatternMatchRawBegin );
|
||||
make_repr_span!(MacroPatternMatchRawEnd );
|
||||
make_repr_span!(MacroPatternMatchRawNothing);
|
||||
make_repr_span!(MacroPatternMatchRawSeq <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawOr <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawMany <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawExcept <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawBuild <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawErr <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawTag <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawCls <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawTok <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawBlank <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawVar <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawCons <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawOpr <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawMod <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawNum <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawText <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawBlock <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawMacro <T>, self.elem);
|
||||
make_repr_span!(MacroPatternMatchRawInvalid<T>, self.elem);
|
||||
|
||||
|
||||
// === Switch ===
|
||||
|
||||
make_repr_span!(Switch<T>, self.get());
|
||||
|
||||
|
||||
// === Shifted ===
|
||||
|
||||
make_repr_span!(Shifted<T>, self.off, self.wrapped);
|
||||
make_repr_span!(ShiftedVec1<T>, self.head, self.tail);
|
||||
|
||||
|
||||
|
||||
// =============================================================================
|
||||
// === Shape ===================================================================
|
||||
// =============================================================================
|
||||
|
||||
// ===============
|
||||
// === Invalid ===
|
||||
// ===============
|
||||
|
||||
make_repr_span!(Unrecognized, self.str );
|
||||
make_repr_span!(InvalidQuote, self.quote);
|
||||
make_repr_span!(InlineBlock , self.quote);
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === Identifiers ===
|
||||
// ===================
|
||||
|
||||
make_repr_span!(Blank , BLANK_TOKEN);
|
||||
make_repr_span!(Var , self.name );
|
||||
make_repr_span!(Cons , self.name );
|
||||
make_repr_span!(Opr , self.name );
|
||||
make_repr_span!(Mod , self.name, MOD_SUFFIX );
|
||||
make_repr_span!(InvalidSuffix<T>, self.elem, self.suffix);
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Number ===
|
||||
// ==============
|
||||
|
||||
/// Helper to represent that optional number base has additional character.
|
||||
struct NumberBase<T>(T);
|
||||
make_repr_span!(NumberBase<T>, self.0, NUMBER_BASE_SEPARATOR);
|
||||
make_repr_span!(Number , self.base.as_ref().map(NumberBase)
|
||||
, self.int);
|
||||
make_repr_span!(DanglingBase , self.base, NUMBER_BASE_SEPARATOR);
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Text ===
|
||||
// ============
|
||||
|
||||
// === Indented ===
|
||||
|
||||
/// Helper to represent line with additional spacing prepended.
|
||||
struct Indented<T>(usize,T);
|
||||
make_repr_span!(Indented<T>, self.0, self.1);
|
||||
impl<T> Block<T> {
|
||||
fn indented<'t, U>(&self, t:&'t U) -> Indented<&'t U> {
|
||||
Indented(self.indent,t)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Lines ===
|
||||
|
||||
make_repr_span!(TextLineRaw , RAW_QUOTE, self.text, RAW_QUOTE);
|
||||
make_repr_span!(TextLineFmt<T> , FMT_QUOTE, self.text, FMT_QUOTE);
|
||||
|
||||
|
||||
// === TextBlockRaw ==
|
||||
|
||||
impl HasSpan for TextBlockRaw {
|
||||
fn span(&self) -> usize {
|
||||
let mut acc = (RAW_BLOCK_QUOTES,self.spaces).span();
|
||||
for line in self.text.iter() {
|
||||
acc += line.span(self.offset);
|
||||
}
|
||||
acc
|
||||
}
|
||||
}
|
||||
|
||||
impl HasRepr for TextBlockRaw {
|
||||
fn write_repr(&self, target:&mut String) {
|
||||
(RAW_BLOCK_QUOTES, self.spaces).write_repr(target);
|
||||
for line in self.text.iter() {
|
||||
line.write_repr(target, self.offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === TextBlockFmt ==
|
||||
|
||||
impl<T: HasSpan> HasSpan for TextBlockFmt<T> {
|
||||
fn span(&self) -> usize {
|
||||
let lines = self.text.iter();
|
||||
let line_spans = lines.map(|line| line.span(self.offset));
|
||||
let lines_span:usize = line_spans.sum();
|
||||
FMT_BLOCK_QUOTES.span() + self.spaces + lines_span
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HasRepr> HasRepr for TextBlockFmt<T> {
|
||||
fn write_repr(&self, target:&mut String) {
|
||||
(FMT_BLOCK_QUOTES,self.spaces).write_repr(target);
|
||||
for line in self.text.iter() {
|
||||
line.write_repr(target,self.offset);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === TextUnclosed ==
|
||||
|
||||
// TODO: [mwu] `TextUnclosed<T>` as it needs to cut off closing quote from the
|
||||
// stored text line. Likely this type should be stored like this.
|
||||
impl<T: HasSpan> HasSpan for TextUnclosed<T> {
|
||||
fn span(&self) -> usize {
|
||||
self.line.span() - 1 // remove missing quote
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HasRepr> HasRepr for TextUnclosed<T> {
|
||||
fn write_repr(&self, target:&mut String) {
|
||||
self.line.write_repr(target);
|
||||
target.pop(); // remove missing quote
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ====================
|
||||
// === Applications ===
|
||||
// ====================
|
||||
|
||||
make_repr_span!(Prefix <T>, self.func, self.off, self.arg);
|
||||
make_repr_span!(Infix <T>, self.larg, self.loff, self.opr, self.roff
|
||||
, self.rarg);
|
||||
make_repr_span!(SectionLeft <T>, self.arg, self.off, self.opr);
|
||||
make_repr_span!(SectionRight<T>, self.opr, self.off, self.arg);
|
||||
make_repr_span!(SectionSides<T>, self.opr);
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Module ===
|
||||
// ==============
|
||||
|
||||
// === Module ==
|
||||
|
||||
impl<T: HasSpan> HasSpan for Module<T> {
|
||||
fn span(&self) -> usize {
|
||||
assert!(!self.lines.is_empty());
|
||||
let break_count = self.lines.len() - 1;
|
||||
let breaks_span = break_count * NEWLINE.span();
|
||||
let lines_span = self.lines.span();
|
||||
lines_span + breaks_span
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HasRepr> HasRepr for Module<T> {
|
||||
fn write_repr(&self, target:&mut String) {
|
||||
let mut iter = self.lines.iter();
|
||||
if let Some(first_line) = iter.next() {
|
||||
first_line.write_repr(target)
|
||||
}
|
||||
for line in iter {
|
||||
(NEWLINE,line).write_repr(target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Block ==
|
||||
|
||||
impl<T: HasSpan> HasSpan for Block<T> {
|
||||
fn span(&self) -> usize {
|
||||
let line_span = |line:&BlockLine<Option<T>>| {
|
||||
let indent = line.elem.as_ref().map_or(0, |_| self.indent);
|
||||
NEWLINE.span() + indent + line.span()
|
||||
};
|
||||
let head_span = if self.is_orphan { 0 } else { 1 };
|
||||
let empty_lines = self.empty_lines.span() + self.empty_lines.len();
|
||||
let first_line = self.indent + self.first_line.span();
|
||||
let lines = self.lines.iter().map(line_span).sum::<usize>();
|
||||
head_span + empty_lines + first_line + lines
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HasRepr> HasRepr for Block<T> {
|
||||
fn write_repr(&self, target:&mut String) {
|
||||
(!self.is_orphan).as_some(NEWLINE).write_repr(target);
|
||||
for empty_line_space in &self.empty_lines {
|
||||
(empty_line_space,NEWLINE).write_repr(target);
|
||||
}
|
||||
self.indented(&self.first_line).write_repr(target);
|
||||
for line in &self.lines {
|
||||
(NEWLINE,self.indented(line)).write_repr(target);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Macros ===
|
||||
// ==============
|
||||
|
||||
// === Match ==
|
||||
|
||||
make_span!(Match<T>, self.pfx, self.segs);
|
||||
|
||||
impl<T: HasRepr> HasRepr for Match<T> {
|
||||
fn write_repr(&self, target:&mut String) {
|
||||
for pat_match in &self.pfx {
|
||||
for sast in pat_match.iter() {
|
||||
// reverse the order for prefix: ast before spacing
|
||||
(&sast.wrapped,&sast.off).write_repr(target);
|
||||
}
|
||||
}
|
||||
self.segs.write_repr(target);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Ambiguous ===
|
||||
|
||||
make_repr_span!(Ambiguous, self.segs);
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === Spaceless AST ===
|
||||
// =====================
|
||||
|
||||
not_supported_repr!(Comment);
|
||||
not_supported_repr!(Import<T>);
|
||||
not_supported_repr!(Mixfix<T>);
|
||||
not_supported_repr!(Group<T>);
|
||||
not_supported_repr!(Def<T>);
|
||||
not_supported_repr!(Foreign);
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
/// Tests for spacelesss AST. Other AST is covered by parsing tests that verify
|
||||
/// that correct spans and text representation are generated. Only spaceless AST
|
||||
/// is not returned by the parser and can't be covered in this way.
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// === Comment ===
|
||||
|
||||
fn make_comment() -> Shape<Ast> {
|
||||
Comment {lines:vec![]}.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn comment_panics_on_repr() {
|
||||
make_comment().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn comment_panics_on_span() {
|
||||
make_comment().span();
|
||||
}
|
||||
|
||||
|
||||
// === Import ===
|
||||
|
||||
fn make_import() -> Shape<Ast> {
|
||||
Import {path : vec![]}.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn import_panics_on_repr() {
|
||||
make_import().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn import_panics_on_span() {
|
||||
make_import().span();
|
||||
}
|
||||
|
||||
|
||||
// === Mixfix ===
|
||||
|
||||
fn make_mixfix() -> Shape<Ast> {
|
||||
Mixfix {
|
||||
name : vec![],
|
||||
args : vec![]
|
||||
}.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn mixfix_panics_on_repr() {
|
||||
make_mixfix().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn mixfix_panics_on_span() {
|
||||
make_mixfix().span();
|
||||
}
|
||||
|
||||
|
||||
// === Group ===
|
||||
|
||||
fn make_group() -> Shape<Ast> {
|
||||
Group {body : None}.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn group_panics_on_repr() {
|
||||
make_group().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn group_panics_on_span() {
|
||||
make_group().span();
|
||||
}
|
||||
|
||||
|
||||
// === Def ===
|
||||
|
||||
fn make_def() -> Shape<Ast> {
|
||||
Def {
|
||||
name : Ast::cons("Foo"),
|
||||
args : vec![],
|
||||
body : None
|
||||
}.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn def_panics_on_repr() {
|
||||
make_def().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn def_panics_on_span() {
|
||||
make_def().span();
|
||||
}
|
||||
|
||||
// === Foreign ===
|
||||
|
||||
fn make_foreign() -> Shape<Ast> {
|
||||
Foreign {
|
||||
indent : 0,
|
||||
lang : "Python".into(),
|
||||
code : vec![]
|
||||
}.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn foreign_panics_on_repr() {
|
||||
make_foreign().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn foreign_panics_on_span() {
|
||||
make_foreign().span();
|
||||
}
|
||||
}
|
26
gui/lib/ide/ast/macros/Cargo.toml
Normal file
26
gui/lib/ide/ast/macros/Cargo.toml
Normal file
@ -0,0 +1,26 @@
|
||||
[package]
|
||||
name = "ast-macros"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <contact@luna-lang.org>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
Inflector = "0.11.4"
|
||||
|
||||
macro-utils = { version = "0.1.0" , path = "../../../macro-utils" }
|
||||
enso-prelude = { version = "0.1.0" , path = "../../../prelude" }
|
||||
|
||||
[dependencies.syn]
|
||||
version = "1.0"
|
||||
features = [
|
||||
'extra-traits',
|
||||
'full' # for syn::File and syn::ItemFn
|
||||
]
|
323
gui/lib/ide/ast/macros/src/lib.rs
Normal file
323
gui/lib/ide/ast/macros/src/lib.rs
Normal file
@ -0,0 +1,323 @@
|
||||
//! Helper macros used when defining AST structures.
|
||||
|
||||
#![warn(missing_docs)]
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
mod repr;
|
||||
|
||||
use crate::prelude::*;
|
||||
use crate::repr::ReprDescription;
|
||||
|
||||
use enso_prelude as prelude;
|
||||
use macro_utils::gather_all_type_reprs;
|
||||
use macro_utils::repr;
|
||||
use proc_macro2::TokenStream;
|
||||
use proc_macro2::Ident;
|
||||
use proc_macro2::Span;
|
||||
use quote::quote;
|
||||
use syn;
|
||||
|
||||
|
||||
// ==============
|
||||
// === Macros ===
|
||||
// ==============
|
||||
|
||||
/// A macro that shall be applied to all AST nodes.
|
||||
///
|
||||
/// Derives all the traits that are expected to be implemented by AST nodes.
|
||||
///
|
||||
/// Implicitly applied by `ast` on target and generated types. User should not
|
||||
/// need to use this macro directly.
|
||||
#[proc_macro_attribute]
|
||||
pub fn ast_node
|
||||
( _meta: proc_macro::TokenStream
|
||||
, input: proc_macro::TokenStream
|
||||
) -> proc_macro::TokenStream {
|
||||
let input: TokenStream = input.into();
|
||||
let output = quote! {
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
#[derive(Iterator)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#input
|
||||
};
|
||||
output.into()
|
||||
}
|
||||
|
||||
/// Marks target declaration as `ast_node`. If it is an enumeration, also
|
||||
/// applies `to_variant_types`.
|
||||
#[proc_macro_attribute]
|
||||
pub fn ast
|
||||
( attrs : proc_macro::TokenStream
|
||||
, input : proc_macro::TokenStream
|
||||
) -> proc_macro::TokenStream {
|
||||
let attrs: TokenStream = attrs.into();
|
||||
let decl = syn::parse_macro_input!(input as syn::DeriveInput);
|
||||
let output = match &decl.data {
|
||||
syn::Data::Enum { .. } => quote! {
|
||||
#[to_variant_types(#attrs)]
|
||||
#[ast_node]
|
||||
#decl
|
||||
},
|
||||
_ => quote! {
|
||||
#[ast_node]
|
||||
#decl
|
||||
}
|
||||
};
|
||||
output.into()
|
||||
}
|
||||
|
||||
// ==============
|
||||
// === Macros ===
|
||||
// ==============
|
||||
|
||||
// Note [Expansion Example]
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// In order to make the definition easier to read, an example expansion of the
|
||||
// following definition was provided for each quotation:
|
||||
//
|
||||
// #[to_variant_types]
|
||||
// pub enum Shape<T> {
|
||||
// Var(Var),
|
||||
// App(App<T>),
|
||||
// }
|
||||
|
||||
/// Produces declaration of the structure for given source enum variant.
|
||||
fn mk_product_type
|
||||
( is_flat : bool
|
||||
, decl : &syn::DeriveInput
|
||||
, variant : &syn::Variant
|
||||
) -> syn::ItemStruct {
|
||||
use syn::ItemStruct;
|
||||
let fields = &variant.fields;
|
||||
let fields = fields.iter();
|
||||
let types = fields.flat_map(|f| {gather_all_type_reprs(&f.ty) });
|
||||
let types = types.collect::<HashSet<_>>();
|
||||
let ty_vars = decl.generics.params.iter().cloned();
|
||||
let params = ty_vars.filter(|v| types.contains(&repr(&v))).collect();
|
||||
let attrs = decl.attrs.clone();
|
||||
let vis = decl.vis.clone();
|
||||
let struct_token = syn::token::Struct { span: Span::call_site() };
|
||||
let ident_flat = variant.ident.clone();
|
||||
let ident_nested = format!("{}{}", decl.ident, variant.ident);
|
||||
let ident_nested = Ident::new(&ident_nested, Span::call_site());
|
||||
let ident = if is_flat { ident_flat } else { ident_nested };
|
||||
let generics = syn::Generics { params, .. default() };
|
||||
let mut fields = variant.fields.clone();
|
||||
let semi_token = None;
|
||||
fields.iter_mut().for_each(|f| f.vis = vis.clone());
|
||||
ItemStruct { attrs, vis, struct_token, ident, generics, fields, semi_token }
|
||||
}
|
||||
|
||||
/// Generates rewritten enumeration declaration.
|
||||
///
|
||||
/// Each constructor will be a single-elem tuple holder for extracted type.
|
||||
fn gen_variant_decl
|
||||
(ident: &syn::Ident, variant: &syn::ItemStruct) -> TokenStream {
|
||||
let variant_ident = &variant.ident;
|
||||
let params = variant.generics.params.iter();
|
||||
quote! {
|
||||
// See note [Expansion Example]
|
||||
// App(ShapeApp<T>),
|
||||
// Var(ShapeVar),
|
||||
#ident(#variant_ident<#(#params),*>)
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate `From` trait implementations converting from each of extracted
|
||||
/// types back into primary enumeration.
|
||||
/// Generate `TryFrom` implementation from primary enumeration into each
|
||||
/// extracted type.
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
fn gen_from_impls
|
||||
( ident : &syn::Ident
|
||||
, decl : &syn::DeriveInput
|
||||
, variant: &syn::ItemStruct
|
||||
) -> TokenStream {
|
||||
let sum_label = &decl.ident;
|
||||
let variant_label = &variant.ident;
|
||||
let variant_name = variant_label.to_string();
|
||||
|
||||
let sum_params = &decl.generics.params
|
||||
.iter().cloned().collect::<Vec<_>>();
|
||||
let variant_params = &variant.generics.params
|
||||
.iter().cloned().collect::<Vec<_>>();
|
||||
|
||||
quote! {
|
||||
// See note [Expansion Example]
|
||||
// impl<T> From<App<T>> for Shape<T> {
|
||||
// fn from(t: App<T>) -> Self { Shape::App(t) }
|
||||
// }
|
||||
// ...
|
||||
impl<#(#sum_params),*> From<#variant_label<#(#variant_params),*>>
|
||||
for #sum_label<#(#sum_params),*> {
|
||||
fn from(t: #variant_label<#(#variant_params),*>) -> Self {
|
||||
#sum_label::#ident(t)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// impl<'t, T> TryFrom<&'t Shape<T>> for &'t Infix<T> {
|
||||
// type Error = WrongEnum;
|
||||
// fn try_from(value: &'t Shape<T>) -> Result<Self, Self::Error> {
|
||||
// match value {
|
||||
// Shape::Infix(elem) => Ok (elem),
|
||||
// _ => {
|
||||
// let error = WrongEnum {
|
||||
// expected_con : "Infix" };
|
||||
// Err(error)
|
||||
// },
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
impl<'t, #(#sum_params),*> TryFrom<&'t #sum_label<#(#sum_params),*>>
|
||||
for &'t #variant_label<#(#variant_params),*> {
|
||||
type Error = WrongEnum;
|
||||
|
||||
fn try_from
|
||||
(value: &'t #sum_label<#(#sum_params),*>)
|
||||
-> Result<Self, Self::Error> {
|
||||
match value {
|
||||
#sum_label::#ident(elem) => Ok(elem),
|
||||
_ => {
|
||||
let error = WrongEnum {
|
||||
expected_con: #variant_name.to_string() };
|
||||
Err(error)
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// same as above but for values
|
||||
impl<#(#sum_params),*> TryFrom<#sum_label<#(#sum_params),*>>
|
||||
for #variant_label<#(#variant_params),*> {
|
||||
type Error = WrongEnum;
|
||||
|
||||
fn try_from
|
||||
(value: #sum_label<#(#sum_params),*>)
|
||||
-> Result<Self, Self::Error> {
|
||||
match value {
|
||||
#sum_label::#ident(elem) => Ok(elem),
|
||||
_ => {
|
||||
let error = WrongEnum {
|
||||
expected_con: #variant_name.to_string() };
|
||||
Err(error)
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Rewrites enum definition by creating a new type for each constructor.
|
||||
///
|
||||
/// Each nested constructor will be converted to a new `struct` and placed in
|
||||
/// the parent scope. The created type name will be {EnumName}{ConstructorName}.
|
||||
/// To name generated types with only their constructor name, use `flat`
|
||||
/// attribute: `#[ast(flat)]`.
|
||||
#[proc_macro_attribute]
|
||||
pub fn to_variant_types
|
||||
( attrs: proc_macro::TokenStream
|
||||
, input: proc_macro::TokenStream
|
||||
) -> proc_macro::TokenStream {
|
||||
let attrs: TokenStream = attrs.into();
|
||||
let decl = syn::parse_macro_input!(input as syn::DeriveInput);
|
||||
let ident = &decl.ident;
|
||||
let ty_vars = &decl.generics.params;
|
||||
let variants = match &decl.data {
|
||||
syn::Data::Enum(ref data) => data.variants.iter(),
|
||||
_ => unimplemented!()
|
||||
}.collect::<Vec<_>>();
|
||||
|
||||
let is_flat = repr(&attrs) == "flat";
|
||||
let structs = variants.iter().map(|v| mk_product_type(is_flat, &decl, v));
|
||||
let structs = structs.collect::<Vec<_>>();
|
||||
|
||||
let variant_idents = variants.iter().map(|v| &v.ident).collect::<Vec<_>>();
|
||||
let variant_decls = variant_idents.iter().zip(structs.iter())
|
||||
.map(|(i,v)| gen_variant_decl(i,v));
|
||||
let variant_froms = variant_idents.iter().zip(structs.iter())
|
||||
.map(|(i,v)| gen_from_impls(i, &decl, &v));
|
||||
|
||||
// Handle single value, unnamed params as created by user.
|
||||
let structs = structs.iter().filter(|v| match &v.fields {
|
||||
syn::Fields::Unnamed(f) => f.unnamed.len() != 1,
|
||||
_ => true
|
||||
});
|
||||
|
||||
let decl_attrs = &decl.attrs;
|
||||
let output = quote! {
|
||||
#(#decl_attrs)*
|
||||
pub enum #ident <#ty_vars> {
|
||||
#(#variant_decls),*
|
||||
}
|
||||
#(#structs)*
|
||||
#(#variant_froms)*
|
||||
};
|
||||
output.into()
|
||||
}
|
||||
|
||||
/// Creates a HasRepr and HasSpan implementations for a given enum type.
|
||||
///
|
||||
/// Given type may only consist of single-elem tuple-like variants.
|
||||
/// The implementation uses underlying HasRepr and HasSpan implementations for
|
||||
/// stored values.
|
||||
#[proc_macro_derive(HasRepr)]
|
||||
pub fn derive_has_span
|
||||
(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let decl = syn::parse_macro_input!(input as syn::DeriveInput);
|
||||
let ret = match decl.data {
|
||||
syn::Data::Enum(ref e) => repr::derive_for_enum(&decl, &e),
|
||||
_ => quote! {},
|
||||
};
|
||||
proc_macro::TokenStream::from(ret)
|
||||
}
|
||||
|
||||
/// Same as `make_repr_span` but provides only `HasSpan` implementation.
|
||||
#[proc_macro]
|
||||
pub fn make_span(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let maker = syn::parse::<ReprDescription>(input).unwrap();
|
||||
maker.make_span().into()
|
||||
}
|
||||
|
||||
/// Generates `HasRepr` and `HasSpan` instances that are just sum of their
|
||||
/// parts.
|
||||
///
|
||||
/// Takes 1+ parameters:
|
||||
/// * first goes the typename for which implementations are generated (can take
|
||||
/// type parameters, as long as they implement `HasRepr` and `HasSpan`)
|
||||
/// * then arbitrary number (0 or more) of expressions, that shall yield values
|
||||
/// implementing `HasRepr` and `HasSpan`. The `self` can be used in th
|
||||
/// expressions.
|
||||
///
|
||||
/// For example, for invocation:
|
||||
/// ```ignore
|
||||
/// make_repr_span!(SegmentExpr<T>, EXPR_QUOTE, self.value, EXPR_QUOTE);
|
||||
/// ```
|
||||
/// the following output is produced:
|
||||
/// ```ignore
|
||||
/// impl<T: HasRepr> HasRepr for SegmentExpr<T> {
|
||||
/// fn write_repr(&self, target: &mut String) {
|
||||
/// EXPR_QUOTE.write_repr(target);
|
||||
/// self.value.write_repr(target);
|
||||
/// EXPR_QUOTE.write_repr(target);
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// impl<T: HasSpan> HasSpan for SegmentExpr<T> {
|
||||
/// fn span(&self) -> usize {
|
||||
/// 0 + EXPR_QUOTE.span() + self.value.span() + EXPR_QUOTE.span()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro]
|
||||
pub fn make_repr_span(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let maker = syn::parse::<ReprDescription>(input).unwrap();
|
||||
maker.make_repr_span().into()
|
||||
}
|
||||
|
||||
/// Generates `HasRepr` and `HasSpan` implementations that panic when used.
|
||||
#[proc_macro]
|
||||
pub fn not_supported_repr(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
crate::repr::not_supported(input)
|
||||
}
|
146
gui/lib/ide/ast/macros/src/repr.rs
Normal file
146
gui/lib/ide/ast/macros/src/repr.rs
Normal file
@ -0,0 +1,146 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use macro_utils::path_segment_generic_args;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::Expr;
|
||||
use syn::GenericArgument;
|
||||
use syn::PathSegment;
|
||||
use syn::Token;
|
||||
use syn::punctuated::Punctuated;
|
||||
|
||||
/// Generates `HasRepr` and `HasSpan` that just panic when called.
|
||||
pub fn not_supported
|
||||
(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let target = syn::parse::<PathSegment>(input).unwrap();
|
||||
let ty_args = path_segment_generic_args(&target);
|
||||
let ret = quote!{
|
||||
// Sample expansion for: Import<T>
|
||||
//
|
||||
// impl<T> HasSpan for Import<T> {
|
||||
// fn span(&self) -> usize {
|
||||
// panic!("HasSpan is not supported for Spaceless AST!")
|
||||
// }
|
||||
// }
|
||||
// impl<T> HasRepr for Import<T> {
|
||||
// fn write_repr(&self, target:&mut String) {
|
||||
// panic!("HasRepr not supported for Spaceless AST!")
|
||||
// }
|
||||
// }
|
||||
impl<#(#ty_args),*> HasSpan for #target {
|
||||
fn span(&self) -> usize {
|
||||
panic!("HasSpan not supported for Spaceless AST!")
|
||||
}
|
||||
}
|
||||
impl<#(#ty_args),*> HasRepr for #target {
|
||||
fn write_repr(&self, target:&mut String) {
|
||||
panic!("HasRepr not supported for Spaceless AST!")
|
||||
}
|
||||
}
|
||||
};
|
||||
ret.into()
|
||||
}
|
||||
|
||||
/// Inner logic for `derive_has_span`.
|
||||
pub fn derive_for_enum
|
||||
(decl:&syn::DeriveInput, data:&syn::DataEnum)
|
||||
-> TokenStream {
|
||||
let ident = &decl.ident;
|
||||
let params = decl.generics.params.iter().collect_vec();
|
||||
let span_arms = data.variants.iter().map(|v| {
|
||||
let con_ident = &v.ident;
|
||||
quote!( #ident::#con_ident (elem) => elem.span() )
|
||||
});
|
||||
let repr_arms = data.variants.iter().map(|v| {
|
||||
let con_ident = &v.ident;
|
||||
quote!( #ident::#con_ident (elem) => elem.write_repr(target) )
|
||||
});
|
||||
let ret = quote! {
|
||||
impl<#(#params:HasSpan),*> HasSpan for #ident<#(#params),*> {
|
||||
fn span(&self) -> usize {
|
||||
match self {
|
||||
#(#span_arms),*
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<#(#params:HasRepr),*> HasRepr for #ident<#(#params),*> {
|
||||
fn write_repr(&self, target:&mut String) {
|
||||
match self {
|
||||
#(#repr_arms),*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
ret
|
||||
}
|
||||
|
||||
/// Structure representing input to macros like `make_repr_span!`.
|
||||
///
|
||||
/// Basically it consists of a typename (with optional generic arguments) and
|
||||
/// sequence of expressions that yield values we use to obtain sub-repr or
|
||||
/// sub-spans.
|
||||
pub struct ReprDescription {
|
||||
pub ty : PathSegment,
|
||||
pub ty_args : Vec<GenericArgument>,
|
||||
pub exprs : Vec<Expr>,
|
||||
}
|
||||
|
||||
impl syn::parse::Parse for ReprDescription {
|
||||
/// Parser user-provided input to macro into out structure.
|
||||
///
|
||||
/// First should go a type for which implementation is to be provided,
|
||||
/// then arbitrary sequence of expressions.
|
||||
/// Panics on invalid input, which is actually fair for a macro code.
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let ty:PathSegment = input.parse()?;
|
||||
input.parse::<Option<syn::token::Comma>>()?;
|
||||
let exprs = Punctuated::<Expr,Token![,]>::parse_terminated(input)?;
|
||||
let exprs = exprs.iter().cloned().collect::<Vec<_>>();
|
||||
let ty_args = path_segment_generic_args(&ty);
|
||||
let ty_args = ty_args.into_iter().cloned().collect(); // get rid of &
|
||||
Ok(ReprDescription {ty,ty_args,exprs})
|
||||
}
|
||||
}
|
||||
|
||||
impl ReprDescription {
|
||||
/// Fills a trait implementation template with given methods.
|
||||
pub fn make_impl
|
||||
(&self, trait_name:&str, methods:&TokenStream) -> TokenStream {
|
||||
let trait_name = syn::parse_str::<syn::TypePath>(trait_name).unwrap();
|
||||
let ty = &self.ty;
|
||||
let ty_args = &self.ty_args;
|
||||
quote! {
|
||||
impl<#(#ty_args:#trait_name),*> #trait_name for #ty {
|
||||
#methods
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates `HasRepr` instances using user-provided input.
|
||||
pub fn make_repr(&self) -> TokenStream {
|
||||
let exprs = &self.exprs;
|
||||
self.make_impl("HasRepr", "e!{
|
||||
fn write_repr(&self, target:&mut String) {
|
||||
#(#exprs.write_repr(target);)*
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Generates `HasSpan` instances using user-provided input.
|
||||
pub fn make_span(&self) -> TokenStream {
|
||||
let exprs = &self.exprs;
|
||||
self.make_impl("HasSpan", "e!{
|
||||
fn span(&self) -> usize {
|
||||
0 #(+ #exprs.span())*
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Generates `HasRepr` and `HasSpan` instances using user-provided input.
|
||||
pub fn make_repr_span(&self) -> TokenStream {
|
||||
let mut ret = self.make_repr();
|
||||
ret.extend(self.make_span());
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
21
gui/lib/ide/file-manager/Cargo.toml
Normal file
21
gui/lib/ide/file-manager/Cargo.toml
Normal file
@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "file-manager-client"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <contact@luna-lang.org>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
futures = "0.3.1"
|
||||
paste = "0.1.6"
|
||||
serde_json = "1.0"
|
||||
|
||||
chrono = { version = "0.4" , features = ["serde"] }
|
||||
serde = { version = "1.0" , features = ["derive"] }
|
||||
uuid = { version = "0.8" , features = ["serde", "v5"] }
|
||||
|
||||
json-rpc = { version = "0.1.0" , path = "../json-rpc" }
|
||||
enso-prelude = { version = "0.1.0" , path = "../../prelude" }
|
||||
utils = { version = "0.1.0" , path = "../utils" }
|
136
gui/lib/ide/file-manager/README.md
Normal file
136
gui/lib/ide/file-manager/README.md
Normal file
@ -0,0 +1,136 @@
|
||||
File Manager consists of pair server and client. They communicate by exchaning
|
||||
messages, following JSON-RPC 2.0.
|
||||
|
||||
# Setup
|
||||
Establish websocket connection.
|
||||
|
||||
In future it is expected that some kind of authorization will be required by the
|
||||
server. As of now, its details remain unspecified.
|
||||
|
||||
# General protocol
|
||||
Remote calls made between File Manager client and server follow [JSON-RPC 2.0
|
||||
protocol](https://www.jsonrpc.org/specification).
|
||||
|
||||
There are two primary cases:
|
||||
* RPC calls from client to [server methods](#Methods);
|
||||
* [Notifications](#Notifications) sent from server to client.
|
||||
|
||||
All messages are text with JSON-encoded values.
|
||||
|
||||
File Manager accepts only method calls (request objects).
|
||||
|
||||
File Manager responds with call results and may send notifications.
|
||||
|
||||
# Methods
|
||||
| Method | Input | Result |
|
||||
|---------------|------------------------------|------------|
|
||||
| copyDirectory | {from:Path, to:Path} | () |
|
||||
| copyFile | {from:Path, to:Path} | () |
|
||||
| deleteFile | {path:Path} | () |
|
||||
| exists | {path:Path} | Boolean |
|
||||
| list | {path:Path} | [Path] |
|
||||
| moveDirectory | {from:Path, to:Path} | () |
|
||||
| moveFile | {from:Path, to:Path} | () |
|
||||
| read | {path:Path} | String |
|
||||
| status | {path:Path} | Attributes |
|
||||
| touch | {path:Path} | () |
|
||||
| write | {path:Path, contents:String} | () |
|
||||
| createWatch | {path:Path} | UUID |
|
||||
| deleteWatch | {watchId:UUID} | () |
|
||||
|
||||
Where `()` is a unit value.
|
||||
|
||||
# Notifications
|
||||
Notifications are emitted by the server.
|
||||
|
||||
| Method | Input | Result |
|
||||
|-----------------|-----------------------------|--------|
|
||||
| filesystemEvent | {path:Path, kind:EventKind} | N/A |
|
||||
|
||||
`filesystemEvent` notification is emitted for paths that are tracked by watch
|
||||
(i.e. subtree of a location passed to `createWatch` method).
|
||||
|
||||
It should be noted that watch notifications are not reliable and significantly
|
||||
os-dependent.
|
||||
|
||||
# Types
|
||||
```
|
||||
Attributes = struct {
|
||||
creationTime : FileTime,
|
||||
lastAccess_time : FileTime,
|
||||
lastModified_time : FileTime,
|
||||
fileKind : FileKind,
|
||||
byteSize : u64,
|
||||
}
|
||||
|
||||
EventKind = enum { Created, Deleted, Modified, Overflow }
|
||||
FileKind = enum { Directory, RegularFile, SymbolicLink, Other }
|
||||
```
|
||||
|
||||
# JSON Encoding
|
||||
Struct values are serialized as map, e.g. `{ "field_name" : field_value }`.
|
||||
|
||||
Enum values are serialized as map `{ "variant_name" : variant_value }` or just
|
||||
`"variant_name"` if there variants have no inner value.
|
||||
Transitive enums (i.e. enums of enums) are flattened, no intermediate variant
|
||||
names shall appear.
|
||||
|
||||
`()` (unit value) is serialized as `null`.
|
||||
|
||||
`FileTime` value is serialized as a string compliant with RFC3339 / ISO8601 text
|
||||
format, e.g. `"2020-01-07T21:25:26Z"`.
|
||||
|
||||
`Path` is serialized as JSON string value, e.g. `"./Main.luna"`.
|
||||
|
||||
`UUID` is serialzied as string using 8-4-4-4-12 format, e.g.
|
||||
`"02723954-fbb0-4641-af53-cec0883f260a"`.
|
||||
|
||||
`u64` is an unsigned 64-bit integer value.
|
||||
|
||||
## Examples
|
||||
|
||||
### Call to `exists` method
|
||||
#### Request (call)
|
||||
```json
|
||||
{
|
||||
"jsonrpc" : "2.0",
|
||||
"id" : 0,
|
||||
"method" : "exists",
|
||||
"input" : { "path" : "./Main.luna" }
|
||||
}
|
||||
```
|
||||
#### Response
|
||||
```json
|
||||
{
|
||||
"jsonrpc" : "2.0",
|
||||
"id" : 0,
|
||||
"result" : true
|
||||
}
|
||||
```
|
||||
|
||||
### Filesystem Event Notification
|
||||
#### Request (notification)
|
||||
```json
|
||||
{
|
||||
"jsonrpc" : "2.0",
|
||||
"method" : "filesystemEvent",
|
||||
"params" : { "path" : "./Main.luna", "kind" : "Modified" }
|
||||
}
|
||||
```
|
||||
|
||||
Notification requests gets no response.
|
||||
|
||||
|
||||
### `Attributes` structure
|
||||
`Attributes` value may be serialized to a following JSON:
|
||||
```json
|
||||
{
|
||||
"creationTime" : "2020-01-07T21:25:26Z",
|
||||
"lastAccessTime" : "2020-01-21T22:16:51.123994500+00:00",
|
||||
"lastModifiedTime" : "2020-01-07T21:25:26Z",
|
||||
"fileKind" : "RegularFile",
|
||||
"sizeInBytes" : 125125
|
||||
}
|
||||
```
|
||||
|
||||
|
443
gui/lib/ide/file-manager/src/lib.rs
Normal file
443
gui/lib/ide/file-manager/src/lib.rs
Normal file
@ -0,0 +1,443 @@
|
||||
//! Client library for the JSON-RPC-based File Manager service.
|
||||
|
||||
#![warn(missing_docs)]
|
||||
#![warn(trivial_casts)]
|
||||
#![warn(trivial_numeric_casts)]
|
||||
#![warn(unused_import_braces)]
|
||||
#![warn(unused_qualifications)]
|
||||
#![warn(unsafe_code)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
pub use enso_prelude as prelude;
|
||||
use json_rpc::api::Result;
|
||||
use json_rpc::Handler;
|
||||
use futures::Stream;
|
||||
use serde::Serialize;
|
||||
use serde::Deserialize;
|
||||
use std::future::Future;
|
||||
use uuid::Uuid;
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Event ===
|
||||
// =============
|
||||
|
||||
/// Event emitted by the File Manager `Client`.
|
||||
pub type Event = json_rpc::handler::Event<Notification>;
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Path ===
|
||||
// ============
|
||||
|
||||
/// Path to a file.
|
||||
#[derive(Clone,Debug,Display,Eq,Hash,PartialEq,PartialOrd,Ord)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[derive(Shrinkwrap)]
|
||||
pub struct Path(pub String);
|
||||
|
||||
impl Path {
|
||||
/// Wraps a `String`-like entity into a new `Path`.
|
||||
pub fn new<S>(s:S) -> Path where S:Into<String> {
|
||||
Path(s.into())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ====================
|
||||
// === Notification ===
|
||||
// ====================
|
||||
|
||||
/// Notification generated by the File Manager.
|
||||
#[derive(Clone,Debug,PartialEq)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(tag="method", content="params")]
|
||||
pub enum Notification {
|
||||
/// Filesystem event occurred for a watched path.
|
||||
#[serde(rename = "filesystemEvent")]
|
||||
FilesystemEvent(FilesystemEvent),
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === FilesystemEvent ===
|
||||
// =======================
|
||||
|
||||
/// Filesystem event notification, generated by an active file watch.
|
||||
#[derive(Clone,Debug,PartialEq)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct FilesystemEvent {
|
||||
/// Path of the file that the event is about.
|
||||
pub path : Path,
|
||||
/// What kind of event is it.
|
||||
pub kind : FilesystemEventKind
|
||||
}
|
||||
|
||||
/// Describes kind of filesystem event (was the file created or deleted, etc.)
|
||||
#[derive(Clone,Copy,Debug,PartialEq)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub enum FilesystemEventKind {
|
||||
/// A new file under path was created.
|
||||
Created,
|
||||
/// Existing file under path was deleted.
|
||||
Deleted,
|
||||
/// File under path was modified.
|
||||
Modified,
|
||||
/// An overflow occurred and some events were lost,
|
||||
Overflow
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==================
|
||||
// === Attributes ===
|
||||
// ==================
|
||||
|
||||
/// Attributes of the file in the filesystem.
|
||||
#[derive(Clone,Copy,Debug,PartialEq)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Attributes{
|
||||
/// When the file was created.
|
||||
pub creation_time : FileTime,
|
||||
/// When the file was last accessed.
|
||||
pub last_access_time : FileTime,
|
||||
/// When the file was last modified.
|
||||
pub last_modified_time : FileTime,
|
||||
/// What kind of file is this.
|
||||
pub file_kind : FileKind,
|
||||
/// Size of the file in bytes.
|
||||
/// (size of files not being `RegularFile`s is unspecified).
|
||||
pub byte_size : u64
|
||||
}
|
||||
|
||||
/// A filesystem's timestamp.
|
||||
pub type FileTime = chrono::DateTime<chrono::FixedOffset>;
|
||||
|
||||
/// What kind of file (regular, directory, symlink) is this.
|
||||
#[derive(Clone,Copy,Debug,PartialEq)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub enum FileKind {
|
||||
/// File being a directory.
|
||||
Directory,
|
||||
/// File being a symbolic link.
|
||||
SymbolicLink,
|
||||
/// File being a regular file with opaque content.
|
||||
RegularFile,
|
||||
/// File being none of the above, e.g. a physical device or a pipe.
|
||||
Other
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Client ===
|
||||
// ==============
|
||||
|
||||
/// File Manager client. Contains numerous asynchronous methods for remote calls
|
||||
/// on File Manager server. Also, allows obtaining events stream by calling
|
||||
/// `events`.
|
||||
#[derive(Debug)]
|
||||
pub struct Client {
|
||||
/// JSON-RPC protocol handler.
|
||||
handler : Handler<Notification>,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
/// Create a new File Manager client that will use given transport.
|
||||
pub fn new(transport:impl json_rpc::Transport + 'static) -> Client {
|
||||
let handler = Handler::new(transport);
|
||||
Client { handler }
|
||||
}
|
||||
|
||||
/// Asynchronous event stream with notification and errors.
|
||||
///
|
||||
/// On a repeated call, previous stream is closed.
|
||||
pub fn events(&mut self) -> impl Stream<Item = Event> {
|
||||
self.handler.events()
|
||||
}
|
||||
|
||||
/// Method that should be called on each frame.
|
||||
///
|
||||
/// Processes incoming transport events, generating File Manager events and
|
||||
/// driving asynchronous calls to completion.
|
||||
pub fn process_events(&mut self) {
|
||||
self.handler.process_events()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === RPC Methods ===
|
||||
// ===================
|
||||
|
||||
|
||||
// === Helper macro ===
|
||||
|
||||
/// Macro that generates a asynchronous method making relevant RPC call to the
|
||||
/// server. First three args is the name appropriately in CamelCase,
|
||||
/// snake_case, camelCase. Then goes the function signature, in form of
|
||||
/// `(arg:Arg) -> Ret`.
|
||||
///
|
||||
/// Macro generates:
|
||||
/// * a method in Client named `snake_case` that takes `(arg:Arg)` and returns
|
||||
/// `Future<Ret>`.
|
||||
/// * a structure named `CamelCase` that stores function arguments as fields and
|
||||
/// its JSON serialization conforms to JSON-RPC (yielding `method` and
|
||||
/// `params` fields).
|
||||
/// * `snakeCase` is the name of the remote method.
|
||||
macro_rules! make_rpc_method {
|
||||
( $name_typename:ident
|
||||
$name:ident
|
||||
$name_ext:ident
|
||||
($($arg:ident : $type:ty),* $(,)?) -> $out:ty ) => {
|
||||
paste::item! {
|
||||
impl Client {
|
||||
/// Remote call to the method on the File Manager Server.
|
||||
pub fn $name
|
||||
(&mut self, $($arg:$type),*) -> impl Future<Output=Result<$out>> {
|
||||
let input = [<$name_typename Input>] { $($arg:$arg),* };
|
||||
self.handler.open_request(input)
|
||||
}
|
||||
}
|
||||
|
||||
/// Structure transporting method arguments.
|
||||
#[derive(Serialize,Deserialize,Debug,PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct [<$name_typename Input>] {
|
||||
$($arg : $type),*
|
||||
}
|
||||
|
||||
impl json_rpc::RemoteMethodCall for [<$name_typename Input>] {
|
||||
const NAME:&'static str = stringify!($name_ext);
|
||||
type Returned = $out;
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
|
||||
// === Remote API definition ===
|
||||
|
||||
make_rpc_method!(CopyDirectory copy_directory copyDirectory (from:Path, to:Path) -> () );
|
||||
make_rpc_method!(CopyFile copy_file copyFile (from:Path, to:Path) -> () );
|
||||
make_rpc_method!(DeleteFile delete_file deleteFile (path:Path) -> () );
|
||||
make_rpc_method!(Exists exists exists (path:Path) -> bool );
|
||||
make_rpc_method!(List list list (path:Path) -> Vec<Path> );
|
||||
make_rpc_method!(MoveDirectory move_directory moveDirectory (from:Path, to:Path) -> () );
|
||||
make_rpc_method!(MoveFile move_file moveFile (from:Path, to:Path) -> () );
|
||||
make_rpc_method!(Read read read (path:Path) -> String );
|
||||
make_rpc_method!(Status status status (path:Path) -> Attributes);
|
||||
make_rpc_method!(Touch touch touch (path:Path) -> () );
|
||||
make_rpc_method!(Write write write (path:Path, contents:String) -> () );
|
||||
make_rpc_method!(CreateWatch create_watch createWatch (path:Path) -> Uuid );
|
||||
make_rpc_method!(DeleteWatch delete_watch deleteWatch (watch_id:Uuid) -> () );
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use super::FileKind::RegularFile;
|
||||
|
||||
use json_rpc::messages::Message;
|
||||
use json_rpc::messages::RequestMessage;
|
||||
use json_rpc::test_util::transport::mock::MockTransport;
|
||||
use serde_json::json;
|
||||
use serde_json::Value;
|
||||
use std::future::Future;
|
||||
use utils::test::poll_future_output;
|
||||
use utils::test::poll_stream_output;
|
||||
|
||||
fn setup_fm() -> (MockTransport, Client) {
|
||||
let transport = MockTransport::new();
|
||||
let client = Client::new(transport.clone());
|
||||
(transport,client)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_notification() {
|
||||
let (mut transport, mut client) = setup_fm();
|
||||
let mut events = Box::pin(client.events());
|
||||
assert!(poll_stream_output(&mut events).is_none());
|
||||
|
||||
let expected_notification = FilesystemEvent {
|
||||
path : Path::new("./Main.luna"),
|
||||
kind : FilesystemEventKind::Modified,
|
||||
};
|
||||
let notification_text = r#"{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "filesystemEvent",
|
||||
"params": {"path" : "./Main.luna", "kind" : "Modified"}
|
||||
}"#;
|
||||
transport.mock_peer_message_text(notification_text);
|
||||
assert!(poll_stream_output(&mut events).is_none());
|
||||
client.process_events();
|
||||
let event = poll_stream_output(&mut events);
|
||||
if let Some(Event::Notification(n)) = event {
|
||||
assert_eq!(n, Notification::FilesystemEvent(expected_notification));
|
||||
} else {
|
||||
panic!("expected notification event");
|
||||
}
|
||||
}
|
||||
|
||||
/// Tests making a request using file manager:
|
||||
/// * creates FM client and uses `make_request` to make a request
|
||||
/// * checks that request is made for `expected_method`
|
||||
/// * checks that request input is `expected_input`
|
||||
/// * mocks receiving a response from server with `result`
|
||||
/// * checks that FM-returned Future yields `expected_output`
|
||||
fn test_request<Fun, Fut, T>
|
||||
( make_request:Fun
|
||||
, expected_method:&str
|
||||
, expected_input:Value
|
||||
, result:Value
|
||||
, expected_output:T )
|
||||
where Fun : FnOnce(&mut Client) -> Fut,
|
||||
Fut : Future<Output = Result<T>>,
|
||||
T : Debug + PartialEq {
|
||||
let (mut transport, mut client) = setup_fm();
|
||||
let mut fut = Box::pin(make_request(&mut client));
|
||||
|
||||
let request = transport.expect_message::<RequestMessage<Value>>();
|
||||
assert_eq!(request.method, expected_method);
|
||||
assert_eq!(request.input, expected_input);
|
||||
|
||||
let response = Message::new_success(request.id, result);
|
||||
transport.mock_peer_message(response);
|
||||
|
||||
client.process_events();
|
||||
let output = poll_future_output(&mut fut).unwrap().unwrap();
|
||||
assert_eq!(output, expected_output);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_requests() {
|
||||
let main = Path::new("./Main.luna");
|
||||
let target = Path::new("./Target.luna");
|
||||
let path_main = json!({"path" : "./Main.luna"});
|
||||
let from_main_to_target = json!({
|
||||
"from" : "./Main.luna",
|
||||
"to" : "./Target.luna"
|
||||
});
|
||||
let true_json = json!(true);
|
||||
let unit_json = json!(null);
|
||||
|
||||
test_request(
|
||||
|client| client.copy_directory(main.clone(), target.clone()),
|
||||
"copyDirectory",
|
||||
from_main_to_target.clone(),
|
||||
unit_json.clone(),
|
||||
());
|
||||
test_request(
|
||||
|client| client.copy_file(main.clone(), target.clone()),
|
||||
"copyFile",
|
||||
from_main_to_target.clone(),
|
||||
unit_json.clone(),
|
||||
());
|
||||
test_request(
|
||||
|client| client.delete_file(main.clone()),
|
||||
"deleteFile",
|
||||
path_main.clone(),
|
||||
unit_json.clone(),
|
||||
());
|
||||
test_request(
|
||||
|client| client.exists(main.clone()),
|
||||
"exists",
|
||||
path_main.clone(),
|
||||
true_json,
|
||||
true);
|
||||
|
||||
let list_response_json = json!([ "Bar.luna", "Foo.luna" ]);
|
||||
let list_response_value = vec! [Path::new("Bar.luna"),Path::new("Foo.luna")];
|
||||
test_request(
|
||||
|client| client.list(main.clone()),
|
||||
"list",
|
||||
path_main.clone(),
|
||||
list_response_json,
|
||||
list_response_value);
|
||||
test_request(
|
||||
|client| client.move_directory(main.clone(), target.clone()),
|
||||
"moveDirectory",
|
||||
from_main_to_target.clone(),
|
||||
unit_json.clone(),
|
||||
());
|
||||
test_request(
|
||||
|client| client.move_file(main.clone(), target.clone()),
|
||||
"moveFile",
|
||||
from_main_to_target.clone(),
|
||||
unit_json.clone(),
|
||||
());
|
||||
test_request(
|
||||
|client| client.read(main.clone()),
|
||||
"read",
|
||||
path_main.clone(),
|
||||
json!("Hello world!"),
|
||||
"Hello world!".into());
|
||||
|
||||
let parse_rfc3339 = |s| {
|
||||
chrono::DateTime::parse_from_rfc3339(s).unwrap()
|
||||
};
|
||||
let expected_attributes = Attributes {
|
||||
creation_time : parse_rfc3339("2020-01-07T21:25:26Z"),
|
||||
last_access_time : parse_rfc3339("2020-01-21T22:16:51.123994500+00:00"),
|
||||
last_modified_time : parse_rfc3339("2020-01-07T21:25:26Z"),
|
||||
file_kind : RegularFile,
|
||||
byte_size : 125125,
|
||||
};
|
||||
let sample_attributes_json = json!({
|
||||
"creationTime" : "2020-01-07T21:25:26Z",
|
||||
"lastAccessTime" : "2020-01-21T22:16:51.123994500+00:00",
|
||||
"lastModifiedTime" : "2020-01-07T21:25:26Z",
|
||||
"fileKind" : "RegularFile",
|
||||
"byteSize" : 125125
|
||||
});
|
||||
test_request(
|
||||
|client| client.status(main.clone()),
|
||||
"status",
|
||||
path_main.clone(),
|
||||
sample_attributes_json,
|
||||
expected_attributes);
|
||||
test_request(
|
||||
|client| client.touch(main.clone()),
|
||||
"touch",
|
||||
path_main.clone(),
|
||||
unit_json.clone(),
|
||||
());
|
||||
test_request(
|
||||
|client| client.write(main.clone(), "Hello world!".into()),
|
||||
"write",
|
||||
json!({"path" : "./Main.luna", "contents" : "Hello world!"}),
|
||||
unit_json.clone(),
|
||||
());
|
||||
|
||||
let uuid_value = uuid::Uuid::parse_str("02723954-fbb0-4641-af53-cec0883f260a").unwrap();
|
||||
let uuid_json = json!("02723954-fbb0-4641-af53-cec0883f260a");
|
||||
test_request(
|
||||
|client| client.create_watch(main.clone()),
|
||||
"createWatch",
|
||||
path_main.clone(),
|
||||
uuid_json.clone(),
|
||||
uuid_value);
|
||||
let watch_id = json!({
|
||||
"watchId" : "02723954-fbb0-4641-af53-cec0883f260a"
|
||||
});
|
||||
test_request(
|
||||
|client| client.delete_watch(uuid_value.clone()),
|
||||
"deleteWatch",
|
||||
watch_id.clone(),
|
||||
unit_json.clone(),
|
||||
());
|
||||
}
|
||||
}
|
17
gui/lib/ide/json-rpc/Cargo.toml
Normal file
17
gui/lib/ide/json-rpc/Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "json-rpc"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <contact@luna-lang.org>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
enso-prelude = { version = "0.1.0" , path = "../../prelude" }
|
||||
utils = { version = "0.1.0" , path = "../utils" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
futures = "0.3.1"
|
||||
failure = "0.1.6"
|
||||
serde_json = "1.0"
|
||||
shrinkwraprs = "0.3.0"
|
43
gui/lib/ide/json-rpc/src/api.rs
Normal file
43
gui/lib/ide/json-rpc/src/api.rs
Normal file
@ -0,0 +1,43 @@
|
||||
//! Module contains entities used by client implementor to describe remote API.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::error::RpcError;
|
||||
use crate::messages::Id;
|
||||
use crate::messages::Message;
|
||||
use crate::messages::RequestMessage;
|
||||
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Result ===
|
||||
// ==============
|
||||
|
||||
/// A result of an RPC-call.
|
||||
pub type Result<T> = std::result::Result<T, RpcError>;
|
||||
|
||||
|
||||
|
||||
// ========================
|
||||
// === RemoteMethodCall ===
|
||||
// ========================
|
||||
|
||||
/// Structure describing a call to a remote method.
|
||||
///
|
||||
/// A serialized value of this trait represents the method's input arguments.
|
||||
pub trait RemoteMethodCall : Serialize + Debug {
|
||||
/// Name of the remote method.
|
||||
const NAME:&'static str;
|
||||
|
||||
/// A type of value returned from successful remote call.
|
||||
type Returned:DeserializeOwned;
|
||||
}
|
||||
|
||||
/// Make a request message from given RemoteMethodInput value.
|
||||
pub fn into_request_message<In:RemoteMethodCall>
|
||||
(input:In, id:Id) -> RequestMessage<In> {
|
||||
Message::new_request(id,In::NAME,input)
|
||||
}
|
69
gui/lib/ide/json-rpc/src/error.rs
Normal file
69
gui/lib/ide/json-rpc/src/error.rs
Normal file
@ -0,0 +1,69 @@
|
||||
//! Error types used by this crate. `RpcError` is used in remote call results,
|
||||
//! while `HandlingError` can be raised by `Handler`.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::messages::Error;
|
||||
use crate::messages::Response;
|
||||
|
||||
use futures::channel::oneshot::Canceled;
|
||||
|
||||
|
||||
|
||||
// ================
|
||||
// === RpcError ===
|
||||
// ================
|
||||
|
||||
/// Errors that can cause a remote call to fail.
|
||||
#[derive(Debug, Fail)]
|
||||
pub enum RpcError {
|
||||
/// Error returned by the remote server.
|
||||
#[fail(display = "Peer has replied with an error: {:?}.", _0)]
|
||||
RemoteError(Error),
|
||||
|
||||
/// Lost connection while waiting for response.
|
||||
#[fail(display = "Lost connection before receiving a reply.")]
|
||||
LostConnection,
|
||||
|
||||
/// Failed to deserialize message from server.
|
||||
#[fail(display = "Failed to deserialize a message: {}.", _0)]
|
||||
DeserializationFailed(serde_json::Error),
|
||||
}
|
||||
|
||||
impl From<Canceled> for RpcError {
|
||||
fn from(_:Canceled) -> Self {
|
||||
RpcError::LostConnection
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::Error> for RpcError {
|
||||
fn from(e:serde_json::Error) -> Self {
|
||||
RpcError::DeserializationFailed(e)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === HandlingError ===
|
||||
// =====================
|
||||
|
||||
/// Errors specific to the Handler itself, not any specific request.
|
||||
///
|
||||
/// Caused either internal errors in the handler or bugs in the server.
|
||||
#[derive(Debug, Fail)]
|
||||
pub enum HandlingError {
|
||||
/// When incoming text message can't be decoded.
|
||||
#[fail(display = "Failed to decode incoming text message: {}.", _0)]
|
||||
InvalidMessage(#[cause] serde_json::Error),
|
||||
|
||||
/// Server responded to an identifier that does not match to any known
|
||||
/// ongoing request.
|
||||
#[fail(display = "Server generated a response with no matching request: id={:?}.", _0)]
|
||||
UnexpectedResponse(Response<serde_json::Value>),
|
||||
|
||||
/// Server send a message that is notification but client wasn't able to
|
||||
/// decode it.
|
||||
#[fail(display = "Failed to decode a notification: {}.", _0)]
|
||||
InvalidNotification(#[cause] serde_json::Error),
|
||||
}
|
324
gui/lib/ide/json-rpc/src/handler.rs
Normal file
324
gui/lib/ide/json-rpc/src/handler.rs
Normal file
@ -0,0 +1,324 @@
|
||||
//! Module providing `Handler` and related types used by its API.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::api;
|
||||
use crate::api::Result;
|
||||
use crate::error::HandlingError;
|
||||
use crate::error::RpcError;
|
||||
use crate::messages;
|
||||
use crate::messages::Id;
|
||||
use crate::transport::Transport;
|
||||
use crate::transport::TransportEvent;
|
||||
|
||||
use futures::FutureExt;
|
||||
use futures::Stream;
|
||||
use futures::channel::mpsc::unbounded;
|
||||
use futures::channel::mpsc::UnboundedSender;
|
||||
use futures::channel::oneshot;
|
||||
use serde::de::DeserializeOwned;
|
||||
use std::future::Future;
|
||||
use std::sync::mpsc::TryRecvError;
|
||||
|
||||
|
||||
|
||||
// ====================
|
||||
// === ReplyMessage ===
|
||||
// ====================
|
||||
|
||||
/// Partially decoded reply message.
|
||||
///
|
||||
/// Known if `Error` or `Success` but returned value remains in JSON form.
|
||||
pub type ReplyMessage = messages::Result<serde_json::Value>;
|
||||
|
||||
/// Converts remote message with JSON-serialized result into `Result<Ret>`.
|
||||
pub fn decode_result<Ret:DeserializeOwned>
|
||||
(result:messages::Result<serde_json::Value>) -> Result<Ret> {
|
||||
match result {
|
||||
messages::Result::Success(ret) =>
|
||||
Ok(serde_json::from_value::<Ret>(ret.result)?),
|
||||
messages::Result::Error(err) =>
|
||||
Err(RpcError::RemoteError(err)),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === IdGenerator ===
|
||||
// ===================
|
||||
|
||||
/// Simple counter-based struct used to generate unique Id's.
|
||||
///
|
||||
/// The generated Ids are sequence 0, 1, 2, …
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct IdGenerator {
|
||||
/// Next Id value to be returned.
|
||||
pub counter: i64,
|
||||
}
|
||||
|
||||
impl IdGenerator {
|
||||
/// Obtain the new Id.
|
||||
pub fn generate(&mut self) -> Id {
|
||||
let id = self.counter;
|
||||
self.counter += 1;
|
||||
Id(id)
|
||||
}
|
||||
|
||||
/// Create a new IdGenerator counting from 0.
|
||||
fn new() -> IdGenerator {
|
||||
IdGenerator::new_from(0)
|
||||
}
|
||||
|
||||
/// Create a new IdGenerator that gives Ids beginning with given number.
|
||||
fn new_from(counter:i64) -> IdGenerator {
|
||||
IdGenerator { counter }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ====================
|
||||
// === SharedBuffer ===
|
||||
// ====================
|
||||
|
||||
/// The buffer shared between `Handler` and `Transport`.
|
||||
///
|
||||
/// The `Transport` callbacks store any input there. Then, `Handler` consumes it
|
||||
/// when prompted with `tick` method.
|
||||
#[derive(Debug,Default)]
|
||||
pub struct SharedBuffer {
|
||||
/// Incoming text messages.
|
||||
pub incoming: Vec<String>,
|
||||
|
||||
/// Whether the transport was closed. This means that the current transport
|
||||
/// cannot be used anymore.
|
||||
pub closed: bool,
|
||||
}
|
||||
|
||||
impl SharedBuffer {
|
||||
/// Create a new empty buffer.
|
||||
pub fn new() -> SharedBuffer { default() }
|
||||
|
||||
/// Returns a new buffer with all the data moved from self.
|
||||
///
|
||||
/// After the call incoming messages list in self is empty, however the
|
||||
/// status of `closed` flag is not changed.
|
||||
pub fn take(&mut self) -> SharedBuffer {
|
||||
let incoming = std::mem::replace(&mut self.incoming, Vec::new());
|
||||
let closed = self.closed;
|
||||
SharedBuffer {incoming,closed}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Event ===
|
||||
// =============
|
||||
|
||||
/// Event emitted by the `Handler<N>`.
|
||||
#[derive(Debug)]
|
||||
pub enum Event<N> {
|
||||
/// Transport has been closed.
|
||||
Closed,
|
||||
/// Error occurred.
|
||||
Error(HandlingError),
|
||||
/// Notification received.
|
||||
Notification(N),
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Handler ===
|
||||
// ===============
|
||||
|
||||
/// Container that stores Sender's for ongoing calls. Each call identified by
|
||||
/// id has its own sender. After reply is received, the call is removed
|
||||
/// from this container.
|
||||
pub type OngoingCalls = HashMap<Id,oneshot::Sender<ReplyMessage>>;
|
||||
|
||||
/// Handler is a main provider of RPC protocol. Given with a transport capable
|
||||
/// of transporting text messages, it manages whole communication with a peer.
|
||||
///
|
||||
/// It allows making request, where method calls are described by values
|
||||
/// implementing `RemoteMethodCall`. The response is returned as a `Future`.
|
||||
///
|
||||
/// Notifications and internal messages are emitted using the `events` stream.
|
||||
///
|
||||
/// `Notification` is a type for notifications. It should implement
|
||||
/// `DeserializeOwned` and deserialize from JSON maps with `method` and `params`
|
||||
/// fields.
|
||||
#[derive(Debug)]
|
||||
pub struct Handler<Notification> {
|
||||
/// Contains handles to calls that were made but no response has came.
|
||||
pub ongoing_calls : OngoingCalls,
|
||||
/// Provides identifiers for requests.
|
||||
pub id_generator : IdGenerator,
|
||||
/// Transports text messages between this handler and the peer.
|
||||
pub transport : Box<dyn Transport>,
|
||||
/// Allows receiving events from the `Transport`.
|
||||
pub incoming_events : std::sync::mpsc::Receiver<TransportEvent>,
|
||||
/// Handle to send outgoing events.
|
||||
pub outgoing_events : Option<UnboundedSender<Event<Notification>>>,
|
||||
}
|
||||
|
||||
impl<Notification> Handler<Notification> {
|
||||
/// Creates a new handler working on a given `Transport`.
|
||||
///
|
||||
/// `Transport` must be functional (e.g. not in the process of opening).
|
||||
pub fn new(transport:impl Transport + 'static) -> Handler<Notification> {
|
||||
let (event_tx, event_rx) = std::sync::mpsc::channel();
|
||||
let mut ret = Handler {
|
||||
ongoing_calls : OngoingCalls::new(),
|
||||
id_generator : IdGenerator::new(),
|
||||
transport : Box::new(transport),
|
||||
incoming_events : event_rx,
|
||||
outgoing_events : None,
|
||||
};
|
||||
ret.transport.set_event_tx(event_tx);
|
||||
ret
|
||||
}
|
||||
|
||||
/// Sends a request to the peer and returns a `Future` that shall yield a
|
||||
/// reply message. It is automatically decoded into the expected type.
|
||||
pub fn open_request<In:api::RemoteMethodCall>
|
||||
(&mut self, input:In) -> impl Future<Output = Result<In::Returned>> {
|
||||
let (sender, receiver) = oneshot::channel::<ReplyMessage>();
|
||||
let ret = receiver.map(|result_or_cancel| {
|
||||
let result = result_or_cancel?;
|
||||
decode_result(result)
|
||||
});
|
||||
|
||||
let id = self.id_generator.generate();
|
||||
let message = api::into_request_message(input,id);
|
||||
self.ongoing_calls.insert(message.payload.id, sender);
|
||||
|
||||
let serialized_message = serde_json::to_string(&message).unwrap();
|
||||
if self.transport.send_text(serialized_message).is_err() {
|
||||
// If message cannot be send, future ret must be cancelled.
|
||||
self.ongoing_calls.remove(&id);
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
/// Deal with `Response` message from the peer.
|
||||
///
|
||||
/// It shall be either matched with an open request or yield an error.
|
||||
pub fn process_response
|
||||
(&mut self, message:messages::Response<serde_json::Value>) {
|
||||
if let Some(sender) = self.ongoing_calls.remove(&message.id) {
|
||||
// Disregard any error. We do not care if RPC caller already
|
||||
// dropped the future.
|
||||
sender.send(message.result).ok();
|
||||
} else {
|
||||
self.error_occurred(HandlingError::UnexpectedResponse(message));
|
||||
}
|
||||
}
|
||||
|
||||
/// Deal with `Notification` message from the peer.
|
||||
///
|
||||
/// If possible, emits a message with notification. In case of failure,
|
||||
/// emits relevant error.
|
||||
pub fn process_notification
|
||||
(&mut self, message:messages::Notification<serde_json::Value>)
|
||||
where Notification: DeserializeOwned {
|
||||
match serde_json::from_value(message.0) {
|
||||
Ok(notification) => {
|
||||
let event = Event::Notification(notification);
|
||||
self.send_event(event);
|
||||
},
|
||||
Err(e) => {
|
||||
let err = HandlingError::InvalidNotification(e);
|
||||
self.error_occurred(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Deal with incoming text message from the peer.
|
||||
///
|
||||
/// The message must conform either to the `Response` or to the
|
||||
/// `Notification` JSON-serialized format. Otherwise, an error is raised.
|
||||
pub fn process_incoming_message(&mut self, message:String)
|
||||
where Notification: DeserializeOwned {
|
||||
match messages::decode_incoming_message(message) {
|
||||
Ok(messages::IncomingMessage::Response(response)) =>
|
||||
self.process_response(response),
|
||||
Ok(messages::IncomingMessage::Notification(notification)) =>
|
||||
self.process_notification(notification),
|
||||
Err(err) =>
|
||||
self.error_occurred(HandlingError::InvalidMessage(err)),
|
||||
}
|
||||
}
|
||||
|
||||
/// With with a handling error. Uses `on_error` callback to notify the
|
||||
/// owner.
|
||||
pub fn error_occurred(&mut self, error: HandlingError) {
|
||||
self.send_event(Event::Error(error))
|
||||
}
|
||||
|
||||
/// Processes a single transport event.
|
||||
///
|
||||
/// Each event either completes a requests or is translated into `Event`.
|
||||
pub fn process_event(&mut self, event:TransportEvent)
|
||||
where Notification: DeserializeOwned {
|
||||
match event {
|
||||
TransportEvent::TextMessage(msg) =>
|
||||
self.process_incoming_message(msg),
|
||||
TransportEvent::Closed => {
|
||||
// Dropping all ongoing calls will mark their futures as
|
||||
// cancelled.
|
||||
self.ongoing_calls.clear();
|
||||
self.send_event(Event::Closed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Processes all incoming events. Returns as soon as there are no more
|
||||
/// messages pending.
|
||||
///
|
||||
/// This will decode the incoming messages, providing input to the futures
|
||||
/// returned from RPC calls.
|
||||
/// Also this cancels any ongoing calls if the connection was lost.
|
||||
pub fn process_events(&mut self)
|
||||
where Notification: DeserializeOwned {
|
||||
loop {
|
||||
match self.incoming_events.try_recv() {
|
||||
Ok(event) => self.process_event(event),
|
||||
Err(TryRecvError::Disconnected) =>
|
||||
panic!("transport dropped the event sender"),
|
||||
Err(TryRecvError::Empty) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Sends a handler event to the event stream.
|
||||
pub fn send_event(&mut self, event:Event<Notification>) {
|
||||
if let Some(tx) = self.outgoing_events.as_mut() {
|
||||
match tx.unbounded_send(event) {
|
||||
Ok(()) => {},
|
||||
Err(e) =>
|
||||
if e.is_full() {
|
||||
// Impossible, as per `futures` library docs.
|
||||
panic!("unbounded channel should never be full")
|
||||
} else if e.is_disconnected() {
|
||||
// It is ok for receiver to disconnect and ignore events.
|
||||
} else {
|
||||
// Never happens unless `futures` library changes API.
|
||||
panic!("unknown unexpected error")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new stream with events from this handler.
|
||||
///
|
||||
/// If such stream was already existing, it will be finished (and
|
||||
/// continuations should be able to process any remaining events).
|
||||
pub fn events(&mut self) -> impl Stream<Item = Event<Notification>> {
|
||||
let (tx,rx) = unbounded();
|
||||
self.outgoing_events = Some(tx);
|
||||
rx
|
||||
}
|
||||
}
|
28
gui/lib/ide/json-rpc/src/lib.rs
Normal file
28
gui/lib/ide/json-rpc/src/lib.rs
Normal file
@ -0,0 +1,28 @@
|
||||
//! This is a library aimed to facilitate implementing JSON-RPC protocol
|
||||
//! clients. The main type is `Handler` that a client should build upon.
|
||||
|
||||
#![feature(trait_alias)]
|
||||
#![warn(missing_docs)]
|
||||
#![warn(trivial_casts)]
|
||||
#![warn(trivial_numeric_casts)]
|
||||
#![warn(unused_import_braces)]
|
||||
#![warn(unused_qualifications)]
|
||||
#![warn(unsafe_code)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
|
||||
|
||||
pub mod api;
|
||||
pub mod error;
|
||||
pub mod handler;
|
||||
pub mod messages;
|
||||
pub mod test_util;
|
||||
pub mod transport;
|
||||
|
||||
pub use api::RemoteMethodCall;
|
||||
pub use api::Result;
|
||||
pub use enso_prelude as prelude;
|
||||
pub use transport::Transport;
|
||||
pub use transport::TransportEvent;
|
||||
pub use handler::Event;
|
||||
pub use handler::Handler;
|
342
gui/lib/ide/json-rpc/src/messages.rs
Normal file
342
gui/lib/ide/json-rpc/src/messages.rs
Normal file
@ -0,0 +1,342 @@
|
||||
//! This module provides data structures that follow JSON-RPC 2.0 scheme. Their
|
||||
//! serialization and deserialization using serde_json shall is compatible with
|
||||
//! JSON-RPC complaint peers.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use shrinkwraprs::Shrinkwrap;
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Message ===
|
||||
// ===============
|
||||
|
||||
/// All JSON-RPC messages bear `jsonrpc` version number.
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
|
||||
#[derive(Shrinkwrap)]
|
||||
pub struct Message<T> {
|
||||
/// JSON-RPC Protocol version, should be 2.0.
|
||||
pub jsonrpc: Version,
|
||||
|
||||
/// Payload, either a Request or Response or Notification in direct
|
||||
/// or serialized form.
|
||||
#[serde(flatten)]
|
||||
#[shrinkwrap(main_field)]
|
||||
pub payload: T
|
||||
}
|
||||
|
||||
// === Common Message Subtypes ===
|
||||
|
||||
/// A request message.
|
||||
pub type RequestMessage<In> = Message<Request<MethodCall<In>>>;
|
||||
|
||||
/// A response message.
|
||||
pub type ResponseMessage<Ret> = Message<Response<Ret>>;
|
||||
|
||||
/// A response message.
|
||||
pub type NotificationMessage<Ret> = Message<Notification<MethodCall<Ret>>>;
|
||||
|
||||
// === `new` Functions ===
|
||||
|
||||
impl<T> Message<T> {
|
||||
/// Wraps given payload into a JSON-RPC 2.0 message.
|
||||
pub fn new(t:T) -> Message<T> {
|
||||
Message {
|
||||
jsonrpc : Version::V2,
|
||||
payload : t,
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a request message.
|
||||
pub fn new_request
|
||||
(id:Id, method:&str, input:T) -> RequestMessage<T> {
|
||||
let call = MethodCall {method: method.into(),input};
|
||||
let request = Request::new(id,call);
|
||||
Message::new(request)
|
||||
}
|
||||
|
||||
/// Construct a successful response message.
|
||||
pub fn new_success(id:Id, result:T) -> ResponseMessage<T> {
|
||||
let result = Result::Success(Success {result});
|
||||
let response = Response {id,result};
|
||||
Message::new(response)
|
||||
}
|
||||
|
||||
/// Construct a successful response message.
|
||||
pub fn new_error
|
||||
(id:Id, code:i64, message:String, data:Option<serde_json::Value>)
|
||||
-> ResponseMessage<T> {
|
||||
let result = Result::Error(Error{code,message,data});
|
||||
let response = Response {id,result};
|
||||
Message::new(response)
|
||||
}
|
||||
|
||||
/// Construct a request message.
|
||||
pub fn new_notification
|
||||
(method:&'static str, input:T) -> NotificationMessage<T> {
|
||||
let call = MethodCall {method: method.into(),input};
|
||||
let notification = Notification(call);
|
||||
Message::new(notification)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ========================
|
||||
// === Message Subparts ===
|
||||
// ========================
|
||||
|
||||
/// An id identifying the call request.
|
||||
///
|
||||
/// Each request made by client should get a unique id (unique in a context of
|
||||
/// the current session). Auto-incrementing integer is a common choice.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Display)]
|
||||
#[derive(Shrinkwrap)]
|
||||
pub struct Id(pub i64);
|
||||
|
||||
/// JSON-RPC protocol version. Only 2.0 is supported.
|
||||
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)]
|
||||
pub enum Version {
|
||||
/// JSON-RPC 2.0 specification. The supported version.
|
||||
#[serde(rename = "2.0")]
|
||||
V2,
|
||||
}
|
||||
|
||||
/// A non-notification request.
|
||||
///
|
||||
/// `Call` must be a type, that upon JSON serialization provides `method` and
|
||||
/// `params` fields, like `MethodCall`.
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
#[derive(Shrinkwrap)]
|
||||
pub struct Request<Call> {
|
||||
/// An identifier for this request that will allow matching the response.
|
||||
pub id: Id,
|
||||
#[serde(flatten)]
|
||||
#[shrinkwrap(main_field)]
|
||||
/// method and its params
|
||||
pub call: Call,
|
||||
}
|
||||
|
||||
impl<M> Request<M> {
|
||||
/// Create a new request.
|
||||
pub fn new(id:Id, call:M) -> Request<M> {
|
||||
Request {id,call}
|
||||
}
|
||||
}
|
||||
|
||||
/// A notification request.
|
||||
///
|
||||
/// `Call` must be a type, that upon JSON serialization provides `method` and
|
||||
/// `params` fields, like `MethodCall`.
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
pub struct Notification<Call>(pub Call);
|
||||
|
||||
/// A response to a `Request`. Depending on `result` value it might be
|
||||
/// successful or not.
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
pub struct Response<Res> {
|
||||
/// Identifier, matching the value given in `Request` when call was made.
|
||||
pub id: Id,
|
||||
/// Call result.
|
||||
#[serde(flatten)]
|
||||
pub result: Result<Res>
|
||||
}
|
||||
|
||||
/// Result of the remote call — either a returned value or en error.
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
#[serde(untagged)]
|
||||
pub enum Result<Res> {
|
||||
/// Returned value of a successfull call.
|
||||
Success(Success<Res>),
|
||||
/// Error value from a called that failed on the remote side.
|
||||
Error(Error),
|
||||
}
|
||||
|
||||
/// Value yield by a successful remote call.
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
pub struct Success<Ret> {
|
||||
/// A value returned from a successful remote call.
|
||||
pub result: Ret,
|
||||
}
|
||||
|
||||
/// Error raised on a failed remote call.
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
pub struct Error {
|
||||
/// A number indicating what type of error occurred.
|
||||
pub code : i64,
|
||||
/// A short description of the error.
|
||||
pub message : String,
|
||||
/// Optional value with additional information about the error.
|
||||
pub data : Option<serde_json::Value>
|
||||
}
|
||||
|
||||
/// A message that can come from Server to Client — either a response or
|
||||
/// notification.
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
#[serde(untagged)]
|
||||
pub enum IncomingMessage {
|
||||
/// A response to a call made by client.
|
||||
Response (Response <serde_json::Value>),
|
||||
/// A notification call (initiated by the server).
|
||||
Notification(Notification<serde_json::Value>),
|
||||
}
|
||||
|
||||
/// Partially decodes incoming message.
|
||||
///
|
||||
/// This checks if has `jsonrpc` version string, and whether it is a
|
||||
/// response or a notification.
|
||||
pub fn decode_incoming_message
|
||||
(message:String) -> serde_json::Result<IncomingMessage> {
|
||||
use serde_json::Value;
|
||||
use serde_json::from_str;
|
||||
use serde_json::from_value;
|
||||
let message = from_str::<Message<Value>>(&message)?;
|
||||
from_value::<IncomingMessage>(message.payload)
|
||||
}
|
||||
|
||||
/// Message from server to client.
|
||||
///
|
||||
/// `In` is any serializable (or already serialized) representation of the
|
||||
/// method arguments passed in this call.
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
#[derive(Shrinkwrap)]
|
||||
pub struct MethodCall<In> {
|
||||
/// Name of the method that is being called.
|
||||
pub method : String,
|
||||
/// Method arguments.
|
||||
#[shrinkwrap(main_field)]
|
||||
pub input : In
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::Map;
|
||||
use serde_json::Value;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
struct MockRequest {
|
||||
number: i64
|
||||
}
|
||||
impl MockRequest {
|
||||
const FIELD_COUNT : usize = 1;
|
||||
const FIELD_NAME : &'static str = "number";
|
||||
}
|
||||
|
||||
mod protocol {
|
||||
// === Field Names ===
|
||||
pub const JSONRPC : &str = "jsonrpc";
|
||||
pub const METHOD : &str = "method";
|
||||
pub const INPUT : &str = "input";
|
||||
pub const ID : &str = "id";
|
||||
|
||||
// === Version strings ===
|
||||
pub const VERSION2_STRING:&str = "2.0";
|
||||
|
||||
// === Other ===
|
||||
pub const FIELD_COUNT_IN_REQUEST : usize = 4;
|
||||
pub const FIELD_COUNT_IN_NOTIFICATION : usize = 3;
|
||||
}
|
||||
|
||||
fn expect_field<'a,Obj:'a>
|
||||
(obj:&'a Map<String, Value>, field_name:&str) -> &'a Value
|
||||
where &'a Obj:Into<&'a Value> {
|
||||
let missing_msg = format!("missing field {}",field_name);
|
||||
obj.get(field_name).expect(&missing_msg)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_request_serialization() {
|
||||
let id = Id(50);
|
||||
let method = "mockMethod";
|
||||
let number = 124;
|
||||
let input = MockRequest {number};
|
||||
let call = MethodCall {method:method.into(),input};
|
||||
let request = Request::new(id,call);
|
||||
let message = Message::new(request);
|
||||
|
||||
let json = serde_json::to_value(message).expect("serialization error");
|
||||
let json = json.as_object().expect("expected an object");
|
||||
assert_eq!(json.len(), protocol::FIELD_COUNT_IN_REQUEST);
|
||||
let jsonrpc_field = expect_field(json, protocol::JSONRPC);
|
||||
assert_eq!(jsonrpc_field, protocol::VERSION2_STRING);
|
||||
assert_eq!(expect_field(json, protocol::ID), id.0);
|
||||
assert_eq!(expect_field(json, protocol::METHOD), method);
|
||||
let input_json = expect_field(json, protocol::INPUT);
|
||||
let input_json = input_json.as_object().expect("input must be object");
|
||||
assert_eq!(input_json.len(), MockRequest::FIELD_COUNT);
|
||||
assert_eq!(expect_field(input_json, MockRequest::FIELD_NAME), number);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_notification_serialization() {
|
||||
let method = "mockNotification";
|
||||
let number = 125;
|
||||
let input = MockRequest {number};
|
||||
let call = MethodCall {method:method.into(),input};
|
||||
let notification = Notification(call);
|
||||
let message = Message::new(notification);
|
||||
|
||||
println!("{}", serde_json::to_string(&message).unwrap());
|
||||
|
||||
let json = serde_json::to_value(message).expect("serialization error");
|
||||
let json = json.as_object().expect("expected an object");
|
||||
assert_eq!(json.len(), protocol::FIELD_COUNT_IN_NOTIFICATION);
|
||||
let jsonrpc_field = expect_field(json, protocol::JSONRPC);
|
||||
assert_eq!(jsonrpc_field, protocol::VERSION2_STRING);
|
||||
assert_eq!(expect_field(json, protocol::METHOD), method);
|
||||
let input_json = expect_field(json, protocol::INPUT);
|
||||
let input_json = input_json.as_object().expect("input must be object");
|
||||
assert_eq!(input_json.len(), MockRequest::FIELD_COUNT);
|
||||
assert_eq!(expect_field(input_json, MockRequest::FIELD_NAME), number);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_response_deserialization() {
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct MockResponse { exists: bool }
|
||||
|
||||
let response = r#"{"jsonrpc":"2.0","id":0,"result":{"exists":true}}"#;
|
||||
let msg = serde_json::from_str(&response).unwrap();
|
||||
if let IncomingMessage::Response(resp) = msg {
|
||||
assert_eq!(resp.id, Id(0));
|
||||
if let Result::Success(ret) = resp.result {
|
||||
let obj = ret.result.as_object().expect("expected object ret");
|
||||
assert_eq!(obj.len(), 1);
|
||||
let exists = obj.get("exists").unwrap().as_bool().unwrap();
|
||||
assert_eq!(exists, true)
|
||||
}
|
||||
else {
|
||||
panic!("Expected a success result")
|
||||
}
|
||||
} else {
|
||||
panic!("Expected a response!");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_serialization_and_deserialization() {
|
||||
use serde_json::from_str;
|
||||
use protocol::VERSION2_STRING;
|
||||
let expected_json = Value::String(VERSION2_STRING.into());
|
||||
let expected_json_text = serde_json::to_string(&expected_json);
|
||||
let expected_json_text = expected_json_text.unwrap();
|
||||
let got_json_text = serde_json::to_string(&Version::V2).unwrap();
|
||||
assert_eq!(got_json_text, expected_json_text);
|
||||
|
||||
let got_value = from_str::<Version>(&expected_json_text).unwrap();
|
||||
assert_eq!(got_value, Version::V2);
|
||||
}
|
||||
}
|
5
gui/lib/ide/json-rpc/src/test_util/mod.rs
Normal file
5
gui/lib/ide/json-rpc/src/test_util/mod.rs
Normal file
@ -0,0 +1,5 @@
|
||||
//! Test utilities. Should not be used in production code.
|
||||
//!
|
||||
//! Reusable code for other crates that want to test usage of this crate.
|
||||
|
||||
pub mod transport;
|
150
gui/lib/ide/json-rpc/src/test_util/transport/mock.rs
Normal file
150
gui/lib/ide/json-rpc/src/test_util/transport/mock.rs
Normal file
@ -0,0 +1,150 @@
|
||||
//! Module provides a `MockTransport` that implements `Transport`.
|
||||
//!
|
||||
//! It is meant to be used in tests.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::transport::Transport;
|
||||
use crate::transport::TransportEvent;
|
||||
|
||||
use std::collections::VecDeque;
|
||||
use failure::Error;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Serialize;
|
||||
|
||||
|
||||
|
||||
// ====================
|
||||
// === SendingError ===
|
||||
// ====================
|
||||
|
||||
/// Errors emitted by the `MockTransport`.
|
||||
#[derive(Clone,Copy,Debug,Fail)]
|
||||
pub enum SendError {
|
||||
/// Cannot send message while the connection is closed.
|
||||
#[fail(display = "Cannot send message when socket is closed.")]
|
||||
TransportClosed,
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ========================
|
||||
// === Transport Status ===
|
||||
// ========================
|
||||
|
||||
/// Status of the `MockTransport`.
|
||||
#[derive(Clone,Copy,Debug)]
|
||||
pub enum Status {
|
||||
/// Transport is functional, can send messages.
|
||||
Open,
|
||||
/// Transport is not functional at the moment, cannot send messages.
|
||||
Closed
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ======================
|
||||
// === Transport Data ===
|
||||
// ======================
|
||||
|
||||
/// Mock transport shared data. Collects all the messages sent by the owner.
|
||||
///
|
||||
/// Allows mocking messages from the peer.
|
||||
#[derive(Debug,Default)]
|
||||
pub struct MockTransportData {
|
||||
/// Events sink.
|
||||
pub event_tx : Option<std::sync::mpsc::Sender<TransportEvent>>,
|
||||
/// Messages sent by the user.
|
||||
pub sent_msgs : VecDeque<String>,
|
||||
/// Transport status.
|
||||
pub is_closed : bool,
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ======================
|
||||
// === Mock Transport ===
|
||||
// ======================
|
||||
|
||||
/// Shareable wrapper over `MockTransportData`.
|
||||
#[derive(Clone,Debug,Default)]
|
||||
pub struct MockTransport(Rc<RefCell<MockTransportData>>);
|
||||
|
||||
impl Transport for MockTransport {
|
||||
fn send_text(&mut self, text:String) -> Result<(), Error> {
|
||||
self.with_mut_data(|data| {
|
||||
if data.is_closed {
|
||||
Err(SendError::TransportClosed.into())
|
||||
} else {
|
||||
data.sent_msgs.push_back(text.clone());
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn set_event_tx(&mut self, tx:std::sync::mpsc::Sender<TransportEvent>) {
|
||||
self.with_mut_data(|data| {
|
||||
data.event_tx = Some(tx);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl MockTransport {
|
||||
/// Create a new `MockTransport`.
|
||||
pub fn new() -> MockTransport {
|
||||
MockTransport::default()
|
||||
}
|
||||
|
||||
/// Executes given function with access to borrowed mutable data reference.
|
||||
pub fn with_mut_data<R,F>(&mut self, f:F) -> R
|
||||
where F: FnOnce(&mut MockTransportData) -> R {
|
||||
let mut data = self.0.borrow_mut();
|
||||
f(&mut data)
|
||||
}
|
||||
|
||||
/// Generates event that mocks receiving a text message from a peer.
|
||||
pub fn mock_peer_message_text<S:Into<String>>(&mut self, message:S) {
|
||||
let message = message.into();
|
||||
if let Some(ref tx) = self.0.borrow_mut().event_tx {
|
||||
let _ = tx.send(TransportEvent::TextMessage(message));
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates event that mocks receiving a text message from a peer with
|
||||
/// serialized JSON contents.
|
||||
pub fn mock_peer_message<T:Serialize>(&mut self, message:T) {
|
||||
let text = serde_json::to_string(&message);
|
||||
let text = text.expect("failed to serialize mock message");
|
||||
self.mock_peer_message_text(text)
|
||||
}
|
||||
|
||||
/// Mocks event generated when peer closes the socket (or connection is lost
|
||||
/// for any other reason).
|
||||
pub fn mock_connection_closed(&mut self) {
|
||||
self.with_mut_data(|data| {
|
||||
if let Some(ref tx) = data.event_tx {
|
||||
data.is_closed = true;
|
||||
let _ = tx.send(TransportEvent::Closed);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Takes the message sent by the client and returns its texts.
|
||||
///
|
||||
/// If the client did not sent any messages, panics.
|
||||
/// If the client sent multiple messages, the first one is returned.
|
||||
/// Further messages can be obtained by subsequent calls.
|
||||
pub fn expect_message_text(&mut self) -> String {
|
||||
self.with_mut_data(|data| {
|
||||
data.sent_msgs.pop_front().expect("client should have sent request")
|
||||
})
|
||||
}
|
||||
|
||||
/// Similar to `expect_message_text` but deserializes the message into
|
||||
/// given type `T` from JSON.
|
||||
pub fn expect_message<T:DeserializeOwned>(&mut self) -> T {
|
||||
let text = self.expect_message_text();
|
||||
let res = serde_json::from_str(&text);
|
||||
res.expect("failed to deserialize client's message")
|
||||
}
|
||||
}
|
3
gui/lib/ide/json-rpc/src/test_util/transport/mod.rs
Normal file
3
gui/lib/ide/json-rpc/src/test_util/transport/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
//! Test utilities for the Transport.
|
||||
|
||||
pub mod mock;
|
29
gui/lib/ide/json-rpc/src/transport.rs
Normal file
29
gui/lib/ide/json-rpc/src/transport.rs
Normal file
@ -0,0 +1,29 @@
|
||||
//! Traits providing abstraction over transport used by the JSON-RPC client.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use failure::Error;
|
||||
|
||||
/// A transport that facilitate JSON-RPC protocol.
|
||||
///
|
||||
/// Must allow sending and receiving text messages. Additionally, connection at
|
||||
/// any point might be lost for any reason.
|
||||
///
|
||||
/// Typical implementation would use WebSockets but it can be also a mock for
|
||||
/// tests.
|
||||
pub trait Transport : Debug {
|
||||
/// Send a text message.
|
||||
fn send_text(&mut self, message:String) -> Result<(), Error>;
|
||||
|
||||
/// Set a callback that gets notified on transport events.
|
||||
fn set_event_tx(&mut self, tx:std::sync::mpsc::Sender<TransportEvent>);
|
||||
}
|
||||
|
||||
/// An event generated by the `Transport`.
|
||||
#[derive(Debug)]
|
||||
pub enum TransportEvent {
|
||||
/// A text message with has been received.
|
||||
TextMessage(String),
|
||||
/// A socket has been closed by the peer.
|
||||
Closed,
|
||||
}
|
285
gui/lib/ide/json-rpc/tests/test.rs
Normal file
285
gui/lib/ide/json-rpc/tests/test.rs
Normal file
@ -0,0 +1,285 @@
|
||||
use prelude::*;
|
||||
|
||||
use futures::FutureExt;
|
||||
use futures::Stream;
|
||||
use json_rpc::*;
|
||||
use json_rpc::api::RemoteMethodCall;
|
||||
use json_rpc::api::Result;
|
||||
use json_rpc::error::RpcError;
|
||||
use json_rpc::error::HandlingError;
|
||||
use json_rpc::messages::Id;
|
||||
use json_rpc::messages::Message;
|
||||
use json_rpc::messages::Version;
|
||||
use json_rpc::test_util::transport::mock::MockTransport;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::future::Future;
|
||||
use std::pin::Pin;
|
||||
use std::sync::mpsc::TryRecvError;
|
||||
use utils::test::poll_future_output;
|
||||
use utils::test::poll_stream_output;
|
||||
|
||||
type MockEvent = json_rpc::handler::Event<MockNotification>;
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === Mock Protocol ===
|
||||
// =====================
|
||||
|
||||
|
||||
// === Remote Method ===
|
||||
|
||||
fn pow_impl(msg:MockRequestMessage) -> MockResponseMessage {
|
||||
let ret = MockResponse { result : msg.i * msg.i };
|
||||
Message::new_success(msg.id,ret)
|
||||
}
|
||||
|
||||
|
||||
// === Protocol Data ===
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
struct MockRequest {i:i64}
|
||||
|
||||
impl RemoteMethodCall for MockRequest {
|
||||
const NAME:&'static str = "pow";
|
||||
type Returned = MockResponse;
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
struct MockResponse { result:i64 }
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
|
||||
#[serde(tag = "method", content="params")]
|
||||
pub enum MockNotification {
|
||||
Meow {text:String},
|
||||
Bark {text:String},
|
||||
}
|
||||
|
||||
|
||||
// === Helper Aliases ===
|
||||
|
||||
type MockRequestMessage = messages::RequestMessage<MockRequest>;
|
||||
|
||||
type MockResponseMessage = messages::ResponseMessage<MockResponse>;
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === Mock Client ===
|
||||
// ===================
|
||||
|
||||
pub struct Client {
|
||||
pub handler : Handler<MockNotification>,
|
||||
pub events_stream : Pin<Box<dyn Stream<Item = MockEvent>>>,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
pub fn new(transport:impl Transport + 'static) -> Client {
|
||||
let mut handler = Handler::new(transport);
|
||||
|
||||
let events_stream = Box::pin(handler.events());
|
||||
Client {
|
||||
handler,
|
||||
events_stream,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pow(&mut self, i:i64) -> impl Future<Output = Result<i64>> {
|
||||
let input = MockRequest { i };
|
||||
self.handler.open_request(input).map(|result| result.map(|r| r.result))
|
||||
}
|
||||
|
||||
pub fn process_events(&mut self) {
|
||||
self.handler.process_events()
|
||||
}
|
||||
|
||||
pub fn try_get_event(&mut self) -> Option<MockEvent> {
|
||||
poll_stream_output(&mut self.events_stream)
|
||||
}
|
||||
|
||||
pub fn try_get_notification(&mut self) -> Option<MockNotification> {
|
||||
let event = self.try_get_event()?;
|
||||
if let MockEvent::Notification(n) = event {
|
||||
Some(n)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_notification(&mut self) -> MockNotification {
|
||||
self.try_get_notification().expect("expected notification event")
|
||||
}
|
||||
|
||||
pub fn expect_handling_error(&mut self) -> HandlingError {
|
||||
let event = self.try_get_event().expect("no events, while expected error event");
|
||||
if let json_rpc::handler::Event::Error(err) = event {
|
||||
err
|
||||
} else {
|
||||
panic!("expected error event, encountered: {:?}", event)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Test ===
|
||||
// ============
|
||||
|
||||
fn setup() -> (MockTransport, Client) {
|
||||
let transport = MockTransport::new();
|
||||
let client = Client::new(transport.clone());
|
||||
(transport,client)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_success_call() {
|
||||
let (mut transport, mut client) = setup();
|
||||
let call_input = 8;
|
||||
let mut fut = Box::pin(client.pow(8));
|
||||
let expected_first_request_id = Id(0);
|
||||
|
||||
// validate request sent
|
||||
let req_msg = transport.expect_message::<MockRequestMessage>();
|
||||
assert_eq!(req_msg.id, expected_first_request_id);
|
||||
assert_eq!(req_msg.method, MockRequest::NAME);
|
||||
assert_eq!(req_msg.i, call_input);
|
||||
assert_eq!(req_msg.jsonrpc, Version::V2);
|
||||
|
||||
assert!(poll_future_output(&mut fut).is_none()); // no reply
|
||||
|
||||
// let's reply
|
||||
let reply = pow_impl(req_msg);
|
||||
transport.mock_peer_message(reply);
|
||||
|
||||
// before tick message should be in buffer and callbacks should not
|
||||
// complete
|
||||
assert!(poll_future_output(&mut fut).is_none()); // not ticked
|
||||
|
||||
// now tick
|
||||
client.process_events();
|
||||
if let Err(TryRecvError::Empty) = client.handler.incoming_events.try_recv() {
|
||||
// ok
|
||||
} else {
|
||||
panic!("All messages from the buffer should be already processed");
|
||||
}
|
||||
let result = poll_future_output(&mut fut);
|
||||
let result = result.expect("result should be present");
|
||||
let result = result.expect("result should be a success");
|
||||
assert_eq!(result, 8*8);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_call() {
|
||||
let (mut transport, mut client) = setup();
|
||||
let mut fut = Box::pin(client.pow(8));
|
||||
assert!(poll_future_output(&mut fut).is_none()); // no reply
|
||||
|
||||
// reply with error
|
||||
let req_msg = transport.expect_message::<MockRequestMessage>();
|
||||
let error_code = 5;
|
||||
let error_description = "wrong!";
|
||||
let error_data = None;
|
||||
let error_msg: MockResponseMessage = Message::new_error(
|
||||
req_msg.id,
|
||||
error_code,
|
||||
error_description.into(),
|
||||
error_data.clone(),
|
||||
);
|
||||
transport.mock_peer_message(error_msg);
|
||||
|
||||
// receive error
|
||||
client.process_events();
|
||||
let result = poll_future_output(&mut fut);
|
||||
let result = result.expect("result should be present");
|
||||
let result = result.expect_err("result should be a failure");
|
||||
if let RpcError::RemoteError(e) = result {
|
||||
assert_eq!(e.code, error_code);
|
||||
assert_eq!(e.data, error_data);
|
||||
assert_eq!(e.message, error_description);
|
||||
} else {
|
||||
panic!("Expected an error to be RemoteError");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_garbage_reply_error() {
|
||||
let (mut transport, mut client) = setup();
|
||||
let mut fut = Box::pin(client.pow(8));
|
||||
assert!(poll_future_output(&mut fut).is_none()); // no reply
|
||||
transport.mock_peer_message_text("hello, nice to meet you");
|
||||
client.process_events();
|
||||
assert!(poll_future_output(&mut fut).is_none()); // no valid reply
|
||||
let internal_error = client.expect_handling_error();
|
||||
if let HandlingError::InvalidMessage(_) = internal_error {
|
||||
} else {
|
||||
panic!("Expected an error to be InvalidMessage");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disconnect_error() {
|
||||
let (mut transport, mut client) = setup();
|
||||
let mut fut = Box::pin(client.pow(8));
|
||||
assert!(poll_future_output(&mut fut).is_none()); // no reply
|
||||
transport.mock_connection_closed();
|
||||
assert!(poll_future_output(&mut fut).is_none()); // no reply
|
||||
client.process_events();
|
||||
let result = poll_future_output(&mut fut);
|
||||
let result = result.expect("result should be present");
|
||||
let result = result.expect_err("result should be a failure");
|
||||
if let RpcError::LostConnection = result {} else {
|
||||
panic!("Expected an error to be RemoteError");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sending_while_disconnected() {
|
||||
let (mut transport, mut client) = setup();
|
||||
transport.mock_connection_closed();
|
||||
let mut fut = Box::pin(client.pow(8));
|
||||
let result = poll_future_output(&mut fut).unwrap();
|
||||
assert!(result.is_err())
|
||||
}
|
||||
|
||||
fn test_notification(mock_notif:MockNotification) {
|
||||
let (mut transport, mut client) = setup();
|
||||
let message = Message::new(mock_notif.clone());
|
||||
assert!(client.try_get_notification().is_none());
|
||||
transport.mock_peer_message(message.clone());
|
||||
assert!(client.try_get_notification().is_none());
|
||||
client.process_events();
|
||||
let notification = client.try_get_notification();
|
||||
assert_eq!(notification.is_none(), false);
|
||||
assert_eq!(notification, Some(mock_notif));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recognizing_notifications() {
|
||||
let meow_notification = MockNotification::Meow {text:"meow!".into()};
|
||||
test_notification(meow_notification);
|
||||
|
||||
let bark_notification = MockNotification::Bark {text:"woof!".into()};
|
||||
test_notification(bark_notification);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_handling_invalid_notification() {
|
||||
let other_notification = r#"{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "update",
|
||||
"params": [1,2,3,4,5]
|
||||
}"#;
|
||||
|
||||
let (mut transport, mut client) = setup();
|
||||
assert!(client.try_get_notification().is_none());
|
||||
transport.mock_peer_message_text(other_notification);
|
||||
assert!(client.try_get_notification().is_none());
|
||||
client.process_events();
|
||||
let internal_error = client.expect_handling_error();
|
||||
if let HandlingError::InvalidNotification(_) = internal_error {}
|
||||
else {
|
||||
panic!("expected InvalidNotification error");
|
||||
}
|
||||
}
|
33
gui/lib/ide/parser/Cargo.toml
Normal file
33
gui/lib/ide/parser/Cargo.toml
Normal file
@ -0,0 +1,33 @@
|
||||
[package]
|
||||
name = "parser"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <contact@luna-lang.org>"]
|
||||
edition = "2018"
|
||||
build = "build.rs"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
ast = { version = "0.1.0", path = "../ast/impl" }
|
||||
enso-prelude = { version = "0.1.0", path = "../../prelude" }
|
||||
|
||||
console_error_panic_hook = "0.1.6"
|
||||
failure = "0.1"
|
||||
js-sys = "0.3"
|
||||
matches = "0.1"
|
||||
serde = { version = "1.0" , features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
shrinkwraprs = "0.2.1"
|
||||
wasm-bindgen = "0.2"
|
||||
wasm-bindgen-test = "0.2"
|
||||
|
||||
[build-dependencies]
|
||||
basegl-build-utilities = { version = "0.1.0", path = "../../../build-utilities" }
|
||||
bytes = "0.5.4"
|
||||
futures = "0.3.1"
|
||||
reqwest = "0.10.1"
|
||||
tokio = { version = "0.2.10", features = ["macros"] }
|
||||
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
websocket = "0.23.0"
|
8
gui/lib/ide/parser/README.md
Normal file
8
gui/lib/ide/parser/README.md
Normal file
@ -0,0 +1,8 @@
|
||||
You can run the tests by calling:
|
||||
```
|
||||
sbt syntaxJS/fullOptJS
|
||||
wasm-pack test --headless --firefox --chrome --safari common/rust/parser
|
||||
```
|
||||
|
||||
Note that --safari wasn't tested and there is also an external
|
||||
[issue](https://github.com/rustwasm/wasm-pack/issues/611) with --chrome.
|
141
gui/lib/ide/parser/build.rs
Normal file
141
gui/lib/ide/parser/build.rs
Normal file
@ -0,0 +1,141 @@
|
||||
//! This build script is responsible for ensuring that if parser targets wasm,
|
||||
//! the JS Parser package is available at the expected location for
|
||||
//! `wasm_bindgen` tool.
|
||||
|
||||
#![feature(option_result_contains)]
|
||||
|
||||
use basegl_build_utilities::PathRef;
|
||||
use basegl_build_utilities::absolute_path;
|
||||
use basegl_build_utilities::targeting_wasm;
|
||||
|
||||
use std::fs::File;
|
||||
use std::fs::create_dir_all;
|
||||
use std::io::prelude::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
|
||||
|
||||
// =========================
|
||||
// == Hardcoded constants ==
|
||||
// =========================
|
||||
|
||||
/// Where the crate expects to find file with compiled parser.
|
||||
/// Path relative to the crate directory.
|
||||
const PARSER_PATH: &str = "./pkg/scala-parser.js";
|
||||
|
||||
/// Commit from `enso` repository that will be used to obtain parser from.
|
||||
const PARSER_COMMIT: &str = "417323deb2cbd26f1d61c914828eb0b1abdf28ff";
|
||||
|
||||
/// Magic code that needs to be prepended to ScalaJS generated parser due to:
|
||||
/// https://github.com/scala-js/scala-js/issues/3677/
|
||||
const PARSER_PREAMBLE: &str = "var __ScalaJSEnv = { global: window };";
|
||||
|
||||
/// Obtains a URL where this parser version can be downloaded.
|
||||
pub fn parser_url(version:&ParserVersion) -> reqwest::Url {
|
||||
let url_string = format!(
|
||||
"https://packages.luna-lang.org/parser-js/nightly/{}/scala-parser.js",
|
||||
version.commit);
|
||||
let invalid_url_msg = format!("{} is an invalid URL.", url_string);
|
||||
reqwest::Url::parse(&url_string).expect(&invalid_url_msg)
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// == ParserVersion ==
|
||||
// ===================
|
||||
|
||||
/// Parser version described as commit hash from `enso` repository.
|
||||
#[derive(Clone,Debug,PartialEq)]
|
||||
pub struct ParserVersion{ pub commit:String }
|
||||
|
||||
impl ParserVersion {
|
||||
/// Create a version described by given commit hash.
|
||||
pub fn from_commit(commit:String) -> ParserVersion { ParserVersion{commit} }
|
||||
|
||||
/// The JS parser version required for this crate.
|
||||
pub fn required() -> ParserVersion {
|
||||
ParserVersion { commit: PARSER_COMMIT.into() }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ========================
|
||||
// == Downloading parser ==
|
||||
// ========================
|
||||
|
||||
/// Stores information which parser version should be provided where.
|
||||
///
|
||||
/// Implementation provides methods that download desired parser version, patch
|
||||
/// it and store to the file, so parser can be consumed by `wasm_bindgen`.
|
||||
struct ParserProvider {
|
||||
/// Required parser version.
|
||||
version : ParserVersion,
|
||||
/// The path where JS file needs to be provided.
|
||||
parser_path : PathBuf,
|
||||
}
|
||||
|
||||
impl ParserProvider {
|
||||
/// Creates a provider that obtains given parser version to a given path.
|
||||
pub fn new(version:ParserVersion, parser_path:impl PathRef) -> ParserProvider {
|
||||
let parser_path = PathBuf::from(parser_path.as_ref());
|
||||
ParserProvider {version,parser_path}
|
||||
}
|
||||
|
||||
/// Downloads contents of JS parser into memory.
|
||||
pub async fn download(&self) -> bytes::Bytes {
|
||||
let url = parser_url(&self.version);
|
||||
let get_error = format!("Failed to get response from {}.", url);
|
||||
let download_error = format!("Failed to download contents of {}.", url);
|
||||
let response = reqwest::get(url).await.expect(&get_error);
|
||||
response.bytes().await.expect(&download_error)
|
||||
}
|
||||
|
||||
/// Stores JS parser into file, after patching with a `PARSER_PREAMBLE`.
|
||||
pub fn patch_and_store(&self, js_parser:bytes::Bytes) {
|
||||
let display_path = self.parser_path.display();
|
||||
let open_error = format!("Failed to open {}.", display_path);
|
||||
let write_error = format!("Failed to write {}.",display_path);
|
||||
let flush_error = format!("Failed to flush {}.",display_path);
|
||||
|
||||
let mut file = File::create(&self.parser_path).expect(&open_error);
|
||||
file.write_all(PARSER_PREAMBLE.as_bytes()).expect(&write_error);
|
||||
file.write_all(&js_parser).expect(&write_error);
|
||||
file.flush().expect(&flush_error);
|
||||
}
|
||||
|
||||
/// Ensures that target's parent directory exists.
|
||||
pub fn prepare_target_location(&self) {
|
||||
if let Some(parent_directory) = self.parser_path.parent() {
|
||||
let create_dir_error = format!(
|
||||
"Failed to create directory: {}.",
|
||||
parent_directory.display());
|
||||
create_dir_all(parent_directory).expect(&create_dir_error);
|
||||
}
|
||||
}
|
||||
|
||||
/// Places required parser version in the target location.
|
||||
pub async fn run(&self) {
|
||||
self.prepare_target_location();
|
||||
let parser_js = self.download().await;
|
||||
self.patch_and_store(parser_js);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==========
|
||||
// == main ==
|
||||
// ==========
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> std::result::Result<(), Box<dyn std::error::Error>> {
|
||||
if targeting_wasm() {
|
||||
let required_version = ParserVersion::required();
|
||||
let parser_path = absolute_path(PARSER_PATH)?;
|
||||
let provider = ParserProvider::new(required_version,&parser_path);
|
||||
provider.run().await;
|
||||
}
|
||||
Ok(())
|
||||
}
|
38
gui/lib/ide/parser/src/api.rs
Normal file
38
gui/lib/ide/parser/src/api.rs
Normal file
@ -0,0 +1,38 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
pub type Ast = ast::Ast;
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// == Parser ==
|
||||
// ============
|
||||
|
||||
/// Entity being able to parse Luna programs into Luna's AST.
|
||||
pub trait IsParser {
|
||||
fn parse(&mut self, program: String) -> Result<Ast>;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===========
|
||||
// == Error ==
|
||||
// ===========
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
#[derive(Debug, Fail)]
|
||||
pub enum Error {
|
||||
/// Error due to inner workings of the parser.
|
||||
#[fail(display = "Internal parser error: {:?}", _0)]
|
||||
ParsingError(String),
|
||||
/// Error related to wrapping = communication with the parser service.
|
||||
#[fail(display = "Interop error: {}", _0)]
|
||||
InteropError(#[cause] Box<dyn failure::Fail>),
|
||||
}
|
||||
|
||||
/// Wraps an arbitrary `std::error::Error` as an `InteropError.`
|
||||
pub fn interop_error<T>(error: T) -> Error
|
||||
where T: Fail {
|
||||
Error::InteropError(Box::new(error))
|
||||
}
|
62
gui/lib/ide/parser/src/jsclient.rs
Normal file
62
gui/lib/ide/parser/src/jsclient.rs
Normal file
@ -0,0 +1,62 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::api;
|
||||
use crate::api::IsParser;
|
||||
use crate::api::Error::ParsingError;
|
||||
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
#[derive(Debug, Fail)]
|
||||
pub enum Error {
|
||||
#[fail(display = "JSON (de)serialization failed: {:?}", _0)]
|
||||
JsonSerializationError(#[cause] serde_json::error::Error),
|
||||
|
||||
#[fail(display = "Scala parser threw an unexpected exception.")]
|
||||
ScalaException(),
|
||||
}
|
||||
|
||||
impl From<Error> for api::Error {
|
||||
fn from(e: Error) -> Self {
|
||||
api::interop_error(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::error::Error> for Error {
|
||||
fn from(error: serde_json::error::Error) -> Self {
|
||||
Error::JsonSerializationError(error)
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen(module = "/pkg/scala-parser.js")]
|
||||
extern "C" {
|
||||
#[wasm_bindgen(catch)]
|
||||
fn parse(input: String) -> std::result::Result<String, JsValue>;
|
||||
#[wasm_bindgen(catch)]
|
||||
#[wasm_bindgen(js_name = parseWithIDs)]
|
||||
fn parse_with_IDs
|
||||
(input: String, ids: String) -> std::result::Result<String, JsValue>;
|
||||
}
|
||||
|
||||
/// Wrapper over the JS-compiled parser.
|
||||
///
|
||||
/// Can only be used when targeting WebAssembly.
|
||||
pub struct Client {}
|
||||
|
||||
impl Client {
|
||||
pub fn new() -> Result<Client> {
|
||||
Ok(Client {})
|
||||
}
|
||||
}
|
||||
|
||||
impl IsParser for Client {
|
||||
fn parse(&mut self, _program: String) -> api::Result<api::Ast> {
|
||||
match parse(_program) {
|
||||
Ok(json_ast) => Err(ParsingError(json_ast)),
|
||||
Err(_) => Err(api::interop_error(Error::ScalaException())),
|
||||
}
|
||||
}
|
||||
}
|
51
gui/lib/ide/parser/src/lib.rs
Normal file
51
gui/lib/ide/parser/src/lib.rs
Normal file
@ -0,0 +1,51 @@
|
||||
pub mod api;
|
||||
|
||||
mod jsclient;
|
||||
mod wsclient;
|
||||
|
||||
pub use enso_prelude as prelude;
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use std::panic;
|
||||
use std::ops::DerefMut;
|
||||
extern crate console_error_panic_hook;
|
||||
|
||||
/// Handle to a parser implementation.
|
||||
///
|
||||
/// Currently this component is implemented as a wrapper over parser written
|
||||
/// in Scala. Depending on compilation target (native or wasm) it uses either
|
||||
/// implementation provided by `wsclient` or `jsclient`.
|
||||
#[derive(Shrinkwrap)]
|
||||
#[shrinkwrap(mutable)]
|
||||
pub struct Parser(pub Box<dyn api::IsParser>);
|
||||
|
||||
impl Parser {
|
||||
/// Obtains a default parser implementation.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn new() -> api::Result<Parser> {
|
||||
let client = wsclient::Client::new()?;
|
||||
let parser = Box::new(client);
|
||||
Ok(Parser(parser))
|
||||
}
|
||||
|
||||
/// Obtains a default parser implementation.
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn new() -> api::Result<Parser> {
|
||||
let client = jsclient::Client::new()?;
|
||||
let parser = Box::new(client);
|
||||
Ok(Parser(parser))
|
||||
}
|
||||
|
||||
/// Obtains a default parser implementation, panicking in case of failure.
|
||||
pub fn new_or_panic() -> Parser {
|
||||
Parser::new()
|
||||
.unwrap_or_else(|e| panic!("Failed to create a parser: {:?}", e))
|
||||
}
|
||||
}
|
||||
|
||||
impl api::IsParser for Parser {
|
||||
fn parse(&mut self, program: String) -> api::Result<api::Ast> {
|
||||
self.deref_mut().parse(program)
|
||||
}
|
||||
}
|
16
gui/lib/ide/parser/src/main.rs
Normal file
16
gui/lib/ide/parser/src/main.rs
Normal file
@ -0,0 +1,16 @@
|
||||
use parser::api::IsParser;
|
||||
|
||||
/// Simple interactive tester - calls parser with its argument (or a
|
||||
/// hardcoded default) and prints the result
|
||||
fn main() {
|
||||
let default_input = String::from("import Foo.Bar\nfoo = a + 2");
|
||||
let program = std::env::args().nth(1).unwrap_or(default_input);
|
||||
println!("Will parse: {}", program);
|
||||
|
||||
let mut parser = parser::Parser::new_or_panic();
|
||||
let output = parser.parse(program);
|
||||
match output {
|
||||
Ok(result) => println!("Parser responded with: {:?}", result),
|
||||
Err(e) => println!("Failed to obtain a response: {:?}", e),
|
||||
}
|
||||
}
|
228
gui/lib/ide/parser/src/wsclient.rs
Normal file
228
gui/lib/ide/parser/src/wsclient.rs
Normal file
@ -0,0 +1,228 @@
|
||||
#![cfg(not(target_arch = "wasm32"))]
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use websocket::{
|
||||
stream::sync::TcpStream, ClientBuilder, Message, OwnedMessage,
|
||||
};
|
||||
|
||||
use crate::api;
|
||||
use api::Error::*;
|
||||
use Error::*;
|
||||
|
||||
type WsTcpClient = websocket::sync::Client<TcpStream>;
|
||||
|
||||
|
||||
|
||||
// ==========================
|
||||
// == Constants & literals ==
|
||||
// ==========================
|
||||
|
||||
pub const LOCALHOST: &str = "localhost";
|
||||
pub const DEFAULT_PORT: i32 = 30615;
|
||||
pub const DEFAULT_HOSTNAME: &str = LOCALHOST;
|
||||
|
||||
pub const HOSTNAME_VAR: &str = "ENSO_PARSER_HOSTNAME";
|
||||
pub const PORT_VAR: &str = "ENSO_PARSER_PORT";
|
||||
|
||||
|
||||
|
||||
// ===========
|
||||
// == Error ==
|
||||
// ===========
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
#[derive(Debug, Fail)]
|
||||
pub enum Error {
|
||||
#[fail(display = "Failed to parse given address url: {}", _0)]
|
||||
WrongUrl(#[cause] websocket::client::ParseError),
|
||||
|
||||
#[fail(display = "Connection error: {}", _0)]
|
||||
ConnectivityError(#[cause] websocket::WebSocketError),
|
||||
|
||||
#[fail(display = "Peer has closed the connection")]
|
||||
PeerClosedConnection,
|
||||
|
||||
#[fail(display = "Received non-text response: {:?}", _0)]
|
||||
NonTextResponse(websocket::OwnedMessage),
|
||||
|
||||
#[fail(display = "JSON (de)serialization failed: {:?}", _0)]
|
||||
JsonSerializationError(#[cause] serde_json::error::Error),
|
||||
|
||||
#[fail(display = "JSON deserialization failed: {:?}, JSON was: {}", _0, _1)]
|
||||
JsonDeserializationError(#[cause] serde_json::error::Error, String),
|
||||
}
|
||||
|
||||
impl From<Error> for api::Error {
|
||||
fn from(e: Error) -> Self {
|
||||
api::interop_error(e)
|
||||
}
|
||||
}
|
||||
impl From<websocket::client::ParseError> for Error {
|
||||
fn from(error: websocket::client::ParseError) -> Self {
|
||||
WrongUrl(error)
|
||||
}
|
||||
}
|
||||
impl From<websocket::WebSocketError> for Error {
|
||||
fn from(error: websocket::WebSocketError) -> Self {
|
||||
ConnectivityError(error)
|
||||
}
|
||||
}
|
||||
impl From<serde_json::error::Error> for Error {
|
||||
fn from(error: serde_json::error::Error) -> Self {
|
||||
JsonSerializationError(error)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// == Protocol ==
|
||||
// ==============
|
||||
|
||||
/// All request supported by the Parser Service.
|
||||
#[derive(Debug, serde::Serialize, serde::Deserialize)]
|
||||
pub enum Request {
|
||||
ParseRequest { program: String },
|
||||
}
|
||||
|
||||
/// All responses that Parser Service might reply with.
|
||||
#[derive(Debug, serde::Serialize, serde::Deserialize)]
|
||||
pub enum Response {
|
||||
Success { ast_json: String },
|
||||
Error { message: String },
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// == Config ==
|
||||
// ============
|
||||
|
||||
/// Describes a WS endpoint.
|
||||
pub struct Config {
|
||||
pub host: String,
|
||||
pub port: i32,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Formats URL String describing a WS endpoint.
|
||||
pub fn address_string(&self) -> String {
|
||||
format!("ws://{}:{}", self.host, self.port)
|
||||
}
|
||||
|
||||
/// Obtains a default WS endpoint to use to connect to parser service
|
||||
/// using environment variables or, if they are not set, hardcoded
|
||||
/// defaults.
|
||||
pub fn from_env() -> Config {
|
||||
let host = env_var_or(HOSTNAME_VAR, DEFAULT_HOSTNAME);
|
||||
let port = env_var_or(PORT_VAR, Default::default())
|
||||
.parse()
|
||||
.unwrap_or(DEFAULT_PORT);
|
||||
Config { host, port }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn env_var_or(varname: &str, default_value: &str) -> String {
|
||||
std::env::var(varname).unwrap_or_else(|_| default_value.into())
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// == Client ==
|
||||
// ============
|
||||
|
||||
/// Client to the Parser Service written in Scala.
|
||||
///
|
||||
/// Connects through WebSocket to the running service.
|
||||
pub struct Client {
|
||||
connection: WsTcpClient,
|
||||
}
|
||||
|
||||
mod internal {
|
||||
use super::*;
|
||||
impl Client {
|
||||
/// Serializes `Request` to JSON and sends to peer as a text message.
|
||||
pub fn send_request(&mut self, request: Request) -> Result<()> {
|
||||
let request_txt = serde_json::to_string(&request)?;
|
||||
let message = Message::text(request_txt);
|
||||
self.connection.send_message(&message)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Obtains a text message from peer and deserializes it using JSON
|
||||
/// into a `Response`.
|
||||
///
|
||||
/// Should be called exactly once after each `send_request` invocation.
|
||||
pub fn recv_response(&mut self) -> Result<Response> {
|
||||
let response = self.connection.recv_message()?;
|
||||
match response {
|
||||
OwnedMessage::Text(text) => Ok(serde_json::from_str(&text)?),
|
||||
_ => Err(NonTextResponse(response)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Sends given `Request` to peer and receives a `Response`.
|
||||
///
|
||||
/// Both request and response are exchanged in JSON using text messages
|
||||
/// over WebSocket.
|
||||
pub fn rpc_call(&mut self, request: Request) -> Result<Response> {
|
||||
self.send_request(request)?;
|
||||
self.recv_response()
|
||||
}
|
||||
}
|
||||
|
||||
/// Deserialize AST from JSON text received from WS Parser Service.
|
||||
pub fn from_json(json_text: &str) -> api::Result<api::Ast> {
|
||||
let ast = serde_json::from_str::<api::Ast>(json_text);
|
||||
Ok(ast.map_err(|e| JsonDeserializationError(e, json_text.into()))?)
|
||||
}
|
||||
}
|
||||
|
||||
impl Client {
|
||||
/// Creates a new `Client` connected to the already running parser service.
|
||||
pub fn from_conf(config: &Config) -> Result<Client> {
|
||||
let address = config.address_string();
|
||||
let mut builder = ClientBuilder::new(&address)?;
|
||||
let connection = builder.connect_insecure()?;
|
||||
Ok(Client { connection })
|
||||
}
|
||||
|
||||
/// Creates a `Client` using configuration defined by environment or
|
||||
/// defaults if environment is not set.
|
||||
pub fn new() -> Result<Client> {
|
||||
let config = Config::from_env();
|
||||
println!("Connecting to {}", config.address_string());
|
||||
let client = Client::from_conf(&config)?;
|
||||
println!("Established connection with {}", config.address_string());
|
||||
Ok(client)
|
||||
}
|
||||
}
|
||||
|
||||
impl api::IsParser for Client {
|
||||
fn parse(&mut self, program: String) -> api::Result<api::Ast> {
|
||||
let request = Request::ParseRequest { program };
|
||||
let response = self.rpc_call(request)?;
|
||||
match response {
|
||||
Response::Success { ast_json } => internal::from_json(&ast_json),
|
||||
Response::Error { message } => Err(ParsingError(message)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===========
|
||||
// == tests ==
|
||||
// ===========
|
||||
|
||||
#[test]
|
||||
fn wrong_url_reported() {
|
||||
let invalid_hostname = String::from("bgjhkb 7");
|
||||
let wrong_config = Config { host: invalid_hostname, port: 8080 };
|
||||
let client = Client::from_conf(&wrong_config);
|
||||
let got_wrong_url_error = matches::matches!(client, Err(WrongUrl(_)));
|
||||
assert!(got_wrong_url_error, "expected WrongUrl error");
|
||||
}
|
502
gui/lib/ide/parser/tests/parsing.rs
Normal file
502
gui/lib/ide/parser/tests/parsing.rs
Normal file
@ -0,0 +1,502 @@
|
||||
#![feature(generators, generator_trait)]
|
||||
|
||||
use parser::prelude::*;
|
||||
|
||||
use ast::*;
|
||||
use parser::api::IsParser;
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Helpers ===
|
||||
// ===============
|
||||
|
||||
/// Takes Ast being a module with a single line and returns that line's AST.
|
||||
fn expect_single_line(ast:&Ast) -> &Ast {
|
||||
let module:&Module<Ast> = expect_shape(ast);
|
||||
let (line,) = (&module.lines).expect_tuple();
|
||||
line.elem.as_ref().unwrap()
|
||||
}
|
||||
|
||||
/// "Downcasts" given AST's Shape to `T`.
|
||||
fn expect_shape<'t,T>(ast:&'t Ast) -> &'t T
|
||||
where &'t Shape<Ast>: TryInto<&'t T> {
|
||||
match ast.shape().try_into() {
|
||||
Ok(shape) => shape,
|
||||
_ => {
|
||||
let expected_typename = std::any::type_name::<T>();
|
||||
panic!("failed converting shape into {}",expected_typename)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Asserts that given AST is a Var with given name.
|
||||
fn assert_var<StringLike: Into<String>>(ast:&Ast, name:StringLike) {
|
||||
let actual : &Var = expect_shape(ast);
|
||||
let expected = Var {name:name.into()};
|
||||
assert_eq!(*actual,expected);
|
||||
}
|
||||
|
||||
/// Asserts that given AST is an Opr with given name.
|
||||
fn assert_opr<StringLike:Into<String>>(ast:&Ast, name:StringLike) {
|
||||
let actual : &Opr = expect_shape(ast);
|
||||
let expected = Opr {name:name.into()};
|
||||
assert_eq!(*actual,expected);
|
||||
}
|
||||
|
||||
/// Checks if all nodes in subtree have declared spans equal to
|
||||
/// spans we calculate.
|
||||
fn validate_spans(ast:&Ast) {
|
||||
for node in ast.iter_recursive() {
|
||||
let calculated = node.shape().span();
|
||||
let declared = node.wrapped.wrapped.span;
|
||||
assert_eq!(calculated, declared
|
||||
, "`{}` part of `{}`", node.repr(), ast.repr());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === ExpectTuple ===
|
||||
// ===================
|
||||
|
||||
// === Trait ===
|
||||
/// Helper allowing converting between collections and tuples. Does heavy
|
||||
/// unwarapping, shouldn't be used outside test environment.
|
||||
trait ExpectTuple<T> {
|
||||
/// Convert Self to tuple `T`. Panic if collection has different count of
|
||||
/// elements.
|
||||
fn expect_tuple(self) -> T;
|
||||
}
|
||||
|
||||
|
||||
// === Implementations ===
|
||||
// TODO [MWU] boilerplate below should be generated with macro
|
||||
|
||||
impl<Collection:IntoIterator>
|
||||
ExpectTuple<(Collection::Item,)> for Collection {
|
||||
fn expect_tuple(self) -> (Collection::Item,) {
|
||||
let mut iter = self.into_iter();
|
||||
let v1 = iter.next().unwrap();
|
||||
assert!(iter.next().is_none());
|
||||
(v1,)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Collection: IntoIterator>
|
||||
ExpectTuple<(Collection::Item,Collection::Item)>
|
||||
for Collection {
|
||||
fn expect_tuple(self) -> (Collection::Item,Collection::Item) {
|
||||
let mut iter = self.into_iter();
|
||||
let v1 = iter.next().unwrap();
|
||||
let v2 = iter.next().unwrap();
|
||||
assert!(iter.next().is_none());
|
||||
(v1,v2)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Collection: IntoIterator>
|
||||
ExpectTuple<(Collection::Item,Collection::Item,Collection::Item)>
|
||||
for Collection {
|
||||
fn expect_tuple
|
||||
(self) -> (Collection::Item,Collection::Item,Collection::Item) {
|
||||
let mut iter = self.into_iter();
|
||||
let v1 = iter.next().unwrap();
|
||||
let v2 = iter.next().unwrap();
|
||||
let v3 = iter.next().unwrap();
|
||||
assert!(iter.next().is_none());
|
||||
(v1,v2,v3)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Fixture ===
|
||||
// ===============
|
||||
|
||||
/// Persists parser (which is expensive to construct, so we want to reuse it
|
||||
/// between tests. Additionally, hosts a number of helper methods.
|
||||
struct Fixture(parser::Parser);
|
||||
|
||||
impl Fixture {
|
||||
|
||||
|
||||
// === Helper methods ===
|
||||
|
||||
/// Create a new fixture, obtaining a default parser.
|
||||
fn new() -> Fixture {
|
||||
Fixture(parser::Parser::new_or_panic())
|
||||
}
|
||||
|
||||
/// Runs parser on given input, panics on any error.
|
||||
fn parse(&mut self, program:&str) -> Ast {
|
||||
println!("parsing {}", program);
|
||||
let ast = self.0.parse(program.into()).unwrap();
|
||||
assert_eq!(ast.shape().span(), program.len());
|
||||
validate_spans(&ast);
|
||||
assert_eq!(ast.repr(), program, "{:?}", ast);
|
||||
ast
|
||||
}
|
||||
|
||||
/// Program is expected to be single line module. The line's AST is
|
||||
/// returned. Panics otherwise.
|
||||
fn parse_line(&mut self, program:&str) -> Ast {
|
||||
let ast = self.parse(program);
|
||||
let line = expect_single_line(&ast);
|
||||
line.clone()
|
||||
}
|
||||
|
||||
/// Program is expected to be single line module. The line's Shape subtype
|
||||
/// is obtained and passed to `tester`.
|
||||
fn test_shape<T,F>(&mut self, program:&str, tester:F)
|
||||
where for<'t> &'t Shape<Ast>: TryInto<&'t T>,
|
||||
F : FnOnce(&T) -> () {
|
||||
let ast = self.parse_line(program);
|
||||
let shape = expect_shape(&ast);
|
||||
tester(shape);
|
||||
}
|
||||
|
||||
|
||||
// === Test Methods ===
|
||||
|
||||
fn deserialize_unrecognized(&mut self) {
|
||||
let unfinished = "`";
|
||||
self.test_shape(unfinished,|shape:&Unrecognized| {
|
||||
assert_eq!(shape.str,"`");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_invalid_quote(&mut self) {
|
||||
let unfinished = "'a''";
|
||||
self.test_shape(unfinished,|shape:&Prefix<Ast>| {
|
||||
// ignore shape.func, being TextUnclosed tested elsewhere
|
||||
let arg:&InvalidQuote = expect_shape(&shape.arg);
|
||||
let expected_quote = Text {str:"''".into()};
|
||||
assert_eq!(arg.quote,expected_quote.into());
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_inline_block(&mut self) {
|
||||
let unfinished = "'''a";
|
||||
self.test_shape(unfinished,|shape:&Prefix<Ast>| {
|
||||
let func:&InlineBlock = expect_shape(&shape.func);
|
||||
let expected_quote = Text {str: "'''".into()};
|
||||
assert_eq!(func.quote,expected_quote.into());
|
||||
assert_var(&shape.arg,"a");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_blank(&mut self) {
|
||||
let expect_blank = |_:&Blank| {};
|
||||
let _ast = self.test_shape("_",expect_blank);
|
||||
}
|
||||
|
||||
fn deserialize_var(&mut self) {
|
||||
self.test_shape("foo",|var: &Var| {
|
||||
let expected_var = Var {name:"foo".into()};
|
||||
assert_eq!(var,&expected_var);
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_cons(&mut self) {
|
||||
let name = "FooBar";
|
||||
self.test_shape(name,|shape:&Cons| {
|
||||
assert_eq!(shape.name,name);
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_mod(&mut self) {
|
||||
self.test_shape("+=",|shape:&Mod| {
|
||||
assert_eq!(shape.name,"+");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_invalid_suffix(&mut self) {
|
||||
self.test_shape("foo'bar",|shape:&InvalidSuffix<Ast>| {
|
||||
assert_var(&shape.elem,"foo'");
|
||||
assert_eq!(shape.suffix,"bar");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_number(&mut self) {
|
||||
self.test_shape("127",|shape:&Number| {
|
||||
assert_eq!(shape.base,None);
|
||||
assert_eq!(shape.int,"127");
|
||||
});
|
||||
|
||||
self.test_shape("16_ff",|shape:&Number| {
|
||||
assert_eq!(shape.base.as_ref().unwrap(),"16");
|
||||
assert_eq!(shape.int,"ff");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_text_line_raw(&mut self) {
|
||||
self.test_shape("\"foo\"",|shape:&TextLineRaw| {
|
||||
let (segment,) = (&shape.text).expect_tuple();
|
||||
let expected = SegmentPlain{value: "foo".to_string()};
|
||||
assert_eq!(*segment,expected.into());
|
||||
});
|
||||
|
||||
let tricky_raw = r#""\\\"\n""#;
|
||||
self.test_shape(tricky_raw,|shape:&TextLineRaw| {
|
||||
let segments: (_,_,_) = (&shape.text).expect_tuple();
|
||||
assert_eq!(*segments.0,Slash {}.into());
|
||||
assert_eq!(*segments.1,RawQuote {}.into());
|
||||
assert_eq!(*segments.2,Invalid {str: 'n'}.into());
|
||||
// Quote (fmt one) cannot be escaped in raw string. So no test for
|
||||
// it, even though it belongs to the same enum.
|
||||
});
|
||||
}
|
||||
|
||||
fn test_text_fmt_segment<F>(&mut self, program:&str, tester:F)
|
||||
where F: FnOnce(&SegmentFmt<Ast>) -> () {
|
||||
self.test_shape(program,|shape:&TextLineFmt<Ast>| {
|
||||
let (segment,) = (&shape.text).expect_tuple();
|
||||
tester(segment)
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_text_line_fmt(&mut self) {
|
||||
use SegmentFmt::SegmentExpr;
|
||||
|
||||
// plain segment
|
||||
self.test_shape("'foo'",|shape:&TextLineFmt<Ast>| {
|
||||
let (segment,) = (&shape.text).expect_tuple();
|
||||
let expected = SegmentPlain{value: "foo".into()};
|
||||
assert_eq!(*segment,expected.into());
|
||||
});
|
||||
|
||||
// escapes
|
||||
let tricky_fmt = r#"'\\\'\"'"#;
|
||||
self.test_shape(tricky_fmt,|shape:&TextLineFmt<Ast>| {
|
||||
let segments: (_,_,_) = (&shape.text).expect_tuple();
|
||||
assert_eq!(*segments.0,Slash{}.into() );
|
||||
assert_eq!(*segments.1,Quote{}.into() );
|
||||
assert_eq!(*segments.2,Invalid{ str: '"'}.into() );
|
||||
});
|
||||
|
||||
// expression empty
|
||||
let expr_fmt = r#"'``'"#;
|
||||
self.test_text_fmt_segment(expr_fmt,|segment| {
|
||||
match segment {
|
||||
SegmentExpr(expr) => assert_eq!(expr.value,None),
|
||||
_ => panic!("wrong segment type received"),
|
||||
}
|
||||
});
|
||||
|
||||
// expression non-empty
|
||||
let expr_fmt = r#"'`foo`'"#;
|
||||
self.test_text_fmt_segment(expr_fmt,|segment| {
|
||||
match segment {
|
||||
SegmentExpr(expr) =>
|
||||
assert_var(expr.value.as_ref().unwrap(),"foo"),
|
||||
_ => panic!("wrong segment type received"),
|
||||
}
|
||||
});
|
||||
|
||||
self.test_text_fmt_segment(r#"'\n'"#,|segment| {
|
||||
let expected = EscapeCharacter{c:'n'};
|
||||
assert_eq!(*segment,expected.into());
|
||||
});
|
||||
self.test_text_fmt_segment(r#"'\u0394'"#,|segment| {
|
||||
let expected = EscapeUnicode16{digits: "0394".into()};
|
||||
assert_eq!(*segment,expected.into());
|
||||
});
|
||||
// TODO [MWU] We don't test Unicode21 as it is not yet supported by the
|
||||
// parser.
|
||||
self.test_text_fmt_segment(r#"'\U0001f34c'"#,|segment| {
|
||||
let expected = EscapeUnicode32{digits: "0001f34c".into()};
|
||||
assert_eq!(*segment,expected.into());
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_text_block_raw(&mut self) {
|
||||
let program = "\"\"\" \n \n X";
|
||||
self.test_shape(program,|shape:&TextBlockRaw| {
|
||||
assert_eq!(shape.spaces,1);
|
||||
assert_eq!(shape.offset,0);
|
||||
|
||||
let (line,) = (&shape.text).expect_tuple();
|
||||
let (empty_line,) = (&line.empty_lines).expect_tuple();
|
||||
assert_eq!(*empty_line,2);
|
||||
|
||||
let (segment,) = (&line.text).expect_tuple();
|
||||
let expected_segment = SegmentPlain { value: " X".into() };
|
||||
assert_eq!(*segment,expected_segment.into());
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_text_block_fmt(&mut self) {
|
||||
let program = "''' \n\n X\n Y";
|
||||
self.test_shape(program,|shape:&TextBlockFmt<Ast>| {
|
||||
assert_eq!(shape.spaces,2);
|
||||
assert_eq!(shape.offset,0);
|
||||
assert_eq!(shape.text.len(),2);
|
||||
|
||||
let (line1,line2) = (&shape.text).expect_tuple();
|
||||
let (empty_line,) = (&line1.empty_lines).expect_tuple();
|
||||
assert_eq!(*empty_line,0);
|
||||
let (segment,) = (&line1.text).expect_tuple();
|
||||
let expected_segment = SegmentPlain { value: " X".into() };
|
||||
assert_eq!(*segment,expected_segment.into());
|
||||
|
||||
assert!(line2.empty_lines.is_empty());
|
||||
let (segment,) = (&line2.text).expect_tuple();
|
||||
let expected_segment = SegmentPlain { value: " Y".into() };
|
||||
assert_eq!(*segment,expected_segment.into());
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
fn deserialize_unfinished_text(&mut self) {
|
||||
let unfinished = r#""\"#;
|
||||
self.test_shape(unfinished,|shape:&TextUnclosed<Ast>| {
|
||||
let line = &shape.line;
|
||||
let line:&TextLineRaw = line.try_into().unwrap();
|
||||
|
||||
let (segment,) = (&line.text).expect_tuple();
|
||||
let expected = Unfinished {};
|
||||
assert_eq!(*segment,expected.into());
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_dangling_base(&mut self) {
|
||||
self.test_shape("16_",|shape:&DanglingBase| {
|
||||
assert_eq!(shape.base,"16");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_prefix(&mut self) {
|
||||
self.test_shape("foo bar",|shape:&Prefix<Ast>| {
|
||||
assert_var(&shape.func,"foo");
|
||||
assert_eq!(shape.off,3);
|
||||
assert_var(&shape.arg,"bar");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_infix(&mut self) {
|
||||
self.test_shape("foo + bar",|shape:&Infix<Ast>| {
|
||||
assert_var(&shape.larg,"foo");
|
||||
assert_eq!(shape.loff,1);
|
||||
assert_opr(&shape.opr,"+");
|
||||
assert_eq!(shape.roff,2);
|
||||
assert_var(&shape.rarg,"bar");
|
||||
});
|
||||
}
|
||||
fn deserialize_left(&mut self) {
|
||||
self.test_shape("foo +",|shape:&SectionLeft<Ast>| {
|
||||
assert_var(&shape.arg,"foo");
|
||||
assert_eq!(shape.off,1);
|
||||
assert_opr(&shape.opr,"+");
|
||||
});
|
||||
}
|
||||
fn deserialize_right(&mut self) {
|
||||
self.test_shape("+ bar",|shape:&SectionRight<Ast>| {
|
||||
assert_opr(&shape.opr,"+");
|
||||
assert_eq!(shape.off,1);
|
||||
assert_var(&shape.arg,"bar");
|
||||
});
|
||||
}
|
||||
fn deserialize_sides(&mut self) {
|
||||
self.test_shape("+",|shape:&SectionSides<Ast>| {
|
||||
assert_opr(&shape.opr,"+");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_block(&mut self) {
|
||||
self.test_shape(" foo\n bar",|block:&Block<Ast>| {
|
||||
assert_eq!(block.ty,BlockType::Continuous{});
|
||||
assert_eq!(block.indent,1);
|
||||
assert_eq!(block.empty_lines.len(),0);
|
||||
assert_eq!(block.is_orphan,true);
|
||||
|
||||
let first_line = &block.first_line;
|
||||
assert_eq!(first_line.off,0);
|
||||
assert_var(&first_line.elem,"foo");
|
||||
|
||||
let (second_line,) = (&block.lines).expect_tuple();
|
||||
assert_eq!(second_line.off,0);
|
||||
assert_var(second_line.elem.as_ref().unwrap(),"bar");
|
||||
});
|
||||
}
|
||||
|
||||
/// Tests parsing a number of sample macro usages.
|
||||
///
|
||||
/// As macros geneerate usually really huge ASTs, this test only checks
|
||||
/// that we are able to deserialize the response and that it is a macro
|
||||
/// match node. Node contents is not covered.
|
||||
fn deserialize_macro_matches(&mut self) {
|
||||
let macro_usages = vec!
|
||||
[ "foo -> bar"
|
||||
, "()"
|
||||
, "(foo -> bar)"
|
||||
, "a b c -> bar"
|
||||
, "type Maybe a\n Just val:a"
|
||||
, "foreign Python3\n bar"
|
||||
, "if foo > 8 then 10 else 9"
|
||||
, "skip bar"
|
||||
, "freeze bar"
|
||||
, "case foo of\n bar"
|
||||
];
|
||||
|
||||
for macro_usage in macro_usages.iter() {
|
||||
let ast = self.parse_line(macro_usage);
|
||||
expect_shape::<Match<Ast>>(&ast);
|
||||
};
|
||||
}
|
||||
|
||||
fn deserialize_macro_ambiguous(&mut self) {
|
||||
self.test_shape("if foo",|shape:&Ambiguous| {
|
||||
let segment = &shape.segs.head;
|
||||
assert_var(&segment.head,"if");
|
||||
|
||||
let segment_body = segment.body.as_ref().unwrap();
|
||||
assert_eq!(segment_body.off,2);
|
||||
assert_var(&segment_body.wrapped,"foo");
|
||||
});
|
||||
}
|
||||
|
||||
fn run(&mut self) {
|
||||
// Shapes not covered by separate test:
|
||||
// * Opr (doesn't parse on its own, covered by Infix and other)
|
||||
// * Module (covered by every single test, as parser wraps everything
|
||||
// into module)
|
||||
self.deserialize_unrecognized();
|
||||
self.deserialize_invalid_quote();
|
||||
self.deserialize_inline_block();
|
||||
self.deserialize_blank();
|
||||
self.deserialize_var();
|
||||
self.deserialize_cons();
|
||||
self.deserialize_mod();
|
||||
self.deserialize_invalid_suffix();
|
||||
self.deserialize_number();
|
||||
self.deserialize_text_line_raw();
|
||||
self.deserialize_text_line_fmt();
|
||||
self.deserialize_text_block_raw();
|
||||
self.deserialize_text_block_fmt();
|
||||
self.deserialize_unfinished_text();
|
||||
self.deserialize_dangling_base();
|
||||
self.deserialize_prefix();
|
||||
self.deserialize_infix();
|
||||
self.deserialize_left();
|
||||
self.deserialize_right();
|
||||
self.deserialize_sides();
|
||||
self.deserialize_block();
|
||||
self.deserialize_macro_matches();
|
||||
self.deserialize_macro_ambiguous();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// A single entry point for all the tests here using external parser.
|
||||
///
|
||||
/// Setting up the parser is costly, so we run all tests as a single batch.
|
||||
/// Until proper CI solution for calling external parser is devised, this
|
||||
/// test is marked with `#[ignore]`.
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn parser_tests() {
|
||||
Fixture::new().run()
|
||||
}
|
23
gui/lib/ide/parser/tests/web.rs
Normal file
23
gui/lib/ide/parser/tests/web.rs
Normal file
@ -0,0 +1,23 @@
|
||||
use parser::Parser;
|
||||
use wasm_bindgen_test::{wasm_bindgen_test_configure, wasm_bindgen_test};
|
||||
use parser::api::Error::ParsingError;
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn web_test() {
|
||||
let mut parser = Parser::new_or_panic();
|
||||
|
||||
let mut parse = |input| {
|
||||
match parser.parse(String::from(input)) {
|
||||
Err(ParsingError(str)) => str,
|
||||
_ => panic!("Not implemented.")
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(parse(""), "\
|
||||
{\"shape\":{\"Module\":{\"lines\":[{\"elem\":null,\"off\":0}]}}\
|
||||
,\"span\":0}"
|
||||
);
|
||||
}
|
12
gui/lib/ide/utils/Cargo.toml
Normal file
12
gui/lib/ide/utils/Cargo.toml
Normal file
@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "utils"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <contact@luna-lang.org>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
futures = "0.3.1"
|
||||
enso-prelude = { version = "0.1.0" , path = "../../prelude" }
|
15
gui/lib/ide/utils/src/lib.rs
Normal file
15
gui/lib/ide/utils/src/lib.rs
Normal file
@ -0,0 +1,15 @@
|
||||
//! General purpose functions to be reused between components, not belonging to
|
||||
//! any other crate and yet not worth of being split into their own creates.
|
||||
|
||||
#![warn(missing_docs)]
|
||||
#![warn(trivial_casts)]
|
||||
#![warn(trivial_numeric_casts)]
|
||||
#![warn(unused_import_braces)]
|
||||
#![warn(unused_qualifications)]
|
||||
#![warn(unsafe_code)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
|
||||
pub use enso_prelude as prelude;
|
||||
|
||||
pub mod test;
|
39
gui/lib/ide/utils/src/test.rs
Normal file
39
gui/lib/ide/utils/src/test.rs
Normal file
@ -0,0 +1,39 @@
|
||||
//! Module with general purpose utilities meant to be used in tests.
|
||||
|
||||
use futures::Stream;
|
||||
use std::future::Future;
|
||||
use std::pin::Pin;
|
||||
use std::task::Context;
|
||||
use std::task::Poll;
|
||||
|
||||
|
||||
|
||||
// ======================
|
||||
// === Task Execution ===
|
||||
// ======================
|
||||
|
||||
/// Polls the future, performing any available work.
|
||||
///
|
||||
/// If future is complete, returns result. Otherwise, returns control when
|
||||
/// stalled.
|
||||
/// It is not legal to call this on future that already completed.
|
||||
pub fn poll_future_output<F : Future>(f:&mut Pin<Box<F>>) -> Option<F::Output> {
|
||||
let mut ctx = Context::from_waker(futures::task::noop_waker_ref());
|
||||
match f.as_mut().poll(&mut ctx) {
|
||||
Poll::Ready(result) => Some(result),
|
||||
Poll::Pending => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Polls the stream, performing any available work. If a new value is
|
||||
/// ready, returns it.
|
||||
///
|
||||
/// Note that this API hides the difference between value not being available
|
||||
/// yet and stream being finished.
|
||||
pub fn poll_stream_output<S : Stream + ?Sized>(f:&mut Pin<Box<S>>) -> Option<S::Item> {
|
||||
let mut ctx = Context::from_waker(futures::task::noop_waker_ref());
|
||||
match f.as_mut().poll_next(&mut ctx) {
|
||||
Poll::Ready(result) => result,
|
||||
Poll::Pending => None,
|
||||
}
|
||||
}
|
17
gui/lib/macro-utils/Cargo.toml
Normal file
17
gui/lib/macro-utils/Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "macro-utils"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <contact@luna-lang.org>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
enso-prelude = { version = "0.1.0" , path = "../prelude" }
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
|
||||
[dependencies.syn]
|
||||
version = "1.0"
|
||||
features = [
|
||||
'extra-traits',
|
||||
'full' # for syn::ItemStruct
|
||||
]
|
302
gui/lib/macro-utils/src/lib.rs
Normal file
302
gui/lib/macro-utils/src/lib.rs
Normal file
@ -0,0 +1,302 @@
|
||||
//! A number of helper functions meant to be used in the procedural macros
|
||||
//! definitions.
|
||||
|
||||
#![warn(missing_docs)]
|
||||
|
||||
pub use enso_prelude as prelude;
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use quote::quote;
|
||||
use syn;
|
||||
use syn::visit::Visit;
|
||||
use proc_macro2::TokenStream;
|
||||
use proc_macro2::TokenTree;
|
||||
|
||||
|
||||
|
||||
// =========================
|
||||
// === Token Stream Utils ===
|
||||
// =========================
|
||||
|
||||
/// Maps all the tokens in the stream using a given function.
|
||||
pub fn map_tokens<F:Fn(TokenTree) -> TokenTree>
|
||||
(input:TokenStream, f:F) -> TokenStream {
|
||||
let ret_iter = input.into_iter().map(f);
|
||||
TokenStream::from_iter(ret_iter)
|
||||
}
|
||||
|
||||
/// Rewrites stream replacing each token with a sequence of tokens returned by
|
||||
/// the given function. The groups (e.g. token tree within braces) are unpacked,
|
||||
/// rewritten and repacked into groups -- the function is applied recursively.
|
||||
pub fn rewrite_stream
|
||||
<F:Fn(TokenTree) -> TokenStream + Copy>
|
||||
(input:TokenStream, f:F) -> TokenStream {
|
||||
let mut ret = TokenStream::new();
|
||||
for token in input.into_iter() {
|
||||
match token {
|
||||
proc_macro2::TokenTree::Group(group) => {
|
||||
let delim = group.delimiter();
|
||||
let span = group.span();
|
||||
let rewritten = rewrite_stream(group.stream(), f);
|
||||
let mut new_group = proc_macro2::Group::new(delim,rewritten);
|
||||
new_group.set_span(span);
|
||||
let new_group = vec![TokenTree::from(new_group)];
|
||||
ret.extend(new_group.into_iter())
|
||||
}
|
||||
_ => ret.extend(f(token)),
|
||||
}
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === Token Utils ===
|
||||
// ===================
|
||||
|
||||
/// Is the given token an identifier matching to a given string?
|
||||
pub fn matching_ident(token:&TokenTree, name:&str) -> bool {
|
||||
match token {
|
||||
TokenTree::Ident(ident) => *ident == name,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Repr ===
|
||||
// ============
|
||||
|
||||
/// Obtains text representation of given `ToTokens`-compatible input.
|
||||
pub fn repr<T: quote::ToTokens>(t:&T) -> String {
|
||||
quote!(#t).to_string()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === Field Utils ===
|
||||
// ===================
|
||||
|
||||
/// Collects all fields, named or not.
|
||||
pub fn fields_list(fields:&syn::Fields) -> Vec<&syn::Field> {
|
||||
match fields {
|
||||
syn::Fields::Named (ref f) => f.named .iter().collect(),
|
||||
syn::Fields::Unnamed(ref f) => f.unnamed.iter().collect(),
|
||||
syn::Fields::Unit => default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns token that refers to the field.
|
||||
///
|
||||
/// It is the field name for named field and field index for unnamed fields.
|
||||
pub fn field_ident_token(field:&syn::Field, index:syn::Index) -> TokenStream {
|
||||
match &field.ident {
|
||||
Some(ident) => quote!(#ident),
|
||||
None => quote!(#index),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === Type Path Utils ===
|
||||
// =======================
|
||||
|
||||
/// Obtain list of generic arguments on the path's segment.
|
||||
pub fn path_segment_generic_args
|
||||
(segment:&syn::PathSegment) -> Vec<&syn::GenericArgument> {
|
||||
match segment.arguments {
|
||||
syn::PathArguments::AngleBracketed(ref args) =>
|
||||
args.args.iter().collect(),
|
||||
_ =>
|
||||
Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Obtain list of generic arguments on the path's last segment.
|
||||
///
|
||||
/// Empty, if path contains no segments.
|
||||
pub fn ty_path_generic_args
|
||||
(ty_path:&syn::TypePath) -> Vec<&syn::GenericArgument> {
|
||||
ty_path.path.segments.last().map_or(Vec::new(), path_segment_generic_args)
|
||||
}
|
||||
|
||||
/// Obtain list of type arguments on the path's last segment.
|
||||
pub fn ty_path_type_args
|
||||
(ty_path:&syn::TypePath) -> Vec<&syn::Type> {
|
||||
ty_path_generic_args(ty_path).iter().filter_map( |generic_arg| {
|
||||
match generic_arg {
|
||||
syn::GenericArgument::Type(t) => Some(t),
|
||||
_ => None,
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
|
||||
/// Last type argument of the last segment on the type path.
|
||||
pub fn last_type_arg(ty_path:&syn::TypePath) -> Option<&syn::GenericArgument> {
|
||||
ty_path_generic_args(ty_path).last().copied()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === Collect Types ===
|
||||
// =====================
|
||||
|
||||
/// Visitor that accumulates all visited `syn::TypePath`.
|
||||
#[derive(Default)]
|
||||
pub struct TypeGatherer<'ast> {
|
||||
/// Observed types accumulator.
|
||||
pub types: Vec<&'ast syn::TypePath>
|
||||
}
|
||||
|
||||
impl<'ast> Visit<'ast> for TypeGatherer<'ast> {
|
||||
fn visit_type_path(&mut self, node:&'ast syn::TypePath) {
|
||||
self.types.push(node);
|
||||
syn::visit::visit_type_path(self, node);
|
||||
}
|
||||
}
|
||||
|
||||
/// All `TypePath`s in the given's `Type` subtree.
|
||||
pub fn gather_all_types(node:&syn::Type) -> Vec<&syn::TypePath> {
|
||||
let mut type_gather = TypeGatherer::default();
|
||||
type_gather.visit_type(node);
|
||||
type_gather.types
|
||||
}
|
||||
|
||||
/// All text representations of `TypePath`s in the given's `Type` subtree.
|
||||
pub fn gather_all_type_reprs(node:&syn::Type) -> Vec<String> {
|
||||
gather_all_types(node).iter().map(|t| repr(t)).collect()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === Type Dependency ===
|
||||
// =======================
|
||||
|
||||
/// Naive type equality test by comparing its representation with a string.
|
||||
pub fn type_matches_repr(ty:&syn::Type, target_repr:&str) -> bool {
|
||||
repr(ty) == target_repr
|
||||
}
|
||||
|
||||
/// Naive type equality test by comparing their text representations.
|
||||
pub fn type_matches(ty:&syn::Type, target_param:&syn::GenericParam) -> bool {
|
||||
type_matches_repr(ty, &repr(target_param))
|
||||
}
|
||||
|
||||
/// Does type depends on the given type parameter.
|
||||
pub fn type_depends_on(ty:&syn::Type, target_param:&syn::GenericParam) -> bool {
|
||||
let target_param = repr(target_param);
|
||||
let relevant_types = gather_all_types(ty);
|
||||
relevant_types.iter().any(|ty| repr(ty) == target_param)
|
||||
}
|
||||
|
||||
/// Does enum variant depend on the given type parameter.
|
||||
pub fn variant_depends_on
|
||||
(var:&syn::Variant, target_param:&syn::GenericParam) -> bool {
|
||||
var.fields.iter().any(|field| type_depends_on(&field.ty, target_param))
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
fn parse<T:syn::parse::Parse>(code:&str) -> T {
|
||||
syn::parse_str(code).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn repr_round_trips() {
|
||||
let program = "pub fn repr<T: quote::ToTokens>(t: &T) -> String {}";
|
||||
let tokens = parse::<TokenStream>(program);
|
||||
let quoted_program = repr(&tokens);
|
||||
let tokens2 = parse::<TokenStream>("ed_program);
|
||||
// check only second round-trip, first is allowed to break whitespace
|
||||
assert_eq!(repr(&tokens), repr(&tokens2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fields_list_test() {
|
||||
let tuple_like = "struct Unnamed(i32, String, T);";
|
||||
let proper_struct = "struct Named{i: i32, s: String, t: T}";
|
||||
let expected_types = vec!["i32", "String", "T"];
|
||||
|
||||
fn assert_field_types(program:&str, expected_types:&[&str]) {
|
||||
let tokens = parse::<syn::ItemStruct>(program);
|
||||
let fields = fields_list(&tokens.fields);
|
||||
let types = fields.iter().map(|f| repr(&f.ty));
|
||||
assert_eq!(Vec::from_iter(types), expected_types);
|
||||
}
|
||||
|
||||
assert_field_types(tuple_like, &expected_types);
|
||||
assert_field_types(proper_struct, &expected_types);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_dependency() {
|
||||
let param:syn::GenericParam = parse("T");
|
||||
let depends = |code| {
|
||||
let ty:syn::Type = parse(code);
|
||||
type_depends_on(&ty, ¶m)
|
||||
};
|
||||
|
||||
// sample types that depend on `T`
|
||||
let dependents = vec!{
|
||||
"T",
|
||||
"Option<T>",
|
||||
"Pair<T, U>",
|
||||
"Pair<U, T>",
|
||||
"Pair<U, (T,)>",
|
||||
"&T",
|
||||
"&'t mut T",
|
||||
};
|
||||
// sample types that do not depend on `T`
|
||||
let independents = vec!{
|
||||
"Tt",
|
||||
"Option<Tt>",
|
||||
"Pair<Tt, U>",
|
||||
"Pair<U, Tt>",
|
||||
"Pair<U, Tt>",
|
||||
"i32",
|
||||
"&str",
|
||||
};
|
||||
for dependent in dependents {
|
||||
assert!(depends(dependent), "{} must depend on {}"
|
||||
, repr(&dependent), repr(¶m));
|
||||
}
|
||||
for independent in independents {
|
||||
assert!(!depends(independent), "{} must not depend on {}"
|
||||
, repr(&independent), repr(¶m));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collecting_type_path_args() {
|
||||
fn check(expected_type_args:Vec<&str>, ty_path:&str) {
|
||||
let ty_path = parse(ty_path);
|
||||
let args = super::ty_path_type_args(&ty_path);
|
||||
assert_eq!(expected_type_args.len(), args.len());
|
||||
let zipped = expected_type_args.iter().zip(args.iter());
|
||||
for (expected,got) in zipped {
|
||||
assert_eq!(expected, &repr(got));
|
||||
}
|
||||
}
|
||||
check(vec!["T"] , "std::Option<T>");
|
||||
check(vec!["U"] , "std::Option<U>");
|
||||
check(vec!["A", "B"], "Either<A,B>");
|
||||
assert_eq!(super::last_type_arg(&parse("i32")), None);
|
||||
assert_eq!(repr(&super::last_type_arg(&parse("Foo<C>"))), "C");
|
||||
}
|
||||
}
|
@ -9,4 +9,4 @@ edition = "2018"
|
||||
[features]
|
||||
|
||||
[dependencies]
|
||||
basegl-prelude = { version = "0.1.0", path = "../prelude" }
|
||||
enso-prelude = { version = "0.1.0", path = "../prelude" }
|
||||
|
@ -7,7 +7,7 @@
|
||||
#![allow(unused_macros)]
|
||||
#![allow(clippy::option_map_unit_fn)]
|
||||
|
||||
use basegl_prelude::*;
|
||||
use enso_prelude::*;
|
||||
|
||||
// ================
|
||||
// === TypeList ===
|
||||
|
@ -1,5 +1,5 @@
|
||||
[package]
|
||||
name = "basegl-prelude"
|
||||
name = "enso-prelude"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <contact@luna-lang.org>"]
|
||||
edition = "2018"
|
||||
|
@ -260,7 +260,7 @@ impl<T> RcOps for Rc<T> {
|
||||
// ===================
|
||||
|
||||
/// Like `Display` trait but for types. However, unlike `Display` it defaults to
|
||||
/// `core::any::type_name` if not provided with explicit implementation.
|
||||
/// `impl::any::type_name` if not provided with explicit implementation.
|
||||
pub trait TypeDisplay {
|
||||
fn type_display() -> String;
|
||||
}
|
||||
@ -346,8 +346,8 @@ impl <T:Scalar,R:DimName,C:DimName,S> TypeDisplay for Matrix<T,R,C,S> {
|
||||
/////
|
||||
///// ```compile_fail
|
||||
///// use std::rc::Rc;
|
||||
///// use core::cell::RefCell;
|
||||
///// use core::cell::Ref;
|
||||
///// use impl::cell::RefCell;
|
||||
///// use impl::cell::Ref;
|
||||
/////
|
||||
///// pub struct SharedDirtyFlag<T> {
|
||||
///// data: Rc<RefCell<T>>
|
||||
|
@ -11,4 +11,4 @@ default = []
|
||||
|
||||
[dependencies]
|
||||
shapely-macros = { version = "0.1.0" , path = "../macros" }
|
||||
basegl-prelude = { version = "0.1.0" , path = "../../prelude" }
|
||||
enso-prelude = { version = "0.1.0" , path = "../../prelude" }
|
||||
|
90
gui/lib/shapely/impl/src/generator.rs
Normal file
90
gui/lib/shapely/impl/src/generator.rs
Normal file
@ -0,0 +1,90 @@
|
||||
//! Helper code meant to be used by the code generated through usage of macros
|
||||
//! from `shapely-macros` crate.
|
||||
|
||||
pub use shapely_macros::*;
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use std::ops::Generator;
|
||||
use std::ops::GeneratorState;
|
||||
use std::pin::Pin;
|
||||
|
||||
|
||||
|
||||
// ==========================
|
||||
// === GeneratingIterator ===
|
||||
// ==========================
|
||||
|
||||
/// Iterates over values yielded from the wrapped `Generator`.
|
||||
#[derive(Debug)]
|
||||
pub struct GeneratingIterator<G: Generator>(pub G);
|
||||
|
||||
impl<G> Iterator for GeneratingIterator<G>
|
||||
where G: Generator<Return = ()> + Unpin {
|
||||
type Item = G::Yield;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match Pin::new(&mut self.0).resume() {
|
||||
GeneratorState::Yielded(element) => Some(element),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === EmptyIterator ===
|
||||
// =====================
|
||||
|
||||
/// An `Iterator` type that yields no values of the given type `T`.
|
||||
#[derive(Derivative)]
|
||||
#[derivative(Debug,Default(bound=""))]
|
||||
pub struct EmptyIterator<T>(PhantomData<T>);
|
||||
|
||||
impl<T> EmptyIterator<T> {
|
||||
/// Create a new empty iterator.
|
||||
pub fn new() -> Self {
|
||||
default()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Iterator for EmptyIterator<T> {
|
||||
type Item = T;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn empty_iterator_works_for_any_type() {
|
||||
for elem in EmptyIterator::new() {
|
||||
elem: i32;
|
||||
}
|
||||
for elem in EmptyIterator::new() {
|
||||
elem: String;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generating_iterator_works() {
|
||||
let generator = || {
|
||||
yield 0;
|
||||
yield 1;
|
||||
yield 2;
|
||||
};
|
||||
let expected_numbers = vec!(0, 1, 2);
|
||||
let generator_iter = GeneratingIterator(generator);
|
||||
let collected_result: Vec<_> = generator_iter.collect();
|
||||
assert_eq!(collected_result, expected_numbers);
|
||||
}
|
||||
}
|
@ -10,18 +10,21 @@
|
||||
#![warn(missing_debug_implementations)]
|
||||
#![feature(generators, generator_trait)]
|
||||
#![feature(specialization)]
|
||||
#![feature(type_ascription)]
|
||||
#![feature(overlapping_marker_traits)]
|
||||
|
||||
pub mod generator;
|
||||
pub mod shared;
|
||||
pub mod singleton;
|
||||
pub mod cartesian;
|
||||
|
||||
pub use enso_prelude as prelude;
|
||||
pub use shapely_macros::*;
|
||||
|
||||
use std::ops::Generator;
|
||||
use std::ops::GeneratorState;
|
||||
use std::pin::Pin;
|
||||
use basegl_prelude::*;
|
||||
pub use generator::EmptyIterator;
|
||||
pub use generator::GeneratingIterator;
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
|
||||
/// Generates a newtype wrapper for the provided types. It also generates a lot of impls,
|
||||
@ -83,45 +86,6 @@ macro_rules! derive_clone_plus {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ========================
|
||||
// === IterForGenerator ===
|
||||
// ========================
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IterForGenerator<G: Generator>(pub G);
|
||||
|
||||
impl<G> Iterator for IterForGenerator<G>
|
||||
where G: Generator<Return = ()> + Unpin {
|
||||
type Item = G::Yield;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match { Pin::new(&mut self.0).resume() } {
|
||||
GeneratorState::Yielded(element) => Some(element),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ======================
|
||||
// === EmptyGenerator ===
|
||||
// ======================
|
||||
|
||||
#[derive(Derivative)]
|
||||
#[derivative(Debug,Default(bound=""))]
|
||||
pub struct EmptyGenerator<T>(PhantomData<T>);
|
||||
|
||||
impl<T> EmptyGenerator<T> {
|
||||
pub fn new() -> Self { default() }
|
||||
}
|
||||
|
||||
impl<T> Iterator for EmptyGenerator<T> {
|
||||
type Item = T;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
174
gui/lib/shapely/impl/tests/derivation.rs
Normal file
174
gui/lib/shapely/impl/tests/derivation.rs
Normal file
@ -0,0 +1,174 @@
|
||||
#![feature(generators)]
|
||||
#![feature(type_alias_impl_trait)]
|
||||
|
||||
use shapely::*;
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Utils ===
|
||||
// =============
|
||||
|
||||
/// To fail compilation if `T` is not `IntoIterator`.
|
||||
fn is_into_iterator<T: IntoIterator>(){}
|
||||
|
||||
fn to_vector<T>(t: T) -> Vec<T::Item>
|
||||
where T : IntoIterator,
|
||||
T::Item: Copy {
|
||||
t.into_iter().collect()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =====================================
|
||||
// === Struct with single type param ===
|
||||
// =====================================
|
||||
|
||||
#[derive(Iterator, IteratorMut, Eq, PartialEq, Debug)]
|
||||
pub struct PairTT<T>(T, T);
|
||||
|
||||
#[test]
|
||||
fn derive_iterator_single_t() {
|
||||
is_into_iterator::<& PairTT<i32>>();
|
||||
is_into_iterator::<&mut PairTT<i32>>();
|
||||
|
||||
let get_pair = || PairTT(4, 49);
|
||||
|
||||
// just collect values
|
||||
let pair = get_pair();
|
||||
let collected = pair.iter().copied().collect::<Vec<i32>>();
|
||||
assert_eq!(collected, vec![4, 49]);
|
||||
|
||||
// IntoIterator for &mut Val
|
||||
let mut pair = get_pair();
|
||||
for i in &mut pair {
|
||||
*i = *i + 1
|
||||
}
|
||||
assert_eq!(pair, PairTT(5, 50));
|
||||
|
||||
// iter_mut
|
||||
for i in pair.iter_mut() {
|
||||
*i = *i + 1
|
||||
}
|
||||
assert_eq!(pair, PairTT(6, 51));
|
||||
|
||||
// IntoIterator for & Val
|
||||
let pair = get_pair(); // not mut anymore
|
||||
let mut sum = 0;
|
||||
for i in &pair {
|
||||
sum += i;
|
||||
}
|
||||
assert_eq!(sum, pair.0 + pair.1)
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================================
|
||||
// === Struct with two type params ===
|
||||
// ===================================
|
||||
|
||||
#[derive(Iterator, IteratorMut, Eq, PartialEq, Debug)]
|
||||
pub struct PairUV<U,V>(U,V);
|
||||
|
||||
#[test]
|
||||
fn two_params() {
|
||||
// verify that iter uses only the last type param field
|
||||
let pair = PairUV(5, 10);
|
||||
assert_eq!(to_vector(pair.iter().copied()), vec![10]);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ======================================
|
||||
// === Struct without any type params ===
|
||||
// ======================================
|
||||
|
||||
#[derive(Iterator, Eq, PartialEq, Debug)]
|
||||
pub struct Monomorphic(i32);
|
||||
|
||||
#[test]
|
||||
fn no_params() {
|
||||
// `derive(Iterator)` is no-op for structures with no type parameters.
|
||||
// We just make sure that it does not cause compilation error.
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ========================
|
||||
// === Enumeration Type ===
|
||||
// ========================
|
||||
|
||||
#[derive(Iterator)]
|
||||
#[warn(dead_code)] // value is never read and shouldn't be
|
||||
pub struct Unrecognized{ pub value : String }
|
||||
|
||||
#[derive(Iterator)]
|
||||
pub enum Foo<U, T> {
|
||||
Con1(PairUV<U, T>),
|
||||
Con2(PairTT<T>),
|
||||
Con3(Unrecognized)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enum_is_into_iterator() {
|
||||
is_into_iterator::<&Foo<i32, i32>>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enum_iter1() {
|
||||
let v = Foo::Con1(PairUV(4, 50));
|
||||
let mut v_iter = v.into_iter();
|
||||
assert_eq!(*v_iter.next().unwrap(),50);
|
||||
assert!(v_iter.next().is_none());
|
||||
}
|
||||
#[test]
|
||||
fn enum_iter2() {
|
||||
let v: Foo<i32, i32> = Foo::Con2(PairTT(6,60));
|
||||
let mut v_iter = v.into_iter();
|
||||
assert_eq!(*v_iter.next().unwrap(),6);
|
||||
assert_eq!(*v_iter.next().unwrap(),60);
|
||||
assert!(v_iter.next().is_none());
|
||||
}
|
||||
#[test]
|
||||
fn enum_iter3() {
|
||||
let v: Foo<i32, i32> = Foo::Con3(Unrecognized{value:"foo".into()});
|
||||
let mut v_iter = v.into_iter();
|
||||
assert!(v_iter.next().is_none());
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === Dependent Types ===
|
||||
// =======================
|
||||
|
||||
#[derive(Iterator)]
|
||||
#[derive(IteratorMut)]
|
||||
pub struct DependentTest<U, T> {
|
||||
a:T,
|
||||
b:(T,U,PairUV<U, T>),
|
||||
// is never used, as it doesn't depend on `T` (last param)
|
||||
#[allow(dead_code)]
|
||||
c:PairTT<U>,
|
||||
d:(i32, Option<Vec<T>>),
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dependent_test_iter() {
|
||||
let val = DependentTest{
|
||||
a : 1,
|
||||
b : (2,3,PairUV(4,5)),
|
||||
c : PairTT(6,6),
|
||||
d : (7, Some(vec![8,9])),
|
||||
};
|
||||
let mut v_iter = val.into_iter();
|
||||
assert_eq!(*v_iter.next().unwrap(), 1);
|
||||
assert_eq!(*v_iter.next().unwrap(), 2);
|
||||
// 3 is `U` in tuple
|
||||
// 4 is `U` in <U,T> pair
|
||||
assert_eq!(*v_iter.next().unwrap(), 5);
|
||||
// 7 is `i32` in tuple
|
||||
assert_eq!(*v_iter.next().unwrap(), 8);
|
||||
assert_eq!(*v_iter.next().unwrap(), 9);
|
||||
assert!(v_iter.next().is_none());
|
||||
}
|
@ -11,11 +11,12 @@ proc-macro = true
|
||||
default = []
|
||||
|
||||
[dependencies]
|
||||
basegl-prelude = { version = "0.1.0" , path = "../../prelude" }
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
Inflector = "0.11.4"
|
||||
itertools = "0.8.1"
|
||||
enso-prelude = { version = "0.1.0" , path = "../../prelude" }
|
||||
macro-utils = { version = "0.1.0" , path = "../../macro-utils" }
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
Inflector = "0.11.4"
|
||||
itertools = "0.8.1"
|
||||
|
||||
[dependencies.syn]
|
||||
version = "1.0"
|
||||
|
447
gui/lib/shapely/macros/src/derive_iterator.rs
Normal file
447
gui/lib/shapely/macros/src/derive_iterator.rs
Normal file
@ -0,0 +1,447 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use macro_utils::fields_list;
|
||||
use macro_utils::field_ident_token;
|
||||
use macro_utils::type_depends_on;
|
||||
use macro_utils::type_matches;
|
||||
use macro_utils::ty_path_type_args;
|
||||
use macro_utils::variant_depends_on;
|
||||
use inflector::Inflector;
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === IsMut ===
|
||||
// =============
|
||||
|
||||
/// Describes whether a mutable or immutable iterator is being derived.
|
||||
#[derive(Clone,Copy,Debug,PartialEq)]
|
||||
pub enum IsMut {
|
||||
Mutable,
|
||||
Immutable,
|
||||
}
|
||||
|
||||
impl IsMut {
|
||||
fn is_mut(self) -> bool {
|
||||
self == IsMut::Mutable
|
||||
}
|
||||
|
||||
/// Returns `mut` token for mutable iterator derivation.
|
||||
fn to_token(self) -> Option<syn::Token![mut]> {
|
||||
self.is_mut().as_some(<syn::Token![mut]>::default())
|
||||
}
|
||||
|
||||
/// Name of method for generating iterator.
|
||||
fn iter_method(self) -> TokenStream {
|
||||
if self.is_mut() {
|
||||
quote!(iter_mut)
|
||||
} else {
|
||||
quote!(iter)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ======================
|
||||
// === DependentValue ===
|
||||
// ======================
|
||||
|
||||
/// A value dependent on out target parameter.
|
||||
///
|
||||
/// Helper methods can be used to generate code yielding values from this.
|
||||
pub struct DependentValue<'t> {
|
||||
/// Type of the value (ref-stripped).
|
||||
pub ty : &'t syn::Type,
|
||||
/// Tokens yielding the value.
|
||||
pub value : TokenStream,
|
||||
/// Parameter type we want to iterate over.
|
||||
pub target_param: &'t syn::GenericParam,
|
||||
/// Are the value yielded as reference.
|
||||
pub through_ref : bool
|
||||
}
|
||||
|
||||
impl<'t> DependentValue<'t> {
|
||||
/// Returns Some when type is dependent and None otherwise.
|
||||
pub fn try_new
|
||||
(ty: &'t syn::Type, value:TokenStream, target_param:&'t syn::GenericParam)
|
||||
-> Option<DependentValue<'t>> {
|
||||
if type_depends_on(ty, target_param) {
|
||||
Some(DependentValue{ty,value,target_param,through_ref:false})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Collects dependent sub-values from the tuple value.
|
||||
pub fn collect_tuple
|
||||
(tuple:&'t syn::TypeTuple, target_param:&'t syn::GenericParam)
|
||||
-> Vec<DependentValue<'t>> {
|
||||
tuple.elems.iter().enumerate().filter_map(|(ix,ty)| {
|
||||
let ix = syn::Index::from(ix);
|
||||
let ident = quote!(t.#ix);
|
||||
DependentValue::try_new(ty,ident,target_param)
|
||||
}).collect()
|
||||
}
|
||||
|
||||
/// Generates code yielding all values of target type accessible from this
|
||||
/// value.
|
||||
pub fn yield_value(&self, is_mut:IsMut) -> TokenStream {
|
||||
match self.ty {
|
||||
syn::Type::Tuple(tuple) => self.yield_tuple_value(tuple, is_mut),
|
||||
syn::Type::Path(path) => {
|
||||
if type_matches(&self.ty, &self.target_param) {
|
||||
self.yield_direct_value(is_mut)
|
||||
} else {
|
||||
self.yield_dependent_ty_path_value(path,is_mut)
|
||||
}
|
||||
}
|
||||
_ =>
|
||||
panic!("Don't know how to yield value of type {} from type {}"
|
||||
, repr(&self.target_param), repr(&self.ty)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Code yielding value that directly matches the target parameter type.
|
||||
pub fn yield_direct_value
|
||||
(&self, is_mut:IsMut) -> TokenStream {
|
||||
let value = &self.value;
|
||||
let opt_mut = is_mut.to_token();
|
||||
let opt_ref = (!self.through_ref).as_some(quote!( & #opt_mut ));
|
||||
|
||||
// yield &mut value;
|
||||
quote!( yield #opt_ref #value; )
|
||||
}
|
||||
|
||||
/// Code yielding values from tuple dependent on the target parameter type.
|
||||
pub fn yield_tuple_value
|
||||
(&self, ty:&syn::TypeTuple,is_mut:IsMut)
|
||||
-> TokenStream {
|
||||
let value = &self.value;
|
||||
let mut_kwd = is_mut.to_token();
|
||||
let subfields = DependentValue::collect_tuple(ty, self.target_param);
|
||||
let yield_sub = subfields.iter().map(|f| {
|
||||
f.yield_value(is_mut)
|
||||
}).collect_vec();
|
||||
|
||||
// yield &mut t.0;
|
||||
// yield &mut t.2;
|
||||
quote!( {
|
||||
let t = & #mut_kwd #value;
|
||||
#(#yield_sub)*
|
||||
})
|
||||
}
|
||||
|
||||
/// Obtain the type of iterator-yielded value.
|
||||
///
|
||||
/// Panics when given a type which is not supported for derivation, like
|
||||
/// having dependent type on the non-last position.
|
||||
pub fn type_path_elem_type(&self, ty_path:&'t syn::TypePath) -> &syn::Type {
|
||||
let mut type_args = ty_path_type_args(ty_path);
|
||||
let last_arg = match type_args.pop() {
|
||||
Some(arg) => arg,
|
||||
None => panic!("Type {} has no segments!", repr(&ty_path))
|
||||
};
|
||||
|
||||
// Last and only last type argument is dependent.
|
||||
for non_last_segment in type_args {
|
||||
assert!(!type_depends_on(non_last_segment, self.target_param)
|
||||
, "Type {} has non-last argument {} that depends on {}"
|
||||
, repr(ty_path)
|
||||
, repr(non_last_segment)
|
||||
, repr(self.target_param)
|
||||
);
|
||||
}
|
||||
assert!(type_depends_on(last_arg, self.target_param));
|
||||
last_arg
|
||||
}
|
||||
|
||||
/// Code yielding values from data dependent on the target parameter type.
|
||||
pub fn yield_dependent_ty_path_value
|
||||
(&self, ty_path:&'t syn::TypePath, is_mut:IsMut)
|
||||
-> TokenStream {
|
||||
let opt_mut = is_mut.to_token();
|
||||
let elem_ty = self.type_path_elem_type(ty_path);
|
||||
let elem = quote!(t);
|
||||
|
||||
let elem_info = DependentValue{
|
||||
value : elem.clone(),
|
||||
target_param : self.target_param,
|
||||
ty : elem_ty,
|
||||
through_ref : true,
|
||||
};
|
||||
let yield_elem = elem_info.yield_value(is_mut);
|
||||
let value = &self.value;
|
||||
let iter_method = if is_mut.is_mut() {
|
||||
quote!(iter_mut)
|
||||
} else {
|
||||
quote!(iter)
|
||||
};
|
||||
|
||||
quote! {
|
||||
for #opt_mut #elem in #value.#iter_method() {
|
||||
#yield_elem
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Describe relevant fields of the struct definition.
|
||||
pub fn collect_struct
|
||||
(data:&'t syn::DataStruct, target_param:&'t syn::GenericParam)
|
||||
-> Vec<DependentValue<'t>> {
|
||||
let fields = fields_list(&data.fields);
|
||||
let dep_field = fields.iter().enumerate().filter_map(|(i,f)| {
|
||||
let ident = field_ident_token(f,i.into());
|
||||
let value = quote!(t.#ident);
|
||||
DependentValue::try_new(&f.ty,value,target_param)
|
||||
});
|
||||
dep_field.collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Parts of derivation output that are specific to enum- or struct- target.
|
||||
pub struct OutputParts<'ast> {
|
||||
pub iterator_tydefs : TokenStream,
|
||||
pub iter_body : TokenStream,
|
||||
pub iterator_params : Vec<&'ast syn::GenericParam>,
|
||||
}
|
||||
|
||||
/// Common data used when generating derived Iterator impls.
|
||||
///
|
||||
/// Examples are given for `pub struct Foo<S, T> { foo: T }`
|
||||
pub struct DerivingIterator<'ast> {
|
||||
pub data : &'ast syn::Data, // { foo: T }
|
||||
pub ident : &'ast syn::Ident, // Foo
|
||||
pub params : Vec<&'ast syn::GenericParam>, // <S, T>
|
||||
pub t_iterator : syn::Ident, // FooIterator{Mut}
|
||||
pub iterator : syn::Ident, // foo_iterator{_mut}
|
||||
pub target_param : &'ast syn::GenericParam, // T
|
||||
pub is_mut : IsMut, // are we mutable iterator?
|
||||
}
|
||||
|
||||
impl DerivingIterator<'_> {
|
||||
pub fn new<'ast>
|
||||
( decl :&'ast syn::DeriveInput
|
||||
, target_param:&'ast syn::GenericParam
|
||||
, is_mut :IsMut
|
||||
) -> DerivingIterator<'ast> {
|
||||
let mut_or_not = if is_mut.is_mut() { "Mut" } else { "" };
|
||||
let data = &decl.data;
|
||||
let params = decl.generics.params.iter().collect();
|
||||
let ident = &decl.ident;
|
||||
let t_iterator = format!("{}Iterator{}", ident, mut_or_not);
|
||||
let iterator = t_iterator.to_snake_case();
|
||||
let t_iterator = syn::Ident::new(&t_iterator, Span::call_site());
|
||||
let iterator = syn::Ident::new(&iterator , Span::call_site());
|
||||
DerivingIterator {
|
||||
data,
|
||||
ident,
|
||||
params,
|
||||
t_iterator,
|
||||
iterator,
|
||||
target_param,
|
||||
is_mut,
|
||||
}
|
||||
}
|
||||
|
||||
/// Handles all enum-specific parts.
|
||||
pub fn prepare_parts_enum(&self, data:&syn::DataEnum) -> OutputParts {
|
||||
let opt_mut = &self.is_mut.to_token();
|
||||
let t_iterator = &self.t_iterator;
|
||||
let ident = &self.ident;
|
||||
let target_param = &self.target_param;
|
||||
let iterator_params = vec!(self.target_param);
|
||||
let iterator_tydefs = quote!(
|
||||
// type FooIterator<'t, U> =
|
||||
// Box<dyn Iterator<Item=&'t U> + 't>;
|
||||
// type FooIteratorMut<'t, U> =
|
||||
// Box<dyn Iterator<Item=&'t mut U> + 't>;
|
||||
type #t_iterator<'t, #(#iterator_params),*> =
|
||||
Box<dyn Iterator<Item=&'t #opt_mut #target_param> + 't>;
|
||||
);
|
||||
// For types that use target type parameter, refer to their
|
||||
// `IntoIterator` implementation. Otherwise, use `EmptyIterator`.
|
||||
let arms = data.variants.iter().map(|var| {
|
||||
let con = &var.ident;
|
||||
let iter = if variant_depends_on(var, target_param) {
|
||||
quote!(elem.into_iter())
|
||||
} else {
|
||||
quote!(shapely::EmptyIterator::new())
|
||||
};
|
||||
quote!(#ident::#con(elem) => Box::new(#iter))
|
||||
});
|
||||
|
||||
// match t {
|
||||
// Foo::Con1(elem) => Box::new(elem.into_iter()),
|
||||
// Foo::Con2(elem) => Box::new(shapely::EmptyIterator::new()),
|
||||
// }
|
||||
let iter_body = quote!( match t { #(#arms,)* } );
|
||||
OutputParts{iterator_tydefs,iter_body,iterator_params}
|
||||
}
|
||||
|
||||
/// Handles all struct-specific parts.
|
||||
pub fn prepare_parts_struct(&self, data:&syn::DataStruct) -> OutputParts {
|
||||
let opt_mut = &self.is_mut.to_token();
|
||||
let t_iterator = &self.t_iterator;
|
||||
let target_param = &self.target_param;
|
||||
let iterator_params = self.params.clone();
|
||||
let iterator_tydefs = quote!(
|
||||
// type FooIterator<'t, T> = impl Iterator<Item = &'t T>;
|
||||
// type FooIteratorMut<'t, T> = impl Iterator<Item = &'t mut T>;
|
||||
type #t_iterator<'t, #(#iterator_params),*> =
|
||||
impl Iterator<Item = &'t #opt_mut #target_param>;
|
||||
);
|
||||
let matched_fields = DependentValue::collect_struct(data, target_param);
|
||||
let yield_fields = matched_fields.iter().map(|field| {
|
||||
field.yield_value(self.is_mut)
|
||||
}).collect_vec();
|
||||
|
||||
// shapely::EmptyIterator::new()
|
||||
let empty_body = quote! { shapely::EmptyIterator::new() };
|
||||
|
||||
// shapely::GeneratingIterator(move || {
|
||||
// yield &t.foo;
|
||||
// })
|
||||
// shapely::GeneratingIterator(move || {
|
||||
// yield &mut t.foo;
|
||||
// })
|
||||
let body = quote! {
|
||||
shapely::GeneratingIterator
|
||||
(move || { #(#yield_fields)* })
|
||||
};
|
||||
|
||||
let iter_body = if matched_fields.is_empty() {
|
||||
empty_body
|
||||
} else {
|
||||
body
|
||||
};
|
||||
OutputParts{iterator_tydefs,iter_body,iterator_params}
|
||||
}
|
||||
|
||||
/// Handles common (between enum and struct) code and assembles it all
|
||||
/// into a final derivation output.
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
pub fn assemble_output(&self, parts:OutputParts) -> TokenStream {
|
||||
let iterator_tydefs = &parts.iterator_tydefs;
|
||||
let iter_body = &parts.iter_body;
|
||||
let iterator_params = &parts.iterator_params;
|
||||
let opt_mut = &self.is_mut.to_token();
|
||||
let iterator = &self.iterator;
|
||||
let t_iterator = &self.t_iterator;
|
||||
let params = &self.params;
|
||||
let ident = &self.ident;
|
||||
let target_param = &self.target_param;
|
||||
let iter_method = &self.is_mut.iter_method();
|
||||
|
||||
quote!{
|
||||
#iterator_tydefs
|
||||
|
||||
// pub fn foo_iterator<'t, T>
|
||||
// (t: &'t Foo<T>) -> FooIterator<'t, T> {
|
||||
// shapely::GeneratingIterator(move || {
|
||||
// yield &t.foo;
|
||||
// })
|
||||
// }
|
||||
// pub fn foo_iterator_mut<'t, T>
|
||||
// (t: &'t mut Foo<T>) -> FooIteratorMut<'t, T> {
|
||||
// shapely::GeneratingIterator(move || {
|
||||
// yield &t.foo;
|
||||
// })
|
||||
// }
|
||||
pub fn #iterator<'t, #(#params),*>
|
||||
(t: &'t #opt_mut #ident<#(#params),*>)
|
||||
-> #t_iterator<'t, #(#iterator_params),*> {
|
||||
#iter_body
|
||||
}
|
||||
|
||||
// impl<'t, T>
|
||||
// IntoIterator for &'t Foo<T> {
|
||||
// type Item = &'t T;
|
||||
// type IntoIter = FooIterator<'t, T>;
|
||||
// fn into_iter(self) -> FooIterator<'t, T> {
|
||||
// foo_iterator(self)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// impl<'t, T>
|
||||
// IntoIterator for &'t mut Foo<T> {
|
||||
// type Item = &'t mut T;
|
||||
// type IntoIter = FooIteratorMut<'t, T>;
|
||||
// fn into_iter(self) -> FooIteratorMut<'t, T> {
|
||||
// foo_iterator_mut(self)
|
||||
// }
|
||||
// }
|
||||
impl<'t, #(#params),*>
|
||||
IntoIterator for &'t #opt_mut #ident<#(#params),*> {
|
||||
type Item = &'t #opt_mut #target_param;
|
||||
type IntoIter = #t_iterator<'t, #(#iterator_params),*>;
|
||||
fn into_iter(self) -> #t_iterator<'t, #(#iterator_params),*> {
|
||||
#iterator(self)
|
||||
}
|
||||
}
|
||||
|
||||
// impl Foo<T> {
|
||||
// pub fn iter(&self) -> FooIterator<'_, T> {
|
||||
// #foo_iterator(self)
|
||||
// }
|
||||
// pub fn iter_mut(&mut self) -> FooIteratorMut<'_, T> {
|
||||
// #foo_iterator_mut (self)
|
||||
// }
|
||||
// }
|
||||
impl<#(#params),*> #ident<#(#params),*> {
|
||||
pub fn #iter_method
|
||||
(& #opt_mut self) -> #t_iterator<'_, #(#iterator_params),*> {
|
||||
#iterator(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates the code that derives desired iterator.
|
||||
pub fn output(&self) -> TokenStream {
|
||||
let parts = match self.data {
|
||||
syn::Data::Struct(data) => self.prepare_parts_struct(data),
|
||||
syn::Data::Enum (data) => self.prepare_parts_enum (data),
|
||||
_ =>
|
||||
panic!("Only Structs and Enums can derive(Iterator)!"),
|
||||
};
|
||||
self.assemble_output(parts)
|
||||
}
|
||||
}
|
||||
|
||||
/// Common implementation for deriving iterator through `derive(Iterator)` and
|
||||
/// `derive(IteratorMut)`.
|
||||
pub fn derive
|
||||
(input:proc_macro::TokenStream, is_mut:IsMut) -> proc_macro::TokenStream {
|
||||
let decl = syn::parse_macro_input!(input as syn::DeriveInput);
|
||||
let params = &decl.generics.params.iter().collect::<Vec<_>>();
|
||||
let output = match params.last() {
|
||||
Some(last_param) => {
|
||||
let der = DerivingIterator::new(&decl,last_param,is_mut);
|
||||
der.output()
|
||||
}
|
||||
None =>
|
||||
TokenStream::new(),
|
||||
};
|
||||
output.into()
|
||||
}
|
||||
|
||||
// Note [Expansion Example]
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// In order to make the definition easier to read, an example expansion of the
|
||||
// following definition was provided for each quotation:
|
||||
//
|
||||
// #[derive(Iterator)]
|
||||
// pub struct Foo<S, T> { foo: T }
|
||||
//
|
||||
// When different output is generated for mutable and immutable content, both
|
||||
// expansions are presented.
|
||||
//
|
||||
// For examples that are enum-specific rather than struct-specific, the
|
||||
// following definition is assumed:
|
||||
//
|
||||
// #[derive(Iterator)]
|
||||
// pub enum Foo<T> {
|
||||
// Con1(Bar<T>),
|
||||
// Con2(Baz),
|
||||
// }
|
||||
|
@ -1,397 +1,79 @@
|
||||
//! This crate defines a custom derive macro `Iterator`. Should not be used
|
||||
//! directly, but only through `shapely` crate, as it provides utilities
|
||||
//! necessary for the generated code to compile.
|
||||
|
||||
#![warn(missing_docs)]
|
||||
#![warn(trivial_casts)]
|
||||
#![warn(trivial_numeric_casts)]
|
||||
#![warn(unused_import_braces)]
|
||||
#![warn(unused_qualifications)]
|
||||
#![warn(unsafe_code)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
use basegl_prelude::*;
|
||||
mod derive_iterator;
|
||||
mod overlappable;
|
||||
|
||||
use inflector::Inflector;
|
||||
use proc_macro2::{TokenStream, Ident, Span};
|
||||
use quote::quote;
|
||||
use syn;
|
||||
mod prelude {
|
||||
pub use enso_prelude::*;
|
||||
|
||||
////////////////////////////////////////////////
|
||||
pub use macro_utils::repr;
|
||||
pub use proc_macro2::Span;
|
||||
pub use proc_macro2::TokenStream;
|
||||
pub use quote::quote;
|
||||
}
|
||||
|
||||
/// In order to make the definition easier to read, an example expansion of the
|
||||
/// following definition was provided for each quotation:
|
||||
use crate::derive_iterator::IsMut;
|
||||
|
||||
/// For `struct Foo<T>` or `enum Foo<T>` provides:
|
||||
/// * `IntoIterator` implementations for `&'t Foo<T>`, `iter` and `into_iter`
|
||||
/// methods.
|
||||
///
|
||||
/// #[derive(Iterator)]
|
||||
/// pub struct Foo<S, T> { foo: T }
|
||||
/// The iterators will:
|
||||
/// * for structs: go over each field that declared type is same as the
|
||||
/// struct's last type parameter.
|
||||
/// * enums: delegate to current constructor's nested value's iterator.
|
||||
///
|
||||
/// Enums are required to use only a single element tuple-like variant. This
|
||||
/// limitation should be lifted in the future.
|
||||
///
|
||||
/// Any dependent type stored in struct, tuple or wrapped in enum should have
|
||||
/// dependency only in its last type parameter. All dependent types that are not
|
||||
/// tuples nor directly the yielded type, are required to provide `iter` method
|
||||
/// that returns a compatible iterator (possible also derived).
|
||||
///
|
||||
/// Caller must have the following features enabled:
|
||||
/// ```
|
||||
/// #![feature(generators)]
|
||||
/// #![feature(type_alias_impl_trait)]
|
||||
/// ```
|
||||
///
|
||||
/// When used on type that takes no type parameters, like `struct Foo`, does
|
||||
/// nothing but yields no errors.
|
||||
#[proc_macro_derive(Iterator)]
|
||||
pub fn derive_iterator
|
||||
(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let decl = syn::parse_macro_input!(input as syn::DeriveInput);
|
||||
let params = &decl.generics.params.iter().collect::<Vec<_>>();
|
||||
match params.last() {
|
||||
Some(last_param) => derive_iterator_for(&decl, &last_param),
|
||||
None => proc_macro::TokenStream::from(quote! {})
|
||||
}
|
||||
derive_iterator::derive(input,IsMut::Immutable)
|
||||
}
|
||||
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
fn derive_iterator_for
|
||||
( decl : &syn::DeriveInput
|
||||
, target_param : &syn::GenericParam
|
||||
) -> proc_macro::TokenStream {
|
||||
let data = &decl.data;
|
||||
let params = &decl.generics.params.iter().collect::<Vec<_>>();
|
||||
let target_param_str = repr(&target_param);
|
||||
let matched_fields: Vec<TokenStream> = match *data {
|
||||
syn::Data::Struct(ref data) => {
|
||||
fields_list(&data.fields).iter().enumerate().filter_map(|(i, f)| {
|
||||
let type_matched = repr(&f.ty) == target_param_str;
|
||||
type_matched.as_some_from(|| {
|
||||
match &f.ident {
|
||||
Some(ident) => quote!(#ident),
|
||||
None => {
|
||||
let ix = syn::Index::from(i);
|
||||
quote!(#ix)
|
||||
}
|
||||
}
|
||||
})
|
||||
}).collect()
|
||||
}
|
||||
syn::Data::Enum(_) | syn::Data::Union(_) => unimplemented!(),
|
||||
};
|
||||
let data = &decl.ident;
|
||||
let t_iterator = format!("{}Iterator" , data);
|
||||
let t_iterator_mut = format!("{}IteratorMut" , data);
|
||||
let iterator = t_iterator.to_snake_case();
|
||||
let iterator_mut = t_iterator_mut.to_snake_case();
|
||||
let t_iterator = Ident::new(&t_iterator , Span::call_site());
|
||||
let t_iterator_mut = Ident::new(&t_iterator_mut , Span::call_site());
|
||||
let iterator = Ident::new(&iterator , Span::call_site());
|
||||
let iterator_mut = Ident::new(&iterator_mut , Span::call_site());
|
||||
let iter_body_ref = quote! {
|
||||
structology::IterForGenerator
|
||||
(move || { #(yield &t.#matched_fields;)* })
|
||||
};
|
||||
let iter_body_mut = quote! {
|
||||
structology::IterForGenerator
|
||||
(move || { #(yield &mut t.#matched_fields;)* })
|
||||
};
|
||||
let iter_body_dummy = quote! { structology::EmptyGenerator::new() };
|
||||
let empty = matched_fields.is_empty();
|
||||
let iter_body = if empty { &iter_body_dummy } else { &iter_body_ref };
|
||||
let iter_body_mut = if empty { &iter_body_dummy } else { &iter_body_mut };
|
||||
let expanded = quote! {
|
||||
// type FooIterator<'t, T> = impl Iterator<Item = &'t T>;
|
||||
type #t_iterator<'t, #(#params),*> =
|
||||
impl Iterator<Item = &'t #target_param>;
|
||||
|
||||
// pub fn foo_iterator<'t, T>
|
||||
// (t: &'t Foo<T>) -> FooIterator<'t, T> {
|
||||
// shapely::IterForGenerator(move || {
|
||||
// yield &t.foo;
|
||||
// })
|
||||
// }
|
||||
pub fn #iterator<'t, #(#params),*>
|
||||
(t: &'t #data<#(#params),*>) -> #t_iterator<'t, #(#params),*> {
|
||||
#iter_body
|
||||
}
|
||||
|
||||
// type FooIteratorMut<'t, T> = impl Iterator<Item = &'t mut T>;
|
||||
type #t_iterator_mut<'t, #(#params),*> =
|
||||
impl Iterator<Item = &'t mut #target_param>;
|
||||
|
||||
// pub fn foo_iterator_mut<'t, T>
|
||||
// (t: &'t mut Foo<T>) -> FooIteratorMut<'t, T> {
|
||||
// shapely::IterForGenerator(move || {
|
||||
// yield &t.foo;
|
||||
// })
|
||||
// }
|
||||
pub fn #iterator_mut<'t, #(#params),*>
|
||||
(t: &'t mut #data<#(#params),*>) -> #t_iterator_mut<'t, #(#params),*> {
|
||||
#iter_body_mut
|
||||
}
|
||||
|
||||
// impl<'t, T> IntoIterator for &'t Foo<T> {
|
||||
// type Item = &'t T;
|
||||
// type IntoIter = FooIterator<'t, T>;
|
||||
// fn into_iter(self) -> FooIterator<'t, T> {
|
||||
// foo_iterator(self)
|
||||
// }
|
||||
// }
|
||||
impl<'t, #(#params),*> IntoIterator for &'t #data<#(#params),*> {
|
||||
type Item = &'t #target_param;
|
||||
type IntoIter = #t_iterator<'t, #(#params),*>;
|
||||
fn into_iter(self) -> #t_iterator<'t, #(#params),*> {
|
||||
#iterator(self)
|
||||
}
|
||||
}
|
||||
|
||||
// impl<'t, T> IntoIterator for &'t mut Foo<T> {
|
||||
// type Item = &'t mut T;
|
||||
// type IntoIter = FooIteratorMut<'t, T>;
|
||||
// fn into_iter(self) -> FooIteratorMut<'t, T> {
|
||||
// foo_iterator_mut(self)
|
||||
// }
|
||||
// }
|
||||
impl<'t, #(#params),*> IntoIterator for &'t mut #data<#(#params),*> {
|
||||
type Item = &'t mut #target_param;
|
||||
type IntoIter = #t_iterator_mut<'t, #(#params),*>;
|
||||
fn into_iter(self) -> #t_iterator_mut<'t, #(#params),*> {
|
||||
#iterator_mut(self)
|
||||
}
|
||||
}
|
||||
|
||||
// impl Foo<T> {
|
||||
// pub fn iter(&self) -> FooIterator<'_, T> {
|
||||
// #foo_iterator(self)
|
||||
// }
|
||||
// pub fn iter_mut(&mut self) -> FooIteratorMut<'_, T> {
|
||||
// #foo_iterator_mut (self)
|
||||
// }
|
||||
// }
|
||||
impl<#(#params),*> #data<#(#params),*> {
|
||||
pub fn iter(&self) -> #t_iterator<'_, #(#params),*> {
|
||||
#iterator(self)
|
||||
}
|
||||
pub fn iter_mut(&mut self) -> #t_iterator_mut<'_, #(#params),*> {
|
||||
#iterator_mut(self)
|
||||
}
|
||||
}
|
||||
};
|
||||
proc_macro::TokenStream::from(expanded)
|
||||
}
|
||||
|
||||
fn fields_list(fields: &syn::Fields) -> Vec<&syn::Field> {
|
||||
match fields {
|
||||
syn::Fields::Named (ref f) => { f.named.iter().collect() }
|
||||
syn::Fields::Unnamed (ref f) => { f.unnamed.iter().collect() }
|
||||
syn::Fields::Unit => default()
|
||||
}
|
||||
}
|
||||
|
||||
fn _field_type(field: &syn::Field) -> String {
|
||||
let tp = &field.ty;
|
||||
quote!(#tp).to_string()
|
||||
}
|
||||
|
||||
#[proc_macro_derive(AstNode)]
|
||||
pub fn derive_ast_node
|
||||
(_input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let expanded = quote! {
|
||||
#[derive(Debug)]
|
||||
};
|
||||
proc_macro::TokenStream::from(expanded)
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn ast_node
|
||||
(_meta: proc_macro::TokenStream, input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input: TokenStream = input.into();
|
||||
let output = quote! {
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
#[derive(Iterator)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#input
|
||||
};
|
||||
output.into()
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn ast
|
||||
( attrs : proc_macro::TokenStream
|
||||
, input : proc_macro::TokenStream
|
||||
) -> proc_macro::TokenStream {
|
||||
let attrs: TokenStream = attrs.into();
|
||||
let decl = syn::parse_macro_input!(input as syn::DeriveInput);
|
||||
let output = match &decl.data {
|
||||
syn::Data::Enum { .. } => quote! {
|
||||
#[to_variant_types(#attrs)]
|
||||
#[ast_node]
|
||||
#decl
|
||||
},
|
||||
_ => quote! {
|
||||
#[ast_node]
|
||||
#decl
|
||||
}
|
||||
};
|
||||
output.into()
|
||||
}
|
||||
|
||||
|
||||
|
||||
fn repr<T: quote::ToTokens>(t: &T) -> String {
|
||||
quote!(#t).to_string()
|
||||
}
|
||||
|
||||
|
||||
use syn::visit::{self, Visit};
|
||||
|
||||
struct TypeGather {
|
||||
pub types: Vec<String>
|
||||
}
|
||||
|
||||
impl TypeGather {
|
||||
pub fn new() -> Self {
|
||||
let types = default();
|
||||
Self { types }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ast> Visit<'ast> for TypeGather {
|
||||
fn visit_type_path(&mut self, node: &'ast syn::TypePath) {
|
||||
self.types.push(repr(node));
|
||||
visit::visit_type_path(self, node);
|
||||
}
|
||||
}
|
||||
|
||||
fn gather_all_types(node: &syn::Type) -> Vec<String> {
|
||||
let mut type_gather = TypeGather::new();
|
||||
type_gather.visit_type(node);
|
||||
type_gather.types
|
||||
}
|
||||
|
||||
|
||||
fn mk_product_type
|
||||
( is_flat : bool
|
||||
, decl : &syn::DeriveInput
|
||||
, variant : &syn::Variant
|
||||
) -> syn::ItemStruct {
|
||||
use syn::ItemStruct;
|
||||
let fields = &variant.fields;
|
||||
let types = fields.iter().flat_map(|f| {gather_all_types(&f.ty) });
|
||||
let types = types.collect::<HashSet<_>>();
|
||||
let ty_vars = decl.generics.params.iter().cloned();
|
||||
let params = ty_vars.filter(|v| types.contains(&repr(&v))).collect();
|
||||
let attrs = decl.attrs.clone();
|
||||
let vis = decl.vis.clone();
|
||||
let struct_token = syn::token::Struct { span: Span::call_site() };
|
||||
let ident_flat = variant.ident.clone();
|
||||
let ident_nested = format!("{}{}", decl.ident, variant.ident);
|
||||
let ident_nested = Ident::new(&ident_nested, Span::call_site());
|
||||
let ident = if is_flat { ident_flat } else { ident_nested };
|
||||
let generics = syn::Generics { params, .. default() };
|
||||
let mut fields = variant.fields.clone();
|
||||
let semi_token = None;
|
||||
fields.iter_mut().for_each(|f| f.vis = vis.clone());
|
||||
ItemStruct { attrs, vis, struct_token, ident, generics, fields, semi_token }
|
||||
}
|
||||
|
||||
fn gen_variant_decl
|
||||
(ident: &syn::Ident, variant: &syn::ItemStruct) -> TokenStream {
|
||||
let variant_ident = &variant.ident;
|
||||
let params = variant.generics.params.iter();
|
||||
quote! {
|
||||
// App(ShapeApp<T>),
|
||||
// Var(ShapeVar),
|
||||
#ident(#variant_ident<#(#params),*>)
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_from_impls
|
||||
(ident: &syn::Ident, decl: &syn::DeriveInput, variant: &syn::ItemStruct) -> TokenStream {
|
||||
let sum_label = &decl.ident;
|
||||
let variant_label = &variant.ident;
|
||||
let sum_params = &decl.generics.params.iter().cloned().collect::<Vec<_>>();
|
||||
let variant_params = &variant.generics.params.iter().cloned().collect::<Vec<_>>();
|
||||
quote! {
|
||||
// impl<T> From<App<T>> for Shape<T> {
|
||||
// fn from(t: App<T>) -> Self { Shape::App(t) }
|
||||
// }
|
||||
// ...
|
||||
impl<#(#sum_params),*> From<#variant_label<#(#variant_params),*>>
|
||||
for #sum_label<#(#sum_params),*> {
|
||||
fn from(t: #variant_label<#(#variant_params),*>) -> Self {
|
||||
#sum_label::#ident(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// In order to make the definition easier to read, an example expansion of the
|
||||
/// following definition was provided for each quotation:
|
||||
/// Same as `derive(Iterator)` but generates mutable iterator.
|
||||
///
|
||||
/// #[to_variant_types]
|
||||
/// pub enum Shape<T> {
|
||||
/// Var(Var),
|
||||
/// App(App<T>),
|
||||
/// }
|
||||
#[proc_macro_attribute]
|
||||
pub fn to_variant_types
|
||||
( attrs: proc_macro::TokenStream
|
||||
, input: proc_macro::TokenStream
|
||||
) -> proc_macro::TokenStream {
|
||||
let attrs: TokenStream = attrs.into();
|
||||
let decl = syn::parse_macro_input!(input as syn::DeriveInput);
|
||||
let ident = &decl.ident;
|
||||
let ty_vars = &decl.generics.params;
|
||||
let variants = match &decl.data {
|
||||
syn::Data::Enum(ref data) => data.variants.iter(),
|
||||
_ => unimplemented!()
|
||||
}.collect::<Vec<_>>();
|
||||
|
||||
let is_flat = repr(&attrs) == "flat";
|
||||
let variant_idents = variants.iter().map(|v| &v.ident).collect::<Vec<_>>();
|
||||
let structs = variants.iter().map(|v| mk_product_type(is_flat, &decl, v));
|
||||
let structs = structs.collect::<Vec<_>>();
|
||||
let variant_decls = variant_idents.iter().zip(structs.iter()).map(|(i,v)| gen_variant_decl(i,&v));
|
||||
let variant_froms = variant_idents.iter().zip(structs.iter()).map(|(i,v)| gen_from_impls(i, &decl, &v));
|
||||
|
||||
// Handle single value, unnamed params as created by user.
|
||||
let structs = structs.iter().filter(|v| match &v.fields {
|
||||
syn::Fields::Unnamed(f) => f.unnamed.len() != 1,
|
||||
_ => true
|
||||
});
|
||||
|
||||
let output = quote! {
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub enum #ident <#ty_vars> {
|
||||
#(#variant_decls),*
|
||||
}
|
||||
#(#structs)*
|
||||
#(#variant_froms)*
|
||||
};
|
||||
output.into()
|
||||
/// It is separate, as some types allow deriving immutable iterator but ont the
|
||||
/// mutable one.
|
||||
#[proc_macro_derive(IteratorMut)]
|
||||
pub fn derive_iterator_mut
|
||||
(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
derive_iterator::derive(input,IsMut::Mutable)
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[proc_macro_attribute]
|
||||
pub fn overlappable
|
||||
( attrs : proc_macro::TokenStream
|
||||
, input : proc_macro::TokenStream
|
||||
, input : proc_macro::TokenStream
|
||||
) -> proc_macro::TokenStream {
|
||||
let _attrs: TokenStream = attrs.into();
|
||||
let decl = syn::parse_macro_input!(input as syn::ItemImpl);
|
||||
// let mut path = decl.trait_.unwrap().1.clone();
|
||||
// let path = path.segments.last_mut().iter().map(|ident| {
|
||||
// Ident::new(&format!("MarketCtx_{}", repr(ident)) , Span::call_site());
|
||||
// });
|
||||
|
||||
let mut marker_ctx_impl = decl;
|
||||
let mut trait_ = marker_ctx_impl.trait_.as_mut();
|
||||
trait_.iter_mut().for_each(|t| {
|
||||
let path = &mut t.1;
|
||||
path.segments.last_mut().iter_mut().for_each(|s| {
|
||||
let rr = repr(&s);
|
||||
s.ident = Ident::new(&format!("MarketCtx_{}", rr) , Span::call_site());
|
||||
});
|
||||
});
|
||||
|
||||
// let mut marker_ctx_impl = decl.clone();
|
||||
// let path = &mut marker_ctx_impl.trait_.as_mut().unwrap().1;
|
||||
// path.segments.last_mut().iter_mut().for_each(|s| {
|
||||
// let rr = repr(&s);
|
||||
// s.ident = Ident::new(&format!("MarketCtx_{}", rr) , Span::call_site());
|
||||
// });
|
||||
|
||||
// let name = repr(path);
|
||||
|
||||
// let marker_ctx_impl = syn::ItemImpl {
|
||||
// .. decl
|
||||
// };
|
||||
|
||||
|
||||
let _output_tmp = quote! {
|
||||
#marker_ctx_impl
|
||||
};
|
||||
let output = quote! {
|
||||
|
||||
};
|
||||
// println!("------------------");
|
||||
// println!("{}", output_tmp);
|
||||
output.into()
|
||||
overlappable::overlappable(attrs,input)
|
||||
}
|
||||
|
||||
|
50
gui/lib/shapely/macros/src/overlappable.rs
Normal file
50
gui/lib/shapely/macros/src/overlappable.rs
Normal file
@ -0,0 +1,50 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use proc_macro2::Ident;
|
||||
|
||||
pub fn overlappable
|
||||
( attrs : proc_macro::TokenStream
|
||||
, input : proc_macro::TokenStream
|
||||
) -> proc_macro::TokenStream {
|
||||
let _attrs: TokenStream = attrs.into();
|
||||
let decl = syn::parse_macro_input!(input as syn::ItemImpl);
|
||||
// let mut path = decl.trait_.unwrap().1.clone();
|
||||
// let path = path.segments.last_mut().iter().map(|ident| {
|
||||
// Ident::new(&format!("MarketCtx_{}", repr(ident)) , Span::call_site());
|
||||
// });
|
||||
|
||||
let mut marker_ctx_impl = decl;
|
||||
let mut trait_ = marker_ctx_impl.trait_.as_mut();
|
||||
trait_.iter_mut().for_each(|t| {
|
||||
let path = &mut t.1;
|
||||
path.segments.last_mut().iter_mut().for_each(|s| {
|
||||
let rr = repr(&s);
|
||||
s.ident = Ident::new(&format!("MarketCtx_{}", rr) , Span::call_site());
|
||||
});
|
||||
});
|
||||
|
||||
// let mut marker_ctx_impl = decl.clone();
|
||||
// let path = &mut marker_ctx_impl.trait_.as_mut().unwrap().1;
|
||||
// path.segments.last_mut().iter_mut().for_each(|s| {
|
||||
// let rr = repr(&s);
|
||||
// s.ident = Ident::new(&format!("MarketCtx_{}", rr) , Span::call_site());
|
||||
// });
|
||||
|
||||
// let name = repr(path);
|
||||
|
||||
// let marker_ctx_impl = syn::ItemImpl {
|
||||
// .. decl
|
||||
// };
|
||||
|
||||
|
||||
let _output_tmp = quote! {
|
||||
#marker_ctx_impl
|
||||
};
|
||||
let output = quote! {
|
||||
|
||||
};
|
||||
// println!("------------------");
|
||||
// println!("{}", output_tmp);
|
||||
output.into()
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ default = ["console_error_panic_hook"]
|
||||
|
||||
[dependencies]
|
||||
data = { version = "0.1.0" , path = "../../data" }
|
||||
basegl-prelude = { version = "0.1.0" , path = "../../prelude" }
|
||||
enso-prelude = { version = "0.1.0" , path = "../../prelude" }
|
||||
js-sys = { version = "0.3.28" }
|
||||
wasm-bindgen = { version = "^0.2" , features = ["nightly"] }
|
||||
failure = { version = "0.1.5" }
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! This file contains the implementation of DOMContainer. A struct that aids us to handle html
|
||||
//! elements, get its dimension avoiding style reflow.
|
||||
|
||||
use basegl_prelude::*;
|
||||
use enso_prelude::*;
|
||||
|
||||
use crate::get_element_by_id;
|
||||
use crate::dyn_into;
|
||||
|
@ -7,7 +7,7 @@
|
||||
pub mod resize_observer;
|
||||
pub mod dom;
|
||||
|
||||
use basegl_prelude::*;
|
||||
use enso_prelude::*;
|
||||
|
||||
use wasm_bindgen::prelude::Closure;
|
||||
use wasm_bindgen::JsCast;
|
||||
|
@ -10,7 +10,7 @@ edition = "2018"
|
||||
wasm-bindgen-test = "0.3.3"
|
||||
web-test-proc-macro = { version = "0.1.0" , path = "../web-test-proc-macro" }
|
||||
basegl = { version = "0.1.0" , path = "../core" }
|
||||
basegl-prelude = { version = "0.1.0" , path = "../prelude" }
|
||||
enso-prelude = { version = "0.1.0" , path = "../prelude" }
|
||||
basegl-system-web = { version = "0.1.0" , path = "../system/web" }
|
||||
wasm-bindgen = { version = "^0.2" , features = ["nightly"] }
|
||||
js-sys = { version = "0.3.28" }
|
||||
|
@ -7,7 +7,7 @@ mod system {
|
||||
pub use basegl_system_web as web;
|
||||
}
|
||||
|
||||
use basegl_prelude as prelude;
|
||||
use enso_prelude as prelude;
|
||||
|
||||
pub use wasm_bindgen_test::wasm_bindgen_test_configure as web_configure;
|
||||
pub use web_test_proc_macro::*;
|
||||
|
@ -1,2 +1,2 @@
|
||||
#!/bin/bash
|
||||
wasm-pack build $@ --no-typescript --out-dir '../../target/web' lib/core
|
||||
wasm-pack build $@ --no-typescript --out-dir '../../target/web' lib/impl
|
||||
|
@ -1,6 +1,14 @@
|
||||
#![feature(option_result_contains)]
|
||||
|
||||
fn get_workspace_members(cargo_toml_root : toml::Value) -> Vec<String> {
|
||||
match &cargo_toml_root["workspace"]["members"] {
|
||||
use std::path::PathBuf;
|
||||
use std::path::Path;
|
||||
|
||||
const BUILD_UTILITIES_DIR : &str = "build-utilities";
|
||||
|
||||
/// Lists members of given Cargo.toml workspace.
|
||||
fn get_workspace_members(cargo_toml_root:toml::Value) -> Vec<String> {
|
||||
let workspace = cargo_toml_root.get("workspace").expect("not a workspace");
|
||||
match &workspace["members"] {
|
||||
toml::Value::Array(list) => list.iter().map(|val| {
|
||||
match val {
|
||||
toml::Value::String(s) => s.clone(),
|
||||
@ -11,7 +19,30 @@ fn get_workspace_members(cargo_toml_root : toml::Value) -> Vec<String> {
|
||||
}
|
||||
}
|
||||
|
||||
const BUILD_UTILITIES_DIR : &str = "build-utilities";
|
||||
/// Parses file under given path as TOML value.
|
||||
fn parse_toml(path:impl AsRef<Path>) -> toml::Value {
|
||||
let path = path.as_ref();
|
||||
let data = std::fs::read_to_string(path).unwrap();
|
||||
data.parse().unwrap()
|
||||
}
|
||||
|
||||
/// Checks if for the given workspace member wasm-pack test should be run.
|
||||
fn to_be_tested(member:&str) -> bool {
|
||||
let is_build_util = member.starts_with(BUILD_UTILITIES_DIR);
|
||||
!is_build_util && !is_proc_macro_crate(member)
|
||||
}
|
||||
|
||||
/// Checks if given workspace member is a proc-macro crate.
|
||||
fn is_proc_macro_crate(member:&str) -> bool {
|
||||
let cargo_toml_path = PathBuf::from(member).join("Cargo.toml");
|
||||
let cargo_toml_root = parse_toml(cargo_toml_path);
|
||||
get_proc_macro(cargo_toml_root).contains(&true)
|
||||
}
|
||||
|
||||
/// Retrieve a `lib.proc-macro` field from Cargo.toml
|
||||
fn get_proc_macro(cargo_toml:toml::Value) -> Option<bool> {
|
||||
cargo_toml.get("lib")?.get("proc-macro")?.as_bool()
|
||||
}
|
||||
|
||||
/// Call wasm-pack test for each workspace member
|
||||
///
|
||||
@ -19,13 +50,10 @@ const BUILD_UTILITIES_DIR : &str = "build-utilities";
|
||||
/// directory, and call `wasm-pack test` each member. All script arguments
|
||||
/// are passed to `wasm-pack` process.
|
||||
fn main() {
|
||||
let wasm_pack_args : Vec<String> = std::env::args().skip(1).collect();
|
||||
let cargo_toml_root = std::fs::read_to_string("Cargo.toml").unwrap()
|
||||
.parse::<toml::Value>().unwrap();
|
||||
|
||||
let all_members = get_workspace_members(cargo_toml_root);
|
||||
let tested_members = all_members.iter()
|
||||
.filter(|path| !path.starts_with(BUILD_UTILITIES_DIR));
|
||||
let wasm_pack_args = std::env::args().skip(1).collect::<Vec<_>>();
|
||||
let cargo_toml_root = parse_toml("Cargo.toml");
|
||||
let all_members = get_workspace_members(cargo_toml_root);
|
||||
let tested_members = all_members.iter().filter(|p| to_be_tested(&p));
|
||||
|
||||
for member in tested_members {
|
||||
println!("Running tests for {}:", member);
|
||||
|
Loading…
Reference in New Issue
Block a user