mirror of
https://github.com/enso-org/enso.git
synced 2024-12-22 17:41:53 +03:00
Port graph editor to new AST (#4113)
Use the Rust parser rather than the Scala parser to parse Enso code in the IDE. Implements: - https://www.pivotaltracker.com/story/show/182975925 - https://www.pivotaltracker.com/story/show/182988419 - https://www.pivotaltracker.com/story/show/182970096 - https://www.pivotaltracker.com/story/show/182973659 - https://www.pivotaltracker.com/story/show/182974161 - https://www.pivotaltracker.com/story/show/182974205 There is additional functionality needed before the transition is fully-completed, however I think it's time for this to see review and testing, so I've opened separate issues. In rough order of urgency (these issues are also linked from the corresponding disabled tests): - #5573 - #5571 - #5572 - #5574 # Important Notes The implementation is based partly on translation, and partly on new analysis. Method- and operator-related shapes are translated to the old `Ast` variants, so that all the analysis applied to them doesn't need to be ported at this time. Everything else (mostly "macros" in the old AST) is implemented with new analysis.
This commit is contained in:
parent
b56d6d74b9
commit
d1af25793a
1293
Cargo.lock
generated
1293
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -5,6 +5,7 @@ resolver = "2"
|
||||
# where plausible.
|
||||
members = [
|
||||
"app/gui",
|
||||
"app/gui/language/parser",
|
||||
"app/gui/enso-profiler-enso-data",
|
||||
"build/cli",
|
||||
"build/macros",
|
||||
|
@ -32,10 +32,11 @@ ensogl-hardcoded-theme = { path = "../../lib/rust/ensogl/app/theme/hardcoded" }
|
||||
ensogl-drop-manager = { path = "../../lib/rust/ensogl/component/drop-manager" }
|
||||
fuzzly = { path = "../../lib/rust/fuzzly" }
|
||||
ast = { path = "language/ast/impl" }
|
||||
parser = { path = "language/parser" }
|
||||
parser-scala = { path = "language/parser-scala" }
|
||||
ide-view = { path = "view" }
|
||||
engine-protocol = { path = "controller/engine-protocol" }
|
||||
json-rpc = { path = "../../lib/rust/json-rpc" }
|
||||
parser-scala = { path = "language/parser" }
|
||||
span-tree = { path = "language/span-tree" }
|
||||
bimap = { version = "0.4.0" }
|
||||
console_error_panic_hook = { workspace = true }
|
||||
|
@ -9,8 +9,8 @@ crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
ast = { path = "../../language/ast/impl" }
|
||||
parser = { path = "../../language/parser" }
|
||||
engine-protocol = { path = "../engine-protocol" }
|
||||
parser-scala = { path = "../../language/parser" }
|
||||
enso-data-structures = { path = "../../../../lib/rust/data-structures" }
|
||||
enso-prelude = { path = "../../../../lib/rust/prelude" }
|
||||
enso-profiler = { path = "../../../../lib/rust/profiler" }
|
||||
|
@ -240,13 +240,6 @@ impl AliasAnalyzer {
|
||||
self.process_assignment(&assignment);
|
||||
} else if let Some(lambda) = ast::macros::as_lambda(ast) {
|
||||
self.process_lambda(&lambda);
|
||||
} else if let Ok(macro_match) = ast::known::Match::try_from(ast) {
|
||||
// Macros (except for lambdas which were covered in the previous check) never introduce
|
||||
// new scopes or different context. We skip the keywords ("if" in "if-then-else" is not
|
||||
// an identifier) and process the matched subtrees as usual.
|
||||
self.process_given_subtrees(macro_match.shape(), macro_match.iter_pat_match_subcrumbs())
|
||||
} else if let Ok(ambiguous) = ast::known::Ambiguous::try_from(ast) {
|
||||
self.process_given_subtrees(ambiguous.shape(), ambiguous.iter_pat_match_subcrumbs())
|
||||
} else if self.is_in_pattern() {
|
||||
// We are in the pattern (be it a lambda's or assignment's left side). Three options:
|
||||
// 1) This is a destructuring pattern match using infix syntax, like `head,tail`.
|
||||
@ -371,8 +364,6 @@ mod tests {
|
||||
use super::test_utils::*;
|
||||
use super::*;
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
/// Checks if actual observed sequence of located identifiers matches the expected one.
|
||||
/// Expected identifiers are described as code spans in the node's text representation.
|
||||
fn validate_identifiers(
|
||||
@ -386,7 +377,7 @@ mod tests {
|
||||
}
|
||||
|
||||
/// Runs the test for the given test case description.
|
||||
fn run_case(parser: &parser_scala::Parser, case: Case) {
|
||||
fn run_case(parser: &parser::Parser, case: Case) {
|
||||
debug!("\n===========================================================================\n");
|
||||
debug!("Case: {}", case.code);
|
||||
let ast = parser.parse_line_ast(&case.code).unwrap();
|
||||
@ -397,15 +388,15 @@ mod tests {
|
||||
}
|
||||
|
||||
/// Runs the test for the test case expressed using markdown notation. See `Case` for details.
|
||||
fn run_markdown_case(parser: &parser_scala::Parser, marked_code: impl AsRef<str>) {
|
||||
fn run_markdown_case(parser: &parser::Parser, marked_code: impl AsRef<str>) {
|
||||
debug!("Running test case for {}", marked_code.as_ref());
|
||||
let case = Case::from_markdown(marked_code.as_ref());
|
||||
run_case(parser, case)
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn test_alias_analysis() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let test_cases = [
|
||||
"»foo«",
|
||||
"«five» = 5",
|
||||
@ -433,21 +424,11 @@ mod tests {
|
||||
"»A« -> »b«",
|
||||
"a -> »A« -> a",
|
||||
"a -> a -> »A«",
|
||||
"x»,«y -> »B«",
|
||||
"x»,«y -> y",
|
||||
"x »,« »Y« -> _",
|
||||
"(»foo«)",
|
||||
"(«foo») = (»bar«)",
|
||||
"if »A« then »B«",
|
||||
"if »a« then »b« else »c«",
|
||||
"case »foo« of\n »Number« a -> a\n »Wildcard« -> »bar«\n a»,«b -> a",
|
||||
// === Macros Ambiguous ===
|
||||
"(»foo«",
|
||||
"if »a«",
|
||||
"case »a«",
|
||||
// "->»a«", // TODO [mwu] restore (and implement) when parser is able to parse this
|
||||
// "a ->", // TODO [mwu] restore (and implement) when parser is able to parse this
|
||||
|
||||
// === Definition ===
|
||||
"«foo» a b c = »foo« a »d«",
|
||||
"«foo» a b c = d -> a d",
|
||||
|
@ -158,7 +158,7 @@ mod tests {
|
||||
|
||||
use ast::crumbs;
|
||||
use ast::crumbs::InfixCrumb;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
struct TestRun {
|
||||
graph: GraphInfo,
|
||||
@ -182,7 +182,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn from_main_def(code: impl Str) -> TestRun {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let module = parser.parse_module(code, default()).unwrap();
|
||||
let definition = DefinitionInfo::from_root_line(&module.lines[0]).unwrap();
|
||||
Self::from_definition(definition)
|
||||
@ -199,15 +199,15 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
pub fn connection_listing_test_plain() {
|
||||
use InfixCrumb::LeftOperand;
|
||||
use InfixCrumb::RightOperand;
|
||||
|
||||
let code_block = r"
|
||||
d,e = p
|
||||
d = p
|
||||
a = d
|
||||
b = e
|
||||
b = d
|
||||
c = a + b
|
||||
fun a = a b
|
||||
f = fun 2";
|
||||
@ -221,21 +221,21 @@ f = fun 2";
|
||||
assert_eq!(&c.destination.crumbs, &crumbs![RightOperand, LeftOperand]);
|
||||
|
||||
let c = &run.connections[1];
|
||||
assert_eq!(run.endpoint_node_repr(&c.source), "b = e");
|
||||
assert_eq!(run.endpoint_node_repr(&c.source), "b = d");
|
||||
assert_eq!(&c.source.crumbs, &crumbs![LeftOperand]);
|
||||
assert_eq!(run.endpoint_node_repr(&c.destination), "c = a + b");
|
||||
assert_eq!(&c.destination.crumbs, &crumbs![RightOperand, RightOperand]);
|
||||
|
||||
let c = &run.connections[2];
|
||||
assert_eq!(run.endpoint_node_repr(&c.source), "d,e = p");
|
||||
assert_eq!(&c.source.crumbs, &crumbs![LeftOperand, LeftOperand]);
|
||||
assert_eq!(run.endpoint_node_repr(&c.source), "d = p");
|
||||
assert_eq!(&c.source.crumbs, &crumbs![LeftOperand]);
|
||||
assert_eq!(run.endpoint_node_repr(&c.destination), "a = d");
|
||||
assert_eq!(&c.destination.crumbs, &crumbs![RightOperand]);
|
||||
|
||||
let c = &run.connections[3];
|
||||
assert_eq!(run.endpoint_node_repr(&c.source), "d,e = p");
|
||||
assert_eq!(&c.source.crumbs, &crumbs![LeftOperand, RightOperand]);
|
||||
assert_eq!(run.endpoint_node_repr(&c.destination), "b = e");
|
||||
assert_eq!(run.endpoint_node_repr(&c.source), "d = p");
|
||||
assert_eq!(&c.source.crumbs, &crumbs![LeftOperand]);
|
||||
assert_eq!(run.endpoint_node_repr(&c.destination), "b = d");
|
||||
assert_eq!(&c.destination.crumbs, &crumbs![RightOperand]);
|
||||
|
||||
// Note that line `fun a = a b` des not introduce any connections, as it is a definition.
|
||||
@ -243,13 +243,13 @@ f = fun 2";
|
||||
assert_eq!(run.connections.len(), 4);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
pub fn inline_definition() {
|
||||
let run = TestRun::from_main_def("main = a");
|
||||
assert!(run.connections.is_empty());
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
pub fn listing_dependent_nodes() {
|
||||
let code_block = "\
|
||||
f,g = p
|
||||
@ -259,7 +259,6 @@ f = fun 2";
|
||||
d = a + b
|
||||
e = b";
|
||||
let mut expected_dependent_nodes = HashMap::<&'static str, Vec<&'static str>>::new();
|
||||
expected_dependent_nodes.insert("f,g = p", vec!["a = f", "b = g", "d = a + b", "e = b"]);
|
||||
expected_dependent_nodes.insert("a = f", vec!["d = a + b"]);
|
||||
expected_dependent_nodes.insert("b = g", vec!["d = a + b", "e = b"]);
|
||||
expected_dependent_nodes.insert("c = 2", vec![]);
|
||||
|
@ -11,7 +11,7 @@ use ast::crumbs::InfixCrumb;
|
||||
use ast::crumbs::Located;
|
||||
use ast::known;
|
||||
use ast::opr;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
|
||||
@ -284,9 +284,7 @@ impl DefinitionInfo {
|
||||
let elem = line.elem.ok_or(MissingLineWithAst)?;
|
||||
let off = line.off;
|
||||
let first_line = ast::BlockLine { elem, off };
|
||||
let is_orphan = false;
|
||||
let ty = ast::BlockType::Discontinuous {};
|
||||
let block = ast::Block { ty, indent, empty_lines, first_line, lines, is_orphan };
|
||||
let block = ast::Block { indent, empty_lines, first_line, lines };
|
||||
let body_ast = Ast::new(block, None);
|
||||
self.set_body_ast(body_ast);
|
||||
Ok(())
|
||||
@ -603,10 +601,6 @@ mod tests {
|
||||
use crate::module;
|
||||
use crate::INDENT;
|
||||
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
|
||||
wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
fn assert_eq_strings(lhs: Vec<impl Str>, rhs: Vec<impl Str>) {
|
||||
let lhs = lhs.iter().map(|s| s.as_ref()).collect_vec();
|
||||
let rhs = rhs.iter().map(|s| s.as_ref()).collect_vec();
|
||||
@ -621,9 +615,9 @@ mod tests {
|
||||
format!(" {line}")
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn generating_definition_to_add() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let mut to_add = ToAdd {
|
||||
name: DefinitionName::new_method("Main", "add"),
|
||||
explicit_parameter_names: vec!["arg1".into(), "arg2".into()],
|
||||
@ -649,9 +643,9 @@ mod tests {
|
||||
assert_eq!(ast.repr(), "Main.add arg1 arg2 =\n arg1 + arg2\n arg1 - arg2");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn definition_name_tests() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let ast = parser.parse_line_ast("Foo.Bar.baz").unwrap();
|
||||
let name = DefinitionName::from_ast(&ast).unwrap();
|
||||
|
||||
@ -664,16 +658,16 @@ mod tests {
|
||||
assert_eq!(ast.get_traversing(&name.extended_target[1].crumbs).unwrap().repr(), "Bar");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn definition_name_rejecting_incomplete_names() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let ast = parser.parse_line_ast("Foo. .baz").unwrap();
|
||||
assert!(DefinitionName::from_ast(&ast).is_none());
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn definition_info_name() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let ast = parser.parse_line_ast("Foo.bar a b c = baz").unwrap();
|
||||
let definition = DefinitionInfo::from_root_line_ast(&ast).unwrap();
|
||||
|
||||
@ -681,9 +675,9 @@ mod tests {
|
||||
assert_eq!(ast.get_traversing(&definition.name.crumbs).unwrap().repr(), "Foo.bar");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn located_definition_args() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let ast = parser.parse_line_ast("foo bar baz = a + b + c").unwrap();
|
||||
let definition = DefinitionInfo::from_root_line_ast(&ast).unwrap();
|
||||
let (arg0, arg1) = definition.args.expect_tuple();
|
||||
@ -700,7 +694,7 @@ mod tests {
|
||||
assert_eq!(ast.get_traversing(&arg1.crumbs).unwrap(), &arg1.item);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn match_is_not_definition() {
|
||||
let cons = Ast::cons("Foo");
|
||||
let arg = Ast::number(5);
|
||||
@ -723,28 +717,24 @@ mod tests {
|
||||
assert!(def_opt.is_some());
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn list_definition_test() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
|
||||
// TODO [mwu]
|
||||
// Due to a parser bug, extension methods defining operators cannot be currently
|
||||
// correctly recognized. When it is fixed, the following should be also supported
|
||||
// and covered in test: `Int.+ a = _` and `Int.+ = _`.
|
||||
// Issue link: https://github.com/enso-org/enso/issues/565
|
||||
let definition_lines = vec![
|
||||
"main = _",
|
||||
"Foo.Bar.foo = _",
|
||||
"Foo.Bar.baz a b = _",
|
||||
"+ = _",
|
||||
"+ a = _",
|
||||
"Int.+ a = _",
|
||||
"bar = _",
|
||||
"add a b = 50",
|
||||
"* a b = _",
|
||||
];
|
||||
let expected_def_names_in_module =
|
||||
vec!["main", "Foo.Bar.foo", "Foo.Bar.baz", "+", "bar", "add", "*"];
|
||||
vec!["main", "Foo.Bar.foo", "Foo.Bar.baz", "+", "Int.+", "bar", "add", "*"];
|
||||
// In definition there are no extension methods nor arg-less definitions.
|
||||
let expected_def_names_in_def = vec!["add", "*"];
|
||||
let expected_def_names_in_def = vec!["+", "add", "*"];
|
||||
|
||||
// === Program with definitions in root ===
|
||||
let program = definition_lines.join("\n");
|
||||
@ -770,7 +760,7 @@ mod tests {
|
||||
assert_eq_strings(to_names(&nested_defs), expected_def_names_in_def);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn finding_root_definition() {
|
||||
let program_to_expected_main_pos = vec![
|
||||
("main = bar", 0),
|
||||
@ -780,7 +770,7 @@ mod tests {
|
||||
("foo = bar\n\nmain = bar", 2),
|
||||
];
|
||||
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let main_id = Id::new_plain_name("main");
|
||||
for (program, expected_line_index) in program_to_expected_main_pos {
|
||||
let module = parser.parse_module(program, default()).unwrap();
|
||||
@ -793,7 +783,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn getting_nested_definition() {
|
||||
let program = r"
|
||||
main =
|
||||
@ -806,7 +796,7 @@ main =
|
||||
|
||||
add foo bar";
|
||||
|
||||
let module = parser_scala::Parser::new_or_panic().parse_module(program, default()).unwrap();
|
||||
let module = parser::Parser::new().parse_module(program, default()).unwrap();
|
||||
let check_def = |id, expected_body| {
|
||||
let definition = module::get_definition(&module, &id).unwrap();
|
||||
assert_eq!(definition.body().repr(), expected_body);
|
||||
|
@ -225,29 +225,26 @@ mod tests {
|
||||
use ast::macros::DocumentationCommentInfo;
|
||||
use ast::test_utils::expect_single_line;
|
||||
use ast::HasRepr;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
|
||||
wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
/// Takes a program with main definition in root and returns main's graph.
|
||||
fn main_graph(parser: &parser_scala::Parser, program: impl Str) -> GraphInfo {
|
||||
let module = parser.parse_module(program.into(), default()).unwrap();
|
||||
fn main_graph(parser: &parser::Parser, program: impl Str) -> GraphInfo {
|
||||
let module = parser.parse_module(program.as_ref(), default()).unwrap();
|
||||
let name = DefinitionName::new_plain("main");
|
||||
let main = module.def_iter().find_by_name(&name).unwrap();
|
||||
GraphInfo::from_definition(main.item)
|
||||
}
|
||||
|
||||
fn find_graph(parser: &parser_scala::Parser, program: impl Str, name: impl Str) -> GraphInfo {
|
||||
let module = parser.parse_module(program.into(), default()).unwrap();
|
||||
fn find_graph(parser: &parser::Parser, program: impl Str, name: impl Str) -> GraphInfo {
|
||||
let module = parser.parse_module(program.as_ref(), default()).unwrap();
|
||||
let crumbs = name.into().split('.').map(DefinitionName::new_plain).collect();
|
||||
let id = Id { crumbs };
|
||||
let definition = get_definition(&module, &id).unwrap();
|
||||
GraphInfo::from_definition(definition)
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn detect_a_node() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
// Each of these programs should have a `main` definition with a single `2+2` node.
|
||||
let programs = vec![
|
||||
"main = 2+2",
|
||||
@ -265,8 +262,8 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
fn new_expression_node(parser: &parser_scala::Parser, expression: &str) -> NodeInfo {
|
||||
let node_ast = parser.parse(expression.to_string(), default()).unwrap();
|
||||
fn new_expression_node(parser: &parser::Parser, expression: &str) -> NodeInfo {
|
||||
let node_ast = parser.parse(expression, default());
|
||||
let line_ast = expect_single_line(&node_ast).clone();
|
||||
NodeInfo::from_main_line_ast(&line_ast).unwrap()
|
||||
}
|
||||
@ -281,16 +278,16 @@ mod tests {
|
||||
fn assert_same(left: &NodeInfo, right: &NodeInfo) {
|
||||
assert_eq!(left.id(), right.id());
|
||||
assert_eq!(
|
||||
left.documentation.as_ref().map(DocumentationCommentInfo::to_string),
|
||||
right.documentation.as_ref().map(DocumentationCommentInfo::to_string)
|
||||
left.documentation.as_ref().map(DocumentationCommentInfo::pretty_text),
|
||||
right.documentation.as_ref().map(DocumentationCommentInfo::pretty_text)
|
||||
);
|
||||
assert_eq!(left.main_line.repr(), right.main_line.repr());
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn add_node_to_graph_with_single_line() {
|
||||
let program = "main = print \"hello\"";
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let mut graph = main_graph(&parser, program);
|
||||
let nodes = graph.nodes();
|
||||
assert_eq!(nodes.len(), 1);
|
||||
@ -310,14 +307,14 @@ mod tests {
|
||||
assert_all(nodes.as_slice(), &[node_to_add1, node_to_add0, initial_node]);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn add_node_to_graph_with_multiple_lines() {
|
||||
// TODO [dg] Also add test for binding node when it's possible to update its id.
|
||||
let program = r#"main =
|
||||
foo = node
|
||||
foo a = not_node
|
||||
print "hello""#;
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let mut graph = main_graph(&parser, program);
|
||||
|
||||
let node_to_add0 = new_expression_node(&parser, "4 + 4");
|
||||
@ -365,7 +362,7 @@ mod tests {
|
||||
assert_eq!(graph.nodes()[1].expression().repr(), "not_node");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn add_node_to_graph_with_blank_line() {
|
||||
// The trailing `foo` definition is necessary for the blank line after "node2" to be
|
||||
// included in the `main` block. Otherwise, the block would end on "node2" and the blank
|
||||
@ -376,7 +373,7 @@ mod tests {
|
||||
node2
|
||||
|
||||
foo = 5";
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let mut graph = main_graph(&parser, program);
|
||||
|
||||
let id2 = graph.nodes()[0].id();
|
||||
@ -396,15 +393,14 @@ foo = 5";
|
||||
node1
|
||||
node2
|
||||
node3
|
||||
node4
|
||||
";
|
||||
node4";
|
||||
// `foo` is not part of expected code, as it belongs to module, not `main` graph.
|
||||
graph.expect_code(expected_code);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn multiple_node_graph() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let program = r"
|
||||
main =
|
||||
## Faux docstring
|
||||
@ -433,9 +429,9 @@ main =
|
||||
assert_eq!(nodes.len(), 4);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn removing_node_from_graph() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let program = r"
|
||||
main =
|
||||
foo = 2 + 2
|
||||
@ -459,9 +455,9 @@ main =
|
||||
graph.expect_code(expected_code);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn removing_last_node_from_graph() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let program = r"
|
||||
main =
|
||||
foo = 2 + 2";
|
||||
@ -477,9 +473,9 @@ main =
|
||||
graph.expect_code("main = Nothing");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn add_first_node_to_empty_graph() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let program = r"main = Nothing";
|
||||
let mut graph = main_graph(&parser, program);
|
||||
assert!(graph.nodes().is_empty());
|
||||
@ -489,15 +485,14 @@ main =
|
||||
assert_eq!(graph.nodes()[0].expression().repr(), "node0");
|
||||
}
|
||||
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn editing_nodes_expression_in_graph() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let program = r"
|
||||
main =
|
||||
foo = 2 + 2
|
||||
bar = 3 + 17";
|
||||
let new_expression = parser.parse("print \"HELLO\"".to_string(), default()).unwrap();
|
||||
let new_expression = parser.parse("print \"HELLO\"", default());
|
||||
let new_expression = expect_single_line(&new_expression).clone();
|
||||
|
||||
let mut graph = main_graph(&parser, program);
|
||||
|
@ -5,11 +5,7 @@ use crate::prelude::*;
|
||||
use crate::name::NamePath;
|
||||
use crate::name::QualifiedName;
|
||||
|
||||
use ast::known;
|
||||
use ast::Ast;
|
||||
use ast::HasRepr;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
|
||||
@ -18,7 +14,6 @@ use std::collections::BTreeSet;
|
||||
// === Constants ===
|
||||
// =================
|
||||
|
||||
const LIST_SEPARATOR: char = ',';
|
||||
const ALIAS_KEYWORD: &str = "as";
|
||||
const ALL_KEYWORD: &str = "all";
|
||||
const HIDING_KEYWORD: &str = "hiding";
|
||||
@ -40,7 +35,7 @@ pub type Id = u64;
|
||||
|
||||
/// A structure describing what names are imported from the module in a specific import declaration.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Clone, Debug, Eq, Deserialize, Hash, Ord, PartialEq, PartialOrd, Serialize)]
|
||||
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
||||
pub enum ImportedNames {
|
||||
/// The import is `import <module> [as <alias>]` and only module name is imported.
|
||||
Module { alias: Option<String> },
|
||||
@ -53,35 +48,8 @@ pub enum ImportedNames {
|
||||
List { names: BTreeSet<String> },
|
||||
}
|
||||
|
||||
impl ImportedNames {
|
||||
/// Create [`ImportedNames`] structure from the second `Match` segment body.
|
||||
///
|
||||
/// The unqualified imports are always parsed as [`Match`](crate::Shape::Match) AST node, where
|
||||
/// the second segment starts from `import` and ends with end of the import declaration. Thus,
|
||||
/// the second segment body may be `all`, `all hiding <comma-separated-name-list>`, or just
|
||||
/// comma separated name list.
|
||||
fn from_unqualified_import_match_second_segment(segment: impl AsRef<str>) -> Self {
|
||||
let is_token_sep = |c: char| c.is_ascii_whitespace() || c == LIST_SEPARATOR;
|
||||
let scope_split = segment.as_ref().split(is_token_sep);
|
||||
let mut scope_tokens = scope_split.filter(|tok| !tok.is_empty());
|
||||
let first_token = scope_tokens.next();
|
||||
let second_token = scope_tokens.next();
|
||||
let third_and_further_tokens = scope_tokens;
|
||||
match (first_token, second_token) {
|
||||
(Some("all"), Some("hiding")) =>
|
||||
Self::AllExcept { not_imported: third_and_further_tokens.map(Into::into).collect() },
|
||||
(Some("all"), _) => Self::All,
|
||||
(first_name, second_name) => {
|
||||
let all_names =
|
||||
first_name.into_iter().chain(second_name).chain(third_and_further_tokens);
|
||||
Self::List { names: all_names.map(Into::into).collect() }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Representation of a single import declaration.
|
||||
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Deserialize, Serialize, Hash)]
|
||||
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
|
||||
pub struct Info {
|
||||
/// The path of the qualified name of the imported module.
|
||||
pub module: NamePath,
|
||||
@ -110,55 +78,27 @@ impl Info {
|
||||
QualifiedName::from_all_segments(&self.module)
|
||||
}
|
||||
|
||||
/// Construct from an AST. Fails if the Ast is not an import declaration.
|
||||
/// Construct from an AST, if the Ast is an import declaration.
|
||||
pub fn from_ast(ast: &Ast) -> Option<Self> {
|
||||
let macro_match = known::Match::try_from(ast).ok()?;
|
||||
Self::from_match(macro_match)
|
||||
}
|
||||
|
||||
/// Construct from a macro match AST. Fails if the Ast is not an import declaration.
|
||||
pub fn from_match(ast: known::Match) -> Option<Self> {
|
||||
if ast::macros::is_match_qualified_import(&ast) {
|
||||
Some(Self {
|
||||
module: Self::module_name_from_str(ast.segs.head.body.repr()),
|
||||
// TODO[ao] the current parser does not recognize aliases for imports. Should be
|
||||
// fixed with the new parser. Once new parser will be integrated, the alias
|
||||
// support will be implemented as task
|
||||
// https://www.pivotaltracker.com/story/show/183590537
|
||||
imported: ImportedNames::Module { alias: None },
|
||||
})
|
||||
} else if ast::macros::is_match_unqualified_import(&ast) {
|
||||
let module = ast.segs.head.body.repr();
|
||||
let imported = ast.segs.tail.first().map_or_default(|s| s.body.repr());
|
||||
Some(Self::from_module_and_scope_str(module, imported))
|
||||
if let ast::Shape::Tree(ast::Tree {
|
||||
type_info: ast::TreeType::Import { module, imported },
|
||||
..
|
||||
}) = ast.shape()
|
||||
{
|
||||
let module = module.clone();
|
||||
let imported = match imported.clone() {
|
||||
ast::ImportedNames::All { except } if except.is_empty() => ImportedNames::All,
|
||||
ast::ImportedNames::All { except } =>
|
||||
ImportedNames::AllExcept { not_imported: except },
|
||||
ast::ImportedNames::List { names } => ImportedNames::List { names },
|
||||
ast::ImportedNames::Module { alias } => ImportedNames::Module { alias },
|
||||
};
|
||||
Some(Info { module, imported })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Create [`Info`] from unqualified import segment's body representations.
|
||||
///
|
||||
/// The unqualified imports are always parsed as [`Match`](crate::Shape::Match) AST node, where
|
||||
/// the first segment contains keyword `from` and module name, and second segment the rest of
|
||||
/// the import.
|
||||
fn from_module_and_scope_str(module: impl AsRef<str>, imported: impl AsRef<str>) -> Self {
|
||||
Self {
|
||||
module: Self::module_name_from_str(module),
|
||||
imported: ImportedNames::from_unqualified_import_match_second_segment(imported),
|
||||
}
|
||||
}
|
||||
|
||||
fn module_name_from_str(module: impl AsRef<str>) -> NamePath {
|
||||
let name = module.as_ref().trim();
|
||||
if name.is_empty() {
|
||||
default()
|
||||
} else {
|
||||
let segments = name.split(ast::opr::predefined::ACCESS);
|
||||
let trimmed = segments.map(str::trim);
|
||||
trimmed.map(Into::into).collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the ID of the import.
|
||||
///
|
||||
/// The ID is based on a hash of the qualified name of the imported target. This ID is GUI
|
||||
@ -211,7 +151,7 @@ impl Display for Info {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
struct Fixture {
|
||||
parser: Parser,
|
||||
@ -219,7 +159,7 @@ mod tests {
|
||||
|
||||
impl Fixture {
|
||||
fn new() -> Self {
|
||||
Self { parser: Parser::new_or_panic() }
|
||||
Self { parser: Parser::new() }
|
||||
}
|
||||
|
||||
fn run_case(&self, code: &str, expected: Info) {
|
||||
@ -229,7 +169,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn qualified_import_info_from_ast() {
|
||||
let test = Fixture::new();
|
||||
let make_info = |module: &[&str]| Info {
|
||||
@ -241,16 +181,12 @@ mod tests {
|
||||
let normal_case_expected = make_info(&["Standard", "Base", "Data"]);
|
||||
test.run_case(normal_case, normal_case_expected);
|
||||
|
||||
let weird_spaces = "import Standard .Base . Data ";
|
||||
let weird_spaces_expected = make_info(&["Standard", "Base", "Data"]);
|
||||
test.run_case(weird_spaces, weird_spaces_expected);
|
||||
|
||||
let single_segment = "import local";
|
||||
let single_segment_expected = make_info(&["local"]);
|
||||
test.run_case(single_segment, single_segment_expected);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn unrestricted_import_info_from_ast() {
|
||||
let test = Fixture::new();
|
||||
let make_info = |module: &[&str]| Info {
|
||||
@ -261,13 +197,9 @@ mod tests {
|
||||
let normal_case = "from Standard.Base import all";
|
||||
let normal_case_expected = make_info(&["Standard", "Base"]);
|
||||
test.run_case(normal_case, normal_case_expected);
|
||||
|
||||
let weird_spaces = "from Standard . Base import all ";
|
||||
let weird_spaces_expected = make_info(&["Standard", "Base"]);
|
||||
test.run_case(weird_spaces, weird_spaces_expected);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn restricted_import_info_from_ast() {
|
||||
let test = Fixture::new();
|
||||
let make_info = |module: &[&str], names: &[&str]| Info {
|
||||
@ -288,7 +220,7 @@ mod tests {
|
||||
test.run_case(single_name, single_name_expected);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn hiding_import_info_from_ast() {
|
||||
let test = Fixture::new();
|
||||
let make_info = |module: &[&str], hidden_names: &[&str]| Info {
|
||||
|
@ -65,11 +65,6 @@ pub mod prelude {
|
||||
pub use enso_prelude::*;
|
||||
pub use enso_profiler as profiler;
|
||||
pub use enso_profiler::prelude::*;
|
||||
|
||||
#[cfg(test)]
|
||||
pub use wasm_bindgen_test::wasm_bindgen_test;
|
||||
#[cfg(test)]
|
||||
pub use wasm_bindgen_test::wasm_bindgen_test_configure;
|
||||
}
|
||||
|
||||
|
||||
@ -206,7 +201,7 @@ mod tests {
|
||||
use crate::definition::DefinitionProvider;
|
||||
|
||||
use ast::macros::DocumentationCommentInfo;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
/// Expect `main` method, where first line is a documentation comment.
|
||||
@ -229,9 +224,9 @@ mod tests {
|
||||
assert_eq!(doc.line().repr(), doc2.line().repr())
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn parse_single_line_comment() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
|
||||
// Typical single line case.
|
||||
let code = r#"
|
||||
@ -266,15 +261,15 @@ main =
|
||||
run_case(&parser, code, expected);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn parse_multi_line_comment() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let code = r#"
|
||||
main =
|
||||
## First line
|
||||
Second line
|
||||
node"#;
|
||||
let expected = " First line\n Second line";
|
||||
let expected = " First line\nSecond line";
|
||||
run_case(&parser, code, expected);
|
||||
}
|
||||
}
|
||||
|
@ -208,7 +208,7 @@ impl Info {
|
||||
// TODO [mwu]
|
||||
// Ideally we should not require parser but should use some sane way of generating AST from
|
||||
// the `ImportInfo` value.
|
||||
pub fn add_import(&mut self, parser: &parser_scala::Parser, to_add: import::Info) -> usize {
|
||||
pub fn add_import(&mut self, parser: &parser::Parser, to_add: import::Info) -> usize {
|
||||
// Find last import that is not "after" the added one lexicographically.
|
||||
let previous_import =
|
||||
self.enumerate_imports().take_while(|(_, import)| &to_add > import).last();
|
||||
@ -224,7 +224,7 @@ impl Info {
|
||||
/// For more details the mechanics see [`add_import`] documentation.
|
||||
pub fn add_import_if_missing(
|
||||
&mut self,
|
||||
parser: &parser_scala::Parser,
|
||||
parser: &parser::Parser,
|
||||
to_add: import::Info,
|
||||
) -> Option<usize> {
|
||||
(!self.contains_import(to_add.id())).then(|| self.add_import(parser, to_add))
|
||||
@ -279,7 +279,7 @@ impl Info {
|
||||
&mut self,
|
||||
method: definition::ToAdd,
|
||||
location: Placement,
|
||||
parser: &parser_scala::Parser,
|
||||
parser: &parser::Parser,
|
||||
) -> FallibleResult {
|
||||
let no_indent = 0;
|
||||
let definition_ast = method.ast(no_indent, parser)?;
|
||||
@ -509,13 +509,10 @@ mod tests {
|
||||
use crate::definition::DefinitionName;
|
||||
|
||||
use engine_protocol::language_server::MethodPointer;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
|
||||
wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn import_listing() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let expect_imports = |code: &str, expected: &[&[&str]]| {
|
||||
let ast = parser.parse_module(code, default()).unwrap();
|
||||
let info = Info { ast };
|
||||
@ -536,9 +533,9 @@ mod tests {
|
||||
]]);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn import_adding_and_removing() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let code = "import Foo.Bar.Baz";
|
||||
let ast = parser.parse_module(code, default()).unwrap();
|
||||
let mut info = Info { ast };
|
||||
@ -565,9 +562,9 @@ mod tests {
|
||||
info.expect_code("import Bar.Gar");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn implicit_method_resolution() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let module_name =
|
||||
QualifiedName::from_all_segments(["local", "ProjectName", "Main"]).unwrap();
|
||||
let expect_find = |method: &MethodPointer, code, expected: &definition::Id| {
|
||||
@ -623,7 +620,7 @@ mod tests {
|
||||
expect_not_found(&ptr, "bar a b = a + b");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn test_definition_location() {
|
||||
let code = r"
|
||||
some def =
|
||||
@ -639,13 +636,13 @@ other def =
|
||||
|
||||
last def = inline expression";
|
||||
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let module = parser.parse_module(code, default()).unwrap();
|
||||
let module = Info { ast: module };
|
||||
|
||||
let id = definition::Id::new_plain_name("other");
|
||||
let span = definition_span(&module.ast, &id).unwrap();
|
||||
assert!(code[span].ends_with("last line of other def\n"));
|
||||
assert!(code[span].ends_with("last line of other def"));
|
||||
|
||||
let id = definition::Id::new_plain_name("last");
|
||||
let span = definition_span(&module.ast, &id).unwrap();
|
||||
@ -656,9 +653,9 @@ last def = inline expression";
|
||||
assert!(code[span].ends_with("nested body"));
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn add_method() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let module = r#"Main.method1 arg = body
|
||||
|
||||
main = Main.method1 10"#;
|
||||
|
@ -237,7 +237,7 @@ impl NodeInfo {
|
||||
}
|
||||
|
||||
/// Obtain documentation text.
|
||||
pub fn documentation_text(&self) -> Option<String> {
|
||||
pub fn documentation_text(&self) -> Option<ImString> {
|
||||
self.documentation.as_ref().map(|doc| doc.pretty_text())
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ use crate::node::NodeInfo;
|
||||
|
||||
use ast::crumbs::Located;
|
||||
use ast::BlockLine;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
|
||||
@ -441,9 +441,9 @@ mod tests {
|
||||
}
|
||||
|
||||
#[allow(unused_parens)] // False warning.
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn test_collapse() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let module_name = "Main".to_owned();
|
||||
let introduced_name = Identifier::try_from("custom_new").unwrap();
|
||||
let refactored_name = DefinitionName::new_plain("custom_old");
|
||||
|
@ -185,7 +185,7 @@ mod test {
|
||||
|
||||
use ast::HasIdMap;
|
||||
use enso_prelude::default;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// A sample text edit used to test "text api" properties.
|
||||
@ -244,10 +244,8 @@ mod test {
|
||||
fn assert_edit_keeps_main_node_ids(&self, parser: &Parser) {
|
||||
let ast1 = parser.parse_module(&self.code, default()).unwrap();
|
||||
let mut id_map = ast1.id_map();
|
||||
|
||||
apply_code_change_to_id_map(&mut id_map, &self.change, &self.code);
|
||||
let code2 = self.resulting_code();
|
||||
|
||||
let ast2 = parser.parse_module(code2, id_map.clone()).unwrap();
|
||||
self.assert_same_node_ids(&ast1, &ast2);
|
||||
}
|
||||
@ -257,8 +255,6 @@ mod test {
|
||||
fn assert_same_node_ids(&self, ast1: &ast::known::Module, ast2: &ast::known::Module) {
|
||||
let ids1 = main_nodes(ast1);
|
||||
let ids2 = main_nodes(ast2);
|
||||
debug!("IDs1: {ids1:?}");
|
||||
debug!("IDs2: {ids2:?}");
|
||||
assert_eq!(ids1, ids2, "Node ids mismatch in {self:?}");
|
||||
}
|
||||
}
|
||||
@ -300,9 +296,9 @@ mod test {
|
||||
assert_eq!(case.resulting_code(), "fooc");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn applying_code_changes_to_id_map() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
|
||||
// All the cases describe edit to a middle line in three line main definition.
|
||||
let cases = [
|
||||
|
@ -13,7 +13,6 @@ failure = { workspace = true }
|
||||
lazy_static = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_json = { workspace = true }
|
||||
uuid = { version = "0.8.1", features = ["serde", "v4", "wasm-bindgen"] }
|
||||
ast-macros = { path = "../macros" }
|
||||
enso-data-structures = { path = "../../../../../lib/rust/data-structures" }
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -45,9 +45,9 @@ impl IdMap {
|
||||
|
||||
|
||||
|
||||
// ======================
|
||||
// === IdMapForParser ===
|
||||
// ======================
|
||||
// =================
|
||||
// === JsonIdMap ===
|
||||
// =================
|
||||
|
||||
/// Strongly typed index of char.
|
||||
///
|
||||
|
@ -9,11 +9,6 @@ use crate::HasTokens;
|
||||
use crate::Shape;
|
||||
use crate::TokenConsumer;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Deserializer;
|
||||
use serde::Serialize;
|
||||
use serde::Serializer;
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
@ -165,25 +160,6 @@ impl<'a, T> From<&'a KnownAst<T>> for &'a Ast {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Serialize for KnownAst<T> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where S: Serializer {
|
||||
self.ast.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T, E> Deserialize<'de> for KnownAst<T>
|
||||
where
|
||||
for<'t> &'t Shape<Ast>: TryInto<&'t T, Error = E>,
|
||||
E: fmt::Display,
|
||||
{
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where D: Deserializer<'de> {
|
||||
let ast = Ast::deserialize(deserializer)?;
|
||||
Self::try_new(ast).map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasTokens for KnownAst<T> {
|
||||
fn feed_to(&self, consumer: &mut impl TokenConsumer) {
|
||||
self.ast.feed_to(consumer)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -3,12 +3,9 @@
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::crumbs::AmbiguousCrumb;
|
||||
use crate::crumbs::Located;
|
||||
use crate::crumbs::MatchCrumb;
|
||||
use crate::known;
|
||||
use crate::BlockLine;
|
||||
use crate::Shifted;
|
||||
|
||||
|
||||
// ==============
|
||||
@ -23,12 +20,6 @@ pub mod skip_and_freeze;
|
||||
// === Recognized Macros Keywords ===
|
||||
// ==================================
|
||||
|
||||
/// The keyword introducing a disabled code line.
|
||||
pub const DISABLING_COMMENT_INTRODUCER: &str = "#";
|
||||
|
||||
/// The keyword introducing a documentation block.
|
||||
pub const DOCUMENTATION_COMMENT_INTRODUCER: &str = "##";
|
||||
|
||||
/// The keyword introducing an qualified import declaration. See:
|
||||
/// https://dev.enso.org/docs/enso/syntax/imports.html#import-syntax
|
||||
pub const QUALIFIED_IMPORT_KEYWORD: &str = "import";
|
||||
@ -36,29 +27,21 @@ pub const QUALIFIED_IMPORT_KEYWORD: &str = "import";
|
||||
/// The keyword introducing an unqualified import declaration.
|
||||
pub const UNQUALIFIED_IMPORT_KEYWORD: &str = "from";
|
||||
|
||||
/// The keyword introducing an unqualified export declaration.
|
||||
pub const QUALIFIED_EXPORT_KEYWORD: &str = "export";
|
||||
|
||||
|
||||
|
||||
// ========================
|
||||
// === Disable Comments ===
|
||||
// ========================
|
||||
|
||||
/// Try Interpreting the line as disabling comment. Return the text after `#`.
|
||||
pub fn as_disable_comment(ast: &Ast) -> Option<String> {
|
||||
let r#match = crate::known::Match::try_from(ast).ok()?;
|
||||
let first_segment = &r#match.segs.head;
|
||||
if crate::identifier::name(&first_segment.head) == Some(DISABLING_COMMENT_INTRODUCER) {
|
||||
Some(first_segment.body.repr())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this AST is a disabling comment.
|
||||
pub fn is_disable_comment(ast: &Ast) -> bool {
|
||||
as_disable_comment(ast).is_some()
|
||||
if let crate::Shape::Tree(tree) = ast.shape()
|
||||
&& tree.type_info == crate::TreeType::ExpressionWithComment
|
||||
&& !tree.span_info.iter().any(|e| matches!(e, crate::SpanSeed::Child(_))) {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -72,40 +55,32 @@ pub fn is_disable_comment(ast: &Ast) -> bool {
|
||||
/// Describes the AST of a documentation comment.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DocumentationCommentAst {
|
||||
ast: known::Match,
|
||||
body: crate::MacroPatternMatch<Shifted<Ast>>,
|
||||
ast: known::Tree,
|
||||
rendered: ImString,
|
||||
}
|
||||
|
||||
impl DocumentationCommentAst {
|
||||
/// Interpret given Ast as a documentation comment. Return `None` if it is not recognized.
|
||||
pub fn new(ast: &Ast) -> Option<Self> {
|
||||
let ast = crate::known::Match::try_from(ast).ok()?;
|
||||
let first_segment = &ast.segs.head;
|
||||
let introducer = crate::identifier::name(&first_segment.head)?;
|
||||
if introducer == DOCUMENTATION_COMMENT_INTRODUCER {
|
||||
let body = first_segment.body.clone_ref();
|
||||
Some(DocumentationCommentAst { ast, body })
|
||||
let ast = crate::known::Tree::try_from(ast).ok()?;
|
||||
if let crate::TreeType::Documentation { rendered } = &ast.type_info {
|
||||
let rendered = rendered.clone();
|
||||
Some(DocumentationCommentAst { ast, rendered })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the documentation comment's AST.
|
||||
pub fn ast(&self) -> known::Match {
|
||||
self.ast.clone_ref()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Line Description ===
|
||||
|
||||
/// Describes the line with a documentation comment.
|
||||
#[derive(Clone, Debug, Deref)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DocumentationCommentLine {
|
||||
/// Stores the documentation AST and the trailing whitespace length.
|
||||
#[deref]
|
||||
line: BlockLine<known::Match>,
|
||||
body: crate::MacroPatternMatch<Shifted<Ast>>,
|
||||
line: BlockLine<known::Tree>,
|
||||
rendered: ImString,
|
||||
}
|
||||
|
||||
impl DocumentationCommentLine {
|
||||
@ -117,22 +92,17 @@ impl DocumentationCommentLine {
|
||||
|
||||
/// Treat given documentation AST as the line with a given trailing whitespace.
|
||||
pub fn from_doc_ast(ast_doc: DocumentationCommentAst, off: usize) -> Self {
|
||||
Self { line: BlockLine { elem: ast_doc.ast, off }, body: ast_doc.body }
|
||||
}
|
||||
|
||||
/// Get the documentation comment's AST.
|
||||
pub fn ast(&self) -> known::Match {
|
||||
self.line.elem.clone_ref()
|
||||
Self { line: BlockLine { elem: ast_doc.ast, off }, rendered: ast_doc.rendered }
|
||||
}
|
||||
|
||||
/// Get the line with this comment.
|
||||
pub fn line(&self) -> &BlockLine<known::Match> {
|
||||
fn line(&self) -> &BlockLine<known::Tree> {
|
||||
&self.line
|
||||
}
|
||||
|
||||
/// Convenience function that throws away some information to return the line description that
|
||||
/// is used in AST blocks.
|
||||
pub fn block_line(&self) -> BlockLine<Option<Ast>> {
|
||||
fn block_line(&self) -> BlockLine<Option<Ast>> {
|
||||
self.line.as_ref().map(|known_ast| Some(known_ast.ast().clone_ref()))
|
||||
}
|
||||
}
|
||||
@ -142,10 +112,9 @@ impl DocumentationCommentLine {
|
||||
|
||||
/// Structure holding the documentation comment AST and related information necessary to deal with
|
||||
/// them.
|
||||
#[derive(Clone, Debug, Deref)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DocumentationCommentInfo {
|
||||
/// Description of the line with the documentation comment.
|
||||
#[deref]
|
||||
pub line: DocumentationCommentLine,
|
||||
/// The absolute indent of the block that contains the line with documentation comment.
|
||||
pub block_indent: usize,
|
||||
@ -157,18 +126,28 @@ impl DocumentationCommentInfo {
|
||||
Some(Self { line: DocumentationCommentLine::new(line)?, block_indent })
|
||||
}
|
||||
|
||||
/// Get the line with this comment.
|
||||
pub fn line(&self) -> &BlockLine<known::Tree> {
|
||||
self.line.line()
|
||||
}
|
||||
|
||||
/// Get the documentation comment's AST.
|
||||
pub fn ast(&self) -> known::Tree {
|
||||
self.line.line.elem.clone_ref()
|
||||
}
|
||||
|
||||
/// Convenience function that throws away some information to return the line description that
|
||||
/// is used in AST blocks.
|
||||
pub fn block_line(&self) -> BlockLine<Option<Ast>> {
|
||||
self.line.block_line()
|
||||
}
|
||||
|
||||
/// Get the documentation text.
|
||||
///
|
||||
/// The text is pretty printed as per UI perspective -- all lines leading whitespace is stripped
|
||||
/// up to the column following comment introducer (`##`).
|
||||
pub fn pretty_text(&self) -> String {
|
||||
let mut repr = self.body.repr();
|
||||
// Trailing whitespace must be maintained.
|
||||
repr.extend(std::iter::repeat(' ').take(self.line.off));
|
||||
let indent = self.block_indent + DOCUMENTATION_COMMENT_INTRODUCER.len();
|
||||
let old = format!("\n{}", " ".repeat(indent));
|
||||
let new = "\n";
|
||||
repr.replace(&old, new)
|
||||
/// The text is pretty printed as per UI perspective--leading whitespace is stripped from all
|
||||
/// lines up to the column following comment introducer (`##`).
|
||||
pub fn pretty_text(&self) -> ImString {
|
||||
self.line.rendered.clone()
|
||||
}
|
||||
|
||||
/// Generates the source code text of the comment line from a pretty text.
|
||||
@ -177,25 +156,12 @@ impl DocumentationCommentInfo {
|
||||
let mut lines = text.lines();
|
||||
// First line must always exist, even for an empty comment.
|
||||
let first_line = format!("##{}", lines.next().unwrap_or_default());
|
||||
let other_lines = lines.map(|line| format!("{indent} {line}"));
|
||||
let other_lines = lines.map(|line| format!("{indent} {line}"));
|
||||
let mut out_lines = std::iter::once(first_line).chain(other_lines);
|
||||
out_lines.join("\n")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl AsRef<Ast> for DocumentationCommentInfo {
|
||||
fn as_ref(&self) -> &Ast {
|
||||
self.line.elem.ast()
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for DocumentationCommentInfo {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.pretty_text())
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if given Ast stores a documentation comment.
|
||||
pub fn is_documentation_comment(ast: &Ast) -> bool {
|
||||
DocumentationCommentAst::new(ast).is_some()
|
||||
@ -203,112 +169,27 @@ pub fn is_documentation_comment(ast: &Ast) -> bool {
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Imports ===
|
||||
// ===============
|
||||
|
||||
/// If the given AST node is an import declaration, returns it as a Match (which is the only shape
|
||||
/// capable of storing import declarations). Returns `None` otherwise.
|
||||
pub fn ast_as_import_match(ast: &Ast) -> Option<known::Match> {
|
||||
let macro_match = known::Match::try_from(ast).ok()?;
|
||||
is_match_import(¯o_match).then_some(macro_match)
|
||||
}
|
||||
|
||||
/// If the given AST node is a qualified import declaration (`import <module name>`), returns it as
|
||||
/// a Match (which is the only shape capable of storing import declarations). Returns `None`
|
||||
/// otherwise.
|
||||
pub fn is_match_qualified_import(ast: &known::Match) -> bool {
|
||||
let segment = &ast.segs.head;
|
||||
let keyword = crate::identifier::name(&segment.head);
|
||||
keyword.contains_if(|str| *str == QUALIFIED_IMPORT_KEYWORD)
|
||||
}
|
||||
|
||||
/// If the given AST node is an unqualified import declaration (`from <module name> import <...>`),
|
||||
/// returns it as a Match (which is the only shape capable of storing import declarations). Returns
|
||||
/// `None` otherwise.
|
||||
pub fn is_match_unqualified_import(ast: &known::Match) -> bool {
|
||||
let first_segment = &ast.segs.head;
|
||||
let first_keyword = crate::identifier::name(&first_segment.head);
|
||||
let second_segment = &ast.segs.tail.first();
|
||||
let second_keyword = second_segment.and_then(|s| crate::identifier::name(&s.head));
|
||||
first_keyword == Some(UNQUALIFIED_IMPORT_KEYWORD)
|
||||
&& second_keyword == Some(QUALIFIED_IMPORT_KEYWORD)
|
||||
}
|
||||
|
||||
/// Check if the given macro match node is an import declaration.
|
||||
pub fn is_match_import(ast: &known::Match) -> bool {
|
||||
is_match_qualified_import(ast) || is_match_unqualified_import(ast)
|
||||
}
|
||||
|
||||
/// Check if the given ast node is an import declaration.
|
||||
pub fn is_ast_import(ast: &Ast) -> bool {
|
||||
ast_as_import_match(ast).is_some()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Lambdas ===
|
||||
// ===============
|
||||
|
||||
/// Describes the lambda-expression's three pieces: the argument, the arrow operator and the body.
|
||||
/// Describes the lambda-expression's pieces: the argument and the body.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct LambdaInfo<'a> {
|
||||
pub arg: Located<&'a Ast>,
|
||||
pub opr: Located<&'a Ast>,
|
||||
pub body: Located<&'a Ast>,
|
||||
}
|
||||
|
||||
/// If this is the builtin macro for `->` (lambda expression), returns it as known `Match`.
|
||||
pub fn as_lambda_match(ast: &Ast) -> Option<known::Match> {
|
||||
let macro_match = known::Match::try_from(ast).ok()?;
|
||||
let segment = ¯o_match.segs.head;
|
||||
crate::opr::is_arrow_opr(&segment.head).then_some(macro_match)
|
||||
}
|
||||
|
||||
/// Describes the given Ast as lambda, if this is a matched `->` builtin macro.
|
||||
pub fn as_lambda(ast: &Ast) -> Option<LambdaInfo> {
|
||||
let _ = as_lambda_match(ast)?;
|
||||
let mut child_iter = ast.iter_subcrumbs();
|
||||
let arg = ast.get_located(child_iter.next()?).ok()?;
|
||||
let opr = ast.get_located(child_iter.next()?).ok()?;
|
||||
let body = ast.get_located(child_iter.next()?).ok()?;
|
||||
let is_arrow = crate::opr::is_arrow_opr(opr.item);
|
||||
is_arrow.then_some(LambdaInfo { arg, opr, body })
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === Match Utils ===
|
||||
// ===================
|
||||
|
||||
impl crate::Match<Ast> {
|
||||
/// Iterates matched ASTs. Skips segment heads ("keywords").
|
||||
/// For example, for `(a)` it iterates only over `a`, skkipping segment heads `(` and `)`.
|
||||
pub fn iter_pat_match_subcrumbs(&self) -> impl Iterator<Item = MatchCrumb> + '_ {
|
||||
self.iter_subcrumbs().filter(|crumb| {
|
||||
use crate::crumbs::SegmentMatchCrumb;
|
||||
match crumb {
|
||||
MatchCrumb::Segs { val, .. } => val != &SegmentMatchCrumb::Head,
|
||||
_ => true,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === Ambiguous Utils ===
|
||||
// =======================
|
||||
|
||||
impl crate::Ambiguous<Ast> {
|
||||
/// Iterates matched ASTs. Skips segment heads ("keywords").
|
||||
/// For example, for `(a)` it iterates only over `a`, skkipping segment heads `(` and `)`.
|
||||
pub fn iter_pat_match_subcrumbs(&self) -> impl Iterator<Item = AmbiguousCrumb> + '_ {
|
||||
self.iter_subcrumbs()
|
||||
.filter(|crumb| crumb.field != crate::crumbs::AmbiguousSegmentCrumb::Head)
|
||||
if let crate::Shape::Tree(crate::Tree { type_info: crate::TreeType::Lambda, .. }) = ast.shape()
|
||||
{
|
||||
let mut iter = ast.iter_subcrumbs().map(|crumb| ast.get_located(crumb).unwrap());
|
||||
let arg = iter.next().unwrap();
|
||||
let body = iter.next().unwrap();
|
||||
Some(LambdaInfo { arg, body })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,6 @@ use enso_prelude::*;
|
||||
|
||||
use crate::known;
|
||||
use crate::Ast;
|
||||
use crate::Crumbable;
|
||||
use crate::HasRepr;
|
||||
|
||||
|
||||
@ -132,26 +131,12 @@ fn preserving_macro(
|
||||
}
|
||||
|
||||
/// Check if AST contains a prefix-like macro call with a given name.
|
||||
///
|
||||
/// We check for both [`known::Prefix`] and [`known::Match`], because first one is used when we
|
||||
/// modify AST using this module, and the second one can be provided by the Engine.
|
||||
///
|
||||
/// Using [`known::Match`] everywhere would be perfect, but it is extremely annoying to construct
|
||||
/// without using the parser. To construct [`known::Match`] we need to know exactly how the specific
|
||||
/// macro is represented in the parser and create a sequence of `MacroPatternMatch` segments. The
|
||||
/// parser generates a huge AST for a simple `skip foo` expression, and it is not wise to exactly
|
||||
/// repeat this AST generation here. If our generated AST is different from the one generated by the
|
||||
/// parser anyway, we would rather generate a much more simple [`known::Prefix`]. It is easier to
|
||||
/// both construct and deconstruct later.
|
||||
pub fn is_macro_call(ast: &Ast, identifier: &str) -> bool {
|
||||
if let Ok(prefix) = known::Prefix::try_from(ast) {
|
||||
let name = crate::identifier::name(&prefix.func);
|
||||
name == Some(identifier)
|
||||
} else if let Ok(macro_match) = known::Match::try_from(ast) {
|
||||
let first_segment = ¯o_match.segs.head;
|
||||
let name = crate::identifier::name(&first_segment.head);
|
||||
name == Some(identifier)
|
||||
} else {
|
||||
// TODO: Check for a [`Tree`] macro (https://github.com/enso-org/enso/issues/5572).
|
||||
false
|
||||
}
|
||||
}
|
||||
@ -160,11 +145,8 @@ pub fn is_macro_call(ast: &Ast, identifier: &str) -> bool {
|
||||
pub fn maybe_prefix_macro_body(ast: &Ast) -> Option<Ast> {
|
||||
if let Ok(prefix) = known::Prefix::try_from(ast) {
|
||||
Some(prefix.arg.clone())
|
||||
} else if let Ok(macro_match) = known::Match::try_from(ast) {
|
||||
let body_crumb = macro_match.iter_subcrumbs().nth(1)?;
|
||||
let body_ast = macro_match.get(&body_crumb).ok()?;
|
||||
Some(body_ast.clone())
|
||||
} else {
|
||||
// TODO: Check for a [`Tree`] macro (https://github.com/enso-org/enso/issues/5572).
|
||||
None
|
||||
}
|
||||
}
|
||||
|
@ -107,7 +107,7 @@ pub fn is_assignment(ast: &Ast) -> bool {
|
||||
pub fn assignment() -> known::Opr {
|
||||
// TODO? We could cache and reuse, if we care.
|
||||
let name = predefined::ASSIGNMENT.into();
|
||||
let opr = Opr { name };
|
||||
let opr = Opr { name, right_assoc: false };
|
||||
known::Opr::new(opr, None)
|
||||
}
|
||||
|
||||
@ -150,7 +150,10 @@ pub fn make_operator(opr: &Ast) -> Option<Operator> {
|
||||
|
||||
/// Describes associativity of the given operator AST.
|
||||
pub fn assoc(ast: &known::Opr) -> Assoc {
|
||||
Assoc::of(&ast.name)
|
||||
match ast.right_assoc {
|
||||
true => Assoc::Right,
|
||||
false => Assoc::Left,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -327,7 +330,7 @@ pub struct Chain {
|
||||
/// Subsequent operands applied to the `target`.
|
||||
pub args: Vec<ChainElement>,
|
||||
/// Operator AST. Generally all operators in the chain should be the same (except for id).
|
||||
/// It is not specified which exactly operator's in the chain this AST belongs to.
|
||||
/// It is not specified exactly which operators in the chain this AST belongs to.
|
||||
pub operator: known::Opr,
|
||||
}
|
||||
|
||||
|
@ -53,66 +53,6 @@ pub const FMT_BLOCK_QUOTES: &str = "'''";
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Builder ===
|
||||
// ===============
|
||||
|
||||
has_tokens!(Empty);
|
||||
has_tokens!(Letter, self.char);
|
||||
has_tokens!(Space, self);
|
||||
has_tokens!(Text, self.str);
|
||||
has_tokens!(Seq, self.first, self.second);
|
||||
|
||||
|
||||
// =====================
|
||||
// === TextBlockLine ===
|
||||
// =====================
|
||||
|
||||
/// Not an instance of `Tokenizer`, as it needs to know parent block's offset.
|
||||
impl<T: HasTokens> TextBlockLine<T> {
|
||||
fn feed_to(&self, consumer: &mut impl TokenConsumer, offset: usize) {
|
||||
for empty_line_spaces in &self.empty_lines {
|
||||
(NEWLINE, empty_line_spaces).feed_to(consumer);
|
||||
}
|
||||
(NEWLINE, offset, &self.text).feed_to(consumer);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === Text Segments ===
|
||||
// =====================
|
||||
|
||||
has_tokens!(SegmentPlain, self.value);
|
||||
has_tokens!(SegmentRawEscape, BACKSLASH, self.code);
|
||||
has_tokens!(SegmentExpr<T>, EXPR_QUOTE, self.value, EXPR_QUOTE);
|
||||
has_tokens!(SegmentEscape, BACKSLASH, self.code);
|
||||
|
||||
|
||||
// =================
|
||||
// === RawEscape ===
|
||||
// =================
|
||||
|
||||
has_tokens!(Unfinished);
|
||||
has_tokens!(Invalid, self.str);
|
||||
has_tokens!(Slash, BACKSLASH);
|
||||
has_tokens!(Quote, FMT_QUOTE);
|
||||
has_tokens!(RawQuote, RAW_QUOTE);
|
||||
|
||||
|
||||
// ==============
|
||||
// === Escape ===
|
||||
// ==============
|
||||
|
||||
has_tokens!(EscapeCharacter, self.c);
|
||||
has_tokens!(EscapeControl, self.name);
|
||||
has_tokens!(EscapeNumber, self.digits);
|
||||
has_tokens!(EscapeUnicode16, UNICODE16_INTRODUCER, self.digits);
|
||||
has_tokens!(EscapeUnicode21, UNICODE21_OPENER.deref(), self.digits, UNICODE21_CLOSER.deref());
|
||||
has_tokens!(EscapeUnicode32, UNICODE32_INTRODUCER, self.digits);
|
||||
|
||||
|
||||
// =============
|
||||
// === Block ===
|
||||
// =============
|
||||
@ -120,68 +60,20 @@ has_tokens!(EscapeUnicode32, UNICODE32_INTRODUCER, self.digits);
|
||||
has_tokens!(BlockLine<T>, self.elem, self.off);
|
||||
|
||||
|
||||
// =============
|
||||
// === Macro ===
|
||||
// =============
|
||||
|
||||
// === Macro Segments ==
|
||||
|
||||
has_tokens!(MacroMatchSegment<T>, self.head, self.body);
|
||||
has_tokens!(MacroAmbiguousSegment<T>, self.head, self.body);
|
||||
|
||||
|
||||
// === MacroPatternMatch subtypes ===
|
||||
|
||||
has_tokens!(MacroPatternMatchRawBegin);
|
||||
has_tokens!(MacroPatternMatchRawEnd);
|
||||
has_tokens!(MacroPatternMatchRawNothing);
|
||||
has_tokens!(MacroPatternMatchRawSeq<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawOr<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawMany<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawExcept<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawBuild<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawErr<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawTag<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawCls<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawTok<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawBlank<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawVar<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawCons<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawOpr<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawAnnotation<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawMod<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawNum<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawText<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawBlock<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawMacro<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawInvalid<T>, self.elem);
|
||||
has_tokens!(MacroPatternMatchRawFailedMatch);
|
||||
|
||||
|
||||
// === Switch ===
|
||||
|
||||
has_tokens!(Switch<T>, self.deref());
|
||||
|
||||
|
||||
// ===============
|
||||
// === Shifted ===
|
||||
// ===============
|
||||
|
||||
has_tokens!(Shifted<T>, self.off, self.wrapped);
|
||||
has_tokens!(ShiftedVec1<T>, self.head, self.tail);
|
||||
|
||||
|
||||
|
||||
// =============================================================================
|
||||
// === Shape ===================================================================
|
||||
// =============================================================================
|
||||
|
||||
// ===============
|
||||
// === Invalid ===
|
||||
// ===============
|
||||
|
||||
has_tokens!(Unrecognized, self.str);
|
||||
has_tokens!(Unexpected<T>, self.stream);
|
||||
has_tokens!(InvalidQuote, self.quote);
|
||||
has_tokens!(InlineBlock, self.quote);
|
||||
|
||||
|
||||
// ===================
|
||||
// === Identifiers ===
|
||||
@ -193,7 +85,6 @@ has_tokens!(Cons, self.name);
|
||||
has_tokens!(Opr, self.name);
|
||||
has_tokens!(Annotation, self.name);
|
||||
has_tokens!(Mod, self.name, MOD_SUFFIX);
|
||||
has_tokens!(InvalidSuffix<T>, self.elem, self.suffix);
|
||||
|
||||
|
||||
// ==============
|
||||
@ -205,55 +96,6 @@ struct NumberBase<T>(T);
|
||||
|
||||
has_tokens!(NumberBase<T>, self.0, NUMBER_BASE_SEPARATOR);
|
||||
has_tokens!(Number, self.base.as_ref().map(NumberBase), self.int);
|
||||
has_tokens!(DanglingBase, self.base, NUMBER_BASE_SEPARATOR);
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Text ===
|
||||
// ============
|
||||
|
||||
|
||||
// === Lines ===
|
||||
|
||||
has_tokens!(TextLineRaw, RAW_QUOTE, self.text, RAW_QUOTE);
|
||||
has_tokens!(TextLineFmt<T>, FMT_QUOTE, self.text, FMT_QUOTE);
|
||||
|
||||
|
||||
// === TextBlockRaw ==
|
||||
|
||||
impl HasTokens for TextBlockRaw {
|
||||
fn feed_to(&self, consumer: &mut impl TokenConsumer) {
|
||||
(RAW_BLOCK_QUOTES, self.spaces).feed_to(consumer);
|
||||
for line in self.text.iter() {
|
||||
line.feed_to(consumer, self.offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === TextBlockFmt ==
|
||||
|
||||
impl<T: HasTokens> HasTokens for TextBlockFmt<T> {
|
||||
fn feed_to(&self, consumer: &mut impl TokenConsumer) {
|
||||
(FMT_BLOCK_QUOTES, self.spaces).feed_to(consumer);
|
||||
for line in self.text.iter() {
|
||||
line.feed_to(consumer, self.offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === TextUnclosed ==
|
||||
|
||||
impl<T: HasTokens> HasTokens for TextUnclosed<T> {
|
||||
fn feed_to(&self, consumer: &mut impl TokenConsumer) {
|
||||
match &self.line {
|
||||
TextLine::TextLineRaw(line) => (RAW_QUOTE, &line.text).feed_to(consumer),
|
||||
TextLine::TextLineFmt(line) => (FMT_QUOTE, &line.text).feed_to(consumer),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -291,7 +133,7 @@ impl<T: HasTokens> HasTokens for Module<T> {
|
||||
|
||||
impl<T: HasTokens> HasTokens for Block<T> {
|
||||
fn feed_to(&self, consumer: &mut impl TokenConsumer) {
|
||||
(!self.is_orphan).as_some(NEWLINE).feed_to(consumer);
|
||||
NEWLINE.feed_to(consumer);
|
||||
for empty_line_space in &self.empty_lines {
|
||||
(empty_line_space, NEWLINE).feed_to(consumer);
|
||||
}
|
||||
@ -304,171 +146,26 @@ impl<T: HasTokens> HasTokens for Block<T> {
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Macros ===
|
||||
// ==============
|
||||
// ============
|
||||
// === Tree ===
|
||||
// ============
|
||||
|
||||
// === Match ==
|
||||
|
||||
impl<T: HasTokens> HasTokens for Match<T> {
|
||||
impl<T: HasTokens> HasTokens for Tree<T> {
|
||||
fn feed_to(&self, consumer: &mut impl TokenConsumer) {
|
||||
for pat_match in &self.pfx {
|
||||
for sast in pat_match.iter() {
|
||||
// reverse the order for prefix: ast before spacing
|
||||
(&sast.wrapped, &sast.off).feed_to(consumer);
|
||||
if let Some(str) = &self.leaf_info {
|
||||
Token::Str(str).feed_to(consumer)
|
||||
} else {
|
||||
for element in &self.span_info {
|
||||
match element {
|
||||
SpanSeed::Space(SpanSeedSpace { space }) =>
|
||||
Token::Off(*space).feed_to(consumer),
|
||||
SpanSeed::Token(SpanSeedToken { token }) => Token::Str(token).feed_to(consumer),
|
||||
SpanSeed::Child(SpanSeedChild { node }) => node.feed_to(consumer),
|
||||
}
|
||||
}
|
||||
}
|
||||
self.segs.feed_to(consumer);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Ambiguous ===
|
||||
|
||||
has_tokens!(Ambiguous<T>, self.segs);
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === Spaceless AST ===
|
||||
// =====================
|
||||
|
||||
spaceless_ast!(Comment);
|
||||
spaceless_ast!(Documented<T>);
|
||||
spaceless_ast!(Import<T>);
|
||||
spaceless_ast!(Export<T>);
|
||||
spaceless_ast!(JavaImport<T>);
|
||||
spaceless_ast!(Mixfix<T>);
|
||||
spaceless_ast!(Group<T>);
|
||||
spaceless_ast!(SequenceLiteral<T>);
|
||||
spaceless_ast!(TypesetLiteral<T>);
|
||||
spaceless_ast!(Def<T>);
|
||||
spaceless_ast!(Foreign);
|
||||
spaceless_ast!(Modified<T>);
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
/// Tests for spacelesss AST. Other AST is covered by parsing tests that verify
|
||||
/// that correct lengths and text representation are generated. Only spaceless AST
|
||||
/// is not returned by the parser and can't be covered in this way.
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// === Comment ===
|
||||
|
||||
fn make_comment() -> Shape<Ast> {
|
||||
Comment { lines: vec![] }.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn comment_panics_on_repr() {
|
||||
make_comment().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn comment_panics_on_length() {
|
||||
make_comment().len();
|
||||
}
|
||||
|
||||
|
||||
// === Import ===
|
||||
|
||||
fn make_import() -> Shape<Ast> {
|
||||
let path = vec![Ast::var("Target")];
|
||||
Import { path, rename: None, isAll: false, onlyNames: None, hidingNames: None }.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn import_panics_on_repr() {
|
||||
make_import().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn import_panics_on_length() {
|
||||
make_import().len();
|
||||
}
|
||||
|
||||
|
||||
// === Mixfix ===
|
||||
|
||||
fn make_mixfix() -> Shape<Ast> {
|
||||
Mixfix { name: vec![], args: vec![] }.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn mixfix_panics_on_repr() {
|
||||
make_mixfix().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn mixfix_panics_on_length() {
|
||||
make_mixfix().len();
|
||||
}
|
||||
|
||||
|
||||
// === Group ===
|
||||
|
||||
fn make_group() -> Shape<Ast> {
|
||||
Group { body: None }.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn group_panics_on_repr() {
|
||||
make_group().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn group_panics_on_length() {
|
||||
make_group().len();
|
||||
}
|
||||
|
||||
|
||||
// === Def ===
|
||||
|
||||
fn make_def() -> Shape<Ast> {
|
||||
Def { name: Ast::cons("Foo"), args: vec![], body: None }.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn def_panics_on_repr() {
|
||||
make_def().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn def_panics_on_length() {
|
||||
make_def().len();
|
||||
}
|
||||
|
||||
// === Foreign ===
|
||||
|
||||
fn make_foreign() -> Shape<Ast> {
|
||||
Foreign { indent: 0, lang: "Python".into(), code: vec![] }.into()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn foreign_panics_on_repr() {
|
||||
make_foreign().repr();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn foreign_panics_on_length() {
|
||||
make_foreign().len();
|
||||
if let Some(str) = &self.trailing_token {
|
||||
Token::Str(str).feed_to(consumer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -45,7 +45,6 @@ pub fn ast_node(
|
||||
let output = quote! {
|
||||
#[derive(Clone,Eq,PartialEq,Debug)]
|
||||
#[derive(Iterator)]
|
||||
#[derive(Serialize,Deserialize)]
|
||||
#input
|
||||
};
|
||||
output.into()
|
||||
@ -283,32 +282,3 @@ pub fn has_tokens(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let maker = syn::parse::<TokenDescription>(input).unwrap();
|
||||
maker.has_tokens().into()
|
||||
}
|
||||
|
||||
/// Generates `HasTokens` instances that are just sum of their parts.
|
||||
///
|
||||
/// Takes 1+ parameters:
|
||||
/// * first goes the typename for which implementations are generated (can take type parameters, as
|
||||
/// long as they implement `HasTokens`)
|
||||
/// * then arbitrary number (0 or more) of expressions, that shall yield values implementing
|
||||
/// `HasTokens`. The `self` can be used in th expressions.
|
||||
///
|
||||
/// For example, for invocation:
|
||||
/// ```text
|
||||
/// has_tokens!(SegmentExpr<T>, EXPR_QUOTE, self.value, EXPR_QUOTE);
|
||||
/// ```
|
||||
/// the following output is produced:
|
||||
/// ```text
|
||||
/// impl<T: HasTokens> HasTokens for SegmentExpr<T> {
|
||||
/// fn feed_to(&self, consumer:&mut impl TokenConsumer) {
|
||||
/// EXPR_QUOTE.feed(consumer);
|
||||
/// self.value.feed(consumer);
|
||||
/// EXPR_QUOTE.feed(consumer);
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
|
||||
/// Generates `HasTokens` implementations for spaceless AST that panics when used.
|
||||
#[proc_macro]
|
||||
pub fn spaceless_ast(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
crate::token::spaceless_ast(input)
|
||||
}
|
||||
|
@ -9,20 +9,6 @@ use syn::Token;
|
||||
|
||||
|
||||
|
||||
/// Generates `HasTokens` implementations for spaceless AST that panics when used.
|
||||
pub fn spaceless_ast(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let target = syn::parse::<PathSegment>(input).unwrap();
|
||||
let ty_args = path_segment_generic_args(&target);
|
||||
let ret = quote! {
|
||||
impl<#(#ty_args),*> HasTokens for #target {
|
||||
fn feed_to(&self, consumer:&mut impl TokenConsumer) {
|
||||
panic!("HasTokens not supported for Spaceless AST!")
|
||||
}
|
||||
}
|
||||
};
|
||||
ret.into()
|
||||
}
|
||||
|
||||
/// Inner logic for `derive_has_tokens`.
|
||||
pub fn derive_for_enum(decl: &syn::DeriveInput, data: &syn::DataEnum) -> TokenStream {
|
||||
let ident = &decl.ident;
|
||||
|
34
app/gui/language/parser-scala/Cargo.toml
Normal file
34
app/gui/language/parser-scala/Cargo.toml
Normal file
@ -0,0 +1,34 @@
|
||||
[package]
|
||||
name = "parser-scala"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <contact@enso.org>"]
|
||||
edition = "2021"
|
||||
build = "build.rs"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
ast = { path = "../ast/impl" }
|
||||
enso-prelude = { path = "../../../../lib/rust/prelude" }
|
||||
enso-profiler = { path = "../../../../lib/rust/profiler" }
|
||||
console_error_panic_hook = { workspace = true }
|
||||
failure = { workspace = true }
|
||||
js-sys = { workspace = true }
|
||||
matches = { workspace = true }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["unbounded_depth"] }
|
||||
wasm-bindgen = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
ide-ci = { path = "../../../../build/ci_utils" }
|
||||
bytes = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
websocket = "0.26.5"
|
29
app/gui/language/parser-scala/src/api.rs
Normal file
29
app/gui/language/parser-scala/src/api.rs
Normal file
@ -0,0 +1,29 @@
|
||||
//! A module containing structures and traits used in parser API.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
|
||||
|
||||
// ===========
|
||||
// == Error ==
|
||||
// ===========
|
||||
|
||||
/// A result of parsing code.
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
/// An error which may be result of parsing code.
|
||||
#[derive(Debug, Fail)]
|
||||
pub enum Error {
|
||||
/// Error due to inner workings of the parser.
|
||||
#[fail(display = "Internal parser error: {:?}.", _0)]
|
||||
ParsingError(String),
|
||||
/// Error related to wrapping = communication with the parser service.
|
||||
#[fail(display = "Interop error: {}.", _0)]
|
||||
InteropError(#[cause] Box<dyn Fail>),
|
||||
}
|
||||
|
||||
/// Wraps an arbitrary `std::error::Error` as an `InteropError.`
|
||||
pub fn interop_error<T>(error: T) -> Error
|
||||
where T: Fail {
|
||||
Error::InteropError(Box::new(error))
|
||||
}
|
@ -4,11 +4,6 @@ use crate::prelude::*;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
use crate::api;
|
||||
use crate::api::Ast;
|
||||
use crate::from_json_str_without_recursion_limit;
|
||||
|
||||
use ast::id_map::JsonIdMap;
|
||||
use ast::IdMap;
|
||||
|
||||
|
||||
|
||||
@ -43,10 +38,6 @@ impl From<JsValue> for Error {
|
||||
|
||||
#[wasm_bindgen(module = "/pkg/scala-parser.js")]
|
||||
extern "C" {
|
||||
#[wasm_bindgen(catch)]
|
||||
fn parse(input: String, ids: String) -> std::result::Result<String, JsValue>;
|
||||
#[wasm_bindgen(catch)]
|
||||
fn parse_with_metadata(content: String) -> std::result::Result<String, JsValue>;
|
||||
#[wasm_bindgen(catch)]
|
||||
fn doc_parser_generate_html_source(content: String) -> std::result::Result<String, JsValue>;
|
||||
#[wasm_bindgen(catch)]
|
||||
@ -65,31 +56,6 @@ impl Client {
|
||||
Ok(Client {})
|
||||
}
|
||||
|
||||
/// Parses Enso code with JS-based parser.
|
||||
pub fn parse(&self, program: String, ids: IdMap) -> api::Result<Ast> {
|
||||
let ast = || {
|
||||
let ids = JsonIdMap::from_id_map(&ids, &program.clone().into());
|
||||
let json_ids = serde_json::to_string(&ids)?;
|
||||
let json_ast = parse(program, json_ids)?;
|
||||
let ast = from_json_str_without_recursion_limit(&json_ast)?;
|
||||
Result::Ok(ast)
|
||||
};
|
||||
Ok(ast()?)
|
||||
}
|
||||
|
||||
/// Parses Enso code with metadata.
|
||||
pub fn parse_with_metadata<M: api::Metadata>(
|
||||
&self,
|
||||
program: String,
|
||||
) -> api::Result<api::ParsedSourceFile<M>> {
|
||||
let result = || {
|
||||
let json = &parse_with_metadata(program)?;
|
||||
let module = from_json_str_without_recursion_limit(json)?;
|
||||
Result::Ok(module)
|
||||
};
|
||||
Ok(result()?)
|
||||
}
|
||||
|
||||
/// Calls JS doc parser to generate HTML from documented Enso code.
|
||||
pub fn generate_html_docs(&self, program: String) -> api::Result<String> {
|
||||
let html_code = || {
|
108
app/gui/language/parser-scala/src/lib.rs
Normal file
108
app/gui/language/parser-scala/src/lib.rs
Normal file
@ -0,0 +1,108 @@
|
||||
//! Crate wrapping parser API in nice-to-use Rust code.
|
||||
//!
|
||||
//! The Parser is a library written in scala. There are two implementations of Rust wrappers to
|
||||
//! this parser: one for local parser which binds scala parser compiled to WebAssembly to the Rust
|
||||
//! crate. The second is calling a Parser running remotely using WebSockets.
|
||||
|
||||
// === Features ===
|
||||
#![feature(trait_alias)]
|
||||
// === Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
#![allow(clippy::bool_to_int_with_if)]
|
||||
#![allow(clippy::let_and_return)]
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![warn(missing_docs)]
|
||||
#![warn(trivial_casts)]
|
||||
#![warn(trivial_numeric_casts)]
|
||||
#![warn(unused_import_braces)]
|
||||
#![warn(unused_qualifications)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
|
||||
|
||||
// ==============
|
||||
// === Export ===
|
||||
// ==============
|
||||
|
||||
pub mod api;
|
||||
|
||||
|
||||
|
||||
mod jsclient;
|
||||
mod wsclient;
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use std::panic;
|
||||
|
||||
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub mod prelude {
|
||||
pub use ast::traits::*;
|
||||
pub use enso_prelude::*;
|
||||
pub use enso_profiler as profiler;
|
||||
pub use enso_profiler::prelude::*;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==========================================
|
||||
// === Documentation Parser and Generator ===
|
||||
// ==========================================
|
||||
|
||||
/// Handle to a doc parser implementation.
|
||||
///
|
||||
/// Currently this component is implemented as a wrapper over documentation
|
||||
/// parser written in Scala. Depending on compilation target (native or wasm)
|
||||
/// it uses either implementation provided by `wsclient` or `jsclient`.
|
||||
#[derive(Clone, CloneRef, Debug, Deref, DerefMut)]
|
||||
pub struct DocParser(pub Rc<RefCell<Client>>);
|
||||
|
||||
impl DocParser {
|
||||
/// Obtains a default doc parser implementation.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn new() -> api::Result<DocParser> {
|
||||
let client = wsclient::Client::new()?;
|
||||
let doc_parser = Rc::new(RefCell::new(client));
|
||||
Ok(DocParser(doc_parser))
|
||||
}
|
||||
|
||||
/// Obtains a default doc parser implementation.
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn new() -> api::Result<DocParser> {
|
||||
let client = jsclient::Client::new()?;
|
||||
let doc_parser = Rc::new(RefCell::new(client));
|
||||
Ok(DocParser(doc_parser))
|
||||
}
|
||||
|
||||
/// Obtains a default doc parser implementation, panicking in case of failure.
|
||||
pub fn new_or_panic() -> DocParser {
|
||||
DocParser::new().unwrap_or_else(|e| panic!("Failed to create doc parser: {e:?}"))
|
||||
}
|
||||
|
||||
/// Parses program with documentation and generates HTML code.
|
||||
/// If the program does not have any documentation will return empty string.
|
||||
pub fn generate_html_docs(&self, program: String) -> api::Result<String> {
|
||||
self.borrow_mut().generate_html_docs(program)
|
||||
}
|
||||
|
||||
/// Parses pure documentation code and generates HTML code.
|
||||
/// Will return empty string for empty entry.
|
||||
pub fn generate_html_doc_pure(&self, code: String) -> api::Result<String> {
|
||||
self.borrow_mut().generate_html_doc_pure(code)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Support ===
|
||||
|
||||
/// Websocket parser client.
|
||||
/// Used as an interface for our (scala) parser.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
type Client = wsclient::Client;
|
||||
/// Javascript parser client.
|
||||
/// Used as an interface for our (scala) parser.
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
type Client = jsclient::Client;
|
@ -4,12 +4,7 @@ use crate::api::Error::*;
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::api;
|
||||
use crate::api::Ast;
|
||||
use crate::api::Metadata;
|
||||
use crate::api::ParsedSourceFile;
|
||||
|
||||
use ast::id_map::JsonIdMap;
|
||||
use ast::IdMap;
|
||||
use std::fmt::Formatter;
|
||||
use websocket::stream::sync::TcpStream;
|
||||
use websocket::ClientBuilder;
|
||||
@ -93,24 +88,10 @@ impl From<serde_json::error::Error> for Error {
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
#[derive(Debug, serde::Serialize, serde::Deserialize)]
|
||||
pub enum Request {
|
||||
ParseRequest { program: String, ids: JsonIdMap },
|
||||
ParseRequestWithMetadata { content: String },
|
||||
DocParserGenerateHtmlSource { program: String },
|
||||
DocParserGenerateHtmlFromDoc { code: String },
|
||||
}
|
||||
|
||||
/// All responses that Parser Service might reply with.
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub enum Response<M> {
|
||||
#[serde(bound(deserialize = "M: Metadata"))]
|
||||
Success {
|
||||
module: ParsedSourceFile<M>,
|
||||
},
|
||||
Error {
|
||||
message: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// All responses that Doc Parser Service might reply with.
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub enum ResponseDoc {
|
||||
@ -171,19 +152,6 @@ mod internal {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Obtains a text message from peer and deserializes it using JSON
|
||||
/// into a `Response`.
|
||||
///
|
||||
/// Should be called exactly once after each `send_request` invocation.
|
||||
pub fn recv_response<M: Metadata>(&mut self) -> Result<Response<M>> {
|
||||
let response = self.connection.recv_message()?;
|
||||
match response {
|
||||
websocket::OwnedMessage::Text(text) =>
|
||||
crate::from_json_str_without_recursion_limit(&text).map_err(Into::into),
|
||||
_ => Err(Error::NonTextResponse(response)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Obtains a text message from peer and deserializes it using JSON
|
||||
/// into a `ResponseDoc`.
|
||||
///
|
||||
@ -196,15 +164,6 @@ mod internal {
|
||||
}
|
||||
}
|
||||
|
||||
/// Sends given `Request` to peer and receives a `Response`.
|
||||
///
|
||||
/// Both request and response are exchanged in JSON using text messages
|
||||
/// over WebSocket.
|
||||
pub fn rpc_call<M: Metadata>(&mut self, request: Request) -> Result<Response<M>> {
|
||||
self.send_request(request)?;
|
||||
self.recv_response()
|
||||
}
|
||||
|
||||
/// Sends given `Request` to peer and receives a `ResponseDoc`.
|
||||
///
|
||||
/// Both request and response are exchanged in JSON using text messages
|
||||
@ -237,30 +196,6 @@ impl Client {
|
||||
Ok(client)
|
||||
}
|
||||
|
||||
/// Sends a request to parser service to parse Enso code.
|
||||
pub fn parse(&mut self, program: String, ids: IdMap) -> api::Result<Ast> {
|
||||
let ids = JsonIdMap::from_id_map(&ids, &program.as_str().into());
|
||||
let request = Request::ParseRequest { program, ids };
|
||||
let response = self.rpc_call::<serde_json::Value>(request)?;
|
||||
match response {
|
||||
Response::Success { module } => Ok(module.ast.into()),
|
||||
Response::Error { message } => Err(ParsingError(message)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Sends a request to parser service to parse code with metadata.
|
||||
pub fn parse_with_metadata<M: Metadata>(
|
||||
&mut self,
|
||||
program: String,
|
||||
) -> api::Result<ParsedSourceFile<M>> {
|
||||
let request = Request::ParseRequestWithMetadata { content: program };
|
||||
let response = self.rpc_call(request)?;
|
||||
match response {
|
||||
Response::Success { module } => Ok(module),
|
||||
Response::Error { message } => Err(ParsingError(message)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Sends a request to parser service to generate HTML code from documented Enso code.
|
||||
pub fn generate_html_docs(&mut self, program: String) -> api::Result<String> {
|
||||
let request = Request::DocParserGenerateHtmlSource { program };
|
@ -1,37 +1,19 @@
|
||||
[package]
|
||||
name = "parser-scala"
|
||||
name = "parser"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <contact@enso.org>"]
|
||||
edition = "2021"
|
||||
build = "build.rs"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
ast = { path = "../ast/impl" }
|
||||
enso-data-structures = { path = "../../../../lib/rust/data-structures" }
|
||||
enso-parser = { path = "../../../../lib/rust/parser" }
|
||||
enso-prelude = { path = "../../../../lib/rust/prelude" }
|
||||
enso-profiler = { path = "../../../../lib/rust/profiler" }
|
||||
enso-text = { path = "../../../../lib/rust/text" }
|
||||
console_error_panic_hook = { workspace = true }
|
||||
failure = { workspace = true }
|
||||
js-sys = { workspace = true }
|
||||
matches = { workspace = true }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", features = ["unbounded_depth"] }
|
||||
uuid = { version = "0.8", features = ["serde", "v5", "wasm-bindgen"] }
|
||||
wasm-bindgen = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
ide-ci = { path = "../../../../build/ci_utils" }
|
||||
bytes = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
websocket = "0.26.5"
|
||||
enso-text = { path = "../../../../lib/rust/text" }
|
||||
failure = { version = "0.1" }
|
||||
uuid = { version = "0.8" }
|
||||
|
@ -1,8 +1,7 @@
|
||||
//! A module containing structures and traits used in parser API.
|
||||
|
||||
use crate::prelude::*;
|
||||
use enso_prelude::*;
|
||||
use enso_text::index::*;
|
||||
use enso_text::traits::*;
|
||||
use enso_text::unit::*;
|
||||
|
||||
use ast::id_map::JsonIdMap;
|
||||
@ -10,16 +9,66 @@ use ast::HasIdMap;
|
||||
use ast::HasRepr;
|
||||
use ast::IdMap;
|
||||
use enso_text::Range;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
|
||||
// ==============
|
||||
// === Export ===
|
||||
// ==============
|
||||
|
||||
pub use ast::Ast;
|
||||
/// A parsed file containing source code and attached metadata.
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ParsedSourceFile<M> {
|
||||
/// Ast representation.
|
||||
pub ast: ast::known::Module,
|
||||
/// Raw metadata in json.
|
||||
pub metadata: M,
|
||||
}
|
||||
|
||||
const NEWLINES_BEFORE_TAG: usize = 3;
|
||||
const METADATA_TAG: &str = "#### METADATA ####";
|
||||
|
||||
impl<M: Metadata> ParsedSourceFile<M> {
|
||||
/// Serialize to the SourceFile structure,
|
||||
pub fn serialize(&self) -> std::result::Result<SourceFile, serde_json::Error> {
|
||||
fn to_json_single_line(
|
||||
val: &impl serde::Serialize,
|
||||
) -> std::result::Result<String, serde_json::Error> {
|
||||
let json = serde_json::to_string(val)?;
|
||||
let line = json.chars().filter(|c| *c != '\n' && *c != '\r').collect();
|
||||
Ok(line)
|
||||
}
|
||||
|
||||
let code = self.ast.repr().into();
|
||||
let before_tag = "\n".repeat(NEWLINES_BEFORE_TAG);
|
||||
let before_idmap = "\n";
|
||||
let json_id_map = JsonIdMap::from_id_map(&self.ast.id_map(), &code);
|
||||
let id_map = to_json_single_line(&json_id_map)?;
|
||||
let before_metadata = "\n";
|
||||
let metadata = to_json_single_line(&self.metadata)?;
|
||||
|
||||
let id_map_start =
|
||||
code.len().value + before_tag.len() + METADATA_TAG.len() + before_idmap.len();
|
||||
let id_map_start_bytes = Byte::from(id_map_start);
|
||||
let metadata_start = id_map_start + id_map.len() + before_metadata.len();
|
||||
let metadata_start_bytes = Byte::from(metadata_start);
|
||||
let content = format!(
|
||||
"{code}{before_tag}{METADATA_TAG}{before_idmap}{id_map}{before_metadata}{metadata}"
|
||||
);
|
||||
Ok(SourceFile {
|
||||
content,
|
||||
code: (0.byte()..code.len().to_byte()).into(),
|
||||
id_map: (id_map_start_bytes..id_map_start_bytes + ByteDiff::from(id_map.len())).into(),
|
||||
metadata: (metadata_start_bytes..metadata_start_bytes + ByteDiff::from(metadata.len()))
|
||||
.into(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<M: Metadata> Display for ParsedSourceFile<M> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self.serialize() {
|
||||
Ok(serialized) => write!(f, "{serialized}"),
|
||||
Err(_) => write!(f, "[UNREPRESENTABLE SOURCE FILE]"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -48,7 +97,9 @@ pub trait PruneUnusedIds {
|
||||
}
|
||||
|
||||
/// Things that are metadata.
|
||||
pub trait Metadata: Default + Serialize + DeserializeOwned + PruneUnusedIds {}
|
||||
pub trait Metadata:
|
||||
Default + serde::de::DeserializeOwned + serde::Serialize + PruneUnusedIds {
|
||||
}
|
||||
|
||||
/// Raw metadata.
|
||||
impl PruneUnusedIds for serde_json::Value {}
|
||||
@ -164,98 +215,6 @@ impl SourceFile {
|
||||
}
|
||||
|
||||
|
||||
// === Parsed Source File ===
|
||||
|
||||
/// Parsed file / module with metadata.
|
||||
#[derive(Clone, Debug, Deserialize, Eq, PartialEq)]
|
||||
#[serde(bound = "M: Metadata")]
|
||||
#[serde(from = "ParsedSourceFileWithUnusedIds<M>")]
|
||||
pub struct ParsedSourceFile<M> {
|
||||
/// Ast representation.
|
||||
pub ast: ast::known::Module,
|
||||
/// Raw metadata in json.
|
||||
pub metadata: M,
|
||||
}
|
||||
|
||||
/// Helper for deserialization. `metadata` is filled with `default()` value if not present or is
|
||||
/// invalid.
|
||||
///
|
||||
/// [`PruneUnusedIds::prune_unused_ids`] is called on deserialization.
|
||||
#[derive(Deserialize)]
|
||||
struct ParsedSourceFileWithUnusedIds<Metadata> {
|
||||
ast: ast::known::Module,
|
||||
#[serde(bound(deserialize = "Metadata:Default+DeserializeOwned"))]
|
||||
#[serde(deserialize_with = "enso_prelude::deserialize_or_default")]
|
||||
metadata: Metadata,
|
||||
}
|
||||
|
||||
impl<M: Metadata> From<ParsedSourceFileWithUnusedIds<M>> for ParsedSourceFile<M> {
|
||||
fn from(file: ParsedSourceFileWithUnusedIds<M>) -> Self {
|
||||
let ast = file.ast;
|
||||
let mut metadata = file.metadata;
|
||||
metadata.prune_unused_ids(&ast.id_map());
|
||||
Self { ast, metadata }
|
||||
}
|
||||
}
|
||||
|
||||
impl<M: Metadata> TryFrom<&ParsedSourceFile<M>> for String {
|
||||
type Error = serde_json::Error;
|
||||
fn try_from(val: &ParsedSourceFile<M>) -> std::result::Result<String, Self::Error> {
|
||||
Ok(val.serialize()?.content)
|
||||
}
|
||||
}
|
||||
|
||||
impl<M: Metadata> Display for ParsedSourceFile<M> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self.serialize() {
|
||||
Ok(serialized) => write!(f, "{serialized}"),
|
||||
Err(_) => write!(f, "[NOT REPRESENTABLE SOURCE FILE]"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// === Parsed Source File Serialization ===
|
||||
|
||||
const NEWLINES_BEFORE_TAG: usize = 3;
|
||||
|
||||
const METADATA_TAG: &str = "#### METADATA ####";
|
||||
|
||||
fn to_json_single_line(val: &impl Serialize) -> std::result::Result<String, serde_json::Error> {
|
||||
let json = serde_json::to_string(val)?;
|
||||
let line = json.chars().filter(|c| *c != '\n' && *c != '\r').collect();
|
||||
Ok(line)
|
||||
}
|
||||
|
||||
impl<M: Metadata> ParsedSourceFile<M> {
|
||||
/// Serialize to the SourceFile structure,
|
||||
pub fn serialize(&self) -> std::result::Result<SourceFile, serde_json::Error> {
|
||||
let code = self.ast.repr().into();
|
||||
let before_tag = "\n".repeat(NEWLINES_BEFORE_TAG);
|
||||
let before_idmap = "\n";
|
||||
let json_id_map = JsonIdMap::from_id_map(&self.ast.id_map(), &code);
|
||||
let id_map = to_json_single_line(&json_id_map)?;
|
||||
let before_metadata = "\n";
|
||||
let metadata = to_json_single_line(&self.metadata)?;
|
||||
|
||||
let id_map_start =
|
||||
code.len().value + before_tag.len() + METADATA_TAG.len() + before_idmap.len();
|
||||
let id_map_start_bytes = Byte::from(id_map_start);
|
||||
let metadata_start = id_map_start + id_map.len() + before_metadata.len();
|
||||
let metadata_start_bytes = Byte::from(metadata_start);
|
||||
Ok(SourceFile {
|
||||
content: format!(
|
||||
"{code}{before_tag}{METADATA_TAG}{before_idmap}{id_map}{before_metadata}{metadata}"
|
||||
),
|
||||
code: (0.byte()..code.len().to_byte()).into(),
|
||||
id_map: (id_map_start_bytes..id_map_start_bytes + ByteDiff::from(id_map.len()))
|
||||
.into(),
|
||||
metadata: (metadata_start_bytes..metadata_start_bytes + ByteDiff::from(metadata.len()))
|
||||
.into(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===========
|
||||
// == Error ==
|
||||
@ -293,55 +252,3 @@ pub fn interop_error<T>(error: T) -> Error
|
||||
where T: Fail {
|
||||
Error::InteropError(Box::new(error))
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
|
||||
struct Metadata {
|
||||
foo: usize,
|
||||
}
|
||||
|
||||
impl PruneUnusedIds for Metadata {}
|
||||
impl crate::api::Metadata for Metadata {}
|
||||
|
||||
#[test]
|
||||
fn serializing_parsed_source_file() {
|
||||
let main = ast::Ast::var("main");
|
||||
let node = ast::Ast::infix_var("2", "+", "2");
|
||||
let infix = ast::Ast::infix(main, "=", node);
|
||||
let ast: ast::known::Module = ast::Ast::one_line_module(infix).try_into().unwrap();
|
||||
let repr = ast.repr().into();
|
||||
let metadata = Metadata { foo: 321 };
|
||||
let source = ParsedSourceFile { ast, metadata };
|
||||
let serialized = source.serialize().unwrap();
|
||||
|
||||
let expected_json_id_map = JsonIdMap::from_id_map(&source.ast.id_map(), &repr);
|
||||
let expected_id_map = to_json_single_line(&expected_json_id_map).unwrap();
|
||||
let expected_metadata = to_json_single_line(&source.metadata).unwrap();
|
||||
let expected_content = format!(
|
||||
r#"main = 2 + 2
|
||||
|
||||
|
||||
#### METADATA ####
|
||||
{expected_id_map}
|
||||
{expected_metadata}"#
|
||||
);
|
||||
|
||||
assert_eq!(serialized.content, expected_content);
|
||||
assert_eq!(serialized.code_slice(), "main = 2 + 2");
|
||||
assert_eq!(serialized.id_map_slice(), expected_id_map.as_str());
|
||||
assert_eq!(serialized.metadata_slice(), expected_metadata.as_str());
|
||||
|
||||
// Check that SourceFile round-trips.
|
||||
let source_file = SourceFile::new(serialized.content.clone());
|
||||
assert_eq!(source_file, serialized);
|
||||
}
|
||||
}
|
||||
|
@ -1,49 +0,0 @@
|
||||
// === Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
#![allow(clippy::bool_to_int_with_if)]
|
||||
#![allow(clippy::let_and_return)]
|
||||
|
||||
use enso_prelude::*;
|
||||
|
||||
|
||||
|
||||
/// Simple interactive tester - calls parser with its argument (or a
|
||||
/// hardcoded default) and prints the result, then calls doc parser
|
||||
/// and prints the HTML code or an error message.
|
||||
fn main() {
|
||||
let default_input = String::from("import Foo.Bar\nfoo = a + 2");
|
||||
let program = std::env::args().nth(1).unwrap_or(default_input);
|
||||
debug!("Will parse: {}", program);
|
||||
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let output = parser.parse(program, default());
|
||||
match output {
|
||||
Ok(result) => debug!("Parser responded with: {result:?}"),
|
||||
Err(e) => debug!("Failed to obtain a response: {e:?}"),
|
||||
}
|
||||
|
||||
|
||||
let default_input = String::from("##\n DEPRECATED\n Foo bar baz\ntype Foo\n type Bar");
|
||||
let program = std::env::args().nth(1).unwrap_or(default_input);
|
||||
debug!("Will parse: {}", program);
|
||||
|
||||
let parser = parser_scala::DocParser::new_or_panic();
|
||||
let output = parser.generate_html_docs(program);
|
||||
match output {
|
||||
Ok(result) => debug!("Doc parser responded with: {result:?}"),
|
||||
Err(e) => debug!("Failed to obtain a response: {e:?}"),
|
||||
}
|
||||
|
||||
|
||||
let default_input = String::from("Computes the _logical_ conjunction of *two* booleans");
|
||||
let program = std::env::args().nth(1).unwrap_or(default_input);
|
||||
debug!("Will parse: {}", program);
|
||||
|
||||
let parser = parser_scala::DocParser::new_or_panic();
|
||||
let output = parser.generate_html_doc_pure(program);
|
||||
match output {
|
||||
Ok(result) => debug!("Doc parser responded with: {result:?}"),
|
||||
Err(e) => debug!("Failed to obtain a response: {e:?}"),
|
||||
}
|
||||
}
|
@ -1,11 +1,10 @@
|
||||
//! Crate wrapping parser API in nice-to-use Rust code.
|
||||
//!
|
||||
//! The Parser is a library written in scala. There are two implementations of Rust wrappers to
|
||||
//! this parser: one for local parser which binds scala parser compiled to WebAssembly to the Rust
|
||||
//! crate. The second is calling a Parser running remotely using WebSockets.
|
||||
//! [`Parser`] adapts a [`enso_syntax::Parser`] to produce the [`ast::AST`]/[`span_tree`]
|
||||
//! representation used by the Graph Editor.
|
||||
|
||||
// === Features ===
|
||||
#![feature(trait_alias)]
|
||||
#![feature(extend_one)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(if_let_guard)]
|
||||
// === Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
@ -13,12 +12,19 @@
|
||||
#![allow(clippy::let_and_return)]
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![warn(missing_docs)]
|
||||
#![warn(trivial_casts)]
|
||||
#![warn(trivial_numeric_casts)]
|
||||
#![warn(unused_import_braces)]
|
||||
#![warn(unused_qualifications)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
|
||||
use enso_prelude::*;
|
||||
use enso_profiler::prelude::*;
|
||||
|
||||
use ast::prelude::FallibleResult;
|
||||
use ast::HasIdMap;
|
||||
use ast::IdMap;
|
||||
use enso_profiler as profiler;
|
||||
|
||||
|
||||
|
||||
mod translation;
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
@ -29,126 +35,113 @@ pub mod api;
|
||||
|
||||
|
||||
|
||||
mod jsclient;
|
||||
pub mod test_utils;
|
||||
mod wsclient;
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use ast::Ast;
|
||||
use ast::BlockLine;
|
||||
use ast::IdMap;
|
||||
use std::panic;
|
||||
|
||||
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub mod prelude {
|
||||
pub use ast::traits::*;
|
||||
pub use enso_prelude::*;
|
||||
pub use enso_profiler as profiler;
|
||||
pub use enso_profiler::prelude::*;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Parser ===
|
||||
// ==============
|
||||
|
||||
/// Websocket parser client.
|
||||
/// Used as an interface for our (scala) parser.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
type Client = wsclient::Client;
|
||||
/// Javascript parser client.
|
||||
/// Used as an interface for our (scala) parser.
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
type Client = jsclient::Client;
|
||||
/// Parses Enso syntax.
|
||||
#[derive(Debug, Default, Clone, CloneRef)]
|
||||
pub struct Parser {
|
||||
parser: Rc<enso_parser::Parser>,
|
||||
}
|
||||
|
||||
/// Handle to a parser implementation.
|
||||
///
|
||||
/// Currently this component is implemented as a wrapper over parser written
|
||||
/// in Scala. Depending on compilation target (native or wasm) it uses either
|
||||
/// implementation provided by `wsclient` or `jsclient`.
|
||||
#[derive(Clone, CloneRef, Debug, Deref, DerefMut)]
|
||||
pub struct Parser(pub Rc<RefCell<Client>>);
|
||||
|
||||
// === Core methods provided by the underlying parser ===
|
||||
|
||||
impl Parser {
|
||||
/// Obtains a default parser implementation.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn new() -> api::Result<Parser> {
|
||||
let client = wsclient::Client::new()?;
|
||||
let parser = Rc::new(RefCell::new(client));
|
||||
Ok(Parser(parser))
|
||||
/// Create a new parser.
|
||||
pub fn new() -> Self {
|
||||
let parser = Rc::new(enso_parser::Parser::new());
|
||||
Self { parser }
|
||||
}
|
||||
|
||||
/// Obtains a default parser implementation.
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn new() -> api::Result<Parser> {
|
||||
let client = jsclient::Client::new()?;
|
||||
let parser = Rc::new(RefCell::new(client));
|
||||
Ok(Parser(parser))
|
||||
/// Parse the given source code with the specified ID map.
|
||||
#[profile(Task)]
|
||||
pub fn parse(&self, program: impl Str, ids: IdMap) -> ast::Ast {
|
||||
let tree = self.parser.run(program.as_ref());
|
||||
let ids = ids
|
||||
.vec
|
||||
.into_iter()
|
||||
.map(|(range, id)| ((range.start.value, range.end.value), id))
|
||||
.collect();
|
||||
translation::tree_to_ast(&tree, ids)
|
||||
}
|
||||
|
||||
/// Obtains a default parser implementation, panicking in case of failure.
|
||||
pub fn new_or_panic() -> Parser {
|
||||
Parser::new().unwrap_or_else(|e| panic!("Failed to create a parser: {e:?}"))
|
||||
}
|
||||
|
||||
/// Parse program.
|
||||
pub fn parse(&self, program: String, ids: IdMap) -> api::Result<Ast> {
|
||||
self.borrow_mut().parse(program, ids)
|
||||
}
|
||||
|
||||
/// Parse contents of the program source file, where program code may be followed by idmap and
|
||||
/// metadata.
|
||||
///
|
||||
/// If metadata deserialization fails, error is ignored and default value for metadata is used.
|
||||
/// Other errors are returned through `Result`.
|
||||
#[profile(Detail)]
|
||||
/// Parse the given source code, using metadata (including ID map) found in the input string.
|
||||
#[profile(Task)]
|
||||
pub fn parse_with_metadata<M: api::Metadata>(
|
||||
&self,
|
||||
program: String,
|
||||
) -> api::Result<api::ParsedSourceFile<M>> {
|
||||
self.borrow_mut().parse_with_metadata(program)
|
||||
program: impl Str,
|
||||
) -> api::ParsedSourceFile<M> {
|
||||
let (code, meta) = enso_parser::metadata::extract(program.as_ref());
|
||||
if meta.is_none() {
|
||||
info!("parse_with_metadata: No metadata found.");
|
||||
}
|
||||
let meta_lines = meta.and_then(|meta| meta.split_once('\n'));
|
||||
if meta.is_some() && meta_lines.is_none() {
|
||||
warn!("parse_with_metadata: Expected two lines of metadata.");
|
||||
}
|
||||
let ids = meta_lines.map(|lines| lines.0);
|
||||
let application_metadata = meta_lines.map(|lines| lines.1);
|
||||
let ids = enso_parser::metadata::parse_metadata(ids.unwrap_or_default());
|
||||
if ids.is_none() {
|
||||
warn!("parse_with_metadata: Failed to parse ID map.");
|
||||
}
|
||||
let ids = ids
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|((start, len), id)| ((*start, start + len), uuid::Uuid::from_u128(id.as_u128())))
|
||||
.collect();
|
||||
let tree = self.parser.run(code);
|
||||
let metadata = application_metadata.and_then(|meta| serde_json::from_str(meta).ok());
|
||||
if application_metadata.is_some() && metadata.is_none() {
|
||||
warn!("parse_with_metadata: Failed to deserialize metadata.");
|
||||
}
|
||||
let ast = translation::tree_to_ast(&tree, ids);
|
||||
let id_map = ast.id_map();
|
||||
let ast = ast::known::Module::try_from(ast).unwrap();
|
||||
let mut metadata: M = metadata.unwrap_or_default();
|
||||
metadata.prune_unused_ids(&id_map);
|
||||
api::ParsedSourceFile { ast, metadata }
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse program into module.
|
||||
|
||||
// === Convenience methods ===
|
||||
|
||||
impl Parser {
|
||||
/// Parse the given source code as a module, and return a [`ast::known::Module`].
|
||||
pub fn parse_module(&self, program: impl Str, ids: IdMap) -> api::Result<ast::known::Module> {
|
||||
let ast = self.parse(program.into(), ids)?;
|
||||
let ast = self.parse(program.as_ref(), ids);
|
||||
ast::known::Module::try_from(ast).map_err(|_| api::Error::NonModuleRoot)
|
||||
}
|
||||
|
||||
/// Program is expected to be single non-empty line module. The line's AST is
|
||||
/// returned. The program is parsed with empty IdMap.
|
||||
#[profile(Debug)]
|
||||
pub fn parse_line_ast(&self, program: impl Str) -> FallibleResult<Ast> {
|
||||
self.parse_line_with_id_map(program, default()).map(|line| line.elem)
|
||||
/// Parse the given line of source code, and return just the [`ast::Ast`].
|
||||
pub fn parse_line_ast(&self, program: impl Str) -> FallibleResult<ast::Ast> {
|
||||
self.parse_line(program).map(|line| line.elem)
|
||||
}
|
||||
|
||||
/// Program is expected to be single non-empty line module. The line's AST is
|
||||
/// returned. The program is parsed with empty IdMap.
|
||||
pub fn parse_line(&self, program: impl Str) -> FallibleResult<BlockLine<Ast>> {
|
||||
/// Parse the given line of source code.
|
||||
pub fn parse_line(&self, program: impl Str) -> FallibleResult<ast::BlockLine<ast::Ast>> {
|
||||
self.parse_line_with_id_map(program, default())
|
||||
}
|
||||
|
||||
/// Program is expected to be single non-empty line module. The line's AST is returned.
|
||||
/// Parse the given line of source code, attaching the given IDs.
|
||||
pub fn parse_line_ast_with_id_map(
|
||||
&self,
|
||||
program: impl Str,
|
||||
id_map: IdMap,
|
||||
) -> FallibleResult<Ast> {
|
||||
) -> FallibleResult<ast::Ast> {
|
||||
self.parse_line_with_id_map(program, id_map).map(|line| line.elem)
|
||||
}
|
||||
|
||||
/// Program is expected to be single non-empty line module. Return the parsed line.
|
||||
pub fn parse_line_with_id_map(
|
||||
fn parse_line_with_id_map(
|
||||
&self,
|
||||
program: impl Str,
|
||||
id_map: IdMap,
|
||||
) -> FallibleResult<BlockLine<Ast>> {
|
||||
) -> FallibleResult<ast::BlockLine<ast::Ast>> {
|
||||
let module = self.parse_module(program, id_map)?;
|
||||
|
||||
let mut lines =
|
||||
module.lines.clone().into_iter().filter_map(|line| line.map(|elem| elem).transpose());
|
||||
if let Some(first_non_empty_line) = lines.next() {
|
||||
@ -163,63 +156,68 @@ impl Parser {
|
||||
}
|
||||
}
|
||||
|
||||
/// Deserialize value from json text.
|
||||
///
|
||||
/// Unlike `serde_json::from_str` it runs with recursion limit disabled, allowing deserialization of
|
||||
/// deeply nested ASTs.
|
||||
pub fn from_json_str_without_recursion_limit<'de, Value: serde::Deserialize<'de>>(
|
||||
json_text: &'de str,
|
||||
) -> Result<Value, serde_json::Error> {
|
||||
let mut de = serde_json::Deserializer::from_str(json_text);
|
||||
de.disable_recursion_limit();
|
||||
Value::deserialize(&mut de)
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use ast::HasRepr;
|
||||
|
||||
|
||||
|
||||
// ==========================================
|
||||
// === Documentation Parser and Generator ===
|
||||
// ==========================================
|
||||
|
||||
/// Handle to a doc parser implementation.
|
||||
///
|
||||
/// Currently this component is implemented as a wrapper over documentation
|
||||
/// parser written in Scala. Depending on compilation target (native or wasm)
|
||||
/// it uses either implementation provided by `wsclient` or `jsclient`.
|
||||
#[derive(Clone, CloneRef, Debug, Deref, DerefMut)]
|
||||
pub struct DocParser(pub Rc<RefCell<Client>>);
|
||||
|
||||
impl DocParser {
|
||||
/// Obtains a default doc parser implementation.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn new() -> api::Result<DocParser> {
|
||||
let client = wsclient::Client::new()?;
|
||||
let doc_parser = Rc::new(RefCell::new(client));
|
||||
Ok(DocParser(doc_parser))
|
||||
#[test]
|
||||
fn test_group_repr() {
|
||||
let code = "bar (Foo (a b))";
|
||||
let ast = Parser::new().parse_line_ast(code).unwrap();
|
||||
assert_eq!(ast.repr(), code);
|
||||
}
|
||||
|
||||
/// Obtains a default doc parser implementation.
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn new() -> api::Result<DocParser> {
|
||||
let client = jsclient::Client::new()?;
|
||||
let doc_parser = Rc::new(RefCell::new(client));
|
||||
Ok(DocParser(doc_parser))
|
||||
#[test]
|
||||
fn test_text_repr() {
|
||||
let code = "operator17 = operator16.order_by (Sort_Column.Name 'Orders Value' Sort_Direction.Descending)";
|
||||
let ast = Parser::new().parse_line_ast(code).unwrap();
|
||||
assert_eq!(ast.repr(), code);
|
||||
}
|
||||
|
||||
/// Obtains a default doc parser implementation, panicking in case of failure.
|
||||
pub fn new_or_panic() -> DocParser {
|
||||
DocParser::new().unwrap_or_else(|e| panic!("Failed to create doc parser: {e:?}"))
|
||||
#[test]
|
||||
fn test_orders_repr() {
|
||||
let code = r#"
|
||||
from Standard.Base import all
|
||||
from Standard.Table import all
|
||||
import Standard.Visualization
|
||||
import Standard.Examples
|
||||
|
||||
main =
|
||||
## The file contains three different sheets relating to operations of an
|
||||
online store.
|
||||
operator2 = enso_project.data / 'store_data.xlsx'
|
||||
## Read the customers table.
|
||||
operator3 = operator2.read (Excel (Worksheet 'Customers'))
|
||||
## Read the products table.
|
||||
operator4 = operator2.read (Excel (Worksheet 'Items'))
|
||||
## Read the orders history.
|
||||
operator5 = operator2.read (Excel (Worksheet 'Orders'))
|
||||
## Join the item data to the order history, to get information on item
|
||||
prices in the orders table.
|
||||
operator8 = operator5.join operator4 Join_Kind.Inner ['Item ID']
|
||||
operator1 = operator8.at 'Unit Price'
|
||||
operator9 = operator8.at 'Quantity'
|
||||
## Multiply item prices and counts to get total order value.
|
||||
product1 = operator1 * operator9
|
||||
operator10 = operator8.set 'Order Value' product1
|
||||
## Group all orders by the Customer ID, to compute the total value of orders
|
||||
placed by each client.
|
||||
operator11 = operator10.aggregate [Aggregate_Column.Group_By 'Customer ID', Aggregate_Column.Sum 'Order Value' 'Orders Value']
|
||||
## Join the customer data into orders table, to include names in the final
|
||||
ranking.
|
||||
operator16 = operator3.join operator11 Join_Kind.Inner ["Customer ID"]
|
||||
## Sort the customers by their lifetime value, with the most valuable
|
||||
customers at the start of the table.
|
||||
operator17 = operator16.order_by (Sort_Column.Name 'Orders Value' Sort_Direction.Descending)
|
||||
"#;
|
||||
let ast = Parser::new().parse_module(code, default()).unwrap();
|
||||
assert_eq!(ast.repr(), code);
|
||||
}
|
||||
|
||||
/// Parses program with documentation and generates HTML code.
|
||||
/// If the program does not have any documentation will return empty string.
|
||||
pub fn generate_html_docs(&self, program: String) -> api::Result<String> {
|
||||
self.borrow_mut().generate_html_docs(program)
|
||||
}
|
||||
|
||||
/// Parses pure documentation code and generates HTML code.
|
||||
/// Will return empty string for empty entry.
|
||||
pub fn generate_html_doc_pure(&self, code: String) -> api::Result<String> {
|
||||
self.borrow_mut().generate_html_doc_pure(code)
|
||||
#[test]
|
||||
fn test_as_lambda() {
|
||||
let ast = Parser::new().parse_line_ast("a->4").unwrap();
|
||||
assert!(ast::macros::as_lambda(&ast).is_some(), "{ast:?}");
|
||||
}
|
||||
}
|
||||
|
998
app/gui/language/parser/src/translation.rs
Normal file
998
app/gui/language/parser/src/translation.rs
Normal file
@ -0,0 +1,998 @@
|
||||
use enso_prelude::*;
|
||||
use enso_profiler::prelude::*;
|
||||
|
||||
use ast::Ast;
|
||||
use enso_parser::syntax;
|
||||
use enso_parser::syntax::tree;
|
||||
use enso_parser::syntax::Tree;
|
||||
use enso_profiler as profiler;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
|
||||
|
||||
/// Enable extra log messages and assertions.
|
||||
const DEBUG: bool = false;
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === Translation API ===
|
||||
// =======================
|
||||
|
||||
/// Translates an [`AST`] from the [`Tree`] representation produced by [`enso_parser`] to the
|
||||
/// [`Ast`] representation used by the GUI (see [`crate`] documentation for high-level overview).
|
||||
/// The returned tree will contain IDs from the given map.
|
||||
#[profile(Detail)]
|
||||
pub fn tree_to_ast(mut tree: &Tree, ids: BTreeMap<(usize, usize), uuid::Uuid>) -> Ast {
|
||||
use ast::HasRepr;
|
||||
let mut context = Translate { ids, ..Default::default() };
|
||||
let ast = loop {
|
||||
match &*tree.variant {
|
||||
tree::Variant::BodyBlock(block) => break context.translate_module(block),
|
||||
tree::Variant::Invalid(tree::Invalid { ast, error }) => {
|
||||
warn!("Parser reports invalid module: {}", error.message);
|
||||
tree = ast
|
||||
}
|
||||
_ => unreachable!("enso_parser always returns a tree with a BodyBlock as root."),
|
||||
}
|
||||
};
|
||||
if DEBUG {
|
||||
debug_assert_eq!(ast.repr(), tree.code(), "Ast should represent same code as Tree.");
|
||||
if !context.ids.is_empty() || !context.ids_missed.is_empty() {
|
||||
warn!(
|
||||
"ids not matched: {:?}\nids missed: {:?}\nids assigned: {:?}",
|
||||
&context.ids, &context.ids_missed, &context.ids_assigned
|
||||
);
|
||||
}
|
||||
}
|
||||
ast
|
||||
}
|
||||
|
||||
|
||||
// === Implementation ===
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct Translate {
|
||||
/// The offset, in bytes, from the beginning of the input source code. This must be tracked
|
||||
/// during [`Tree`] traversal because [`Tree`] doesn't have absolute source references, only
|
||||
/// token lengths.
|
||||
offset: usize,
|
||||
/// IDs to associate with byte ranges of the source code.
|
||||
ids: BTreeMap<(usize, usize), uuid::Uuid>,
|
||||
/// The [`AstBuilder`] interface supports associating IDs with byte ranges; however, it's
|
||||
/// important to ensure that the byte ranges don't include any leading whitespace. This would
|
||||
/// be a difficult invariant to maintain through careful usage of [`AstBuilder`]; instead,
|
||||
/// we record where whitespace occurs in [`space_after`], and use that to adjust the byte
|
||||
/// ranges.
|
||||
space_after: BTreeMap<usize, usize>,
|
||||
|
||||
// === Diagnostic information used when [`DEBUG`] is enabled ===
|
||||
/// IDs that were in [`ids`], and now have been successfully attached to [`Ast`] nodes.
|
||||
ids_assigned: Vec<((usize, usize), uuid::Uuid)>,
|
||||
/// Byte ranges of [`Ast`] nodes that we didn't find any IDs for.
|
||||
ids_missed: Vec<(usize, usize)>,
|
||||
}
|
||||
|
||||
impl Translate {
|
||||
/// This must be called at the beginning of each [`Tree`], as they are processed in depth-first
|
||||
/// order. It updates the internal counter to include the leading whitespace bytes, and returns
|
||||
/// the visible width (indent) of the leading space.
|
||||
fn visit_space(&mut self, span: &enso_parser::source::Span) -> usize {
|
||||
let space = span.left_offset.code.repr.len();
|
||||
self.space_after.insert(self.offset, space);
|
||||
self.offset += space;
|
||||
span.left_offset.visible.width_in_spaces
|
||||
}
|
||||
|
||||
/// This must be called at the beginning of each [`Token`], as they are processed in depth-first
|
||||
/// order. It updates the internal counter for the token's bytes, and returns its contents.
|
||||
fn visit_token<T: Copy>(&mut self, token: &syntax::Token<T>) -> WithInitialSpace<String> {
|
||||
self.visit_token_ref(syntax::token::Ref::<T>::from(token))
|
||||
}
|
||||
|
||||
/// This must be called at the beginning of each [`Token`], as they are processed in depth-first
|
||||
/// order. It updates the internal counter for the token's bytes, and returns its contents.
|
||||
fn visit_token_ref<T>(&mut self, token: syntax::token::Ref<T>) -> WithInitialSpace<String> {
|
||||
let space = token.left_offset.visible.width_in_spaces;
|
||||
let body = token.code.to_string();
|
||||
self.space_after.insert(self.offset, space);
|
||||
self.offset += token.left_offset.code.repr.len();
|
||||
self.offset += token.code.repr.len();
|
||||
WithInitialSpace { space, body }
|
||||
}
|
||||
}
|
||||
|
||||
impl Translate {
|
||||
/// Translate a [`Tree`].
|
||||
/// The returned [`Ast`] can be [`None`] if an empty block is encountered.
|
||||
fn translate(&mut self, tree: &Tree) -> WithInitialSpace<Option<Ast>> {
|
||||
let space = self.visit_space(&tree.span);
|
||||
let body = self.translate_expression_body(tree);
|
||||
WithInitialSpace { space, body }
|
||||
}
|
||||
|
||||
/// Translate a [`Tree`], except for the leading space.
|
||||
/// This can return [`None`] if an empty block is encountered.
|
||||
fn translate_expression_body(&mut self, tree: &Tree) -> Option<Ast> {
|
||||
let builder = self.start_ast();
|
||||
Some(match &*tree.variant {
|
||||
tree::Variant::BodyBlock(block) => {
|
||||
let block = self.translate_block(&block.statements)?.expect_unspaced();
|
||||
self.finish_ast(block, builder)
|
||||
}
|
||||
tree::Variant::Ident(tree::Ident { token }) => {
|
||||
let name = self.visit_token(token).expect_unspaced();
|
||||
match token.is_type {
|
||||
true => self.finish_ast(ast::Cons { name }, builder),
|
||||
false => self.finish_ast(ast::Var { name }, builder),
|
||||
}
|
||||
}
|
||||
tree::Variant::Number(tree::Number { base, integer, fractional_digits }) => {
|
||||
let base = base.as_ref().map(|base| self.visit_token(base).expect_unspaced());
|
||||
let mut int = integer
|
||||
.as_ref()
|
||||
.map(|integer| self.visit_token(integer).expect_unspaced())
|
||||
.unwrap_or_default();
|
||||
if let Some(tree::FractionalDigits { dot, digits }) = fractional_digits {
|
||||
let dot = self.visit_token(dot).expect_unspaced();
|
||||
let digits = self.visit_token(digits).expect_unspaced();
|
||||
int = format!("{int}{dot}{digits}");
|
||||
}
|
||||
self.finish_ast(ast::Number { base, int }, builder)
|
||||
}
|
||||
tree::Variant::App(tree::App { func, arg }) => {
|
||||
let func = self.translate(func);
|
||||
let arg = self.translate(arg);
|
||||
let app = maybe_prefix(func, arg).expect_unspaced()?;
|
||||
self.finish_ast(app, builder)
|
||||
}
|
||||
tree::Variant::OprApp(tree::OprApp { lhs: Some(_), opr: Ok(opr), rhs: Some(_) })
|
||||
if opr.properties.is_arrow() =>
|
||||
{
|
||||
let ast = ast::Tree::lambda(self.translate_items(tree));
|
||||
self.finish_ast(ast, builder)
|
||||
}
|
||||
tree::Variant::OprApp(tree::OprApp { lhs, opr, rhs }) => {
|
||||
let larg = lhs.as_ref().map(|a| self.translate(a)).unwrap_or_default();
|
||||
let opr = self.translate_operators(opr);
|
||||
let rarg = rhs.as_ref().map(|a| self.translate(a)).unwrap_or_default();
|
||||
let opr_app = infix(larg, opr, rarg).expect_unspaced();
|
||||
self.finish_ast(opr_app, builder)
|
||||
}
|
||||
tree::Variant::OprSectionBoundary(tree::OprSectionBoundary { ast, .. }) =>
|
||||
self.translate(ast).expect_unspaced()?,
|
||||
tree::Variant::Function(func) => {
|
||||
let func = self.translate_function(func);
|
||||
self.finish_ast(func, builder)
|
||||
}
|
||||
tree::Variant::ForeignFunction(func) => {
|
||||
let func = self.translate_foreign_function(func);
|
||||
self.finish_ast(func, builder)
|
||||
}
|
||||
tree::Variant::UnaryOprApp(tree::UnaryOprApp { opr, rhs }) => {
|
||||
let opr = self.translate_operator(opr);
|
||||
if let Some(arg) = rhs {
|
||||
let non_block_operand = "Unary operator cannot be applied to an (empty) block.";
|
||||
let arg = self.translate(arg).expect(non_block_operand);
|
||||
let section = section_right(opr, arg).expect_unspaced();
|
||||
self.finish_ast(section, builder)
|
||||
} else {
|
||||
let opr = opr.expect_unspaced();
|
||||
self.finish_ast(ast::SectionSides { opr }, builder)
|
||||
}
|
||||
}
|
||||
tree::Variant::Assignment(tree::Assignment { pattern, equals, expr }) =>
|
||||
self.opr_app(pattern, equals, expr).expect_unspaced(),
|
||||
tree::Variant::OperatorBlockApplication(app) => {
|
||||
let tree::OperatorBlockApplication { lhs, expressions, excess } = app;
|
||||
let func = lhs.as_ref().map(|lhs| self.translate(lhs)).unwrap_or_default();
|
||||
let block = self.translate_operator_block(expressions, excess);
|
||||
let app = maybe_prefix(func, block).expect_unspaced()?;
|
||||
self.finish_ast(app, builder)
|
||||
}
|
||||
tree::Variant::TemplateFunction(tree::TemplateFunction { ast, .. }) =>
|
||||
self.translate(ast).expect_unspaced()?,
|
||||
tree::Variant::Wildcard(tree::Wildcard { token, .. }) => {
|
||||
self.visit_token(token).expect_unspaced();
|
||||
self.finish_ast(ast::Blank {}, builder)
|
||||
}
|
||||
tree::Variant::ArgumentBlockApplication(app) => {
|
||||
let tree::ArgumentBlockApplication { lhs, arguments } = app;
|
||||
let func = lhs.as_ref().map(|lhs| self.translate(lhs)).unwrap_or_default();
|
||||
let arg = self
|
||||
.translate_block(arguments)
|
||||
.map(|arg| arg.map(|arg| Some(Ast::from(arg))))
|
||||
.unwrap_or_default();
|
||||
let app = maybe_prefix(func, arg).expect_unspaced()?;
|
||||
self.finish_ast(app, builder)
|
||||
}
|
||||
tree::Variant::DefaultApp(tree::DefaultApp { func, default }) => {
|
||||
let func = self.translate(func);
|
||||
let arg_builder = self.start_ast();
|
||||
let default = self.visit_token(default);
|
||||
let arg = default.map(|name| self.finish_ast(ast::Var { name }, arg_builder));
|
||||
let app = maybe_prefix(func, arg).expect_unspaced()?;
|
||||
self.finish_ast(app, builder)
|
||||
}
|
||||
tree::Variant::NamedApp(tree::NamedApp { func, open, name, equals, arg, close }) => {
|
||||
let func = self.translate(func);
|
||||
let open = open.as_ref().map(|token| self.visit_token(token));
|
||||
let name = self.visit_token(name);
|
||||
let larg = name.map(|name| Ast::from(ast::Var { name }));
|
||||
let opr = self.translate_operator(equals);
|
||||
let non_block_operand = "Named-application operand cannot be an (empty) block.";
|
||||
let rarg = self.translate(arg).expect(non_block_operand);
|
||||
let mut arg = infix(larg, opr, rarg).map(Ast::from);
|
||||
let close = close.as_ref().map(|token| self.visit_token(token));
|
||||
if let Some(open) = open && let Some(close) = close {
|
||||
arg = open.map(|open| group(open, arg, close));
|
||||
}
|
||||
let app = maybe_prefix(func, arg).expect_unspaced()?;
|
||||
self.finish_ast(app, builder)
|
||||
}
|
||||
tree::Variant::TypeSignature(tree::TypeSignature { variable, operator, type_ }) =>
|
||||
self.opr_app(variable, operator, type_).expect_unspaced(),
|
||||
tree::Variant::TypeAnnotated(tree::TypeAnnotated { expression, operator, type_ }) =>
|
||||
self.opr_app(expression, operator, type_).expect_unspaced(),
|
||||
tree::Variant::AnnotatedBuiltin(tree::AnnotatedBuiltin {
|
||||
token,
|
||||
annotation,
|
||||
newlines,
|
||||
expression,
|
||||
}) => {
|
||||
let at = self.visit_token(token).expect_unspaced();
|
||||
let func = self.visit_token(annotation);
|
||||
let func =
|
||||
func.map(|func| Ast::from(ast::Annotation { name: format!("{at}{func}") }));
|
||||
let arg = expression.as_ref().map(|e| self.translate(e)).unwrap_or_default();
|
||||
if !newlines.is_empty() {
|
||||
error!("Multiline expression must be handled in translate_lines.");
|
||||
}
|
||||
let app = maybe_prefix(func, arg).expect_unspaced()?;
|
||||
self.finish_ast(app, builder)
|
||||
}
|
||||
tree::Variant::Documented(tree::Documented { documentation: _, expression }) => {
|
||||
warn!("Multiline expression should have been handled in translate_lines.");
|
||||
self.translate(expression.as_ref()?).without_space()?
|
||||
}
|
||||
tree::Variant::Import(import) => {
|
||||
let span_info = self.translate_items(tree);
|
||||
let type_info = analyze_import(import).unwrap_or_default();
|
||||
let ast = ast::Tree::expression(span_info).with_type_info(type_info);
|
||||
self.finish_ast(ast, builder)
|
||||
}
|
||||
tree::Variant::TextLiteral(_) => {
|
||||
self.translate_items(tree);
|
||||
let ast = ast::Tree::text(tree.trimmed_code());
|
||||
self.finish_ast(ast, builder)
|
||||
}
|
||||
tree::Variant::Group(_) => {
|
||||
let span_info = self.translate_items(tree);
|
||||
let ast = ast::Tree::group(span_info);
|
||||
self.finish_ast(ast, builder)
|
||||
}
|
||||
_ => {
|
||||
let ast = ast::Tree::expression(self.translate_items(tree));
|
||||
self.finish_ast(ast, builder)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Translate [`Tree`]s to [`Ast`]s in a multi-line context (i.e. when building a block or
|
||||
/// module).
|
||||
fn translate_lines(&mut self, tree: &Tree, out: &mut Vec<WithInitialSpace<Option<Ast>>>) {
|
||||
match &*tree.variant {
|
||||
tree::Variant::AnnotatedBuiltin(tree::AnnotatedBuiltin {
|
||||
token,
|
||||
annotation,
|
||||
newlines,
|
||||
expression,
|
||||
}) if !newlines.is_empty() => {
|
||||
let space = self.visit_space(&tree.span);
|
||||
let at = self.visit_token(token).expect_unspaced();
|
||||
let annotation = self.visit_token(annotation).expect_unspaced();
|
||||
let annotation = Ast::from(ast::Annotation { name: format!("{at}{annotation}") });
|
||||
out.extend_one(WithInitialSpace { space, body: Some(annotation) });
|
||||
self.translate_linebreaks(out, newlines);
|
||||
if let Some(expression) = expression {
|
||||
self.translate_lines(expression, out);
|
||||
}
|
||||
}
|
||||
tree::Variant::Annotated(tree::Annotated {
|
||||
token,
|
||||
annotation,
|
||||
argument,
|
||||
newlines,
|
||||
expression,
|
||||
}) => {
|
||||
let space = self.visit_space(&tree.span);
|
||||
let at = self.visit_token(token).expect_unspaced();
|
||||
let annotation = self.visit_token(annotation).expect_unspaced();
|
||||
let body = Ast::from(ast::Annotation { name: format!("{at}{annotation}") });
|
||||
let mut annotation = WithInitialSpace { space, body };
|
||||
let argument =
|
||||
argument.as_ref().and_then(|arg| ignore_space_if_empty(self.translate(arg)));
|
||||
if let Some(argument) = argument {
|
||||
annotation = prefix(annotation, argument).map(Ast::from);
|
||||
}
|
||||
out.extend_one(annotation.map(Some));
|
||||
self.translate_linebreaks(out, newlines);
|
||||
if let Some(expression) = expression {
|
||||
self.translate_lines(expression, out);
|
||||
}
|
||||
}
|
||||
tree::Variant::Documented(tree::Documented { documentation, expression }) => {
|
||||
let space = self.visit_space(&tree.span);
|
||||
self.translate_doc(space, documentation, out);
|
||||
if let Some(expression) = expression {
|
||||
self.translate_lines(expression, out);
|
||||
}
|
||||
}
|
||||
_ => out.extend_one(self.translate(tree)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Translate a sequence of line breaks and trailing comments to [`Ast`] representation.
|
||||
fn translate_linebreaks(
|
||||
&mut self,
|
||||
out: &mut Vec<WithInitialSpace<Option<Ast>>>,
|
||||
newlines: &[syntax::token::Newline],
|
||||
) {
|
||||
// In the [`Ast`] representation, each block line has one implicit newline.
|
||||
let out_newlines = newlines.len().saturating_sub(1);
|
||||
out.reserve(out_newlines);
|
||||
let mut prev = None;
|
||||
for token in newlines {
|
||||
let next = self.visit_token(token).split();
|
||||
if let Some((space, token)) = prev.replace(next) {
|
||||
if let Some(text) = into_comment(&token) {
|
||||
append_comment(out, space, text);
|
||||
} else {
|
||||
out.push(WithInitialSpace { space, body: None });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Translate a documentation comment to [`Ast`] representation.
|
||||
fn translate_doc(
|
||||
&mut self,
|
||||
space: usize,
|
||||
documentation: &tree::DocComment,
|
||||
out: &mut Vec<WithInitialSpace<Option<Ast>>>,
|
||||
) {
|
||||
let open = self.visit_token(&documentation.open);
|
||||
let mut span_info = SpanSeedBuilder::new();
|
||||
span_info.token(open);
|
||||
for element in &documentation.elements {
|
||||
span_info.token(match element {
|
||||
tree::TextElement::Section { text } => self.visit_token(text),
|
||||
tree::TextElement::Escape { token } => self.visit_token(token),
|
||||
tree::TextElement::Newline { newline } => self.visit_token(newline),
|
||||
tree::TextElement::Splice { .. } => {
|
||||
let error = "Lexer must not emit splices in documentation comments.";
|
||||
debug_assert!(false, "{error}");
|
||||
error!("{error}");
|
||||
continue;
|
||||
}
|
||||
})
|
||||
}
|
||||
let rendered = documentation.content().into();
|
||||
let type_info = ast::TreeType::Documentation { rendered };
|
||||
let span_info = span_info.build().expect_unspaced();
|
||||
let body = Some(Ast::from(ast::Tree::expression(span_info).with_type_info(type_info)));
|
||||
out.extend_one(WithInitialSpace { space, body });
|
||||
self.translate_linebreaks(out, &documentation.newlines);
|
||||
}
|
||||
|
||||
/// Lower a function definition to [`Ast`] representation.
|
||||
fn translate_function(&mut self, function: &tree::Function) -> ast::Shape<Ast> {
|
||||
let tree::Function { name, args, equals, body } = function;
|
||||
let non_empty_name = "A function name cannot be an (empty) block.";
|
||||
let name = self.translate(name).expect(non_empty_name);
|
||||
let mut lhs_terms = vec![name];
|
||||
lhs_terms.extend(args.iter().map(|a| self.translate_argument_definition(a)));
|
||||
let larg = lhs_terms
|
||||
.into_iter()
|
||||
.reduce(|func, arg| prefix(func, arg).map(Ast::from))
|
||||
.expect("`lhs_terms` has at least one value, because it was initialized with a value.");
|
||||
let opr = self.translate_operator(equals);
|
||||
let body = body.as_ref().map(|body| self.translate(body)).unwrap_or_default();
|
||||
infix(larg, opr, body).expect_unspaced()
|
||||
}
|
||||
|
||||
/// Lower a foreign-function definition to [`Ast`] representation.
|
||||
fn translate_foreign_function(&mut self, func: &tree::ForeignFunction) -> ast::Shape<Ast> {
|
||||
let tree::ForeignFunction { foreign, language, name, args, equals, body } = func;
|
||||
let mut lhs_terms: Vec<_> = [foreign, language, name]
|
||||
.into_iter()
|
||||
.map(|ident| self.visit_token(ident).map(|name| Ast::from(ast::Var { name })))
|
||||
.collect();
|
||||
lhs_terms.extend(args.iter().map(|a| self.translate_argument_definition(a)));
|
||||
let lhs = lhs_terms
|
||||
.into_iter()
|
||||
.reduce(|func, arg| prefix(func, arg).map(Ast::from))
|
||||
.expect("`lhs_terms` has at least one value, because it was initialized with values.");
|
||||
let equals = self.translate_operator(equals);
|
||||
let body = self.translate(body);
|
||||
infix(lhs, equals, body).expect_unspaced()
|
||||
}
|
||||
|
||||
/// Construct an operator application from [`Tree`] operands and a specific operator.
|
||||
fn opr_app(
|
||||
&mut self,
|
||||
lhs: &Tree,
|
||||
opr: &syntax::token::Operator,
|
||||
rhs: &Tree,
|
||||
) -> WithInitialSpace<Ast> {
|
||||
let builder = self.start_ast();
|
||||
let lhs = self.translate(lhs);
|
||||
let opr = self.translate_operator(opr);
|
||||
let rhs = self.translate(rhs);
|
||||
infix(lhs, opr, rhs).map(|opr_app| self.finish_ast(opr_app, builder))
|
||||
}
|
||||
|
||||
/// Translate an operator or multiple-operator erorr into the [`Ast`] representation.
|
||||
fn translate_operators(&mut self, opr: &tree::OperatorOrError) -> WithInitialSpace<Ast> {
|
||||
match opr {
|
||||
Ok(name) => match name.code.repr.strip_suffix('=') {
|
||||
Some(mod_name) if mod_name.contains(|c| c != '=') => {
|
||||
let opr_builder = self.start_ast();
|
||||
let token = self.visit_token(name);
|
||||
token.map(|_| {
|
||||
let name = mod_name.to_string();
|
||||
let opr = ast::Mod { name };
|
||||
self.finish_ast(opr, opr_builder)
|
||||
})
|
||||
}
|
||||
_ => self.translate_operator(name),
|
||||
},
|
||||
Err(names) => {
|
||||
let opr_builder = self.start_ast();
|
||||
let mut span_info = SpanSeedBuilder::new();
|
||||
for token in &names.operators {
|
||||
span_info.token(self.visit_token(token));
|
||||
}
|
||||
let opr = span_info
|
||||
.build()
|
||||
.map(|span_info| ast::Shape::from(ast::Tree::expression(span_info)));
|
||||
opr.map(|opr| self.finish_ast(opr, opr_builder))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Translate an operator token into its [`Ast`] representation.
|
||||
fn translate_operator(&mut self, token: &syntax::token::Operator) -> WithInitialSpace<Ast> {
|
||||
let opr_builder = self.start_ast();
|
||||
let right_assoc = token.properties.associativity() == syntax::token::Associativity::Right;
|
||||
self.visit_token(token)
|
||||
.map(|name| self.finish_ast(ast::Opr { name, right_assoc }, opr_builder))
|
||||
}
|
||||
|
||||
/// Translate a [`tree::BodyBlock`] into an [`Ast`] module.
|
||||
fn translate_module(&mut self, block: &tree::BodyBlock) -> Ast {
|
||||
let (lines, _) =
|
||||
self.translate_block_lines(&block.statements).unwrap_or_default().expect_unspaced();
|
||||
Ast::new_no_id(ast::Module { lines })
|
||||
}
|
||||
|
||||
/// Translate the lines of [`Tree`] block into the [`Ast`] block representation.
|
||||
fn translate_block<'a, 's: 'a>(
|
||||
&mut self,
|
||||
tree_lines: impl IntoIterator<Item = &'a tree::block::Line<'s>>,
|
||||
) -> Option<WithInitialSpace<ast::Block<Ast>>> {
|
||||
let (space, (ast_lines, indent)) = self.translate_block_lines(tree_lines)?.split();
|
||||
let mut empty_lines = vec![];
|
||||
let mut first_line = None;
|
||||
let mut lines = vec![];
|
||||
for line in ast_lines {
|
||||
if first_line.is_none() {
|
||||
if let Some(elem) = line.elem {
|
||||
first_line = Some(ast::BlockLine { elem, off: line.off });
|
||||
} else {
|
||||
empty_lines.push(line.off);
|
||||
}
|
||||
} else {
|
||||
lines.push(line);
|
||||
}
|
||||
}
|
||||
let first_line = first_line?;
|
||||
let body = ast::Block { indent, empty_lines, first_line, lines };
|
||||
Some(WithInitialSpace { space, body })
|
||||
}
|
||||
|
||||
/// Translate the lines of [`Tree`] block into [`Ast`] block lines.
|
||||
fn translate_block_lines<'a, 's: 'a>(
|
||||
&mut self,
|
||||
tree_lines: impl IntoIterator<Item = &'a tree::block::Line<'s>>,
|
||||
) -> Option<WithInitialSpace<(Vec<ast::BlockLine<Option<Ast>>>, usize)>> {
|
||||
let tree_lines = tree_lines.into_iter();
|
||||
let mut ast_lines: Vec<ast::BlockLine<Option<Ast>>> =
|
||||
Vec::with_capacity(tree_lines.size_hint().0);
|
||||
let mut statement_lines = vec![];
|
||||
let mut initial_indent = None;
|
||||
let mut space = 0;
|
||||
for tree::block::Line { newline, expression } in tree_lines {
|
||||
// Mapping from [`Tree`]'s leading offsets to [`Ast`]'s trailing offsets:
|
||||
// Initially, we create each line with no trailing offset.
|
||||
let off = 0;
|
||||
// We write each line's leading offset into the trailing offset of the previous line
|
||||
// (or, for the first line, the initial offset).
|
||||
let (trailing_space, newline) = self.visit_token(newline).split();
|
||||
if let Some(prev_line_comment) = into_comment(&newline) {
|
||||
append_comment_ast(&mut ast_lines, trailing_space, prev_line_comment);
|
||||
continue;
|
||||
}
|
||||
*ast_lines.last_mut().map(|line| &mut line.off).unwrap_or(&mut space) = trailing_space;
|
||||
match &expression {
|
||||
Some(statement) => {
|
||||
self.translate_lines(statement, &mut statement_lines);
|
||||
if initial_indent.is_none() && let Some(first) = statement_lines.first() {
|
||||
initial_indent = Some(first.space);
|
||||
}
|
||||
let new_lines = statement_lines
|
||||
.drain(..)
|
||||
.map(|elem| ast::BlockLine { elem: elem.without_space(), off });
|
||||
ast_lines.extend(new_lines);
|
||||
}
|
||||
None => ast_lines.push(ast::BlockLine { elem: None, off }),
|
||||
}
|
||||
}
|
||||
let body = (ast_lines, initial_indent?);
|
||||
Some(WithInitialSpace { space, body })
|
||||
}
|
||||
|
||||
/// Lower an operator block into the [`Ast`] block representation.
|
||||
fn translate_operator_block<'a, 's: 'a>(
|
||||
&mut self,
|
||||
operator_lines: impl IntoIterator<Item = &'a tree::block::OperatorLine<'s>>,
|
||||
excess: impl IntoIterator<Item = &'a tree::block::Line<'s>>,
|
||||
) -> WithInitialSpace<Ast> {
|
||||
let mut ast_lines: Vec<ast::BlockLine<_>> = vec![];
|
||||
let mut indent = None;
|
||||
let mut space = 0;
|
||||
let off = 0;
|
||||
for tree::block::OperatorLine { newline, expression } in operator_lines {
|
||||
let (trailing_space, newline) = self.visit_token(newline).split();
|
||||
if let Some(prev_line_comment) = into_comment(&newline) {
|
||||
append_comment_ast(&mut ast_lines, trailing_space, prev_line_comment);
|
||||
continue;
|
||||
}
|
||||
*ast_lines.last_mut().map(|line| &mut line.off).unwrap_or(&mut space) = trailing_space;
|
||||
let elem = expression.as_ref().map(|expression| {
|
||||
let opr = self.translate_operators(&expression.operator);
|
||||
let non_block_operand = "Expression in operand-line cannot be an (empty) block.";
|
||||
let arg = self.translate(&expression.expression).expect(non_block_operand);
|
||||
let (space, elem) = section_right(opr, arg).split();
|
||||
if indent.is_none() {
|
||||
indent = Some(space);
|
||||
}
|
||||
Ast::from(elem)
|
||||
});
|
||||
ast_lines.push(ast::BlockLine { elem, off });
|
||||
}
|
||||
let non_empty_block = "An operator block must have at least one operator line.";
|
||||
let indent = indent.expect(non_empty_block);
|
||||
let mut statement_lines = vec![];
|
||||
for tree::block::Line { newline, expression } in excess {
|
||||
let (trailing_space, newline) = self.visit_token(newline).split();
|
||||
if let Some(prev_line_comment) = into_comment(&newline) {
|
||||
append_comment_ast(&mut ast_lines, trailing_space, prev_line_comment);
|
||||
continue;
|
||||
}
|
||||
*ast_lines.last_mut().map(|line| &mut line.off).unwrap_or(&mut space) = trailing_space;
|
||||
match &expression {
|
||||
Some(statement) => {
|
||||
self.translate_lines(statement, &mut statement_lines);
|
||||
let new_lines = statement_lines
|
||||
.drain(..)
|
||||
.map(|elem| ast::BlockLine { elem: elem.without_space(), off });
|
||||
ast_lines.extend(new_lines);
|
||||
}
|
||||
None => ast_lines.push(ast::BlockLine { elem: None, off }),
|
||||
}
|
||||
}
|
||||
let mut first_line = None;
|
||||
let mut empty_lines = vec![];
|
||||
let mut lines = vec![];
|
||||
for ast::BlockLine { elem, off } in ast_lines {
|
||||
match (elem, &mut first_line) {
|
||||
(None, None) => empty_lines.push(off),
|
||||
(Some(elem), None) => first_line = Some(ast::BlockLine { elem, off }),
|
||||
(elem, Some(_)) => lines.push(ast::BlockLine { elem, off }),
|
||||
}
|
||||
}
|
||||
let first_line = first_line.expect(non_empty_block);
|
||||
let body = Ast::from(ast::Block { indent, empty_lines, first_line, lines });
|
||||
WithInitialSpace { space, body }
|
||||
}
|
||||
|
||||
/// Lower an argument definition into the [`Ast`] representation.
|
||||
fn translate_argument_definition(
|
||||
&mut self,
|
||||
arg: &tree::ArgumentDefinition,
|
||||
) -> WithInitialSpace<Ast> {
|
||||
let tree::ArgumentDefinition {
|
||||
open,
|
||||
open2,
|
||||
suspension,
|
||||
pattern,
|
||||
type_,
|
||||
close2,
|
||||
default,
|
||||
close,
|
||||
} = arg;
|
||||
let open = open.as_ref().map(|token| self.visit_token(token));
|
||||
let open2 = open2.as_ref().map(|token| self.visit_token(token));
|
||||
let suspension = suspension.as_ref().map(|token| self.translate_operator(token));
|
||||
let non_block_operand = "Pattern in argument definition cannot be an (empty) block.";
|
||||
let mut term = self.translate(pattern).expect(non_block_operand);
|
||||
if let Some(opr) = suspension {
|
||||
term = section_right(opr, term).map(Ast::from);
|
||||
}
|
||||
if let Some(tree::ArgumentType { operator, type_ }) = type_ {
|
||||
let opr = self.translate_operator(operator);
|
||||
let rarg = self.translate(type_);
|
||||
term = infix(term, opr, rarg).map(Ast::from);
|
||||
}
|
||||
let close2 = close2.as_ref().map(|token| self.visit_token(token));
|
||||
if let Some(open) = open2 && let Some(close) = close2 {
|
||||
term = open.map(|open| group(open, term, close));
|
||||
}
|
||||
if let Some(tree::ArgumentDefault { equals, expression }) = default {
|
||||
let opr = self.translate_operator(equals);
|
||||
let rarg = self.translate(expression);
|
||||
term = infix(term, opr, rarg).map(Ast::from);
|
||||
}
|
||||
let close = close.as_ref().map(|token| self.visit_token(token));
|
||||
if let Some(open) = open && let Some(close) = close {
|
||||
term = open.map(|open| group(open, term, close));
|
||||
}
|
||||
term
|
||||
}
|
||||
|
||||
/// Analyze a [`Tree`] and produce a representation used by the graph editor.
|
||||
fn translate_items(&mut self, tree: &syntax::tree::Tree<'_>) -> Vec<ast::SpanSeed<Ast>> {
|
||||
let mut span_info = SpanSeedBuilder::new();
|
||||
tree.visit_items(|item| match item {
|
||||
syntax::item::Ref::Token(token) => span_info.token(self.visit_token_ref(token)),
|
||||
syntax::item::Ref::Tree(tree) => {
|
||||
if let Some(ast) = ignore_space_if_empty(self.translate(tree)) {
|
||||
span_info.child(ast);
|
||||
}
|
||||
}
|
||||
});
|
||||
span_info.build().expect_unspaced()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Span-tracking ===
|
||||
|
||||
/// Tracks what input bytes are visited during the construction of a particular [`Ast`], and uses
|
||||
/// that information to assign an ID from the ID map.
|
||||
struct AstBuilder {
|
||||
start: usize,
|
||||
}
|
||||
|
||||
impl Translate {
|
||||
/// Marks the beginning of the input byte range that will be included in a particular [`Ast`]
|
||||
/// node.
|
||||
fn start_ast(&mut self) -> AstBuilder {
|
||||
AstBuilder { start: self.offset }
|
||||
}
|
||||
|
||||
/// Marks the end of the input byte range that will be included in a particular [`Ast`] node,
|
||||
/// and constructs the node from an [`ast::Shape`], using the calculated byte range to assign
|
||||
/// an ID.
|
||||
fn finish_ast<S: Into<ast::Shape<Ast>>>(&mut self, shape: S, builder: AstBuilder) -> Ast {
|
||||
let AstBuilder { start } = builder;
|
||||
let start = start + self.space_after.get(&start).copied().unwrap_or_default();
|
||||
let end = self.offset;
|
||||
let id = self.ids.remove(&(start, end));
|
||||
if DEBUG {
|
||||
match id {
|
||||
Some(id) => self.ids_assigned.push(((start, end), id)),
|
||||
None => self.ids_missed.push((start, end)),
|
||||
}
|
||||
}
|
||||
Ast::new(shape, id)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Semantic Analysis ===
|
||||
|
||||
/// Analyze an import statement to identify the module referenced and the names imported.
|
||||
// TODO: In place of this analysis (and a similar analysis in Java [`TreeToIr`]),
|
||||
// refactor [`tree::Import`] to a higher-level representation resembling
|
||||
// [`ast::ImportedNames`] (but with concrete tokens).
|
||||
fn analyze_import(import: &tree::Import) -> Option<ast::TreeType> {
|
||||
let tree::Import { polyglot, from, import, all, as_, hiding } = import;
|
||||
fn parse_module(tree: &Tree) -> Option<Vec<ImString>> {
|
||||
let mut segments = vec![];
|
||||
for tree in tree.left_assoc_rev(".") {
|
||||
match &*tree.variant {
|
||||
tree::Variant::Ident(tree::Ident { token }) =>
|
||||
segments.push(token.code.to_string().into()),
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
segments.reverse();
|
||||
Some(segments)
|
||||
}
|
||||
fn parse_ident(tree: &Tree) -> Option<String> {
|
||||
match &*tree.variant {
|
||||
tree::Variant::Ident(tree::Ident { token }) => Some(token.code.to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn parse_idents(tree: &Tree) -> Option<std::collections::BTreeSet<String>> {
|
||||
let mut names = std::collections::BTreeSet::new();
|
||||
for tree in tree.left_assoc_rev(",") {
|
||||
match &*tree.variant {
|
||||
tree::Variant::Ident(tree::Ident { token }) => {
|
||||
names.insert(token.code.to_string());
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
Some(names)
|
||||
}
|
||||
let module;
|
||||
let imported;
|
||||
match (polyglot, from, all, as_, hiding) {
|
||||
(None, None, None, None, None) => {
|
||||
module = import.body.as_ref().and_then(parse_module)?;
|
||||
imported = ast::ImportedNames::Module { alias: None };
|
||||
}
|
||||
(None, None, None, Some(as_), None) => {
|
||||
module = import.body.as_ref().and_then(parse_module)?;
|
||||
let alias = as_.body.as_ref().and_then(parse_ident);
|
||||
imported = ast::ImportedNames::Module { alias };
|
||||
}
|
||||
(None, Some(from), None, None, None) => {
|
||||
module = from.body.as_ref().and_then(parse_module)?;
|
||||
let names = import.body.as_ref().and_then(parse_idents)?;
|
||||
imported = ast::ImportedNames::List { names };
|
||||
}
|
||||
(None, Some(from), Some(_), None, None) => {
|
||||
module = from.body.as_ref().and_then(parse_module)?;
|
||||
imported = ast::ImportedNames::All { except: Default::default() };
|
||||
}
|
||||
(None, Some(from), Some(_), None, Some(hiding)) => {
|
||||
module = from.body.as_ref().and_then(parse_module)?;
|
||||
let except = hiding.body.as_ref().and_then(parse_idents)?;
|
||||
imported = ast::ImportedNames::All { except };
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
Some(ast::TreeType::Import { module, imported })
|
||||
}
|
||||
|
||||
/// Distinguish a plain newline from a trailing comment.
|
||||
fn into_comment(mut newline: &str) -> Option<String> {
|
||||
if let Some(text) = newline.strip_suffix('\n') {
|
||||
newline = text;
|
||||
}
|
||||
if let Some(text) = newline.strip_suffix('\r') {
|
||||
newline = text;
|
||||
}
|
||||
(!newline.is_empty()).then(|| newline.to_string())
|
||||
}
|
||||
|
||||
|
||||
// === WithInitialSpace ===
|
||||
|
||||
/// Helper for propagating spacing from children (Tree-style left offsets) to parents (Ast-style
|
||||
/// top-down spacing).
|
||||
#[derive(Debug, Default)]
|
||||
struct WithInitialSpace<T> {
|
||||
space: usize,
|
||||
body: T,
|
||||
}
|
||||
|
||||
impl<T: Debug> WithInitialSpace<T> {
|
||||
/// If any initial space is present, emit a warning; forget the space and return the value.
|
||||
fn expect_unspaced(self) -> T {
|
||||
if DEBUG {
|
||||
debug_assert_eq!(self.space, 0, "Expected no space before term: {:?}", &self.body);
|
||||
} else if self.space != 0 {
|
||||
warn!("Expected no space before term: {:?}", &self.body);
|
||||
}
|
||||
self.body
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> WithInitialSpace<T> {
|
||||
/// Return the value, ignoring any initial space.
|
||||
fn without_space(self) -> T {
|
||||
self.body
|
||||
}
|
||||
|
||||
/// Return the initial space and the value.
|
||||
fn split(self) -> (usize, T) {
|
||||
(self.space, self.body)
|
||||
}
|
||||
|
||||
fn map<U>(self, f: impl FnOnce(T) -> U) -> WithInitialSpace<U> {
|
||||
let WithInitialSpace { space, body } = self;
|
||||
let body = f(body);
|
||||
WithInitialSpace { space, body }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> WithInitialSpace<Option<T>> {
|
||||
/// Convenience function that applies [`Option::expect`] to the inner value.
|
||||
fn expect(self, message: &str) -> WithInitialSpace<T> {
|
||||
let WithInitialSpace { space, body } = self;
|
||||
WithInitialSpace { space, body: body.expect(message) }
|
||||
}
|
||||
}
|
||||
|
||||
/// Convenience function that transposes an optional value that always has initial space count, to
|
||||
/// an optional value that, if present, has an initial space count. Note that this is a lossy
|
||||
/// operation.
|
||||
fn ignore_space_if_empty<T>(spaced: WithInitialSpace<Option<T>>) -> Option<WithInitialSpace<T>> {
|
||||
spaced.body.map(|body| WithInitialSpace { space: spaced.space, body })
|
||||
}
|
||||
|
||||
|
||||
// === Shape-building helpers ===
|
||||
|
||||
/// Construct an [`ast::Prefix`], representing the initial spaces of the inputs appropriately.
|
||||
fn prefix(
|
||||
func: WithInitialSpace<Ast>,
|
||||
arg: WithInitialSpace<Ast>,
|
||||
) -> WithInitialSpace<ast::Shape<Ast>> {
|
||||
func.map(|func| {
|
||||
let (off, arg) = arg.split();
|
||||
(ast::Prefix { func, off, arg }).into()
|
||||
})
|
||||
}
|
||||
|
||||
/// Construct an [`ast::Prefix`] if both operands are present; otherwise, returns a single operand
|
||||
/// if present.
|
||||
fn maybe_prefix<T: Into<Option<Ast>>, U: Into<Option<Ast>>>(
|
||||
func: WithInitialSpace<T>,
|
||||
arg: WithInitialSpace<U>,
|
||||
) -> WithInitialSpace<Option<ast::Shape<Ast>>> {
|
||||
func.map(|func| {
|
||||
let arg = ignore_space_if_empty(arg.map(|arg| arg.into()));
|
||||
match (func.into(), arg) {
|
||||
(Some(func), Some(arg)) => {
|
||||
let (off, arg) = arg.split();
|
||||
Some((ast::Prefix { func, off, arg }).into())
|
||||
}
|
||||
(Some(func), None) => Some(func.shape().clone()),
|
||||
(None, Some(arg)) => Some(arg.expect_unspaced().shape().clone()),
|
||||
(None, None) => None,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Constructs an operator section for an operator with only a right operand.
|
||||
fn section_right(
|
||||
opr: WithInitialSpace<Ast>,
|
||||
arg: WithInitialSpace<Ast>,
|
||||
) -> WithInitialSpace<ast::Shape<Ast>> {
|
||||
opr.map(|opr| {
|
||||
let (off, arg) = arg.split();
|
||||
(ast::SectionRight { opr, off, arg }).into()
|
||||
})
|
||||
}
|
||||
|
||||
/// Constructs an infix-application node. If any of the inputs are [`Option`] types, then an
|
||||
/// operator section will be produced if appropriate.
|
||||
fn infix<T: Into<Option<Ast>>, U: Into<Option<Ast>>>(
|
||||
larg: WithInitialSpace<T>,
|
||||
opr: WithInitialSpace<Ast>,
|
||||
rarg: WithInitialSpace<U>,
|
||||
) -> WithInitialSpace<ast::Shape<Ast>> {
|
||||
larg.map(|larg| {
|
||||
let (opr_off, opr) = opr.split();
|
||||
let rarg = ignore_space_if_empty(rarg.map(|arg| arg.into()));
|
||||
match (larg.into(), rarg) {
|
||||
(Some(larg), Some(rarg)) => {
|
||||
let (roff, rarg) = rarg.split();
|
||||
(ast::Infix { larg, loff: opr_off, opr, roff, rarg }).into()
|
||||
}
|
||||
(Some(arg), None) => (ast::SectionLeft { arg, off: opr_off, opr }).into(),
|
||||
(None, Some(arg)) => {
|
||||
let (off, arg) = arg.split();
|
||||
(ast::SectionRight { opr, off, arg }).into()
|
||||
}
|
||||
(None, None) => (ast::SectionSides { opr }).into(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Wrap an input in a parenthesized-group node.
|
||||
fn group(open: String, body: WithInitialSpace<Ast>, close: WithInitialSpace<String>) -> Ast {
|
||||
let (body_space, body) = body.split();
|
||||
let (close_space, close) = close.split();
|
||||
let min_elements = 3; // There are always at least 3 elements: open, close, and body
|
||||
let mut span_info = Vec::with_capacity(min_elements);
|
||||
span_info.push(ast::SpanSeed::Token(ast::SpanSeedToken { token: open }));
|
||||
span_info.extend(ast::SpanSeed::space(body_space));
|
||||
span_info.push(ast::SpanSeed::Child(ast::SpanSeedChild { node: body }));
|
||||
span_info.extend(ast::SpanSeed::space(close_space));
|
||||
span_info.push(ast::SpanSeed::Token(ast::SpanSeedToken { token: close }));
|
||||
Ast::from(ast::Tree::expression(span_info))
|
||||
}
|
||||
|
||||
/// Append a trailing-comment to the last line of the given block, creating a line to hold it if the
|
||||
/// block is empty.
|
||||
fn append_comment(lines: &mut Vec<WithInitialSpace<Option<Ast>>>, space: usize, comment: String) {
|
||||
let prev = lines.pop();
|
||||
let space_after_expression = match prev.as_ref().and_then(|spaced| spaced.body.as_ref()) {
|
||||
Some(_) => space,
|
||||
None => 0,
|
||||
};
|
||||
let prev = prev.unwrap_or(WithInitialSpace { space, body: None });
|
||||
let line = prev.map(|prev| {
|
||||
Some(Ast::from(ast::Tree::expression_with_comment(prev, space_after_expression, comment)))
|
||||
});
|
||||
lines.push(line);
|
||||
}
|
||||
|
||||
/// Append a trailing-comment to the last line of the given block, creating a line to hold it if the
|
||||
/// block is empty.
|
||||
fn append_comment_ast(lines: &mut Vec<ast::BlockLine<Option<Ast>>>, space: usize, comment: String) {
|
||||
let prev = lines.pop();
|
||||
let off = prev.as_ref().map(|line| line.off).unwrap_or_default();
|
||||
let prev = prev.and_then(|line| line.elem);
|
||||
// If there's no expression before the comment, the space to the left of the comment
|
||||
// is already represented as the indent.
|
||||
let trailing_space = match prev {
|
||||
Some(_) => space,
|
||||
None => 0,
|
||||
};
|
||||
let elem = ast::Tree::expression_with_comment(prev, trailing_space, comment);
|
||||
lines.push(ast::BlockLine { elem: Some(Ast::from(elem)), off });
|
||||
}
|
||||
|
||||
|
||||
// === SpanSeedBuilder ===
|
||||
|
||||
/// Constructs a sequence of [`ast::SpanSeed`] values.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SpanSeedBuilder {
|
||||
space: Option<usize>,
|
||||
spans: Vec<ast::SpanSeed<Ast>>,
|
||||
}
|
||||
|
||||
impl SpanSeedBuilder {
|
||||
fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Append a token.
|
||||
fn token(&mut self, value: WithInitialSpace<String>) {
|
||||
let (space, value) = value.split();
|
||||
if self.space.is_none() {
|
||||
self.space = Some(space);
|
||||
} else {
|
||||
self.spans.extend(ast::SpanSeed::space(space));
|
||||
}
|
||||
self.spans.push(ast::SpanSeed::Token(ast::SpanSeedToken { token: value }));
|
||||
}
|
||||
|
||||
/// Append a node.
|
||||
fn child(&mut self, node: WithInitialSpace<Ast>) {
|
||||
let (space, node) = node.split();
|
||||
if self.space.is_none() {
|
||||
self.space = Some(space);
|
||||
} else {
|
||||
self.spans.extend(ast::SpanSeed::space(space));
|
||||
}
|
||||
self.spans.push(ast::SpanSeed::Child(ast::SpanSeedChild { node }));
|
||||
}
|
||||
|
||||
/// Construct the sequence.
|
||||
fn build(self) -> WithInitialSpace<Vec<ast::SpanSeed<Ast>>> {
|
||||
let space = self.space.unwrap_or_default();
|
||||
let body = self.spans;
|
||||
WithInitialSpace { space, body }
|
||||
}
|
||||
}
|
@ -1,124 +0,0 @@
|
||||
//! Tests specific to Ast rather than parser itself but placed here because they depend on parser
|
||||
//! to easily generate test input.
|
||||
|
||||
// TODO: [mwu]
|
||||
// That means that likely either `parser` should be merged with `ast` or that we should have a
|
||||
// separate `ast_ops` crate that depends on both. Now it is better to tests here than none but
|
||||
// a decision should be made as to which way we want to go.
|
||||
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
|
||||
use parser_scala::prelude::*;
|
||||
|
||||
use ast::opr;
|
||||
use ast::opr::GeneralizedInfix;
|
||||
use ast::prefix;
|
||||
use ast::test_utils::expect_single_line;
|
||||
use ast::HasRepr;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
|
||||
|
||||
|
||||
wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub fn to_assignment_test() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let is_assignment = |code: &str| {
|
||||
let ast = parser.parse(code.to_string(), default()).unwrap();
|
||||
let line = expect_single_line(&ast);
|
||||
ast::opr::to_assignment(line).is_some()
|
||||
};
|
||||
|
||||
let expected_assignments = vec!["a = 5", "a=5", "foo bar = a b c", "(x,y) = pos"];
|
||||
let expected_not_assignments = vec!["= 5", "a=", "=", "foo", "a->b", "a+b"];
|
||||
|
||||
for code in expected_assignments {
|
||||
assert!(is_assignment(code), "{code} expected to be recognized as assignment");
|
||||
}
|
||||
for code in expected_not_assignments {
|
||||
assert!(!is_assignment(code), "{code} expected to not be recognized as assignment");
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub fn generalized_infix_test() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let make_gen_infix = |code: &str| {
|
||||
let ast = parser.parse(code.to_string(), default()).unwrap();
|
||||
let line = expect_single_line(&ast);
|
||||
GeneralizedInfix::try_new(line)
|
||||
};
|
||||
|
||||
let infix = make_gen_infix("a+b").unwrap();
|
||||
assert_eq!(infix.name(), "+");
|
||||
assert_eq!(infix.left.map(|op| op.arg).repr(), "a");
|
||||
assert_eq!(infix.right.map(|op| op.arg).repr(), "b");
|
||||
|
||||
let right = make_gen_infix("+b").unwrap();
|
||||
assert_eq!(right.name(), "+");
|
||||
assert_eq!(right.right.map(|op| op.arg).repr(), "b");
|
||||
|
||||
let left = make_gen_infix("a+").unwrap();
|
||||
assert_eq!(left.name(), "+");
|
||||
assert_eq!(left.left.map(|op| op.arg).repr(), "a");
|
||||
|
||||
let sides = make_gen_infix("+").unwrap();
|
||||
assert_eq!(sides.name(), "+");
|
||||
|
||||
let var_as_infix = make_gen_infix("a");
|
||||
assert!(var_as_infix.is_none());
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub fn flatten_prefix_test() {
|
||||
fn expect_pieces(flattened: &prefix::Chain, pieces: Vec<&str>) {
|
||||
let mut piece_itr = pieces.iter();
|
||||
assert_eq!(flattened.args.len() + 1, pieces.len()); // +1 because `func` piece is separate field
|
||||
assert_eq!(&flattened.func.repr(), piece_itr.next().unwrap());
|
||||
flattened.args.iter().zip(piece_itr).for_each(|(lhs, rhs)| {
|
||||
assert_eq!(&lhs.repr(), rhs);
|
||||
})
|
||||
}
|
||||
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let case = |code: &str, expected_pieces: Vec<&str>| {
|
||||
let ast = parser.parse(code.into(), default()).unwrap();
|
||||
let ast = ast::test_utils::expect_single_line(&ast);
|
||||
let flattened = prefix::Chain::from_ast_non_strict(ast);
|
||||
expect_pieces(&flattened, expected_pieces);
|
||||
assert_eq!(flattened.repr(), code);
|
||||
};
|
||||
|
||||
case("a", vec!["a"]);
|
||||
case("a b c d", vec!["a", " b", " c", " d"]);
|
||||
case("+ a b c", vec!["+", " a", " b", " c"]);
|
||||
case("a b + c d", vec!["a b + c d"]); // nothing to flatten, this is infix, not prefix
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub fn flatten_infix_test() {
|
||||
fn expect_pieces(flattened: &opr::Chain, target: &str, pieces: Vec<&str>) {
|
||||
assert_eq!(flattened.target.as_ref().map(|a| &a.arg).repr(), target);
|
||||
|
||||
let piece_itr = pieces.iter();
|
||||
assert_eq!(flattened.args.len(), pieces.len());
|
||||
flattened.args.iter().zip(piece_itr).for_each(|(lhs, rhs)| {
|
||||
assert_eq!(lhs.operand.as_ref().map(|a| &a.arg).repr(), *rhs);
|
||||
})
|
||||
}
|
||||
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let case = |code: &str, target: &str, expected_pieces: Vec<&str>| {
|
||||
let ast = parser.parse(code.into(), default()).unwrap();
|
||||
let ast = ast::test_utils::expect_single_line(&ast);
|
||||
let flattened = opr::Chain::try_new(ast).unwrap();
|
||||
expect_pieces(&flattened, target, expected_pieces);
|
||||
};
|
||||
|
||||
case("a+b+c", "a", vec!["b", "c"]);
|
||||
case("a,b,c", "c", vec!["b", "a"]);
|
||||
case("a+b*c+d", "a", vec!["b*c", "d"]);
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
//! Tests for cases where parser currently fails. They are ignored, should be removed and placed
|
||||
//! elsewhere, as the parser gets fixed.
|
||||
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use wasm_bindgen_test::wasm_bindgen_test_configure;
|
||||
|
||||
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn no_doc_found() {
|
||||
let input = String::from("type Foo\n type Bar");
|
||||
let program = std::env::args().nth(1).unwrap_or(input);
|
||||
let parser = parser_scala::DocParser::new_or_panic();
|
||||
let gen_code = parser.generate_html_docs(program).unwrap();
|
||||
// gen_code should be empty.
|
||||
assert_eq!(gen_code.len(), 22, "Generated length differs from the expected\"{gen_code}\"");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn extension_operator_methods() {
|
||||
let ast = parser_scala::Parser::new_or_panic().parse_line_ast("Int.+").unwrap();
|
||||
|
||||
use ast::*;
|
||||
if let Shape::Infix(Infix { larg: _larg, loff: _loff, opr, roff: _roff, rarg }, ..) =
|
||||
ast.shape()
|
||||
{
|
||||
if let Shape::Opr(Opr { .. }) = opr.shape() {
|
||||
// TODO: should be Opr(+). https://github.com/enso-org/enso/issues/565
|
||||
if let Shape::Var(Var { .. }) = rarg.shape() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
panic!("Should have matched into return.");
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
|
||||
use enso_prelude::*;
|
||||
|
||||
use ast::crumbs::Crumbable;
|
||||
use ast::HasRepr;
|
||||
use parser_scala::Parser;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use wasm_bindgen_test::wasm_bindgen_test_configure;
|
||||
|
||||
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn macro_crumb_test() {
|
||||
let ast = Parser::new_or_panic().parse_line_ast("foo -> bar").unwrap();
|
||||
let crumbs = ast.iter_subcrumbs().collect_vec();
|
||||
|
||||
assert_eq!(ast.get(&crumbs[0]).unwrap().repr(), "foo");
|
||||
assert_eq!(ast.get(&crumbs[1]).unwrap().repr(), "->");
|
||||
assert_eq!(ast.get(&crumbs[2]).unwrap().repr(), "bar");
|
||||
|
||||
let ast = Parser::new_or_panic().parse_line_ast("( foo bar )").unwrap();
|
||||
let crumbs = ast.iter_subcrumbs().collect_vec();
|
||||
|
||||
assert_eq!(ast.get(&crumbs[0]).unwrap().repr(), "(");
|
||||
assert_eq!(ast.get(&crumbs[1]).unwrap().repr(), "foo bar");
|
||||
assert_eq!(ast.get(&crumbs[2]).unwrap().repr(), ")");
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
|
||||
use parser_scala::DocParser;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use wasm_bindgen_test::wasm_bindgen_test_configure;
|
||||
|
||||
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn doc_gen_test() {
|
||||
// Case of pure documentation code.
|
||||
let input = String::from("Foo *Bar* Baz");
|
||||
let program = std::env::args().nth(1).unwrap_or(input);
|
||||
let parser = DocParser::new_or_panic();
|
||||
let gen_code = parser.generate_html_doc_pure(program).unwrap();
|
||||
assert_ne!(gen_code.len(), 0);
|
||||
|
||||
let input = String::from("##\n foo\ntype Foo\n");
|
||||
let program = std::env::args().nth(1).unwrap_or(input);
|
||||
let parser = DocParser::new_or_panic();
|
||||
let gen_code = parser.generate_html_docs(program).unwrap();
|
||||
assert_ne!(gen_code.len(), 0);
|
||||
|
||||
let input = String::from("##\n DEPRECATED\n Foo bar baz\ntype Foo\n type Bar");
|
||||
let program = std::env::args().nth(1).unwrap_or(input);
|
||||
let parser = DocParser::new_or_panic();
|
||||
let gen_code = parser.generate_html_docs(program).unwrap();
|
||||
assert_ne!(gen_code.len(), 0);
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
|
||||
use parser_scala::prelude::*;
|
||||
|
||||
use ast::HasIdMap;
|
||||
use parser_scala::Parser;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use wasm_bindgen_test::wasm_bindgen_test_configure;
|
||||
|
||||
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn id_map_round_tripping() {
|
||||
let cases = [
|
||||
"main =\n 2 + 2",
|
||||
"main = \n \n 2 + 2\n foo = bar \n baz",
|
||||
"main = \n foo\n\n bar",
|
||||
"main = \n foo\n \n bar",
|
||||
"main = \n foo\n \n bar",
|
||||
"main = \n foo\n baz \n bar",
|
||||
];
|
||||
|
||||
let parser = Parser::new().unwrap();
|
||||
for case in cases.iter().copied() {
|
||||
let id_map = default();
|
||||
let ast1 = parser.parse_module(case, id_map).unwrap();
|
||||
let id_map = ast1.id_map();
|
||||
let ast2 = parser.parse_module(case, id_map).unwrap();
|
||||
assert_eq!(ast1, ast2)
|
||||
}
|
||||
}
|
@ -1,82 +0,0 @@
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
|
||||
use parser_scala::prelude::*;
|
||||
|
||||
use parser_scala::Parser;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use wasm_bindgen_test::wasm_bindgen_test_configure;
|
||||
|
||||
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn import_utilities() {
|
||||
use ast::macros::ast_as_import_match;
|
||||
use ast::macros::is_ast_import;
|
||||
use ast::macros::is_match_import;
|
||||
|
||||
let parser = Parser::new_or_panic();
|
||||
let expect_import = |code: &str| {
|
||||
let ast = parser.parse_line_ast(code).unwrap();
|
||||
assert!(is_ast_import(&ast), "Not Ast import: {ast:?}");
|
||||
let ast_match = ast_as_import_match(&ast).unwrap();
|
||||
assert_eq!(&ast, ast_match.ast());
|
||||
assert!(is_match_import(&ast_match));
|
||||
};
|
||||
|
||||
let expect_not_import = |code: &str| {
|
||||
let ast = parser.parse_line_ast(code).unwrap();
|
||||
assert!(!is_ast_import(&ast));
|
||||
assert!(ast_as_import_match(&ast).is_none());
|
||||
};
|
||||
|
||||
expect_import("import");
|
||||
expect_import("import Foo");
|
||||
expect_import("import foo.Foo.Bar");
|
||||
expect_import("import foo.Foo.Bar");
|
||||
expect_import("import Foo.Bar");
|
||||
expect_import("import Foo.Bar.Baz");
|
||||
expect_import("from Foo import Bar");
|
||||
expect_import("from foo.Foo import all hiding Bar");
|
||||
expect_import("from Base.Data.List import all hiding Cons, Nil");
|
||||
|
||||
expect_not_import("type Foo");
|
||||
expect_not_import("type Foo as Bar");
|
||||
expect_not_import("if Foo then Bar else Baz");
|
||||
expect_not_import("Foo.Bar.Baz");
|
||||
expect_not_import("->");
|
||||
expect_not_import("export");
|
||||
expect_not_import("export Foo");
|
||||
expect_not_import("from Foo export all hiding Bar");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn recognizing_lambdas() {
|
||||
let parser = Parser::new_or_panic();
|
||||
|
||||
let expect_lambda = |code: &str, arg: &str, body: &str| {
|
||||
let ast = parser.parse_line_ast(code).unwrap();
|
||||
let lambda = ast::macros::as_lambda(&ast).expect("failed to recognize lambda");
|
||||
assert_eq!(lambda.arg.repr(), arg);
|
||||
assert_eq!(lambda.body.repr(), body);
|
||||
assert_eq!(*lambda.arg, ast.get_traversing(&lambda.arg.crumbs).unwrap());
|
||||
assert_eq!(*lambda.body, ast.get_traversing(&lambda.body.crumbs).unwrap());
|
||||
};
|
||||
let expect_not_lambda = |code: &str| {
|
||||
let ast = parser.parse_line_ast(code).unwrap();
|
||||
assert!(ast::macros::as_lambda_match(&ast).is_none(), "wrongly recognized a lambda");
|
||||
};
|
||||
|
||||
expect_lambda("a->b", "a", "b");
|
||||
expect_lambda("foo->4+(4)", "foo", "4+(4)");
|
||||
expect_lambda("a->b->c", "a", "b->c");
|
||||
expect_lambda("(a->b)->c", "(a->b)", "c");
|
||||
|
||||
expect_not_lambda("(a->b)");
|
||||
expect_not_lambda("a+b");
|
||||
expect_not_lambda("'a+b'");
|
||||
expect_not_lambda("497");
|
||||
}
|
@ -1,550 +0,0 @@
|
||||
// === Features ===
|
||||
#![feature(generators, generator_trait)]
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
|
||||
use ast::*;
|
||||
use parser_scala::prelude::*;
|
||||
|
||||
use ast::test_utils::expect_shape;
|
||||
use parser_scala::api::Metadata;
|
||||
use parser_scala::api::ParsedSourceFile;
|
||||
use parser_scala::api::PruneUnusedIds;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use wasm_bindgen_test::wasm_bindgen_test_configure;
|
||||
|
||||
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Helpers ===
|
||||
// ===============
|
||||
|
||||
/// Asserts that given AST is a Var with given name.
|
||||
fn assert_var<StringLike: Into<String>>(ast: &Ast, name: StringLike) {
|
||||
let actual: &Var = expect_shape(ast);
|
||||
let expected = Var { name: name.into() };
|
||||
assert_eq!(*actual, expected);
|
||||
}
|
||||
|
||||
/// Asserts that given AST is an Opr with given name.
|
||||
fn assert_opr<StringLike: Into<String>>(ast: &Ast, name: StringLike) {
|
||||
let actual: &Opr = expect_shape(ast);
|
||||
let expected = Opr { name: name.into() };
|
||||
assert_eq!(*actual, expected);
|
||||
}
|
||||
|
||||
fn roundtrip_program_with(parser: &parser_scala::Parser, program: &str) {
|
||||
let ast = parser.parse(program.to_string(), Default::default()).unwrap();
|
||||
assert_eq!(ast.repr(), program, "{ast:#?}");
|
||||
}
|
||||
|
||||
fn roundtrip_program(program: &str) {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
roundtrip_program_with(&parser, program);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ================
|
||||
// === Metadata ===
|
||||
// ================
|
||||
|
||||
/// Wrapper for using any serializable type as metadata.
|
||||
#[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)]
|
||||
struct FauxMetadata<T>(T);
|
||||
|
||||
impl<T> PruneUnusedIds for FauxMetadata<T> {}
|
||||
impl<T: Default + Serialize + DeserializeOwned> Metadata for FauxMetadata<T> {}
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Fixture ===
|
||||
// ===============
|
||||
|
||||
/// Persists parser (which is expensive to construct, so we want to reuse it
|
||||
/// between tests. Additionally, hosts a number of helper methods.
|
||||
struct Fixture {
|
||||
parser: parser_scala::Parser,
|
||||
}
|
||||
|
||||
impl Fixture {
|
||||
// === Helper methods ===
|
||||
|
||||
/// Create a new fixture, obtaining a default parser.
|
||||
fn new() -> Fixture {
|
||||
Fixture { parser: parser_scala::Parser::new_or_panic() }
|
||||
}
|
||||
|
||||
/// Program is expected to be single line module. The line's Shape subtype
|
||||
/// is obtained and passed to `tester`.
|
||||
fn test_shape<T, F>(&mut self, program: &str, tester: F)
|
||||
where
|
||||
for<'t> &'t Shape<Ast>: TryInto<&'t T>,
|
||||
F: FnOnce(&T), {
|
||||
let ast = self.parser.parse_line_ast(program).unwrap();
|
||||
let shape = expect_shape(&ast);
|
||||
tester(shape);
|
||||
}
|
||||
|
||||
|
||||
// === Test Methods ===
|
||||
|
||||
fn blank_line_round_trip(&mut self) {
|
||||
let program = "main = \n foo\n \n bar";
|
||||
let ast = self.parser.parse_module(program, default()).unwrap();
|
||||
assert_eq!(ast.repr(), program);
|
||||
}
|
||||
|
||||
fn deserialize_metadata(&mut self) {
|
||||
let term = ast::Module { lines: vec![ast::BlockLine { elem: None, off: 0 }] };
|
||||
let ast = known::KnownAst::new_no_id(term);
|
||||
let file = ParsedSourceFile { ast, metadata: serde_json::json!({}) };
|
||||
let code = String::try_from(&file).unwrap();
|
||||
assert_eq!(self.parser.parse_with_metadata(code).unwrap(), file);
|
||||
}
|
||||
|
||||
fn deserialize_unrecognized(&mut self) {
|
||||
let unfinished = "`";
|
||||
self.test_shape(unfinished, |shape: &Unrecognized| {
|
||||
assert_eq!(shape.str, "`");
|
||||
});
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // TODO [mwu] https://github.com/enso-org/enso/issues/1016
|
||||
fn deserialize_unexpected(&mut self) {
|
||||
let unexpected = "import";
|
||||
let ast = self.parser.parse_line_ast(unexpected).unwrap();
|
||||
// This does not deserialize to "Unexpected" but to a very complex macro match tree that has
|
||||
// Unexpected somewhere within. We just make sure that it is somewhere, and that confirms
|
||||
// that we are able to deserialize such node.
|
||||
let has_unexpected =
|
||||
ast.iter_recursive().find(|ast| matches!(ast.shape(), Shape::Unexpected(_)));
|
||||
assert!(has_unexpected.is_some());
|
||||
}
|
||||
|
||||
fn deserialize_invalid_quote(&mut self) {
|
||||
let unfinished = "'a''";
|
||||
self.test_shape(unfinished, |shape: &Prefix<Ast>| {
|
||||
// ignore shape.func, being TextUnclosed tested elsewhere
|
||||
let arg: &InvalidQuote = expect_shape(&shape.arg);
|
||||
let expected_quote = Text { str: "''".into() };
|
||||
assert_eq!(arg.quote, expected_quote.into());
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_inline_block(&mut self) {
|
||||
let unfinished = "'''a";
|
||||
self.test_shape(unfinished, |shape: &Prefix<Ast>| {
|
||||
let func: &InlineBlock = expect_shape(&shape.func);
|
||||
let expected_quote = Text { str: "'''".into() };
|
||||
assert_eq!(func.quote, expected_quote.into());
|
||||
assert_var(&shape.arg, "a");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_blank(&mut self) {
|
||||
let expect_blank = |_: &Blank| {};
|
||||
self.test_shape("_", expect_blank);
|
||||
}
|
||||
|
||||
fn deserialize_var(&mut self) {
|
||||
self.test_shape("foo", |var: &Var| {
|
||||
let expected_var = Var { name: "foo".into() };
|
||||
assert_eq!(var, &expected_var);
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_cons(&mut self) {
|
||||
let name = "FooBar";
|
||||
self.test_shape(name, |shape: &Cons| {
|
||||
assert_eq!(shape.name, name);
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_mod(&mut self) {
|
||||
self.test_shape("+=", |shape: &Mod| {
|
||||
assert_eq!(shape.name, "+");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_invalid_suffix(&mut self) {
|
||||
self.test_shape("foo'bar", |shape: &InvalidSuffix<Ast>| {
|
||||
assert_var(&shape.elem, "foo'");
|
||||
assert_eq!(shape.suffix, "bar");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_number(&mut self) {
|
||||
self.test_shape("127", |shape: &Number| {
|
||||
assert_eq!(shape.base, None);
|
||||
assert_eq!(shape.int, "127");
|
||||
});
|
||||
|
||||
self.test_shape("16_ff", |shape: &Number| {
|
||||
assert_eq!(shape.base.as_ref().unwrap(), "16");
|
||||
assert_eq!(shape.int, "ff");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_text_line_raw(&mut self) {
|
||||
self.test_shape("\"foo\"", |shape: &TextLineRaw| {
|
||||
let (segment,) = (&shape.text).expect_tuple();
|
||||
let expected = SegmentPlain { value: "foo".to_string() };
|
||||
assert_eq!(*segment, expected.into());
|
||||
});
|
||||
|
||||
let tricky_raw = r#""\\\'\n""#;
|
||||
self.test_shape(tricky_raw, |shape: &TextLineRaw| {
|
||||
let segments: (_,) = (&shape.text).expect_tuple();
|
||||
assert_eq!(*segments.0, SegmentPlain { value: r"\\\'\n".to_string() }.into());
|
||||
});
|
||||
}
|
||||
|
||||
fn test_text_fmt_segment<F>(&mut self, program: &str, tester: F)
|
||||
where F: FnOnce(&SegmentFmt<Ast>) {
|
||||
self.test_shape(program, |shape: &TextLineFmt<Ast>| {
|
||||
let (segment,) = (&shape.text).expect_tuple();
|
||||
tester(segment)
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_text_line_fmt(&mut self) {
|
||||
use SegmentFmt::SegmentExpr;
|
||||
|
||||
// plain segment
|
||||
self.test_shape("'foo'", |shape: &TextLineFmt<Ast>| {
|
||||
let (segment,) = (&shape.text).expect_tuple();
|
||||
let expected = SegmentPlain { value: "foo".into() };
|
||||
assert_eq!(*segment, expected.into());
|
||||
});
|
||||
|
||||
// escapes
|
||||
let tricky_fmt = r#"'\\\'\"'"#;
|
||||
self.test_shape(tricky_fmt, |shape: &TextLineFmt<Ast>| {
|
||||
let segments: (_, _, _) = (&shape.text).expect_tuple();
|
||||
assert_eq!(*segments.0, Slash {}.into());
|
||||
assert_eq!(*segments.1, Quote {}.into());
|
||||
assert_eq!(*segments.2, Invalid { str: '"' }.into());
|
||||
});
|
||||
|
||||
// expression empty
|
||||
let expr_fmt = r#"'``'"#;
|
||||
self.test_text_fmt_segment(expr_fmt, |segment| match segment {
|
||||
SegmentExpr(expr) => assert_eq!(expr.value, None),
|
||||
_ => panic!("wrong segment type received"),
|
||||
});
|
||||
|
||||
// expression non-empty
|
||||
let expr_fmt = r#"'`foo`'"#;
|
||||
self.test_text_fmt_segment(expr_fmt, |segment| match segment {
|
||||
SegmentExpr(expr) => assert_var(expr.value.as_ref().unwrap(), "foo"),
|
||||
_ => panic!("wrong segment type received"),
|
||||
});
|
||||
|
||||
self.test_text_fmt_segment(r#"'\n'"#, |segment| {
|
||||
let expected = EscapeCharacter { c: 'n' };
|
||||
assert_eq!(*segment, expected.into());
|
||||
});
|
||||
self.test_text_fmt_segment(r#"'\u0394'"#, |segment| {
|
||||
let expected = EscapeUnicode16 { digits: "0394".into() };
|
||||
assert_eq!(*segment, expected.into());
|
||||
});
|
||||
// TODO [MWU] We don't test Unicode21 as it is not yet supported by the
|
||||
// parser.
|
||||
self.test_text_fmt_segment(r#"'\U0001f34c'"#, |segment| {
|
||||
let expected = EscapeUnicode32 { digits: "0001f34c".into() };
|
||||
assert_eq!(*segment, expected.into());
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_text_block_raw(&mut self) {
|
||||
let program = "\"\"\" \n \n X";
|
||||
self.test_shape(program, |shape: &TextBlockRaw| {
|
||||
assert_eq!(shape.spaces, 1);
|
||||
assert_eq!(shape.offset, 0);
|
||||
|
||||
let (line,) = (&shape.text).expect_tuple();
|
||||
let (empty_line,) = (&line.empty_lines).expect_tuple();
|
||||
assert_eq!(*empty_line, 2);
|
||||
|
||||
let (segment,) = (&line.text).expect_tuple();
|
||||
let expected_segment = SegmentPlain { value: " X".into() };
|
||||
assert_eq!(*segment, expected_segment.into());
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_text_block_fmt(&mut self) {
|
||||
let program = "''' \n\n X\n Y";
|
||||
self.test_shape(program, |shape: &TextBlockFmt<Ast>| {
|
||||
assert_eq!(shape.spaces, 2);
|
||||
assert_eq!(shape.offset, 0);
|
||||
assert_eq!(shape.text.len(), 2);
|
||||
|
||||
let (line1, line2) = (&shape.text).expect_tuple();
|
||||
let (empty_line,) = (&line1.empty_lines).expect_tuple();
|
||||
assert_eq!(*empty_line, 0);
|
||||
let (segment,) = (&line1.text).expect_tuple();
|
||||
let expected_segment = SegmentPlain { value: " X".into() };
|
||||
assert_eq!(*segment, expected_segment.into());
|
||||
|
||||
assert!(line2.empty_lines.is_empty());
|
||||
let (segment,) = (&line2.text).expect_tuple();
|
||||
let expected_segment = SegmentPlain { value: " Y".into() };
|
||||
assert_eq!(*segment, expected_segment.into());
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
fn deserialize_unfinished_text(&mut self) {
|
||||
let unfinished = r#""\"#;
|
||||
self.test_shape(unfinished, |shape: &TextUnclosed<Ast>| {
|
||||
let line = &shape.line;
|
||||
let line: &TextLineRaw = line.try_into().unwrap();
|
||||
|
||||
let (segment,) = (&line.text).expect_tuple();
|
||||
let expected = SegmentPlain { value: r"\".into() };
|
||||
assert_eq!(*segment, expected.into());
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_dangling_base(&mut self) {
|
||||
self.test_shape("16_", |shape: &DanglingBase| {
|
||||
assert_eq!(shape.base, "16");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_prefix(&mut self) {
|
||||
self.test_shape("foo bar", |shape: &Prefix<Ast>| {
|
||||
assert_var(&shape.func, "foo");
|
||||
assert_eq!(shape.off, 3);
|
||||
assert_var(&shape.arg, "bar");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_infix(&mut self) {
|
||||
self.test_shape("foo + bar", |shape: &Infix<Ast>| {
|
||||
assert_var(&shape.larg, "foo");
|
||||
assert_eq!(shape.loff, 1);
|
||||
assert_opr(&shape.opr, "+");
|
||||
assert_eq!(shape.roff, 2);
|
||||
assert_var(&shape.rarg, "bar");
|
||||
});
|
||||
}
|
||||
fn deserialize_left(&mut self) {
|
||||
self.test_shape("foo +", |shape: &SectionLeft<Ast>| {
|
||||
assert_var(&shape.arg, "foo");
|
||||
assert_eq!(shape.off, 1);
|
||||
assert_opr(&shape.opr, "+");
|
||||
});
|
||||
}
|
||||
fn deserialize_right(&mut self) {
|
||||
self.test_shape("+ bar", |shape: &SectionRight<Ast>| {
|
||||
assert_opr(&shape.opr, "+");
|
||||
assert_eq!(shape.off, 1);
|
||||
assert_var(&shape.arg, "bar");
|
||||
});
|
||||
}
|
||||
fn deserialize_sides(&mut self) {
|
||||
self.test_shape("+", |shape: &SectionSides<Ast>| {
|
||||
assert_opr(&shape.opr, "+");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_block(&mut self) {
|
||||
self.test_shape(" foo\n bar", |block: &Block<Ast>| {
|
||||
assert_eq!(block.ty, BlockType::Continuous {});
|
||||
assert_eq!(block.indent, 1);
|
||||
assert_eq!(block.empty_lines.len(), 0);
|
||||
assert!(block.is_orphan);
|
||||
|
||||
let first_line = &block.first_line;
|
||||
assert_eq!(first_line.off, 0);
|
||||
assert_var(&first_line.elem, "foo");
|
||||
|
||||
let (second_line,) = (&block.lines).expect_tuple();
|
||||
assert_eq!(second_line.off, 0);
|
||||
assert_var(second_line.elem.as_ref().unwrap(), "bar");
|
||||
});
|
||||
}
|
||||
|
||||
fn deserialize_annotation(&mut self) {
|
||||
self.test_shape("@Tail_call", |annotation: &Annotation| {
|
||||
let expected_annotation = Annotation { name: "@Tail_call".into() };
|
||||
assert_eq!(annotation, &expected_annotation);
|
||||
});
|
||||
}
|
||||
|
||||
/// Tests parsing a number of sample macro usages.
|
||||
///
|
||||
/// As macros generate usually really huge ASTs, this test only checks
|
||||
/// that we are able to deserialize the response and that it is a macro
|
||||
/// match node. Node contents is not covered.
|
||||
fn deserialize_macro_matches(&mut self) {
|
||||
let macro_usages = vec![
|
||||
"[]",
|
||||
"[1,2,3]",
|
||||
"{x}",
|
||||
"polyglot java import com.example.MyClass",
|
||||
"foo -> bar",
|
||||
"()",
|
||||
"(foo -> bar)",
|
||||
"a b c -> bar",
|
||||
"type Maybe a\n Just val:a",
|
||||
"if foo > 8 then 10 else 9",
|
||||
"skip bar",
|
||||
"freeze bar",
|
||||
"case foo of\n bar",
|
||||
"import foo",
|
||||
"import",
|
||||
"export bar",
|
||||
"from bar import all",
|
||||
"from bar export bo",
|
||||
"a ->",
|
||||
"-> a",
|
||||
"(a -> b) -> c",
|
||||
];
|
||||
|
||||
for macro_usage in macro_usages.iter() {
|
||||
println!(">>>>>>>>>> {macro_usage}");
|
||||
let ast = self.parser.parse_line_ast(*macro_usage).unwrap();
|
||||
println!("{ast:?}");
|
||||
expect_shape::<Match<Ast>>(&ast);
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_macro_ambiguous(&mut self) {
|
||||
self.test_shape("if foo", |shape: &Ambiguous<Ast>| {
|
||||
let segment = &shape.segs.head;
|
||||
assert_var(&segment.head, "if");
|
||||
|
||||
let segment_body = segment.body.as_ref().unwrap();
|
||||
assert_eq!(segment_body.off, 2);
|
||||
assert_var(&segment_body.wrapped, "foo");
|
||||
});
|
||||
}
|
||||
|
||||
fn run(&mut self) {
|
||||
// Shapes not covered by separate test:
|
||||
// * Opr (doesn't parse on its own, covered by Infix and other)
|
||||
// * Module (covered by every single test, as parser wraps everything into module)
|
||||
self.blank_line_round_trip();
|
||||
self.deserialize_metadata();
|
||||
self.deserialize_unrecognized();
|
||||
//self.deserialize_unexpected(); // TODO [mwu] https://github.com/enso-org/enso/issues/1016
|
||||
self.deserialize_invalid_quote();
|
||||
self.deserialize_inline_block();
|
||||
self.deserialize_blank();
|
||||
self.deserialize_var();
|
||||
self.deserialize_cons();
|
||||
self.deserialize_mod();
|
||||
self.deserialize_invalid_suffix();
|
||||
self.deserialize_number();
|
||||
self.deserialize_text_line_raw();
|
||||
self.deserialize_text_line_fmt();
|
||||
self.deserialize_text_block_raw();
|
||||
self.deserialize_text_block_fmt();
|
||||
self.deserialize_unfinished_text();
|
||||
self.deserialize_dangling_base();
|
||||
self.deserialize_prefix();
|
||||
self.deserialize_infix();
|
||||
self.deserialize_left();
|
||||
self.deserialize_right();
|
||||
self.deserialize_sides();
|
||||
self.deserialize_block();
|
||||
self.deserialize_annotation();
|
||||
self.deserialize_macro_matches();
|
||||
self.deserialize_macro_ambiguous();
|
||||
}
|
||||
}
|
||||
|
||||
/// A single entry point for all the tests here using external parser.
|
||||
///
|
||||
/// Setting up the parser is costly, so we run all tests as a single batch.
|
||||
/// Until proper CI solution for calling external parser is devised, this
|
||||
/// test is marked with `#[ignore]`.
|
||||
#[wasm_bindgen_test]
|
||||
fn parser_tests() {
|
||||
Fixture::new().run()
|
||||
}
|
||||
|
||||
/// Test case for https://github.com/enso-org/ide/issues/296
|
||||
#[wasm_bindgen_test]
|
||||
fn block_roundtrip() {
|
||||
let programs = vec![
|
||||
"main = 10 + 10",
|
||||
"main =\n a = 10\n b = 20\n a * b",
|
||||
"main =\n foo a =\n a * 10\n foo 10\n print \"hello\"",
|
||||
"main =\n foo\n \n bar",
|
||||
"main =\n \n foo\n \n bar",
|
||||
];
|
||||
for program in programs {
|
||||
roundtrip_program(program);
|
||||
}
|
||||
}
|
||||
|
||||
/// Test case for https://github.com/enso-org/ide/issues/296
|
||||
#[wasm_bindgen_test]
|
||||
fn nested_macros() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
|
||||
// Generate nested brackets. Stop at 8 because it gets slower and slower.
|
||||
// At 12 the deserialization fails on WASM.
|
||||
// At 14 the parsing fails in parser-service.
|
||||
for i in 0..8 {
|
||||
let program = format!("{}{}{}", "[".repeat(i), "foo", "]".repeat(i));
|
||||
roundtrip_program_with(&parser, &program);
|
||||
}
|
||||
|
||||
// Cases from https://github.com/enso-org/ide/issues/1351
|
||||
let program = r#"from Standard.Base import all
|
||||
|
||||
main =
|
||||
operator13 = Json.from_pairs [["a", 42], ["foo", [1,2,3]]]
|
||||
var1 = [operator13, operator13]"#;
|
||||
roundtrip_program_with(&parser, program);
|
||||
|
||||
let program = r#"triplets n = 1.up_to n . to_vector . flat_map a->
|
||||
a+1 . up_to n . to_vector . flat_map b->
|
||||
b+1 . up_to n . to_vector . flat_map c->
|
||||
if a+b+c == n then [[a,b,c]] else []
|
||||
n = 10
|
||||
here.triplets n
|
||||
IO.println(here.triplets n)"#;
|
||||
roundtrip_program_with(&parser, program);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn dealing_with_invalid_metadata() {
|
||||
let f = Fixture::new();
|
||||
|
||||
let id = ast::Id::from_str("52233542-5c73-430b-a2b7-a68aaf81341b").unwrap();
|
||||
let var = ast::Ast::new(ast::Var { name: "variable1".into() }, Some(id));
|
||||
let module = ast::Module::from_line(var);
|
||||
let ast = known::Module::new_no_id(module);
|
||||
let metadata = FauxMetadata("certainly_not_a_number".to_string());
|
||||
|
||||
// Make sure that out metadata cannot be deserialized as `FauxMetadata<i32>`.
|
||||
let serialized_text_metadata = serde_json::to_string(&metadata).unwrap();
|
||||
assert!(serde_json::from_str::<FauxMetadata<i32>>(&serialized_text_metadata).is_err());
|
||||
|
||||
let parsed_file = parser_scala::api::ParsedSourceFile { ast, metadata };
|
||||
let generated = parsed_file.serialize().unwrap();
|
||||
let expected_generated = r#"variable1
|
||||
|
||||
|
||||
#### METADATA ####
|
||||
[[{"index":{"value":0},"size":{"value":9}},"52233542-5c73-430b-a2b7-a68aaf81341b"]]
|
||||
"certainly_not_a_number""#;
|
||||
assert_eq!(generated.content, expected_generated);
|
||||
let r = f.parser.parse_with_metadata::<FauxMetadata<i32>>(generated.content).unwrap();
|
||||
assert_eq!(r.metadata, default());
|
||||
}
|
@ -1,44 +0,0 @@
|
||||
// === Non-Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
|
||||
use enso_prelude::*;
|
||||
|
||||
use ast::Ast;
|
||||
use parser_scala::api::ParsedSourceFile;
|
||||
use parser_scala::Parser;
|
||||
use uuid::Uuid;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use wasm_bindgen_test::wasm_bindgen_test_configure;
|
||||
|
||||
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn web_test() {
|
||||
let uuid = Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap();
|
||||
|
||||
let parser = Parser::new_or_panic();
|
||||
|
||||
let parse = |input| parser.parse_with_metadata(input).unwrap();
|
||||
let file = |term| ParsedSourceFile {
|
||||
metadata: serde_json::json!({}),
|
||||
ast: ast::known::KnownAst::new_no_id(term),
|
||||
};
|
||||
|
||||
|
||||
let line = |term| ast::Module { lines: vec![ast::BlockLine { elem: term, off: 0 }] };
|
||||
|
||||
let app = ast::Prefix { func: Ast::var("x"), off: 3, arg: Ast::var("y") };
|
||||
let var = ast::Var { name: "x".into() };
|
||||
|
||||
let ast = file(line(None));
|
||||
assert_eq!(parse(String::try_from(&ast).unwrap()), ast);
|
||||
|
||||
let ast = file(line(Some(Ast::new(var, Some(uuid)))));
|
||||
assert_eq!(parse(String::try_from(&ast).unwrap()), ast);
|
||||
|
||||
let ast = file(line(Some(Ast::new(app, Some(uuid)))));
|
||||
assert_eq!(parse(String::try_from(&ast).unwrap()), ast);
|
||||
}
|
@ -13,5 +13,5 @@ enso-profiler = { path = "../../../../lib/rust/profiler" }
|
||||
failure = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
parser-scala = { path = "../parser" }
|
||||
parser = { path = "../parser" }
|
||||
wasm-bindgen-test = { workspace = true }
|
||||
|
@ -4,7 +4,6 @@
|
||||
#![allow(clippy::bool_to_int_with_if)]
|
||||
#![allow(clippy::let_and_return)]
|
||||
|
||||
use ast::crumbs::PatternMatchCrumb::*;
|
||||
use ast::crumbs::*;
|
||||
use enso_prelude::*;
|
||||
use enso_text::traits::*;
|
||||
|
@ -239,10 +239,9 @@ mod test {
|
||||
use crate::SpanTree;
|
||||
|
||||
use ast::HasRepr;
|
||||
use parser_scala::Parser;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use parser::Parser;
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn actions_in_span_tree() {
|
||||
#[derive(Debug)]
|
||||
struct Case {
|
||||
@ -262,11 +261,12 @@ mod test {
|
||||
panic!("Invalid case {:?}: no node with span {:?}", self, self.span)
|
||||
});
|
||||
let arg = Ast::new(ast::Var { name: "foo".to_string() }, None);
|
||||
let case = format!("{self:?}");
|
||||
let result = match &self.action {
|
||||
Set => node.set(&ast, arg),
|
||||
Erase => node.erase(&ast),
|
||||
}
|
||||
.unwrap();
|
||||
.expect(&case);
|
||||
let result_repr = result.repr();
|
||||
assert_eq!(result_repr, self.expected, "Wrong answer for case {self:?}");
|
||||
assert_eq!(ast_id, result.id, "Changed AST ID in case {self:?}");
|
||||
@ -280,9 +280,6 @@ mod test {
|
||||
, Case{expr:"a + b" , span:4..5 , action:Set , expected:"a + foo" }
|
||||
, Case{expr:"a + b + c" , span:0..1 , action:Set , expected:"foo + b + c" }
|
||||
, Case{expr:"a + b + c" , span:4..5 , action:Set , expected:"a + foo + c" }
|
||||
, Case{expr:"a , b , c" , span:0..1 , action:Set , expected:"foo , b , c" }
|
||||
, Case{expr:"a , b , c" , span:4..5 , action:Set , expected:"a , foo , c" }
|
||||
, Case{expr:"a , b , c" , span:8..9 , action:Set , expected:"a , b , foo" }
|
||||
, Case{expr:"f a b" , span:0..1 , action:Set , expected:"foo a b" }
|
||||
, Case{expr:"f a b" , span:2..3 , action:Set , expected:"f foo b" }
|
||||
, Case{expr:"f a b" , span:4..5 , action:Set , expected:"f a foo" }
|
||||
@ -298,10 +295,6 @@ mod test {
|
||||
, Case{expr:"+ b" , span:3..3 , action:Set , expected:"+ b + foo" }
|
||||
, Case{expr:"a + b + c" , span:0..0 , action:Set , expected:"foo + a + b + c"}
|
||||
, Case{expr:"a + b + c" , span:5..5 , action:Set , expected:"a + b + foo + c"}
|
||||
, Case{expr:"a , b , c" , span:0..0 , action:Set , expected:"foo , a , b , c"}
|
||||
, Case{expr:"a , b , c" , span:4..4 , action:Set , expected:"a , foo , b , c"}
|
||||
, Case{expr:"a , b , c" , span:8..8 , action:Set , expected:"a , b , foo , c"}
|
||||
, Case{expr:"a , b , c" , span:9..9 , action:Set , expected:"a , b , c , foo"}
|
||||
, Case{expr:", b" , span:3..3 , action:Set , expected:", b , foo" }
|
||||
, Case{expr:"f a b" , span:2..2 , action:Set , expected:"f foo a b" }
|
||||
, Case{expr:"f a b" , span:3..3 , action:Set , expected:"f a foo b" }
|
||||
@ -314,21 +307,18 @@ mod test {
|
||||
, Case{expr:"a + b + c" , span:0..1 , action:Erase, expected:"b + c" }
|
||||
, Case{expr:"a + b + c" , span:4..5 , action:Erase, expected:"a + c" }
|
||||
, Case{expr:"a + b + c" , span:8..9 , action:Erase, expected:"a + b" }
|
||||
, Case{expr:"a , b , c" , span:0..1 , action:Erase, expected:"b , c" }
|
||||
, Case{expr:"a , b , c" , span:4..5 , action:Erase, expected:"a , c" }
|
||||
, Case{expr:"a , b , c" , span:8..9 , action:Erase, expected:"a , b" }
|
||||
, Case{expr:"f a b" , span:2..3 , action:Erase, expected:"f b" }
|
||||
, Case{expr:"f a b" , span:4..5 , action:Erase, expected:"f a" }
|
||||
, Case{expr:"(a + b + c)", span:5..6 , action:Erase, expected: "(a + c)" }
|
||||
, Case{expr:"(a + b + c" , span:5..6 , action:Erase, expected: "(a + c" }
|
||||
];
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
for case in cases {
|
||||
case.run(&parser);
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn possible_actions_in_span_tree() {
|
||||
#[derive(Debug)]
|
||||
struct Case {
|
||||
@ -385,10 +375,9 @@ mod test {
|
||||
Case { expr: "[a,b]", span: 4..5, expected: &[] },
|
||||
Case { expr: "(a + b + c)", span: 5..6, expected: &[Set, Erase] },
|
||||
Case { expr: "(a", span: 1..2, expected: &[Set] },
|
||||
Case { expr: "(a", span: 0..1, expected: &[] },
|
||||
Case { expr: "(a + b + c", span: 5..6, expected: &[Set, Erase] },
|
||||
];
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
for case in cases {
|
||||
case.run(&parser);
|
||||
}
|
||||
|
@ -11,13 +11,11 @@ use crate::ArgumentInfo;
|
||||
use crate::Node;
|
||||
use crate::SpanTree;
|
||||
|
||||
use ast::assoc::Assoc;
|
||||
use ast::crumbs::Located;
|
||||
use ast::opr::GeneralizedInfix;
|
||||
use ast::Ast;
|
||||
use ast::HasRepr;
|
||||
use ast::MacroAmbiguousSegment;
|
||||
use ast::MacroMatchSegment;
|
||||
use ast::SpanSeed;
|
||||
use std::collections::VecDeque;
|
||||
|
||||
|
||||
@ -26,7 +24,6 @@ use std::collections::VecDeque;
|
||||
// ==============
|
||||
|
||||
pub mod context;
|
||||
pub mod macros;
|
||||
|
||||
pub use context::Context;
|
||||
|
||||
@ -39,7 +36,7 @@ pub use context::Context;
|
||||
/// A trait for all types from which we can generate referred SpanTree. Meant to be implemented for
|
||||
/// all AST-like structures.
|
||||
pub trait SpanTreeGenerator<T> {
|
||||
/// Generate node with it's whole subtree.
|
||||
/// Generate node with its whole subtree.
|
||||
fn generate_node(
|
||||
&self,
|
||||
kind: impl Into<node::Kind>,
|
||||
@ -272,13 +269,8 @@ fn generate_node_for_ast<T: Payload>(
|
||||
match ast.shape() {
|
||||
ast::Shape::Prefix(_) =>
|
||||
ast::prefix::Chain::from_ast(ast).unwrap().generate_node(kind, context),
|
||||
// Lambdas should fall in _ case, because we don't want to create subports for
|
||||
// them
|
||||
ast::Shape::Match(_) if ast::macros::as_lambda_match(ast).is_none() =>
|
||||
ast::known::Match::try_new(ast.clone_ref()).unwrap().generate_node(kind, context),
|
||||
ast::Shape::Ambiguous(_) => ast::known::Ambiguous::try_new(ast.clone_ref())
|
||||
.unwrap()
|
||||
.generate_node(kind, context),
|
||||
ast::Shape::Tree(tree) if tree.type_info != ast::TreeType::Lambda =>
|
||||
tree_generate_node(tree, kind, context, ast.id),
|
||||
_ => {
|
||||
let size = (ast.len().value as i32).byte_diff();
|
||||
let ast_id = ast.id;
|
||||
@ -374,7 +366,7 @@ fn generate_node_for_opr_chain<T: Payload>(
|
||||
}
|
||||
gen.generate_empty_node(InsertionPointType::Append);
|
||||
|
||||
if ast::opr::assoc(&this.operator) == Assoc::Right {
|
||||
if this.operator.right_assoc {
|
||||
gen.reverse_children();
|
||||
}
|
||||
|
||||
@ -461,135 +453,6 @@ fn generate_node_for_prefix_chain<T: Payload>(
|
||||
}
|
||||
|
||||
|
||||
// === Match ===
|
||||
|
||||
impl<T: Payload> SpanTreeGenerator<T> for ast::known::Match {
|
||||
fn generate_node(
|
||||
&self,
|
||||
kind: impl Into<node::Kind>,
|
||||
context: &impl Context,
|
||||
) -> FallibleResult<Node<T>> {
|
||||
generate_node_for_known_match(self, kind.into(), context)
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_node_for_known_match<T: Payload>(
|
||||
this: &ast::known::Match,
|
||||
kind: node::Kind,
|
||||
context: &impl Context,
|
||||
) -> FallibleResult<Node<T>> {
|
||||
let removable = false;
|
||||
let children_kind = node::Kind::argument().with_removable(removable);
|
||||
let mut gen = ChildGenerator::default();
|
||||
if let Some(pat) = &this.pfx {
|
||||
for macros::AstInPattern { ast, crumbs } in macros::all_ast_nodes_in_pattern(pat) {
|
||||
let ast_crumb = ast::crumbs::MatchCrumb::Pfx { val: crumbs };
|
||||
let located_ast = Located::new(ast_crumb, ast.wrapped);
|
||||
gen.generate_ast_node(located_ast, children_kind.clone(), context)?;
|
||||
gen.spacing(ast.off);
|
||||
}
|
||||
}
|
||||
let first_segment_index = 0;
|
||||
generate_children_from_segment(&mut gen, first_segment_index, &this.segs.head, context)?;
|
||||
for (index, segment) in this.segs.tail.iter().enumerate() {
|
||||
gen.spacing(segment.off);
|
||||
generate_children_from_segment(&mut gen, index + 1, &segment.wrapped, context)?;
|
||||
}
|
||||
Ok(Node {
|
||||
kind,
|
||||
size: gen.current_offset,
|
||||
children: gen.children,
|
||||
ast_id: this.id(),
|
||||
payload: default(),
|
||||
})
|
||||
}
|
||||
|
||||
fn generate_children_from_segment<T: Payload>(
|
||||
gen: &mut ChildGenerator<T>,
|
||||
index: usize,
|
||||
segment: &MacroMatchSegment<Ast>,
|
||||
context: &impl Context,
|
||||
) -> FallibleResult {
|
||||
// generate child for head
|
||||
let ast = segment.head.clone_ref();
|
||||
let segment_crumb = ast::crumbs::SegmentMatchCrumb::Head;
|
||||
let ast_crumb = ast::crumbs::MatchCrumb::Segs { val: segment_crumb, index };
|
||||
let located_ast = Located::new(ast_crumb, ast);
|
||||
gen.generate_ast_node(located_ast, node::Kind::Token, context)?;
|
||||
|
||||
for macros::AstInPattern { ast, crumbs } in macros::all_ast_nodes_in_pattern(&segment.body) {
|
||||
let child_kind = match crumbs.last() {
|
||||
Some(ast::crumbs::PatternMatchCrumb::Tok) => node::Kind::Token,
|
||||
_ => node::Kind::argument().into(),
|
||||
};
|
||||
gen.spacing(ast.off);
|
||||
let segment_crumb = ast::crumbs::SegmentMatchCrumb::Body { val: crumbs };
|
||||
let ast_crumb = ast::crumbs::MatchCrumb::Segs { val: segment_crumb, index };
|
||||
let located_ast = Located::new(ast_crumb, ast.wrapped);
|
||||
gen.generate_ast_node(located_ast, child_kind, context)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
// === Ambiguous ==
|
||||
|
||||
impl<T: Payload> SpanTreeGenerator<T> for ast::known::Ambiguous {
|
||||
fn generate_node(
|
||||
&self,
|
||||
kind: impl Into<node::Kind>,
|
||||
context: &impl Context,
|
||||
) -> FallibleResult<Node<T>> {
|
||||
generate_node_for_known_ambiguous(self, kind.into(), context)
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_node_for_known_ambiguous<T: Payload>(
|
||||
this: &ast::known::Ambiguous,
|
||||
kind: node::Kind,
|
||||
context: &impl Context,
|
||||
) -> FallibleResult<Node<T>> {
|
||||
let mut gen = ChildGenerator::default();
|
||||
let first_segment_index = 0;
|
||||
generate_children_from_ambiguous(&mut gen, first_segment_index, &this.segs.head, context)?;
|
||||
for (index, segment) in this.segs.tail.iter().enumerate() {
|
||||
gen.spacing(segment.off);
|
||||
generate_children_from_ambiguous(&mut gen, index + 1, &segment.wrapped, context)?;
|
||||
}
|
||||
Ok(Node {
|
||||
kind,
|
||||
size: gen.current_offset,
|
||||
children: gen.children,
|
||||
ast_id: this.id(),
|
||||
payload: default(),
|
||||
})
|
||||
}
|
||||
|
||||
fn generate_children_from_ambiguous<T: Payload>(
|
||||
gen: &mut ChildGenerator<T>,
|
||||
index: usize,
|
||||
segment: &MacroAmbiguousSegment<Ast>,
|
||||
context: &impl Context,
|
||||
) -> FallibleResult {
|
||||
let children_kind = node::Kind::argument();
|
||||
// generate child for head
|
||||
let ast = segment.head.clone_ref();
|
||||
let segment_crumb = ast::crumbs::AmbiguousSegmentCrumb::Head;
|
||||
let ast_crumb = ast::crumbs::AmbiguousCrumb { field: segment_crumb, index };
|
||||
let located_ast = Located::new(ast_crumb, ast);
|
||||
gen.generate_ast_node(located_ast, node::Kind::Token, context)?;
|
||||
|
||||
if let Some(sast) = &segment.body {
|
||||
gen.spacing(sast.off);
|
||||
let field = ast::crumbs::AmbiguousSegmentCrumb::Body;
|
||||
let located_ast =
|
||||
Located::new(ast::crumbs::AmbiguousCrumb { index, field }, sast.clone_ref());
|
||||
gen.generate_ast_node(located_ast, children_kind, context)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
// === Common Utility ==
|
||||
|
||||
/// Build a prefix application-like span tree structure where the prefix argument has not been
|
||||
@ -632,6 +495,61 @@ fn generate_expected_arguments<T: Payload>(
|
||||
|
||||
|
||||
|
||||
// =========================
|
||||
// === SpanTree for Tree ===
|
||||
// =========================
|
||||
|
||||
fn tree_generate_node<T: Payload>(
|
||||
tree: &ast::Tree<Ast>,
|
||||
kind: impl Into<node::Kind>,
|
||||
context: &impl Context,
|
||||
ast_id: Option<Id>,
|
||||
) -> FallibleResult<Node<T>> {
|
||||
let kind = match &tree.type_info {
|
||||
ast::TreeType::Group => node::Kind::Group,
|
||||
_ => kind.into(),
|
||||
};
|
||||
let mut children = vec![];
|
||||
let size;
|
||||
if let Some(leaf_info) = &tree.leaf_info {
|
||||
size = ByteDiff::from(leaf_info.len());
|
||||
} else {
|
||||
let mut offset = ByteDiff::from(0);
|
||||
for (index, raw_span_info) in tree.span_info.iter().enumerate() {
|
||||
match raw_span_info {
|
||||
SpanSeed::Space(ast::SpanSeedSpace { space }) => offset += ByteDiff::from(space),
|
||||
SpanSeed::Token(ast::SpanSeedToken { token }) => {
|
||||
let kind = node::Kind::Token;
|
||||
let size = ByteDiff::from(token.len());
|
||||
let ast_crumbs = vec![ast::crumbs::TreeCrumb { index }.into()];
|
||||
let node = Node { kind, size, ..default() };
|
||||
children.push(node::Child { node, offset, ast_crumbs });
|
||||
offset += size;
|
||||
}
|
||||
SpanSeed::Child(ast::SpanSeedChild { node }) => {
|
||||
let kind = node::Kind::Argument(node::Argument {
|
||||
removable: false,
|
||||
name: None,
|
||||
tp: None,
|
||||
call_id: None,
|
||||
tag_values: vec![],
|
||||
});
|
||||
let node = node.generate_node(kind, context)?;
|
||||
let child_size = node.size;
|
||||
let ast_crumbs = vec![ast::crumbs::TreeCrumb { index }.into()];
|
||||
children.push(node::Child { node, offset, ast_crumbs });
|
||||
offset += child_size;
|
||||
}
|
||||
}
|
||||
}
|
||||
size = offset;
|
||||
}
|
||||
let payload = default();
|
||||
Ok(Node { kind, size, children, ast_id, payload })
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === MockContext ===
|
||||
// ===================
|
||||
@ -676,21 +594,13 @@ mod test {
|
||||
use crate::node::Payload;
|
||||
use crate::ArgumentInfo;
|
||||
|
||||
use ast::crumbs::AmbiguousCrumb;
|
||||
use ast::crumbs::AmbiguousSegmentCrumb;
|
||||
use ast::crumbs::InfixCrumb;
|
||||
use ast::crumbs::PatternMatchCrumb;
|
||||
use ast::crumbs::PrefixCrumb;
|
||||
use ast::crumbs::SectionLeftCrumb;
|
||||
use ast::crumbs::SectionRightCrumb;
|
||||
use ast::crumbs::SectionSidesCrumb;
|
||||
use ast::Crumbs;
|
||||
use ast::IdMap;
|
||||
use parser_scala::Parser;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use wasm_bindgen_test::wasm_bindgen_test_configure;
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
/// A helper function which removes information about expression id from thw tree rooted at
|
||||
@ -716,9 +626,9 @@ mod test {
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn generating_span_tree() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let mut id_map = IdMap::default();
|
||||
id_map.generate(0..15);
|
||||
id_map.generate(0..11);
|
||||
@ -733,7 +643,7 @@ mod test {
|
||||
let (span, id) = id_map_entry;
|
||||
let node = tree.root_ref().find_by_span(&span);
|
||||
assert!(node.is_some(), "Node with span {span} not found");
|
||||
assert_eq!(node.unwrap().node.ast_id, Some(id));
|
||||
assert_eq!(node.unwrap().node.ast_id, Some(id), "Span: {span}");
|
||||
}
|
||||
|
||||
// Check the other fields:
|
||||
@ -763,9 +673,9 @@ mod test {
|
||||
assert_eq!(expected, tree)
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn generate_span_tree_with_chains() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let ast = parser.parse_line_ast("2 + 3 + foo bar baz 13 + 5").unwrap();
|
||||
let mut tree: SpanTree = ast.generate_tree(&context::Empty).unwrap();
|
||||
clear_expression_ids(&mut tree.root);
|
||||
@ -806,198 +716,54 @@ mod test {
|
||||
assert_eq!(expected, tree);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn generating_span_tree_from_right_assoc_operator() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let ast = parser.parse_line_ast("1,2,3").unwrap();
|
||||
let parser = Parser::new();
|
||||
let ast = parser.parse_line_ast("1<|2<|3").unwrap();
|
||||
let mut tree: SpanTree = ast.generate_tree(&context::Empty).unwrap();
|
||||
clear_expression_ids(&mut tree.root);
|
||||
clear_parameter_infos(&mut tree.root);
|
||||
|
||||
let expected = TreeBuilder::new(5)
|
||||
let expected = TreeBuilder::new(7)
|
||||
.add_empty_child(0, Append)
|
||||
.add_leaf(0, 1, node::Kind::argument().removable(), InfixCrumb::LeftOperand)
|
||||
.add_leaf(1, 1, node::Kind::operation(), InfixCrumb::Operator)
|
||||
.add_child(2, 3, node::Kind::Chained, InfixCrumb::RightOperand)
|
||||
.add_leaf(1, 2, node::Kind::operation(), InfixCrumb::Operator)
|
||||
.add_child(3, 3, node::Kind::Chained, InfixCrumb::RightOperand)
|
||||
.add_empty_child(0, Append)
|
||||
.add_leaf(0, 1, node::Kind::argument().removable(), InfixCrumb::LeftOperand)
|
||||
.add_leaf(1, 1, node::Kind::operation(), InfixCrumb::Operator)
|
||||
.add_empty_child(2, AfterTarget)
|
||||
.add_leaf(2, 1, node::Kind::this().removable(), InfixCrumb::RightOperand)
|
||||
.add_empty_child(3, BeforeTarget)
|
||||
.add_leaf(1, 2, node::Kind::operation(), InfixCrumb::Operator)
|
||||
.add_empty_child(3, AfterTarget)
|
||||
.add_leaf(3, 1, node::Kind::this().removable(), InfixCrumb::RightOperand)
|
||||
.add_empty_child(4, BeforeTarget)
|
||||
.done()
|
||||
.build();
|
||||
|
||||
assert_eq!(expected, tree)
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn generating_span_tree_from_section() {
|
||||
let parser = Parser::new_or_panic();
|
||||
// The star makes `SectionSides` ast being one of the parameters of + chain. First + makes
|
||||
// SectionRight, and last + makes SectionLeft.
|
||||
let ast = parser.parse_line_ast("+ * + + 2 +").unwrap();
|
||||
let mut tree: SpanTree = ast.generate_tree(&context::Empty).unwrap();
|
||||
clear_expression_ids(&mut tree.root);
|
||||
clear_parameter_infos(&mut tree.root);
|
||||
|
||||
let expected = TreeBuilder::new(11)
|
||||
.add_child(0, 9, node::Kind::Chained, SectionLeftCrumb::Arg)
|
||||
.add_child(0, 5, node::Kind::Chained, InfixCrumb::LeftOperand)
|
||||
.add_child(0, 3, node::Kind::Chained, SectionLeftCrumb::Arg)
|
||||
.add_empty_child(0, BeforeTarget)
|
||||
.add_leaf(0, 1, node::Kind::operation(), SectionRightCrumb::Opr)
|
||||
.add_child(2, 1, node::Kind::argument().removable(), SectionRightCrumb::Arg)
|
||||
.add_empty_child(0, BeforeTarget)
|
||||
.add_leaf(0, 1, node::Kind::operation(), SectionSidesCrumb)
|
||||
.add_empty_child(1, Append)
|
||||
.done()
|
||||
.add_empty_child(3, Append)
|
||||
.done()
|
||||
.add_leaf(4, 1, node::Kind::operation(), SectionLeftCrumb::Opr)
|
||||
.add_empty_child(5, Append)
|
||||
.done()
|
||||
.add_leaf(6, 1, node::Kind::operation(), InfixCrumb::Operator)
|
||||
.add_leaf(8, 1, node::Kind::argument().removable(), InfixCrumb::RightOperand)
|
||||
.add_empty_child(9, Append)
|
||||
.done()
|
||||
.add_leaf(10, 1, node::Kind::operation(), SectionLeftCrumb::Opr)
|
||||
.add_empty_child(11, Append)
|
||||
.build();
|
||||
|
||||
assert_eq!(expected, tree);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn generating_span_tree_from_right_assoc_section() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let ast = parser.parse_line_ast(",2,").unwrap();
|
||||
let parser = Parser::new();
|
||||
let ast = parser.parse_line_ast("<|2<|").unwrap();
|
||||
let mut tree: SpanTree = ast.generate_tree(&context::Empty).unwrap();
|
||||
clear_expression_ids(&mut tree.root);
|
||||
clear_parameter_infos(&mut tree.root);
|
||||
|
||||
let expected = TreeBuilder::new(3)
|
||||
let expected = TreeBuilder::new(5)
|
||||
.add_empty_child(0, Append)
|
||||
.add_leaf(0, 1, node::Kind::operation(), SectionRightCrumb::Opr)
|
||||
.add_child(1, 2, node::Kind::Chained, SectionRightCrumb::Arg)
|
||||
.add_leaf(0, 2, node::Kind::operation(), SectionRightCrumb::Opr)
|
||||
.add_child(2, 2, node::Kind::Chained, SectionRightCrumb::Arg)
|
||||
.add_empty_child(0, Append)
|
||||
.add_leaf(0, 1, node::Kind::argument().removable(), SectionLeftCrumb::Arg)
|
||||
.add_leaf(1, 1, node::Kind::operation(), SectionLeftCrumb::Opr)
|
||||
.add_empty_child(2, BeforeTarget)
|
||||
.done()
|
||||
.build();
|
||||
|
||||
assert_eq!(expected, tree);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn generating_span_tree_from_matched_macros() {
|
||||
use PatternMatchCrumb::*;
|
||||
|
||||
let parser = Parser::new_or_panic();
|
||||
let mut id_map = IdMap::default();
|
||||
let expected_id = id_map.generate(0..29);
|
||||
let expression = "if foo then (a + b) x else ()";
|
||||
let ast = parser.parse_line_ast_with_id_map(expression, id_map).unwrap();
|
||||
let mut tree: SpanTree = ast.generate_tree(&context::Empty).unwrap();
|
||||
|
||||
// Check if expression id is set
|
||||
assert_eq!(tree.root_ref().ast_id, Some(expected_id));
|
||||
|
||||
// Check the other fields
|
||||
clear_expression_ids(&mut tree.root);
|
||||
clear_parameter_infos(&mut tree.root);
|
||||
let seq = Seq { right: false };
|
||||
let if_then_else_cr = vec![seq, Or, Build];
|
||||
let parens_cr = vec![seq, Or, Or, Build];
|
||||
|
||||
let expected = TreeBuilder::new(29)
|
||||
.add_leaf(0, 2, node::Kind::Token, segment_head_crumbs(0))
|
||||
.add_leaf(3, 3, node::Kind::argument(), segment_body_crumbs(0, &if_then_else_cr))
|
||||
.add_leaf(7, 4, node::Kind::Token, segment_head_crumbs(1))
|
||||
.add_child(12, 9, node::Kind::argument(), segment_body_crumbs(1, &if_then_else_cr))
|
||||
.add_child(0, 7, node::Kind::operation(), PrefixCrumb::Func)
|
||||
.add_leaf(0, 1, node::Kind::Token, segment_head_crumbs(0))
|
||||
.add_child(1, 5, node::Kind::argument(), segment_body_crumbs(0, &parens_cr))
|
||||
.add_empty_child(0, BeforeTarget)
|
||||
.add_leaf(0, 1, node::Kind::this(), InfixCrumb::LeftOperand)
|
||||
.add_empty_child(1, AfterTarget)
|
||||
.add_leaf(2, 1, node::Kind::operation(), InfixCrumb::Operator)
|
||||
.add_leaf(4, 1, node::Kind::argument(), InfixCrumb::RightOperand)
|
||||
.add_empty_child(5, Append)
|
||||
.done()
|
||||
.add_leaf(6, 1, node::Kind::Token, segment_head_crumbs(1))
|
||||
.done()
|
||||
.add_empty_child(8, BeforeTarget)
|
||||
.add_leaf(8, 1, node::Kind::this(), PrefixCrumb::Arg)
|
||||
.add_empty_child(9, Append)
|
||||
.done()
|
||||
.add_leaf(22, 4, node::Kind::Token, segment_head_crumbs(2))
|
||||
.add_child(27, 2, node::Kind::argument(), segment_body_crumbs(2, &if_then_else_cr))
|
||||
.add_leaf(0, 1, node::Kind::Token, segment_head_crumbs(0))
|
||||
.add_leaf(1, 1, node::Kind::Token, segment_head_crumbs(1))
|
||||
.done()
|
||||
.build();
|
||||
|
||||
assert_eq!(expected, tree);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn generating_span_tree_from_matched_list_macro() {
|
||||
use PatternMatchCrumb::*;
|
||||
|
||||
let parser = Parser::new_or_panic();
|
||||
let expression = "[a,b]";
|
||||
let ast = parser.parse_line_ast(expression).unwrap();
|
||||
let mut tree: SpanTree = ast.generate_tree(&context::Empty).unwrap();
|
||||
|
||||
// Check the other fields
|
||||
clear_expression_ids(&mut tree.root);
|
||||
let left_seq = Seq { right: false };
|
||||
let right_seq = Seq { right: true };
|
||||
let many = Many { index: 0 };
|
||||
let first_element_cr = vec![left_seq, Or, Or, left_seq, Build];
|
||||
let second_element_cr = vec![left_seq, Or, Or, right_seq, many, right_seq, Build];
|
||||
let comma_cr = vec![left_seq, Or, Or, right_seq, many, left_seq, Tok];
|
||||
|
||||
let expected = TreeBuilder::new(5)
|
||||
.add_leaf(0, 1, node::Kind::Token, segment_head_crumbs(0))
|
||||
.add_leaf(1, 1, node::Kind::argument(), segment_body_crumbs(0, &first_element_cr))
|
||||
.add_leaf(2, 1, node::Kind::Token, segment_body_crumbs(0, &comma_cr))
|
||||
.add_leaf(3, 1, node::Kind::argument(), segment_body_crumbs(0, &second_element_cr))
|
||||
.add_leaf(4, 1, node::Kind::Token, segment_head_crumbs(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(expected, tree);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn generating_span_tree_from_ambiguous_macros() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let mut id_map = IdMap::default();
|
||||
id_map.generate(0..2);
|
||||
let ast = parser.parse_line_ast_with_id_map("(4", id_map.clone()).unwrap();
|
||||
let mut tree: SpanTree = ast.generate_tree(&context::Empty).unwrap();
|
||||
|
||||
// Check the expression id:
|
||||
let (_, expected_id) = id_map.vec.first().unwrap();
|
||||
assert_eq!(tree.root_ref().ast_id, Some(*expected_id));
|
||||
|
||||
// Check the other fields:
|
||||
clear_expression_ids(&mut tree.root);
|
||||
let head_crumb = AmbiguousCrumb { index: 0, field: AmbiguousSegmentCrumb::Head };
|
||||
let body_crumb = AmbiguousCrumb { index: 0, field: AmbiguousSegmentCrumb::Body };
|
||||
let expected = TreeBuilder::new(2)
|
||||
.add_leaf(0, 1, node::Kind::Token, head_crumb)
|
||||
.add_leaf(1, 1, node::Kind::argument(), body_crumb)
|
||||
.build();
|
||||
|
||||
assert_eq!(expected, tree);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn generating_span_tree_for_lambda() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let ast = parser.parse_line_ast("foo a-> b + c").unwrap();
|
||||
let mut tree: SpanTree = ast.generate_tree(&context::Empty).unwrap();
|
||||
clear_expression_ids(&mut tree.root);
|
||||
@ -1013,9 +779,9 @@ mod test {
|
||||
assert_eq!(expected, tree);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn generating_span_tree_for_unfinished_call() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let this_param = |call_id| ArgumentInfo {
|
||||
name: Some("self".to_owned()),
|
||||
tp: Some("Any".to_owned()),
|
||||
@ -1143,17 +909,4 @@ mod test {
|
||||
clear_parameter_infos(&mut tree.root);
|
||||
assert_eq!(tree, expected);
|
||||
}
|
||||
|
||||
fn segment_body_crumbs(
|
||||
index: usize,
|
||||
pattern_crumb: &[PatternMatchCrumb],
|
||||
) -> ast::crumbs::MatchCrumb {
|
||||
let val = ast::crumbs::SegmentMatchCrumb::Body { val: pattern_crumb.to_vec() };
|
||||
ast::crumbs::MatchCrumb::Segs { val, index }
|
||||
}
|
||||
|
||||
fn segment_head_crumbs(index: usize) -> ast::crumbs::MatchCrumb {
|
||||
let val = ast::crumbs::SegmentMatchCrumb::Head;
|
||||
ast::crumbs::MatchCrumb::Segs { val, index }
|
||||
}
|
||||
}
|
||||
|
@ -1,139 +0,0 @@
|
||||
//! A module with utilities for generating SpanTree from macros (Match and Ambiguous).
|
||||
|
||||
// TODO[ao] Duplicated with `pattern_subcrumbs` function in `ast::crumbs`, but adds information
|
||||
// about spacing. All the 'crumblike' utilities should be merged to one solution
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use ast::crumbs::PatternMatchCrumb;
|
||||
use ast::Ast;
|
||||
use ast::MacroPatternMatch;
|
||||
use ast::Shifted;
|
||||
|
||||
|
||||
|
||||
// ======================
|
||||
// === LocatedPattern ===
|
||||
// ======================
|
||||
|
||||
/// A fragment of MacroPatternMatch localized by PatternMatchCrumbs.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug)]
|
||||
pub struct LocatedPattern<'a> {
|
||||
pub pattern: &'a MacroPatternMatch<Shifted<Ast>>,
|
||||
pub crumbs: Vec<PatternMatchCrumb>,
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==================
|
||||
// === PatternDfs ===
|
||||
// ==================
|
||||
|
||||
/// A iterator over all nodes in MacroPatternMatch tree, traversing it with DFS algorithm.
|
||||
struct PatternDfs<'a> {
|
||||
/// The FILO queue of nodes to visit.
|
||||
to_visit: Vec<LocatedPattern<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for PatternDfs<'a> {
|
||||
type Item = LocatedPattern<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let to_return = self.to_visit.pop();
|
||||
if let Some(pattern) = &to_return {
|
||||
self.push_children_to_visit(pattern);
|
||||
}
|
||||
to_return
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PatternDfs<'a> {
|
||||
/// Create iterator which start from `root` node.
|
||||
pub fn new(root: &'a MacroPatternMatch<Shifted<Ast>>) -> Self {
|
||||
let first_to_visit = LocatedPattern { pattern: root, crumbs: vec![] };
|
||||
PatternDfs { to_visit: vec![first_to_visit] }
|
||||
}
|
||||
|
||||
/// Obtain all children of `pattern` and push them to `to_visit` queue.
|
||||
fn push_children_to_visit(&mut self, pattern: &LocatedPattern<'a>) {
|
||||
use ast::MacroPatternMatchRaw::*;
|
||||
match pattern.pattern.deref() {
|
||||
Except(pat) => self.push_child_to_visit(pattern, &pat.elem, PatternMatchCrumb::Except),
|
||||
Tag(pat) => self.push_child_to_visit(pattern, &pat.elem, PatternMatchCrumb::Tag),
|
||||
Cls(pat) => self.push_child_to_visit(pattern, &pat.elem, PatternMatchCrumb::Cls),
|
||||
Or(pat) => self.push_child_to_visit(pattern, &pat.elem, PatternMatchCrumb::Or),
|
||||
Seq(pat) => {
|
||||
let (left_elem, right_elem) = &pat.elem;
|
||||
self.push_child_to_visit(pattern, right_elem, PatternMatchCrumb::Seq {
|
||||
right: true,
|
||||
});
|
||||
self.push_child_to_visit(pattern, left_elem, PatternMatchCrumb::Seq {
|
||||
right: false,
|
||||
});
|
||||
}
|
||||
Many(pat) =>
|
||||
for (index, elem) in pat.elem.iter().enumerate().rev() {
|
||||
self.push_child_to_visit(pattern, elem, PatternMatchCrumb::Many { index });
|
||||
},
|
||||
// Other patterns does not have children.
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn push_child_to_visit(
|
||||
&mut self,
|
||||
pattern: &LocatedPattern<'a>,
|
||||
child: &'a MacroPatternMatch<Shifted<Ast>>,
|
||||
crumb: PatternMatchCrumb,
|
||||
) {
|
||||
let loc_pattern = LocatedPattern {
|
||||
pattern: child,
|
||||
crumbs: pattern.crumbs.iter().cloned().chain(std::iter::once(crumb)).collect(),
|
||||
};
|
||||
self.to_visit.push(loc_pattern);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ==========================================
|
||||
// === Retrieving AST Nodes From Patterns ===
|
||||
// ==========================================
|
||||
|
||||
/// An AST node being inside a Match node
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug)]
|
||||
pub struct AstInPattern {
|
||||
pub ast: Shifted<Ast>,
|
||||
pub crumbs: Vec<PatternMatchCrumb>,
|
||||
}
|
||||
|
||||
/// Helper function that returns all AST nodes being on leaves of MacroPatternMatch.
|
||||
pub fn all_ast_nodes_in_pattern(
|
||||
pattern: &MacroPatternMatch<Shifted<Ast>>,
|
||||
) -> impl Iterator<Item = AstInPattern> + '_ {
|
||||
use ast::MacroPatternMatchRaw::*;
|
||||
|
||||
PatternDfs::new(pattern).filter_map(|pattern| {
|
||||
let opt_ast_and_crumb = match pattern.pattern.deref() {
|
||||
Build(pat) => Some((&pat.elem, PatternMatchCrumb::Build)),
|
||||
Err(pat) => Some((&pat.elem, PatternMatchCrumb::Err)),
|
||||
Tok(pat) => Some((&pat.elem, PatternMatchCrumb::Tok)),
|
||||
Blank(pat) => Some((&pat.elem, PatternMatchCrumb::Blank)),
|
||||
Var(pat) => Some((&pat.elem, PatternMatchCrumb::Var)),
|
||||
Cons(pat) => Some((&pat.elem, PatternMatchCrumb::Cons)),
|
||||
Opr(pat) => Some((&pat.elem, PatternMatchCrumb::Opr)),
|
||||
Mod(pat) => Some((&pat.elem, PatternMatchCrumb::Mod)),
|
||||
Num(pat) => Some((&pat.elem, PatternMatchCrumb::Num)),
|
||||
Text(pat) => Some((&pat.elem, PatternMatchCrumb::Text)),
|
||||
Block(pat) => Some((&pat.elem, PatternMatchCrumb::Block)),
|
||||
Macro(pat) => Some((&pat.elem, PatternMatchCrumb::Macro)),
|
||||
Invalid(pat) => Some((&pat.elem, PatternMatchCrumb::Invalid)),
|
||||
_ => None,
|
||||
};
|
||||
opt_ast_and_crumb.map(|(ast, crumb)| AstInPattern {
|
||||
ast: ast.clone(),
|
||||
crumbs: pattern.crumbs.into_iter().chain(std::iter::once(crumb)).collect(),
|
||||
})
|
||||
})
|
||||
}
|
@ -8,7 +8,6 @@ use crate::iter::LeafIterator;
|
||||
use crate::iter::TreeFragment;
|
||||
use crate::ArgumentInfo;
|
||||
|
||||
use ast::crumbs::IntoCrumbs;
|
||||
use enso_text as text;
|
||||
|
||||
|
||||
@ -65,12 +64,6 @@ impl<T: Payload> Node<T> {
|
||||
default()
|
||||
}
|
||||
|
||||
/// Define a new child by using the `ChildBuilder` pattern. Consumes self.
|
||||
pub fn new_child(mut self, f: impl FnOnce(ChildBuilder<T>) -> ChildBuilder<T>) -> Self {
|
||||
ChildBuilder::apply_to_node(&mut self, f);
|
||||
self
|
||||
}
|
||||
|
||||
/// Payload mapping utility.
|
||||
pub fn map<S>(self, f: impl Copy + Fn(T) -> S) -> Node<S> {
|
||||
let kind = self.kind;
|
||||
@ -86,15 +79,9 @@ impl<T: Payload> Node<T> {
|
||||
|
||||
#[allow(missing_docs)]
|
||||
impl<T: Payload> Node<T> {
|
||||
// FIXME[WD]: This is a hack, which just checks token placement, not a real solution.
|
||||
/// Check whether the node is a parensed expression.
|
||||
pub fn is_parensed(&self) -> bool {
|
||||
let check = |t: Option<&Child<T>>| {
|
||||
t.map(|t| t.kind == Kind::Token && t.size.value == 1) == Some(true)
|
||||
};
|
||||
check(self.children.first()) && check(self.children.last()) && self.children.len() == 3
|
||||
self.kind == Kind::Group
|
||||
}
|
||||
|
||||
pub fn is_root(&self) -> bool {
|
||||
self.kind.is_root()
|
||||
}
|
||||
@ -216,116 +203,6 @@ impl<T> DerefMut for Child<T> {
|
||||
|
||||
|
||||
|
||||
// ====================
|
||||
// === ChildBuilder ===
|
||||
// ====================
|
||||
|
||||
/// A builder pattern for `SpanTree`. A think wrapper for `Child` which adds useful methods for
|
||||
/// building properties of the current node.
|
||||
///
|
||||
/// This builder exposes two main functions - `new_child`, and `add_child`. The former provides a
|
||||
/// nice, user-friendly interface for building a `SpanTree`, while the later provides a very
|
||||
/// explicit argument setting interface meant for building `SpanTree` for shape testing purposes.
|
||||
#[derive(Debug)]
|
||||
#[allow(missing_docs)]
|
||||
pub struct ChildBuilder<T = ()> {
|
||||
pub child: Child<T>,
|
||||
}
|
||||
|
||||
impl<T> Deref for ChildBuilder<T> {
|
||||
type Target = Child<T>;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.child
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DerefMut for ChildBuilder<T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.child
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Payload> ChildBuilder<T> {
|
||||
/// Constructor.
|
||||
pub fn new(child: Child<T>) -> Self {
|
||||
Self { child }
|
||||
}
|
||||
|
||||
/// Add new child and use the `ChildBuilder` pattern to define its properties. This is a smart
|
||||
/// child constructor. This function will automatically compute all not provided properties,
|
||||
/// such as span or offset. Moreover, it will default all other not provided fields.
|
||||
pub fn new_child(mut self, f: impl FnOnce(Self) -> Self) -> Self {
|
||||
Self::apply_to_node(&mut self.node, f);
|
||||
self
|
||||
}
|
||||
|
||||
/// Define a new child by using the `ChildBuilder` pattern.
|
||||
fn apply_to_node(node: &mut Node<T>, f: impl FnOnce(ChildBuilder<T>) -> ChildBuilder<T>) {
|
||||
let mut new_child = Child::default();
|
||||
let offset = node.size;
|
||||
new_child.offset = offset;
|
||||
let builder = ChildBuilder::new(new_child);
|
||||
let child = f(builder).child;
|
||||
let offset_diff = child.offset - offset;
|
||||
node.size = node.size + child.size + offset_diff;
|
||||
node.children.push(child);
|
||||
}
|
||||
|
||||
/// Add new child and use the `ChildBuilder` pattern to define its properties. This function
|
||||
/// accepts explicit list of arguments and disables all automatic computation of spans and
|
||||
/// offsets. It is useful for testing purposes.
|
||||
pub fn add_child(
|
||||
mut self,
|
||||
offset: usize,
|
||||
size: usize,
|
||||
kind: Kind,
|
||||
crumbs: impl IntoCrumbs,
|
||||
f: impl FnOnce(Self) -> Self,
|
||||
) -> Self {
|
||||
let child: ChildBuilder<T> = ChildBuilder::new(default());
|
||||
let child = f(child.offset(offset.into()).size(size.into()).kind(kind).crumbs(crumbs));
|
||||
self.node.children.push(child.child);
|
||||
self
|
||||
}
|
||||
|
||||
/// Offset setter.
|
||||
pub fn offset(mut self, offset: ByteDiff) -> Self {
|
||||
self.offset = offset;
|
||||
self
|
||||
}
|
||||
|
||||
/// Crumbs setter.
|
||||
pub fn crumbs(mut self, crumbs: impl IntoCrumbs) -> Self {
|
||||
self.ast_crumbs = crumbs.into_crumbs();
|
||||
self
|
||||
}
|
||||
|
||||
/// Kind setter.
|
||||
pub fn kind(mut self, kind: impl Into<Kind>) -> Self {
|
||||
self.node.kind = kind.into();
|
||||
self
|
||||
}
|
||||
|
||||
/// Size setter.
|
||||
pub fn size(mut self, size: ByteDiff) -> Self {
|
||||
self.node.size = size;
|
||||
self
|
||||
}
|
||||
|
||||
/// Expression ID setter.
|
||||
pub fn ast_id(mut self, id: ast::Id) -> Self {
|
||||
self.node.ast_id = Some(id);
|
||||
self
|
||||
}
|
||||
|
||||
/// Expression ID generator.
|
||||
pub fn new_ast_id(self) -> Self {
|
||||
self.ast_id(ast::Id::new_v4())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Crumbs ===
|
||||
// ==============
|
||||
@ -545,7 +422,7 @@ impl<'a, T: Payload> Ref<'a, T> {
|
||||
!ch.ast_crumbs.is_empty() && ast_crumbs.starts_with(&ch.ast_crumbs)
|
||||
})
|
||||
.or_else(|| {
|
||||
// We try to find appriopriate node second time, this time expecting case of
|
||||
// We try to find appropriate node second time, this time expecting case of
|
||||
// "prefix-like" nodes with `InsertionPoint(ExpectedArgument(_))`. See also docs
|
||||
// for `generate::generate_expected_argument`.
|
||||
// TODO[ao]: As implementation of SpanTree will extend there may be some day
|
||||
@ -561,7 +438,7 @@ impl<'a, T: Payload> Ref<'a, T> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the node which exactly matches the given Span. If there many such node's, it pick first
|
||||
/// Get the node which exactly matches the given Span. If there are many such nodes, pick first
|
||||
/// found by DFS.
|
||||
pub fn find_by_span(self, span: &text::Range<Byte>) -> Option<Ref<'a, T>> {
|
||||
if self.span() == *span {
|
||||
|
@ -31,6 +31,8 @@ pub enum Kind {
|
||||
/// between AST tokens. For example, given expression `foo bar`, the span assigned to the
|
||||
/// `InsertionPoint` between `foo` and `bar` should be set to 3.
|
||||
InsertionPoint(InsertionPoint),
|
||||
/// A parenthesized expression.
|
||||
Group,
|
||||
}
|
||||
|
||||
|
||||
@ -197,6 +199,7 @@ impl Kind {
|
||||
Self::Argument(_) => "Argument",
|
||||
Self::Token => "Token",
|
||||
Self::InsertionPoint(_) => "InsertionPoint",
|
||||
Self::Group => "Group",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ use double_representation::node::MainLine;
|
||||
use double_representation::node::NodeInfo;
|
||||
use double_representation::node::NodeLocation;
|
||||
use engine_protocol::language_server;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
use span_tree::action::Action;
|
||||
use span_tree::action::Actions;
|
||||
use span_tree::generate::context::CalledMethodInfo;
|
||||
@ -334,15 +334,10 @@ impl Connections {
|
||||
pub fn name_for_ast(ast: &Ast) -> String {
|
||||
use ast::*;
|
||||
match ast.shape() {
|
||||
Shape::Tree(tree) if let Some(name) = &tree.descriptive_name => name.to_string(),
|
||||
Shape::Var(ident) => ident.name.clone(),
|
||||
Shape::Cons(ident) => ident.name.to_lowercase(),
|
||||
Shape::Number(_) => "number".into(),
|
||||
Shape::DanglingBase(_) => "number".into(),
|
||||
Shape::TextLineRaw(_) => "text".into(),
|
||||
Shape::TextLineFmt(_) => "text".into(),
|
||||
Shape::TextBlockRaw(_) => "text".into(),
|
||||
Shape::TextBlockFmt(_) => "text".into(),
|
||||
Shape::TextUnclosed(_) => "text".into(),
|
||||
Shape::Opr(opr) => match opr.name.as_ref() {
|
||||
"+" => "sum",
|
||||
"*" => "product",
|
||||
@ -1041,8 +1036,7 @@ pub mod tests {
|
||||
use double_representation::name::project;
|
||||
use engine_protocol::language_server::MethodPointer;
|
||||
use enso_text::index::*;
|
||||
use parser_scala::Parser;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
|
||||
@ -1097,7 +1091,7 @@ pub mod tests {
|
||||
|
||||
/// Create a graph controller from the current mock data.
|
||||
pub fn graph(&self) -> Handle {
|
||||
let parser = Parser::new().unwrap();
|
||||
let parser = Parser::new();
|
||||
let urm = Rc::new(model::undo_redo::Repository::new());
|
||||
let module = self.module_data().plain(&parser, urm);
|
||||
let id = self.graph_id.clone();
|
||||
@ -1109,7 +1103,6 @@ pub mod tests {
|
||||
self.module_path.method_pointer(self.project_name.clone(), self.graph_id.to_string())
|
||||
}
|
||||
|
||||
#[profile(Debug)]
|
||||
pub fn suggestion_db(&self) -> Rc<model::SuggestionDatabase> {
|
||||
use model::suggestion_database::SuggestionDatabase;
|
||||
let entries = self.suggestions.iter();
|
||||
@ -1147,7 +1140,7 @@ pub mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn node_operations() {
|
||||
Fixture::set_up().run(|graph| async move {
|
||||
let uid = graph.all_node_infos().unwrap()[0].id();
|
||||
@ -1158,7 +1151,7 @@ pub mod tests {
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_notification_relay() {
|
||||
Fixture::set_up().run(|graph| async move {
|
||||
let mut sub = graph.subscribe();
|
||||
@ -1168,7 +1161,7 @@ pub mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn suggestion_db_updates_graph_values() {
|
||||
Fixture::set_up().run(|graph| async move {
|
||||
let mut sub = graph.subscribe();
|
||||
@ -1181,7 +1174,7 @@ pub mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_inline_definition() {
|
||||
let mut test = Fixture::set_up();
|
||||
const EXPRESSION: &str = "2+2";
|
||||
@ -1196,7 +1189,7 @@ pub mod tests {
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_block_definition() {
|
||||
let mut test = Fixture::set_up();
|
||||
test.data.code = r"
|
||||
@ -1212,7 +1205,7 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_parse_expression() {
|
||||
let mut test = Fixture::set_up();
|
||||
test.run(|graph| async move {
|
||||
@ -1227,7 +1220,7 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn span_tree_context_handling_metadata_and_name() {
|
||||
let entry = crate::test::mock::data::suggestion_entry_foo();
|
||||
let mut test = Fixture::set_up();
|
||||
@ -1266,7 +1259,7 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_used_names_in_inline_def() {
|
||||
let mut test = Fixture::set_up();
|
||||
test.data.code = "main = foo".into();
|
||||
@ -1277,7 +1270,7 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_nested_definition() {
|
||||
let mut test = Fixture::set_up();
|
||||
test.data.code = r"main =
|
||||
@ -1298,7 +1291,7 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn collapsing_nodes_avoids_name_conflicts() {
|
||||
// Checks that generated name avoid collision with other methods defined in the module
|
||||
// and with symbols used that could be shadowed by the extracted method's name.
|
||||
@ -1335,7 +1328,7 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn collapsing_nodes() {
|
||||
let mut test = Fixture::set_up();
|
||||
let code = r"
|
||||
@ -1385,7 +1378,7 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_doubly_nested_definition() {
|
||||
// Tests editing nested definition that requires transforming inline expression into
|
||||
// into a new block.
|
||||
@ -1404,7 +1397,7 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_node_operations_node() {
|
||||
let mut test = Fixture::set_up();
|
||||
const PROGRAM: &str = r"
|
||||
@ -1483,7 +1476,8 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
#[ignore] // FIXME (https://github.com/enso-org/enso/issues/5574)
|
||||
fn graph_controller_connections_listing() {
|
||||
let mut test = Fixture::set_up();
|
||||
const PROGRAM: &str = r"
|
||||
@ -1532,7 +1526,7 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_create_connection() {
|
||||
/// A case for creating connection test. The field's names are short to be able to write
|
||||
/// nice-to-read table of cases without very long lines (see `let cases` below).
|
||||
@ -1573,22 +1567,18 @@ main =
|
||||
}
|
||||
}
|
||||
|
||||
let cases = &[
|
||||
Case { src: "x", dst: "foo", expected: "x", ports: (&[], &[]) },
|
||||
Case { src: "x,y", dst: "foo a", expected: "foo y", ports: (&[4], &[2]) },
|
||||
Case {
|
||||
src: "Vec x y",
|
||||
dst: "1 + 2 + 3",
|
||||
expected: "x + 2 + 3",
|
||||
ports: (&[0, 2], &[0, 1]),
|
||||
},
|
||||
];
|
||||
let cases = &[Case { src: "x", dst: "foo", expected: "x", ports: (&[], &[]) }, Case {
|
||||
src: "Vec x y",
|
||||
dst: "1 + 2 + 3",
|
||||
expected: "x + 2 + 3",
|
||||
ports: (&[0, 2], &[0, 1]),
|
||||
}];
|
||||
for case in cases {
|
||||
case.run()
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_create_connection_reordering() {
|
||||
let mut test = Fixture::set_up();
|
||||
const PROGRAM: &str = r"main =
|
||||
@ -1621,7 +1611,7 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_create_connection_reordering_with_dependency() {
|
||||
let mut test = Fixture::set_up();
|
||||
const PROGRAM: &str = r"main =
|
||||
@ -1660,7 +1650,7 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn graph_controller_create_connection_introducing_var() {
|
||||
let mut test = Fixture::set_up();
|
||||
const PROGRAM: &str = r"main =
|
||||
@ -1697,9 +1687,9 @@ main =
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn suggested_names() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let cases = [
|
||||
("a+b", "sum"),
|
||||
("a-b", "difference"),
|
||||
@ -1722,7 +1712,7 @@ main =
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn disconnect() {
|
||||
#[derive(Clone, Debug)]
|
||||
struct Case {
|
||||
@ -1756,11 +1746,6 @@ main =
|
||||
Case { dest_node_expr: "var + b + c", dest_node_expected: "_ + b + c" },
|
||||
Case { dest_node_expr: "a + var + c", dest_node_expected: "a + _ + c" },
|
||||
Case { dest_node_expr: "a + b + var", dest_node_expected: "a + b" },
|
||||
Case { dest_node_expr: "var , a", dest_node_expected: "_ , a" },
|
||||
Case { dest_node_expr: "a , var", dest_node_expected: "a , _" },
|
||||
Case { dest_node_expr: "var , b , c", dest_node_expected: "_ , b , c" },
|
||||
Case { dest_node_expr: "a , var , c", dest_node_expected: "a , _ , c" },
|
||||
Case { dest_node_expr: "a , b , var", dest_node_expected: "a , b" },
|
||||
Case {
|
||||
dest_node_expr: "f\n bar a var",
|
||||
dest_node_expected: "f\n bar a _",
|
||||
|
@ -385,7 +385,7 @@ pub mod tests {
|
||||
|
||||
impl MockData {
|
||||
pub fn controller(&self) -> Handle {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let repository = Rc::new(model::undo_redo::Repository::new());
|
||||
let module = self.module.plain(&parser, repository);
|
||||
let method = self.graph.method();
|
||||
|
@ -400,9 +400,7 @@ impl QueryData {
|
||||
/// Escape a string to be used as a visualization argument. Transforms the string into an enso
|
||||
/// expression with string literal.
|
||||
fn escape_visualization_argument(arg: &str) -> String {
|
||||
let segment = ast::SegmentPlain { value: arg.into() };
|
||||
let text = ast::TextLineRaw { text: vec![segment.into()] };
|
||||
text.repr()
|
||||
Ast::raw_text_literal(arg).repr()
|
||||
}
|
||||
|
||||
/// Escape a list of strings to be used as a visualization argument. Transforms the strings into
|
||||
|
@ -7,7 +7,7 @@ use crate::prelude::*;
|
||||
|
||||
use double_representation::name::project;
|
||||
use mockall::automock;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
// ==============
|
||||
|
@ -15,7 +15,7 @@ use engine_protocol::project_manager;
|
||||
use engine_protocol::project_manager::MissingComponentAction;
|
||||
use engine_protocol::project_manager::ProjectMetadata;
|
||||
use engine_protocol::project_manager::ProjectName;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
|
||||
@ -69,7 +69,7 @@ impl Handle {
|
||||
) -> Self {
|
||||
let current_project = Rc::new(CloneCell::new(project));
|
||||
let status_notifications = default();
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let notifications = default();
|
||||
Self { current_project, project_manager, status_notifications, parser, notifications }
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ use crate::model::project::synchronized::Properties;
|
||||
|
||||
use double_representation::name::project;
|
||||
use engine_protocol::project_manager::ProjectName;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
|
||||
@ -46,7 +46,7 @@ impl Handle {
|
||||
/// Create IDE Controller for a given opened project.
|
||||
pub fn new(project: model::Project) -> Self {
|
||||
let status_notifications = default();
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
Self { status_notifications, parser, project }
|
||||
}
|
||||
|
||||
@ -73,7 +73,7 @@ impl Handle {
|
||||
)
|
||||
.await?;
|
||||
let status_notifications = default();
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
Ok(Self { status_notifications, parser, project })
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ use double_representation::name::QualifiedName;
|
||||
use double_representation::text::apply_code_change_to_id_map;
|
||||
use engine_protocol::language_server;
|
||||
use engine_protocol::types::Sha3_224;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
|
||||
@ -89,7 +89,7 @@ impl Handle {
|
||||
"The module controller ast was not synchronized with text editor \
|
||||
content!\n >>> Module: {my_code}\n >>> Editor: {code}"
|
||||
);
|
||||
let actual_ast = self.parser.parse(code, default())?.try_into()?;
|
||||
let actual_ast = self.parser.parse(code, default()).try_into()?;
|
||||
self.model.update_ast(actual_ast)?;
|
||||
}
|
||||
Ok(())
|
||||
@ -171,7 +171,7 @@ impl Handle {
|
||||
parser: Parser,
|
||||
repository: Rc<model::undo_redo::Repository>,
|
||||
) -> FallibleResult<Self> {
|
||||
let ast = parser.parse(code.to_string(), id_map)?.try_into()?;
|
||||
let ast = parser.parse(code.to_string(), id_map).try_into()?;
|
||||
let metadata = default();
|
||||
let model = Rc::new(model::module::Plain::new(path, ast, metadata, repository));
|
||||
Ok(Handle { model, language_server, parser })
|
||||
@ -200,15 +200,14 @@ mod test {
|
||||
use ast::Ast;
|
||||
use ast::BlockLine;
|
||||
use enso_text::index::*;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
use uuid::Uuid;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn update_ast_after_text_change() {
|
||||
TestWithLocalPoolExecutor::set_up().run_task(async {
|
||||
let ls = language_server::Connection::new_mock_rc(default());
|
||||
let parser = Parser::new().unwrap();
|
||||
let parser = Parser::new();
|
||||
let location = Path::from_mock_module_name("Test");
|
||||
let code = "2+2";
|
||||
let uuid1 = Uuid::new_v4();
|
||||
@ -236,7 +235,10 @@ mod test {
|
||||
Some(uuid1),
|
||||
),
|
||||
loff: 0,
|
||||
opr: Ast::new(ast::Opr { name: "+".to_string() }, Some(uuid2)),
|
||||
opr: Ast::new(
|
||||
ast::Opr { name: "+".to_string(), right_assoc: false },
|
||||
Some(uuid2),
|
||||
),
|
||||
roff: 0,
|
||||
rarg: Ast::new(
|
||||
ast::Number { base: None, int: "2".to_string() },
|
||||
|
@ -12,7 +12,7 @@ use engine_protocol::language_server::MethodPointer;
|
||||
use engine_protocol::language_server::Path;
|
||||
use enso_frp::web::platform;
|
||||
use enso_frp::web::platform::Platform;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
|
||||
@ -276,7 +276,7 @@ mod tests {
|
||||
#[wasm_bindgen_test]
|
||||
fn adding_missing_main() {
|
||||
let _ctx = TestWithLocalPoolExecutor::set_up();
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
let mut data = crate::test::mock::Unified::new();
|
||||
let module_name = data.module_path.module_name().to_owned();
|
||||
let main_ptr = main_method_ptr(data.project_name.clone(), &data.module_path);
|
||||
|
@ -26,7 +26,7 @@ use enso_text::Byte;
|
||||
use enso_text::Location;
|
||||
use enso_text::Rope;
|
||||
use flo_stream::Subscriber;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
// ==============
|
||||
@ -1778,7 +1778,7 @@ pub mod test {
|
||||
project.expect_qualified_name().returning_st(move || project_qname.clone());
|
||||
project.expect_name().returning_st(move || project_name.clone());
|
||||
let project = Rc::new(project);
|
||||
ide.expect_parser().return_const(Parser::new_or_panic());
|
||||
ide.expect_parser().return_const(Parser::new());
|
||||
let current_project = project.clone_ref();
|
||||
ide.expect_current_project().returning_st(move || Some(current_project.clone_ref()));
|
||||
ide.expect_manage_projects()
|
||||
@ -2123,7 +2123,7 @@ pub mod test {
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn parsed_input() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
|
||||
fn args_reprs(prefix: &ast::prefix::Chain) -> Vec<String> {
|
||||
prefix.args.iter().map(|arg| arg.repr()).collect()
|
||||
@ -2186,9 +2186,7 @@ pub mod test {
|
||||
let expression = parsed.expression.unwrap();
|
||||
assert_eq!(expression.off, 0);
|
||||
assert_eq!(expression.func.repr(), "foo");
|
||||
assert_eq!(args_reprs(&expression), vec![" bar".to_string()]);
|
||||
assert_eq!(parsed.pattern_offset, 1);
|
||||
assert_eq!(parsed.pattern.as_str(), "(baz ");
|
||||
assert_eq!(args_reprs(&expression), vec![" bar".to_string(), " (baz".to_string()]);
|
||||
}
|
||||
|
||||
fn are_same(
|
||||
@ -2272,7 +2270,7 @@ pub mod test {
|
||||
}
|
||||
|
||||
fn run(&self) {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let ast = parser.parse_line_ast(self.before).unwrap();
|
||||
let new_ast = apply_this_argument("foo", &ast);
|
||||
assert_eq!(new_ast.repr(), self.after, "Case {self:?} failed: {ast:?}");
|
||||
@ -2424,7 +2422,7 @@ pub mod test {
|
||||
|
||||
let module = searcher.graph.graph().module.clone_ref();
|
||||
// Setup searcher.
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let picked_method = FragmentAddedByPickingSuggestion {
|
||||
id: CompletedFragmentId::Function,
|
||||
picked_suggestion: action::Suggestion::FromDatabase(entry4),
|
||||
@ -2507,7 +2505,7 @@ pub mod test {
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn simple_function_call_parsing() {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
|
||||
let ast = parser.parse_line_ast("foo").unwrap();
|
||||
let call = SimpleFunctionCall::try_new(&ast).expect("Returned None for \"foo\"");
|
||||
|
@ -152,7 +152,7 @@ mod test {
|
||||
use crate::executor::test_utils::TestWithLocalPoolExecutor;
|
||||
|
||||
use enso_text::index::*;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
|
||||
fn setup_mock_project(setup: impl FnOnce(&mut model::project::MockAPI)) -> model::Project {
|
||||
@ -171,7 +171,7 @@ mod test {
|
||||
test.run_task(async move {
|
||||
let ls = language_server::Connection::new_mock_rc(default());
|
||||
let path = model::module::Path::from_mock_module_name("Test");
|
||||
let parser = Parser::new().unwrap();
|
||||
let parser = Parser::new();
|
||||
let module_res =
|
||||
controller::Module::new_mock(path, "main = 2+2", default(), ls, parser, default());
|
||||
let module = module_res.unwrap();
|
||||
@ -204,7 +204,7 @@ mod test {
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn obtain_text_controller_for_module() {
|
||||
let parser = parser_scala::Parser::new_or_panic();
|
||||
let parser = parser::Parser::new();
|
||||
TestWithLocalPoolExecutor::set_up().run_task(async move {
|
||||
let code = "2 + 2".to_string();
|
||||
let undo = default();
|
||||
|
@ -43,6 +43,7 @@
|
||||
#![feature(assert_matches)]
|
||||
#![feature(hash_drain_filter)]
|
||||
#![feature(unwrap_infallible)]
|
||||
#![feature(if_let_guard)]
|
||||
// === Standard Linter Configuration ===
|
||||
#![deny(non_ascii_idents)]
|
||||
#![warn(unsafe_code)]
|
||||
|
@ -13,10 +13,10 @@ use double_representation::name::project;
|
||||
use double_representation::name::QualifiedName;
|
||||
use engine_protocol::language_server::MethodPointer;
|
||||
use flo_stream::Subscriber;
|
||||
use parser_scala::api::ParsedSourceFile;
|
||||
use parser_scala::api::PruneUnusedIds;
|
||||
use parser_scala::api::SourceFile;
|
||||
use parser_scala::Parser;
|
||||
use parser::api::ParsedSourceFile;
|
||||
use parser::api::PruneUnusedIds;
|
||||
use parser::api::SourceFile;
|
||||
use parser::Parser;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
@ -340,7 +340,7 @@ impl PruneUnusedIds for Metadata {
|
||||
}
|
||||
}
|
||||
|
||||
impl parser_scala::api::Metadata for Metadata {}
|
||||
impl parser::api::Metadata for Metadata {}
|
||||
|
||||
impl Default for Metadata {
|
||||
fn default() -> Self {
|
||||
@ -738,7 +738,7 @@ pub mod test {
|
||||
parser: &Parser,
|
||||
repository: Rc<model::undo_redo::Repository>,
|
||||
) -> Module {
|
||||
let ast = parser.parse_module(self.code.clone(), self.id_map.clone()).unwrap();
|
||||
let ast = parser.parse_module(&self.code, self.id_map.clone()).unwrap();
|
||||
let module = Plain::new(self.path.clone(), ast, self.metadata.clone(), repository);
|
||||
Rc::new(module)
|
||||
}
|
||||
@ -746,8 +746,7 @@ pub mod test {
|
||||
|
||||
pub fn plain_from_code(code: impl Into<String>) -> Module {
|
||||
let urm = default();
|
||||
MockData { code: code.into(), ..default() }
|
||||
.plain(&parser_scala::Parser::new_or_panic(), urm)
|
||||
MockData { code: code.into(), ..default() }.plain(&parser::Parser::new(), urm)
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -783,7 +782,7 @@ pub mod test {
|
||||
assert_eq!(qualified.to_string(), "n.P.Foo.Bar");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
#[test]
|
||||
fn outdated_metadata_parses() {
|
||||
// Metadata here will fail to serialize because `File` is not a valid qualified name.
|
||||
// Expected behavior is that invalid metadata parts will be filled with defaults.
|
||||
@ -794,8 +793,7 @@ main = 5
|
||||
#### METADATA ####
|
||||
[[{"index":{"value":7},"size":{"value":8}},"04f2bbe4-6291-4bad-961c-146228f3aee4"],[{"index":{"value":15},"size":{"value":1}},"20f4e5e3-3ab4-4c68-ae7a-d261d3f23af0"],[{"index":{"value":16},"size":{"value":13}},"746b453a-3fed-4128-86ce-a3853ef684b0"],[{"index":{"value":0},"size":{"value":29}},"aead2cca-c429-47f2-85ef-fe090433990b"],[{"index":{"value":30},"size":{"value":4}},"063ab796-e79b-4037-bf94-1f24c9545b9a"],[{"index":{"value":35},"size":{"value":1}},"4b4992bd-7d8e-401b-aebf-42b30a4a5cae"],[{"index":{"value":37},"size":{"value":1}},"1d6660c6-a70b-4eeb-b5f7-82f05a51df25"],[{"index":{"value":30},"size":{"value":8}},"ad5b88bf-0cdb-4eba-90fe-07afc37e3953"],[{"index":{"value":0},"size":{"value":39}},"602dfcea-2321-48fa-95b1-1f58fb028099"]]
|
||||
{"ide":{"node":{"1d6660c6-a70b-4eeb-b5f7-82f05a51df25":{"position":{"vector":[-75.5,52]},"intended_method":{"module":"Base.System.File","defined_on_type":"File","name":"read"}}}}}"#;
|
||||
let result = Parser::new_or_panic().parse_with_metadata::<Metadata>(code.into());
|
||||
let file = result.unwrap();
|
||||
let file = Parser::new().parse_with_metadata::<Metadata>(code);
|
||||
assert_eq!(file.ast.repr(), "import Standard.Visualization\nmain = 5");
|
||||
assert_eq!(file.metadata.ide.node.len(), 1);
|
||||
let id = ast::Id::from_str("1d6660c6-a70b-4eeb-b5f7-82f05a51df25").unwrap();
|
||||
|
@ -19,9 +19,9 @@ use double_representation::definition::DefinitionInfo;
|
||||
use double_representation::definition::DefinitionProvider;
|
||||
use double_representation::import;
|
||||
use flo_stream::Subscriber;
|
||||
use parser_scala::api::ParsedSourceFile;
|
||||
use parser_scala::api::SourceFile;
|
||||
use parser_scala::Parser;
|
||||
use parser::api::ParsedSourceFile;
|
||||
use parser::api::SourceFile;
|
||||
use parser::Parser;
|
||||
use std::collections::hash_map::Entry;
|
||||
|
||||
|
||||
@ -178,7 +178,7 @@ impl model::module::API for Module {
|
||||
let replaced_end = code.offset_to_location_snapped(change.range.end);
|
||||
let replaced_location = enso_text::Range::new(replaced_start, replaced_end);
|
||||
code.apply_change(change.as_ref());
|
||||
let new_ast = parser.parse(code.into(), new_id_map)?.try_into()?;
|
||||
let new_ast = parser.parse(code.to_string(), new_id_map).try_into()?;
|
||||
let notification = NotificationKind::CodeChanged { change, replaced_location };
|
||||
self.update_content(notification, |content| content.ast = new_ast)
|
||||
}
|
||||
@ -318,7 +318,7 @@ fn restore_edited_node_in_graph(
|
||||
"Restoring edited node {node_id} to original expression \
|
||||
\"{previous_expression}\"."
|
||||
);
|
||||
graph.edit_node(node_id, Parser::new()?.parse_line_ast(previous_expression)?)?;
|
||||
graph.edit_node(node_id, Parser::new().parse_line_ast(previous_expression)?)?;
|
||||
md_entry.get_mut().intended_method = previous_intended_method;
|
||||
}
|
||||
None => {}
|
||||
@ -360,7 +360,7 @@ mod test {
|
||||
range: enso_text::Range::new(2.byte(), 5.byte()),
|
||||
text: "- abc".to_string(),
|
||||
};
|
||||
module.apply_code_change(change, &Parser::new_or_panic(), default()).unwrap();
|
||||
module.apply_code_change(change, &Parser::new(), default()).unwrap();
|
||||
assert_eq!("2 - abc", module.ast().repr());
|
||||
}
|
||||
|
||||
@ -391,7 +391,7 @@ mod test {
|
||||
range: enso_text::Range::new(0.byte(), 1.byte()),
|
||||
text: "foo".to_string(),
|
||||
};
|
||||
module.apply_code_change(change.clone(), &Parser::new_or_panic(), default()).unwrap();
|
||||
module.apply_code_change(change.clone(), &Parser::new(), default()).unwrap();
|
||||
let replaced_location = enso_text::Range {
|
||||
start: enso_text::Location { line: 0.into(), offset: 0.byte() },
|
||||
end: enso_text::Location { line: 0.into(), offset: 1.byte() },
|
||||
|
@ -24,8 +24,8 @@ use enso_text::text;
|
||||
use enso_text::Location;
|
||||
use enso_text::Range;
|
||||
use flo_stream::Subscriber;
|
||||
use parser_scala::api::SourceFile;
|
||||
use parser_scala::Parser;
|
||||
use parser::api::SourceFile;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
|
||||
@ -172,9 +172,7 @@ impl Module {
|
||||
info!("Read content of the module {path}, digest is {:?}", opened.current_version);
|
||||
let end_of_file_byte = content.last_line_end_location();
|
||||
let end_of_file = content.utf16_code_unit_location_of_location(end_of_file_byte);
|
||||
// TODO[ao] We should not fail here when metadata are malformed, but discard them and set
|
||||
// default instead.
|
||||
let source = parser.parse_with_metadata(opened.content)?;
|
||||
let source = parser.parse_with_metadata(opened.content);
|
||||
let digest = opened.current_version;
|
||||
let summary = ContentSummary { digest, end_of_file };
|
||||
let model = model::module::Plain::new(path, source.ast, source.metadata, repository);
|
||||
@ -726,12 +724,12 @@ pub mod test {
|
||||
let parser = data.parser.clone();
|
||||
let module = fixture.synchronized_module();
|
||||
|
||||
let new_content = "main =\n println \"Test\"".to_string();
|
||||
let new_content = "main =\n println \"Test\"";
|
||||
let new_ast = parser.parse_module(new_content, default()).unwrap();
|
||||
module.update_ast(new_ast).unwrap();
|
||||
runner.perhaps_run_until_stalled(&mut fixture);
|
||||
let change = TextChange { range: (20..24).into(), text: "Test 2".to_string() };
|
||||
module.apply_code_change(change, &Parser::new_or_panic(), default()).unwrap();
|
||||
module.apply_code_change(change, &Parser::new(), default()).unwrap();
|
||||
runner.perhaps_run_until_stalled(&mut fixture);
|
||||
};
|
||||
|
||||
|
@ -14,7 +14,7 @@ use engine_protocol::language_server;
|
||||
use engine_protocol::language_server::ContentRoot;
|
||||
use flo_stream::Subscriber;
|
||||
use mockall::automock;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
use uuid::Uuid;
|
||||
|
||||
|
||||
|
@ -25,7 +25,7 @@ use engine_protocol::project_manager::MissingComponentAction;
|
||||
use engine_protocol::project_manager::ProjectName;
|
||||
use flo_stream::Subscriber;
|
||||
use json_rpc::error::RpcError;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
|
||||
@ -287,9 +287,9 @@ impl Project {
|
||||
let language_server = language_server_rpc.clone();
|
||||
let module_registry = default();
|
||||
let execution_contexts = default();
|
||||
let parser = Parser::new();
|
||||
let visualization =
|
||||
controller::Visualization::new(language_server, embedded_visualizations);
|
||||
let parser = Parser::new_or_panic();
|
||||
let language_server = &*language_server_rpc;
|
||||
let suggestion_db = SuggestionDatabase::create_synchronized(language_server);
|
||||
let suggestion_db = Rc::new(suggestion_db.await.map_err(&wrap)?);
|
||||
|
@ -152,7 +152,7 @@ impl Nodes {
|
||||
removed_views
|
||||
}
|
||||
|
||||
/// Remove node represented by given view (if any) and return it's AST ID.
|
||||
/// Remove node represented by given view (if any) and return its AST ID.
|
||||
pub fn remove_node(&mut self, node: ViewNodeId) -> Option<AstNodeId> {
|
||||
let ast_id = self.ast_node_by_view_id.remove(&node)?;
|
||||
self.nodes.remove(&ast_id);
|
||||
@ -827,10 +827,10 @@ impl<'a> ViewChange<'a> {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use engine_protocol::language_server::MethodPointer;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
fn create_test_node(expression: &str) -> controller::graph::Node {
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let ast = parser.parse_line_ast(expression).unwrap();
|
||||
controller::graph::Node {
|
||||
info: double_representation::node::NodeInfo {
|
||||
@ -969,7 +969,7 @@ mod tests {
|
||||
fn refreshing_node_expression() {
|
||||
let Fixture { state, nodes } = Fixture::setup_nodes(&["foo bar"]);
|
||||
let node_id = nodes[0].node.id();
|
||||
let new_ast = Parser::new_or_panic().parse_line_ast("foo baz").unwrap().with_id(node_id);
|
||||
let new_ast = Parser::new().parse_line_ast("foo baz").unwrap().with_id(node_id);
|
||||
let new_node = controller::graph::Node {
|
||||
info: double_representation::node::NodeInfo {
|
||||
documentation: None,
|
||||
|
@ -134,7 +134,7 @@ pub mod mock {
|
||||
pub module_path: model::module::Path,
|
||||
pub suggestions: HashMap<suggestion_database::entry::Id, suggestion_database::Entry>,
|
||||
pub context_id: model::execution_context::Id,
|
||||
pub parser: parser_scala::Parser,
|
||||
pub parser: parser::Parser,
|
||||
code: String,
|
||||
id_map: ast::IdMap,
|
||||
metadata: crate::model::module::Metadata,
|
||||
@ -171,7 +171,7 @@ pub mod mock {
|
||||
metadata: default(),
|
||||
context_id: CONTEXT_ID,
|
||||
root_definition: definition_name(),
|
||||
parser: parser_scala::Parser::new_or_panic(),
|
||||
parser: parser::Parser::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -180,7 +180,7 @@ pub mod mock {
|
||||
}
|
||||
|
||||
pub fn module(&self, urm: Rc<undo_redo::Manager>) -> crate::model::Module {
|
||||
let ast = self.parser.parse_module(self.code.clone(), self.id_map.clone()).unwrap();
|
||||
let ast = self.parser.parse_module(&self.code, self.id_map.clone()).unwrap();
|
||||
let path = self.module_path.clone();
|
||||
let metadata = self.metadata.clone();
|
||||
let repository = urm.repository.clone_ref();
|
||||
|
@ -12,7 +12,8 @@ enso-prelude = { path = "../../../lib/rust/prelude" }
|
||||
convert_case = { workspace = true }
|
||||
span-tree = { path = "../language/span-tree" }
|
||||
ast = { path = "../language/ast/impl" }
|
||||
parser-scala = { path = "../language/parser" }
|
||||
parser = { path = "../language/parser" }
|
||||
parser-scala = { path = "../language/parser-scala" }
|
||||
enso-text = { path = "../../../lib/rust/text" }
|
||||
double-representation = { path = "../controller/double-representation" }
|
||||
engine-protocol = { path = "../controller/engine-protocol" }
|
||||
|
@ -5,7 +5,7 @@ use crate::prelude::*;
|
||||
use double_representation::definition;
|
||||
use double_representation::definition::DefinitionName;
|
||||
use double_representation::module;
|
||||
use parser_scala::Parser;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
|
||||
@ -74,7 +74,7 @@ impl Example {
|
||||
) -> FallibleResult<definition::ToAdd> {
|
||||
let base_name = self.function_name();
|
||||
let name = DefinitionName::new_plain(module.generate_name(&base_name)?);
|
||||
let code_ast = parser.parse_module(self.code.clone(), default())?;
|
||||
let code_ast = parser.parse_module(&self.code, default())?;
|
||||
let body_block = code_ast.shape().as_block(0).ok_or(InvalidExample)?;
|
||||
let body_ast = Ast::new(body_block, None);
|
||||
Ok(definition::ToAdd::new_with_body(name, default(), body_ast))
|
||||
|
@ -9,6 +9,7 @@ crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
ast = { path = "../language/ast/impl" }
|
||||
parser = { path = "../language/parser" }
|
||||
enso-config = { path = "../config" }
|
||||
enso-frp = { path = "../../../lib/rust/frp" }
|
||||
enso-prelude = { path = "../../../lib/rust/prelude" }
|
||||
@ -23,7 +24,6 @@ ensogl-hardcoded-theme = { path = "../../../lib/rust/ensogl/app/theme/hardcoded"
|
||||
ide-view-component-browser = { path = "component-browser" }
|
||||
ide-view-documentation = { path = "documentation" }
|
||||
ide-view-graph-editor = { path = "graph-editor" }
|
||||
parser-scala = { path = "../language/parser" }
|
||||
span-tree = { path = "../language/span-tree" }
|
||||
js-sys = { workspace = true }
|
||||
multi-map = { workspace = true }
|
||||
|
@ -9,12 +9,12 @@ crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
ast = { path = "../../../language/ast/impl" }
|
||||
parser = { path = "../../../language/parser" }
|
||||
enso-frp = { path = "../../../../../lib/rust/frp" }
|
||||
ensogl = { path = "../../../../../lib/rust/ensogl" }
|
||||
ensogl-hardcoded-theme = { path = "../../../../../lib/rust/ensogl/app/theme/hardcoded" }
|
||||
ensogl-text-msdf = { path = "../../../../../lib/rust/ensogl/component/text/src/font/msdf" }
|
||||
ide-view = { path = "../.." }
|
||||
parser-scala = { path = "../../../language/parser" }
|
||||
span-tree = { path = "../../../language/span-tree" }
|
||||
uuid = { version = "0.8", features = ["v4", "wasm-bindgen"] }
|
||||
wasm-bindgen = { workspace = true }
|
||||
|
@ -15,10 +15,7 @@
|
||||
#![warn(unused_import_braces)]
|
||||
#![warn(unused_qualifications)]
|
||||
|
||||
use ast::crumbs::PatternMatchCrumb::*;
|
||||
use ast::crumbs::*;
|
||||
use ensogl::prelude::*;
|
||||
use span_tree::traits::*;
|
||||
|
||||
use enso_frp as frp;
|
||||
use ensogl::application::Application;
|
||||
@ -37,8 +34,7 @@ use ide_view::graph_editor::Type;
|
||||
use ide_view::project;
|
||||
use ide_view::root;
|
||||
use ide_view::status_bar;
|
||||
use parser_scala::Parser;
|
||||
use uuid::Uuid;
|
||||
use parser::Parser;
|
||||
|
||||
|
||||
|
||||
@ -323,7 +319,7 @@ fn init(app: &Application) {
|
||||
pub fn expression_mock_string(label: &str) -> Expression {
|
||||
let pattern = Some(label.to_string());
|
||||
let code = format!("\"{label}\"");
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let parameters = vec![];
|
||||
let ast = parser.parse_line_ast(&code).unwrap();
|
||||
let invocation_info = span_tree::generate::context::CalledMethodInfo { parameters };
|
||||
@ -338,7 +334,7 @@ pub fn expression_mock_string(label: &str) -> Expression {
|
||||
pub fn expression_mock() -> Expression {
|
||||
let pattern = Some("var1".to_string());
|
||||
let code = "[1,2,3]".to_string();
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let this_param = span_tree::ArgumentInfo {
|
||||
name: Some("self".to_owned()),
|
||||
tp: Some("Text".to_owned()),
|
||||
@ -355,52 +351,11 @@ pub fn expression_mock() -> Expression {
|
||||
Expression { pattern, code, whole_expression_id, input_span_tree, output_span_tree }
|
||||
}
|
||||
|
||||
// TODO[ao] This expression mocks results in panic. If you want to use it, please fix it first.
|
||||
pub fn expression_mock2() -> Expression {
|
||||
let pattern = Some("var1".to_string());
|
||||
let pattern_cr = vec![Seq { right: false }, Or, Or, Build];
|
||||
let val = ast::crumbs::SegmentMatchCrumb::Body { val: pattern_cr };
|
||||
let parens_cr = ast::crumbs::MatchCrumb::Segs { val, index: 0 };
|
||||
let code = "make_maps size (distribution normal)".to_string();
|
||||
let output_span_tree = span_tree::SpanTree::default();
|
||||
let input_span_tree = span_tree::builder::TreeBuilder::new(36)
|
||||
.add_child(0, 14, span_tree::node::Kind::Chained, PrefixCrumb::Func)
|
||||
.add_child(0, 9, span_tree::node::Kind::operation(), PrefixCrumb::Func)
|
||||
.set_ast_id(Uuid::new_v4())
|
||||
.done()
|
||||
.add_empty_child(10, span_tree::node::InsertionPointType::BeforeTarget)
|
||||
.add_child(10, 4, span_tree::node::Kind::this().removable(), PrefixCrumb::Arg)
|
||||
.set_ast_id(Uuid::new_v4())
|
||||
.done()
|
||||
.add_empty_child(14, span_tree::node::InsertionPointType::Append)
|
||||
.set_ast_id(Uuid::new_v4())
|
||||
.done()
|
||||
.add_child(15, 21, span_tree::node::Kind::argument().removable(), PrefixCrumb::Arg)
|
||||
.set_ast_id(Uuid::new_v4())
|
||||
.add_child(1, 19, span_tree::node::Kind::argument(), parens_cr)
|
||||
.set_ast_id(Uuid::new_v4())
|
||||
.add_child(0, 12, span_tree::node::Kind::operation(), PrefixCrumb::Func)
|
||||
.set_ast_id(Uuid::new_v4())
|
||||
.done()
|
||||
.add_empty_child(13, span_tree::node::InsertionPointType::BeforeTarget)
|
||||
.add_child(13, 6, span_tree::node::Kind::this(), PrefixCrumb::Arg)
|
||||
.set_ast_id(Uuid::new_v4())
|
||||
.done()
|
||||
.add_empty_child(19, span_tree::node::InsertionPointType::Append)
|
||||
.done()
|
||||
.done()
|
||||
.add_empty_child(36, span_tree::node::InsertionPointType::Append)
|
||||
.build();
|
||||
let whole_expression_id = default();
|
||||
let code = code.into();
|
||||
Expression { pattern, code, whole_expression_id, input_span_tree, output_span_tree }
|
||||
}
|
||||
|
||||
pub fn expression_mock3() -> Expression {
|
||||
let pattern = Some("Vector x y z".to_string());
|
||||
// let code = "image.blur ((foo bar) baz)".to_string();
|
||||
let code = "Vector x y z".to_string();
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let this_param = span_tree::ArgumentInfo {
|
||||
name: Some("self".to_owned()),
|
||||
tp: Some("Image".to_owned()),
|
||||
@ -440,7 +395,7 @@ pub fn expression_mock3() -> Expression {
|
||||
pub fn expression_mock_trim() -> Expression {
|
||||
let pattern = Some("trim_node".to_string());
|
||||
let code = "\" hello \".trim".to_string();
|
||||
let parser = Parser::new_or_panic();
|
||||
let parser = Parser::new();
|
||||
let this_param = span_tree::ArgumentInfo {
|
||||
name: Some("self".to_owned()),
|
||||
tp: Some("Text".to_owned()),
|
||||
|
@ -104,6 +104,7 @@ impl Debug for Expression {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Pretty printing debug adapter ===
|
||||
|
||||
/// Debug adapter used for pretty-printing the `Expression` span tree. Can be used to print the
|
||||
|
@ -171,13 +171,13 @@ public class ErrorCompilerTest extends CompilerTest {
|
||||
@Test
|
||||
public void malformedImport7() throws Exception {
|
||||
var ir = parse("import Foo hiding");
|
||||
assertSingleSyntaxError(ir, IR$Error$Syntax$InvalidImport$.MODULE$, "Imports must have a valid module path.", 17, 17);
|
||||
assertSingleSyntaxError(ir, IR$Error$Syntax$InvalidImport$.MODULE$, "Imports must have a valid module path.", 7, 17);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void malformedImport8() throws Exception {
|
||||
var ir = parse("import Foo hiding X,");
|
||||
assertSingleSyntaxError(ir, IR$Error$Syntax$InvalidImport$.MODULE$, "Imports must have a valid module path.", 18, 20);
|
||||
assertSingleSyntaxError(ir, IR$Error$Syntax$InvalidImport$.MODULE$, "Imports must have a valid module path.", 7, 20);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -840,6 +840,9 @@ fn export() {
|
||||
fn metadata_raw() {
|
||||
let code = [
|
||||
"x",
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"#### METADATA ####",
|
||||
r#"[[{"index":{"value":7},"size":{"value":8}},"5bad897e-099b-4b00-9348-64092636746d"]]"#,
|
||||
];
|
||||
|
@ -47,15 +47,10 @@ fn register_import_macros(macros: &mut resolver::SegmentMap<'_>) {
|
||||
let defs = [
|
||||
macro_definition! {("import", everything()) import_body},
|
||||
macro_definition! {("import", everything(), "as", everything()) import_body},
|
||||
macro_definition! {("import", everything(), "hiding", everything()) import_body},
|
||||
macro_definition! {("polyglot", everything(), "import", everything()) import_body},
|
||||
macro_definition! {
|
||||
("polyglot", everything(), "import", everything(), "as", everything()) import_body},
|
||||
macro_definition! {
|
||||
("polyglot", everything(), "import", everything(), "hiding", everything()) import_body},
|
||||
macro_definition! {
|
||||
("from", everything(), "import", everything(), "hiding", everything()) import_body},
|
||||
macro_definition! {
|
||||
("from", everything(), "import", nothing(), "all", nothing()) import_body},
|
||||
macro_definition! {
|
||||
("from", everything(), "import", nothing(), "all", nothing(), "hiding", everything())
|
||||
|
@ -9,7 +9,7 @@ use uuid::Uuid;
|
||||
|
||||
|
||||
|
||||
const MARKER: &str = "#### METADATA ####\n";
|
||||
const MARKER: &str = "\n\n\n#### METADATA ####\n";
|
||||
|
||||
|
||||
|
||||
@ -47,6 +47,14 @@ impl From<MetadataFormat> for Metadata {
|
||||
}
|
||||
}
|
||||
|
||||
/// Split input source file into the code and the metadata section, if any was found.
|
||||
pub fn extract(input: &str) -> (&str, Option<&str>) {
|
||||
match input.rsplit_once(MARKER) {
|
||||
Some((code, metadata)) => (code, Some(metadata)),
|
||||
None => (input, None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Given source code, if a metadata section is found: Attempt to parse it; return the result, and
|
||||
/// the non-metadata portion of the input.
|
||||
pub fn parse(input: &str) -> Option<(Result, &str)> {
|
||||
@ -54,6 +62,18 @@ pub fn parse(input: &str) -> Option<(Result, &str)> {
|
||||
Some((metadata.parse().map(|data: MetadataFormat| data.into()), code))
|
||||
}
|
||||
|
||||
/// Parse just the metadata section.
|
||||
pub fn parse_metadata(input: &str) -> Option<Vec<((usize, usize), Uuid)>> {
|
||||
Some(
|
||||
MetadataFormat::from_str(input)
|
||||
.ok()?
|
||||
.id_map
|
||||
.into_iter()
|
||||
.map(|(location, id)| ((location.index.value, location.size.value), id))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Result of parsing metadata.
|
||||
pub type Result<T = Metadata> = std::result::Result<T, String>;
|
||||
|
||||
|
@ -498,6 +498,33 @@ pub struct DocComment<'s> {
|
||||
pub newlines: Vec<token::Newline<'s>>,
|
||||
}
|
||||
|
||||
impl<'s> DocComment<'s> {
|
||||
/// Return the contents of the comment, with leading whitespace, the `##` token, and following
|
||||
/// empty lines removed; newlines will be normalized.
|
||||
pub fn content(&self) -> String {
|
||||
let mut buf = String::new();
|
||||
macro_rules! emit_token {
|
||||
($buf:expr, $token:expr) => {{
|
||||
$buf.push_str(&$token.left_offset.code.repr);
|
||||
$buf.push_str(&$token.code.repr);
|
||||
}};
|
||||
}
|
||||
for element in &self.elements {
|
||||
match element {
|
||||
TextElement::Section { text } => buf.push_str(&text.code.repr),
|
||||
TextElement::Escape { token } => emit_token!(buf, token),
|
||||
TextElement::Newline { newline } => {
|
||||
buf.push_str(&newline.left_offset.code.repr);
|
||||
buf.push('\n');
|
||||
}
|
||||
// Unreachable.
|
||||
TextElement::Splice { .. } => continue,
|
||||
}
|
||||
}
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s> span::Builder<'s> for DocComment<'s> {
|
||||
fn add_to_span(&mut self, span: Span<'s>) -> Span<'s> {
|
||||
span.add(&mut self.open).add(&mut self.elements).add(&mut self.newlines)
|
||||
@ -1320,6 +1347,13 @@ impl<'s> Tree<'s> {
|
||||
self.visit_item(&mut visitor);
|
||||
visitor.code
|
||||
}
|
||||
|
||||
/// Return source code of this AST, excluding initial whitespace.
|
||||
pub fn trimmed_code(&self) -> String {
|
||||
let mut visitor = CodePrinterVisitor::default();
|
||||
self.variant.visit_item(&mut visitor);
|
||||
visitor.code
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1391,3 +1425,61 @@ impl<'s> Tree<'s> {
|
||||
self.visit_mut(&mut visitor);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === ItemFnVisitor ===
|
||||
|
||||
impl<'s> Tree<'s> {
|
||||
/// Apply the provided function to each [`Token`] or [`Tree`] that is a child of the node.
|
||||
pub fn visit_items<F>(&self, f: F)
|
||||
where F: for<'a> FnMut(item::Ref<'s, 'a>) {
|
||||
struct ItemFnVisitor<F> {
|
||||
f: F,
|
||||
}
|
||||
impl<F> Visitor for ItemFnVisitor<F> {}
|
||||
impl<'a, 's: 'a, F> ItemVisitor<'s, 'a> for ItemFnVisitor<F>
|
||||
where F: FnMut(item::Ref<'s, 'a>)
|
||||
{
|
||||
fn visit_item(&mut self, item: item::Ref<'s, 'a>) -> bool {
|
||||
(self.f)(item);
|
||||
false
|
||||
}
|
||||
}
|
||||
self.variant.visit_item(&mut ItemFnVisitor { f });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === Traversal ===
|
||||
// =================
|
||||
|
||||
impl<'s> Tree<'s> {
|
||||
/// Return an iterator over the operands of the given left-associative operator, in reverse
|
||||
/// order.
|
||||
pub fn left_assoc_rev<'t, 'o>(&'t self, operator: &'o str) -> LeftAssocRev<'o, 't, 's> {
|
||||
let tree = Some(self);
|
||||
LeftAssocRev { operator, tree }
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over the operands of a particular left-associative operator, in reverse order.
|
||||
#[derive(Debug)]
|
||||
pub struct LeftAssocRev<'o, 't, 's> {
|
||||
operator: &'o str,
|
||||
tree: Option<&'t Tree<'s>>,
|
||||
}
|
||||
|
||||
impl<'o, 't, 's> Iterator for LeftAssocRev<'o, 't, 's> {
|
||||
type Item = &'t Tree<'s>;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let box Variant::OprApp(OprApp { lhs, opr: Ok(opr), rhs }) = &self.tree?.variant
|
||||
&& opr.code == self.operator {
|
||||
self.tree = lhs.into();
|
||||
rhs.into()
|
||||
} else {
|
||||
self.tree.take()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -284,7 +284,7 @@ where I: vec_indexed_by::Index
|
||||
}
|
||||
|
||||
/// Get the tail reference.
|
||||
pub fn tail(&mut self) -> &[T]
|
||||
pub fn tail(&self) -> &[T]
|
||||
where I: From<u8> {
|
||||
&self.elems[I::from(1_u8)..]
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user