Merge branch 'nix-flake' of github.com:rtfeldman/roc into nix-flake

This commit is contained in:
Anton-4 2022-03-16 17:34:47 +01:00
commit 55761d950e
No known key found for this signature in database
GPG Key ID: C954D6E0F9C0ABFD
122 changed files with 6352 additions and 1367 deletions

49
Cargo.lock generated
View File

@ -564,6 +564,16 @@ dependencies = [
"serde_yaml",
]
[[package]]
name = "console_error_panic_hook"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc"
dependencies = [
"cfg-if 1.0.0",
"wasm-bindgen",
]
[[package]]
name = "const_format"
version = "0.2.22"
@ -2716,6 +2726,33 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
[[package]]
name = "peg"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af728fe826811af3b38c37e93de6d104485953ea373d656eebae53d6987fcd2c"
dependencies = [
"peg-macros",
"peg-runtime",
]
[[package]]
name = "peg-macros"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4536be147b770b824895cbad934fccce8e49f14b4c4946eaa46a6e4a12fcdc16"
dependencies = [
"peg-runtime",
"proc-macro2",
"quote",
]
[[package]]
name = "peg-runtime"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9b0efd3ba03c3a409d44d60425f279ec442bcf0b9e63ff4e410da31c8b0f69f"
[[package]]
name = "percent-encoding"
version = "2.1.0"
@ -3335,6 +3372,7 @@ name = "roc_builtins"
version = "0.1.0"
dependencies = [
"dunce",
"lazy_static",
"roc_collections",
"roc_module",
"roc_region",
@ -3444,6 +3482,7 @@ version = "0.1.0"
dependencies = [
"bumpalo",
"indoc",
"peg",
"pretty_assertions",
"pulldown-cmark",
"roc_ast",
@ -3451,6 +3490,7 @@ dependencies = [
"roc_can",
"roc_code_markup",
"roc_collections",
"roc_highlight",
"roc_load",
"roc_module",
"roc_parse",
@ -3596,6 +3636,14 @@ dependencies = [
"roc_target",
]
[[package]]
name = "roc_highlight"
version = "0.1.0"
dependencies = [
"peg",
"roc_code_markup",
]
[[package]]
name = "roc_ident"
version = "0.1.0"
@ -3772,6 +3820,7 @@ name = "roc_repl_wasm"
version = "0.1.0"
dependencies = [
"bumpalo",
"console_error_panic_hook",
"futures",
"js-sys",
"roc_builtins",

View File

@ -33,6 +33,7 @@ members = [
"ast",
"cli",
"code_markup",
"highlight",
"error_macros",
"reporting",
"repl_cli",

View File

@ -50,7 +50,7 @@ install-zig-llvm-valgrind-clippy-rustfmt:
copy-dirs:
FROM +install-zig-llvm-valgrind-clippy-rustfmt
COPY --dir cli cli_utils compiler docs editor ast code_markup error_macros utils test_utils reporting repl_cli repl_eval repl_test repl_wasm roc_std vendor examples linker Cargo.toml Cargo.lock version.txt ./
COPY --dir cli cli_utils compiler docs editor ast code_markup error_macros highlight utils test_utils reporting repl_cli repl_eval repl_test repl_wasm roc_std vendor examples linker Cargo.toml Cargo.lock version.txt ./
test-zig:
FROM +install-zig-llvm-valgrind-clippy-rustfmt
@ -70,7 +70,7 @@ check-rustfmt:
check-typos:
RUN cargo install typos-cli --version 1.0.11 # version set to prevent confusion if the version is updated automatically
COPY --dir .github ci cli cli_utils compiler docs editor examples ast code_markup utils linker nightly_benches packages roc_std www *.md LEGAL_DETAILS shell.nix version.txt ./
COPY --dir .github ci cli cli_utils compiler docs editor examples ast code_markup highlight utils linker nightly_benches packages roc_std www *.md LEGAL_DETAILS shell.nix version.txt ./
RUN typos
test-rust:

91
FAQ.md
View File

@ -39,6 +39,90 @@ whether the feature should be in the language at all. In the case of this featur
language doesn't have it; that way nobody has to learn (or spend time spreading the word) about the
performance-boosting advice not to use it.
## Why can't functions be compared for equality using the `==` operator?
Function equality has been proven to be undecidable in the general case because of the [halting problem](https://en.wikipedia.org/wiki/Halting_problem).
So while we as humans might be able to look at `\x -> x + 1` and `\x -> 1 + x` and know that they're equivalent,
in the general case it's not possible for a computer to do this reliably.
There are some other potential ways to define function equality, but they all have problems.
One way would be to have two functions be considered equal if their source code is equivalent. (Perhaps disregarding
comments and spaces.) This sounds reasonable, but it means that now revising a function to do
exactly the same thing as before (say, changing `\x -> x + 1` to `\x -> 1 + x`) can cause a bug in a
distant part of the code base. Defining function equality this way means that revising a function's internals
is no longer a safe, local operation - even if it gives all the same outputs for all the same inputs.
Another option would be to define it using "reference equality." This is what JavaScript does, for example.
However, Roc does not use reference equality anywhere else in the language, and it would mean that (for example)
passing `\x -> x + 1` to a function compared to defining `fn = \x -> x + 1` elsewhere and then passing `fn` into
the function might give different answers.
Both of these would make revising code riskier across the entire language, which is very undesirable.
Another option would be to define that function equality always returns `False`. So both of these would evaluate
to `False`:
* `(\x -> x + 1) == (\x -> 1 + x)`
* `(\x -> x + 1) == (\x -> x + 1)`
This makes function equality effectively useless, while still technically allowing it. It has some other downsides:
* Now if you put a function inside a record, using `==` on that record will still type-check, but it will then return `False`. This could lead to bugs if you didn't realize you had accidentally put a function in there - for example, because you were actually storing a different type (e.g. an opaque type) and didn't realize it had a function inside it.
* If you put a function (or a value containing a function) into a `Dict` or `Set`, you'll never be able to get it out again. This is a common problem with [NaN](https://en.wikipedia.org/wiki/NaN), which is also defined not to be equal to itself.
The first of these problems could be addressed by having function equality always return `True` instead of `False` (since that way it would not affect other fields' equality checks in a record), but that design has its own problems:
* Although function equality is still useless, `(\x -> x + 1) == (\x -> x)` returns `True`. Even if it didn't lead to bugs in practice, this would certainly be surprising and confusing to beginners.
* Now if you put several different functions into a `Dict` or `Set`, only one of them will be kept; the others will be discarded or overwritten. This could cause bugs if a value stored a function internally, and then other functions relied on that internal function for correctness.
Each of these designs makes Roc a language that's some combination of more error-prone, more confusing, and more
brittle to change. Disallowing function equality at compile time eliminates all of these drawbacks.
## Why doesn't Roc have a `Maybe` or `Option` or `Optional` type, or `null` or `nil` or `undefined`?
It's common for programming languages to have a [null reference](https://en.wikipedia.org/wiki/Null_pointer)
(e.g. `null` in C, `nil` in Ruby, `None` in Python, or `undefined` in JavaScript).
The inventor of the null reference refers to it as his "[billion dollar mistake](https://en.wikipedia.org/wiki/Null_pointer#History)" because it "has led to innumerable errors, vulnerabilities, and system crashes, which have probably caused a billion dollars of pain and damage in the last forty years."
For this and other reasons, many languages do not include a null reference, but instead have a standard library
data type which can be used in situations where a null reference would otherwise be used. Common names for this
null reference alternative type include `Maybe` (like in Haskell or Elm), `Option` (like in OCaml or Rust),
and `Optional` (like in Java).
By design, Roc does not have one of these. There are several reasons for this.
First, if a function returns a potential error, Roc has the convention to use `Result` with an error type that
has a single tag describing what went wrong. (For example, `List.first : List a -> Result a [ ListWasEmpty ]*`
instead of `List.first : List a -> Maybe a`.) This is not only more self-descriptive, it also composes better with
other operations that can fail; there's no need to have functions like `Result.toMaybe` or `Maybe.toResult`,
because in Roc, the convention is that operations that can fail always use `Result`.
Second, optional record fields can be handled using Roc's Optional Record Field language feature, so using a type like `Maybe` there would be less ergonomic.
To describe something that's neither an optional field nor an operation that can fail, an explicit tag union can be
more descriptive than something like `Maybe`. For example, if a record type has an `artist` field, but the artist
information may not be available, compare these three alternative ways to represent that:
* `artist : Maybe Artist`
* `artist : [ Loading, Loaded Artist ]`
* `artist : [ Unspecified, Specified Artist ]`
All three versions tell us that we might not have access to an `Artist`. However, the `Maybe` version doesn't
tell us why that might be. The `Loading`/`Loaded` version tells us we don't have one *yet*, because we're
still loading it, whereas the `Unspecified`/`Specified` version tells us we don't have one and shouldn't expect
to have one later if we wait, because it wasn't specified.
Naming aside, using explicit tag unions also makes it easier to transition to richer data models. For example,
after using `[ Loading, Loaded Artist ]` for awhile, we might realize that there's another possible state: loading
failed due to an error. If we modify this to be `[ Loading, Loaded Artist, Errored LoadingErr ]`, all
of our code for the `Loading` and `Loaded` states will still work.
In contrast, if we'd had `Maybe Artist` and were using helper functions like `Maybe.isNone` (a common argument
for using `Maybe` even when it's less self-descriptive), we'd have to rewrite all the code which used those
helper functions. As such, a subtle downside of these helper functions is that they discourage any change to
the data model that would break their call sites, even if that change would improve the data model overall.
On a historical note, `Maybe` may have been thought of as a substitute for null references—as opposed to something that emerged organically based on specific motivating use cases after `Result` already existed. That said, in languages that do not have an equivalent of Roc's tag unions, it's much less ergonomic to write something like `Result a [ ListWasEmpty ]*`, so that design would not fit those languages as well as it fits Roc.
## Why doesn't Roc have higher-kinded polymorphism or arbitrary-rank types?
_Since this is a FAQ answer, I'm going to assume familiarity with higher-kinded types and higher-rank types instead of including a primer on them._
@ -159,12 +243,7 @@ Roc also has a different standard library from Elm. Some of the differences come
* No `Char`. What most people think of as a "character" is a rendered glyph. However, rendered glyphs are comprised of [grapheme clusters](https://stackoverflow.com/a/27331885), which are a variable number of Unicode code points - and there's no upper bound on how many code points there can be in a single cluster. In a world of emoji, I think this makes `Char` error-prone and it's better to have `Str` be the only first-class unit. For convenience when working with unicode code points (e.g. for performance-critical tasks like parsing), the single-quote syntax is sugar for the corresponding `U32` code point - for example, writing `'鹏'` is exactly the same as writing `40527`. Like Rust, you get a compiler error if you put something in single quotes that's not a valid [Unicode scalar value](http://www.unicode.org/glossary/#unicode_scalar_value).
* No `Debug.log` - the editor can do a better job at this, or you can write `expect x != x` to see what `x` is when the expectation fails. Using the editor means your code doesn't change, and using `expect` gives a natural reminder to remove the debugging code before shipping: the build will fail.
* No `Debug.todo` - instead you can write a type annotation with no implementation below it; the type checker will treat it normally, but attempting to use the value will cause a runtime exception. This is a feature I've often wanted in Elm, because I like prototyping APIs by writing out the types only, but then when I want the compiler to type-check them for me, I end up having to add `Debug.todo` in various places.
* No `Maybe`. There are several reasons for this:
* If a function returns a potential error, I prefer `Result` with an error type that uses a no-payload tag to describe what went wrong. (For example, `List.first : List a -> Result a [ ListWasEmpty ]*` instead of `List.first : List a -> Maybe a`.) This is not only more self-descriptive, it also composes better with operations that have multiple ways to fail.
* Optional record fields can be handled using the explicit Optional Record Field language feature.
* To describe something that's neither an operation that can fail nor an optional field, I prefer using a more descriptive tag - e.g. for a nullable JSON decoder, instead of `nullable : Decoder a -> Decoder (Maybe a)`, making a self-documenting API like `nullable : Decoder a -> Decoder [ Null, NonNull a ]`.
* It's surprisingly easy to misuse - especially by overusing it when a different language feature (especially a custom tag union) would lead to nicer code. Joël's legendary [talk about Maybe](https://youtu.be/43eM4kNbb6c) is great, but the fact that a whole talk about such a simple type can be so useful speaks to how easy the type is to misuse. Imagine a 20-minute talk about `Result` - could it be anywhere near as hepful?
* On a historical note, it's conceivable that the creation of `Maybe` predated `Result`, and `Maybe` might have been thought of as a substitute for null pointers—as opposed to something that emerged organically based on specific motivating use cases after `Result` already existed.
* No `Maybe`. See the "Why doesn't Roc have a `Maybe`/`Option`/`Optional` type" FAQ question
## Why aren't Roc functions curried by default?

View File

@ -203,7 +203,7 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
* Zig - https://ziglang.org
This source code can be found in compiler/builtins/bitcode/src/hash.zig and is licensed under the following terms:
This source code can be found in compiler/builtins/bitcode/src/hash.zig, highlight/tests/peg_grammar.rs and highlight/src/highlight_parser.rs and is licensed under the following terms:
The MIT License (Expat)

View File

@ -13,7 +13,7 @@
// use crate::pattern::{bindings_from_patterns, canonicalize_pattern, Pattern};
// use crate::procedure::References;
use roc_collections::all::{default_hasher, ImMap, MutMap, MutSet, SendMap};
use roc_error_macros::todo_opaques;
use roc_error_macros::{todo_abilities, todo_opaques};
use roc_module::ident::Lowercase;
use roc_module::symbol::Symbol;
use roc_parse::ast::{self, TypeHeader};
@ -262,6 +262,7 @@ fn to_pending_def<'a>(
}
Opaque { .. } => todo_opaques!(),
Ability { .. } => todo_abilities!(),
Expect(_) => todo!(),

View File

@ -3,6 +3,7 @@
#![allow(unused_imports)]
// use roc_can::expr::Output;
use roc_collections::all::{MutMap, MutSet};
use roc_error_macros::todo_abilities;
use roc_module::ident::{Ident, Lowercase, TagName};
use roc_module::symbol::Symbol;
use roc_region::all::{Loc, Region};
@ -570,6 +571,7 @@ pub fn to_type2<'a>(
// }
Type2::AsAlias(symbol, vars, alias.actual)
}
Where { .. } => todo_abilities!(),
SpaceBefore(nested, _) | SpaceAfter(nested, _) => {
to_type2(env, scope, references, nested, region)
}

View File

@ -1,12 +1,11 @@
use std::path::Path;
use bumpalo::Bump;
use roc_collections::all::MutMap;
use roc_load::file::LoadedModule;
use roc_target::TargetInfo;
pub fn load_module(src_file: &Path) -> LoadedModule {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let arena = Bump::new();
let loaded = roc_load::file::load_and_typecheck(

View File

@ -4,7 +4,6 @@ use roc_build::{
program,
};
use roc_builtins::bitcode;
use roc_collections::all::MutMap;
use roc_load::file::LoadingProblem;
use roc_mono::ir::OptLevel;
use roc_target::TargetInfo;
@ -61,7 +60,7 @@ pub fn build_file<'a>(
let target_info = TargetInfo::from(target);
// Step 1: compile the app and generate the .o file
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
// Release builds use uniqueness optimizations
let stdlib = arena.alloc(roc_builtins::std::standard_stdlib());
@ -366,7 +365,7 @@ pub fn check_file(
let target_info = TargetInfo::default_x86_64();
// Step 1: compile the app and generate the .o file
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
// Release builds use uniqueness optimizations
let stdlib = arena.alloc(roc_builtins::std::standard_stdlib());

View File

@ -9,8 +9,8 @@ use roc_fmt::module::fmt_module;
use roc_fmt::Buf;
use roc_module::called_via::{BinOp, UnaryOp};
use roc_parse::ast::{
AssignedField, Collection, Expr, Pattern, Spaced, StrLiteral, StrSegment, Tag, TypeAnnotation,
TypeHeader, WhenBranch,
AbilityDemand, AssignedField, Collection, Expr, Has, HasClause, Pattern, Spaced, StrLiteral,
StrSegment, Tag, TypeAnnotation, TypeHeader, WhenBranch,
};
use roc_parse::header::{
AppHeader, ExposedName, HostedHeader, ImportsEntry, InterfaceHeader, ModuleName, PackageEntry,
@ -482,6 +482,18 @@ impl<'a> RemoveSpaces<'a> for Def<'a> {
body_pattern: arena.alloc(body_pattern.remove_spaces(arena)),
body_expr: arena.alloc(body_expr.remove_spaces(arena)),
},
Def::Ability {
header: TypeHeader { name, vars },
loc_has,
demands,
} => Def::Ability {
header: TypeHeader {
name: name.remove_spaces(arena),
vars: vars.remove_spaces(arena),
},
loc_has: loc_has.remove_spaces(arena),
demands: demands.remove_spaces(arena),
},
Def::Expect(a) => Def::Expect(arena.alloc(a.remove_spaces(arena))),
Def::NotYetImplemented(a) => Def::NotYetImplemented(a),
Def::SpaceBefore(a, _) | Def::SpaceAfter(a, _) => a.remove_spaces(arena),
@ -489,6 +501,21 @@ impl<'a> RemoveSpaces<'a> for Def<'a> {
}
}
impl<'a> RemoveSpaces<'a> for Has<'a> {
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
Has::Has
}
}
impl<'a> RemoveSpaces<'a> for AbilityDemand<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
AbilityDemand {
name: self.name.remove_spaces(arena),
typ: self.typ.remove_spaces(arena),
}
}
}
impl<'a> RemoveSpaces<'a> for WhenBranch<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
WhenBranch {
@ -679,12 +706,26 @@ impl<'a> RemoveSpaces<'a> for TypeAnnotation<'a> {
},
TypeAnnotation::Inferred => TypeAnnotation::Inferred,
TypeAnnotation::Wildcard => TypeAnnotation::Wildcard,
TypeAnnotation::Where(annot, has_clauses) => TypeAnnotation::Where(
arena.alloc(annot.remove_spaces(arena)),
arena.alloc(has_clauses.remove_spaces(arena)),
),
TypeAnnotation::SpaceBefore(a, _) => a.remove_spaces(arena),
TypeAnnotation::SpaceAfter(a, _) => a.remove_spaces(arena),
TypeAnnotation::Malformed(a) => TypeAnnotation::Malformed(a),
}
}
}
impl<'a> RemoveSpaces<'a> for HasClause<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
HasClause {
var: self.var.remove_spaces(arena),
ability: self.ability.remove_spaces(arena),
}
}
}
impl<'a> RemoveSpaces<'a> for Tag<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {

View File

@ -4,7 +4,7 @@ version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
description = "Our own markup language for Roc code. Used by the editor and (soon) the docs."
description = "Our own markup language for Roc code. Used by the editor and the docs."
[dependencies]
roc_ast = { path = "../ast" }

View File

@ -1,164 +1,167 @@
use roc_ast::lang::core::{ast::ASTNodeId, expr::expr2::ExprId};
use crate::{
slow_pool::{MarkNodeId, SlowPool},
syntax_highlight::HighlightStyle,
};
use crate::{slow_pool::MarkNodeId, syntax_highlight::HighlightStyle};
use super::{
attribute::Attributes,
nodes::MarkupNode,
nodes::{self, make_nested_mn},
};
use super::{attribute::Attributes, nodes, nodes::MarkupNode};
pub fn new_equals_mn(ast_node_id: ASTNodeId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::EQUALS.to_owned(),
ast_node_id,
syn_high_style: HighlightStyle::Operator,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
pub fn new_equals_mn() -> MarkupNode {
common_text_node(nodes::EQUALS.to_owned(), HighlightStyle::Operator, 0)
}
pub fn new_comma_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
new_comma_mn_ast(ASTNodeId::AExprId(expr_id), parent_id_opt)
pub fn new_comma_mn() -> MarkupNode {
common_text_node(nodes::COMMA.to_owned(), HighlightStyle::Operator, 0)
}
pub fn new_comma_mn_ast(ast_node_id: ASTNodeId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::COMMA.to_owned(),
ast_node_id,
syn_high_style: HighlightStyle::Comma,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
pub fn new_dot_mn() -> MarkupNode {
common_text_node(nodes::DOT.to_owned(), HighlightStyle::Operator, 0)
}
pub fn new_blank_mn(ast_node_id: ASTNodeId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
pub fn new_blank_mn() -> MarkupNode {
MarkupNode::Blank {
ast_node_id,
attributes: Attributes::default(),
parent_id_opt,
parent_id_opt: None,
newlines_at_end: 0,
}
}
pub fn new_blank_mn_w_nls(
ast_node_id: ASTNodeId,
parent_id_opt: Option<MarkNodeId>,
nr_of_newlines: usize,
) -> MarkupNode {
pub fn new_blank_mn_w_nls(nr_of_newlines: usize) -> MarkupNode {
MarkupNode::Blank {
ast_node_id,
attributes: Attributes::default(),
parent_id_opt,
parent_id_opt: None,
newlines_at_end: nr_of_newlines,
}
}
pub fn new_colon_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
new_operator_mn(nodes::COLON.to_owned(), expr_id, parent_id_opt)
pub fn new_colon_mn() -> MarkupNode {
new_operator_mn(nodes::COLON.to_owned())
}
pub fn new_operator_mn(
content: String,
expr_id: ExprId,
parent_id_opt: Option<MarkNodeId>,
) -> MarkupNode {
MarkupNode::Text {
content,
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::Operator,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
pub fn new_operator_mn(content: String) -> MarkupNode {
common_text_node(content, HighlightStyle::Operator, 0)
}
pub fn new_left_accolade_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::LEFT_ACCOLADE.to_owned(),
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::Bracket,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
pub fn new_left_accolade_mn() -> MarkupNode {
common_text_node(nodes::LEFT_ACCOLADE.to_owned(), HighlightStyle::Bracket, 0)
}
pub fn new_right_accolade_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::RIGHT_ACCOLADE.to_owned(),
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::Bracket,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
pub fn new_right_accolade_mn() -> MarkupNode {
common_text_node(nodes::RIGHT_ACCOLADE.to_owned(), HighlightStyle::Bracket, 0)
}
pub fn new_left_square_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::LEFT_SQUARE_BR.to_owned(),
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::Bracket,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
pub fn new_left_square_mn() -> MarkupNode {
common_text_node(nodes::LEFT_SQUARE_BR.to_owned(), HighlightStyle::Bracket, 0)
}
pub fn new_right_square_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::RIGHT_SQUARE_BR.to_owned(),
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::Bracket,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
pub fn new_right_square_mn() -> MarkupNode {
common_text_node(
nodes::RIGHT_SQUARE_BR.to_owned(),
HighlightStyle::Bracket,
0,
)
}
pub fn new_func_name_mn(content: String, expr_id: ExprId) -> MarkupNode {
MarkupNode::Text {
content,
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::FunctionName,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
}
pub fn new_func_name_mn(content: String) -> MarkupNode {
common_text_node(content, HighlightStyle::FunctionName, 0)
}
pub fn new_arg_name_mn(content: String, expr_id: ExprId) -> MarkupNode {
MarkupNode::Text {
content,
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::FunctionArgName,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
}
pub fn new_arg_name_mn(content: String) -> MarkupNode {
common_text_node(content, HighlightStyle::FunctionArgName, 0)
}
pub fn new_arrow_mn(ast_node_id: ASTNodeId, newlines_at_end: usize) -> MarkupNode {
MarkupNode::Text {
content: nodes::ARROW.to_owned(),
ast_node_id,
syn_high_style: HighlightStyle::Operator,
attributes: Attributes::default(),
parent_id_opt: None,
pub fn new_arrow_mn(newlines_at_end: usize) -> MarkupNode {
common_text_node(
nodes::ARROW.to_owned(),
HighlightStyle::Operator,
newlines_at_end,
}
)
}
pub fn new_comments_mn(
comments: String,
ast_node_id: ASTNodeId,
pub fn new_comments_mn(comment: String, newlines_at_end: usize) -> MarkupNode {
common_text_node(comment, HighlightStyle::Comment, newlines_at_end)
}
fn common_text_node(
content: String,
highlight_style: HighlightStyle,
newlines_at_end: usize,
) -> MarkupNode {
MarkupNode::Text {
content: comments,
ast_node_id,
syn_high_style: HighlightStyle::Comment,
content,
syn_high_style: highlight_style,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end,
}
}
pub const NEW_LINES_AFTER_DEF: usize = 2;
pub fn new_assign_mn(
val_name_mn_id: MarkNodeId,
equals_mn_id: MarkNodeId,
expr_mark_node_id: MarkNodeId,
) -> MarkupNode {
make_nested_mn(
vec![val_name_mn_id, equals_mn_id, expr_mark_node_id],
NEW_LINES_AFTER_DEF,
)
}
pub fn new_module_name_mn_id(mn_ids: Vec<MarkNodeId>, mark_node_pool: &mut SlowPool) -> MarkNodeId {
if mn_ids.len() == 1 {
*mn_ids.get(0).unwrap() // safe because we checked the length before
} else {
let nested_node = make_nested_mn(mn_ids, 0);
mark_node_pool.add(nested_node)
}
}
pub fn new_module_var_mn(
module_name_id: MarkNodeId,
dot_id: MarkNodeId,
ident_id: MarkNodeId,
) -> MarkupNode {
make_nested_mn(vec![module_name_id, dot_id, ident_id], 0)
}
pub fn if_mn() -> MarkupNode {
keyword_mn("if ")
}
pub fn then_mn() -> MarkupNode {
keyword_mn(" then ")
}
pub fn else_mn() -> MarkupNode {
keyword_mn(" else ")
}
fn keyword_mn(keyword: &str) -> MarkupNode {
common_text_node(keyword.to_owned(), HighlightStyle::Keyword, 0)
}
pub fn new_if_expr_mn(
if_mn_id: MarkNodeId,
cond_expr_mn_id: MarkNodeId,
then_mn_id: MarkNodeId,
then_expr_mn_id: MarkNodeId,
else_mn_id: MarkNodeId,
else_expr_mn_id: MarkNodeId,
) -> MarkupNode {
make_nested_mn(
vec![
if_mn_id,
cond_expr_mn_id,
then_mn_id,
then_expr_mn_id,
else_mn_id,
else_expr_mn_id,
],
1,
)
}

View File

@ -7,6 +7,7 @@ use roc_module::symbol::Interns;
use crate::{
markup::{
convert::{from_def2::def2_to_markup, from_header::header_to_markup},
mark_id_ast_id_map::MarkIdAstIdMap,
nodes::set_parent_for_all,
},
slow_pool::{MarkNodeId, SlowPool},
@ -17,8 +18,13 @@ pub fn ast_to_mark_nodes<'a>(
ast: &AST,
mark_node_pool: &mut SlowPool,
interns: &Interns,
) -> ASTResult<Vec<MarkNodeId>> {
let mut all_mark_node_ids = vec![header_to_markup(&ast.header, mark_node_pool)];
) -> ASTResult<(Vec<MarkNodeId>, MarkIdAstIdMap)> {
let mut mark_id_ast_id_map = MarkIdAstIdMap::default();
let mut all_mark_node_ids = vec![header_to_markup(
&ast.header,
mark_node_pool,
&mut mark_id_ast_id_map,
)];
for &def_id in ast.def_ids.iter() {
// for debugging
@ -26,12 +32,19 @@ pub fn ast_to_mark_nodes<'a>(
let def2 = env.pool.get(def_id);
let expr2_markup_id = def2_to_markup(env, def2, def_id, mark_node_pool, interns)?;
let expr2_markup_id = def2_to_markup(
env,
def2,
def_id,
mark_node_pool,
&mut mark_id_ast_id_map,
interns,
)?;
set_parent_for_all(expr2_markup_id, mark_node_pool);
all_mark_node_ids.push(expr2_markup_id);
}
Ok(all_mark_node_ids)
Ok((all_mark_node_ids, mark_id_ast_id_map))
}

View File

@ -1,7 +1,9 @@
use crate::{
markup::{
common_nodes::new_blank_mn_w_nls,
top_level_def::{tld_mark_node, tld_w_comments_mark_node},
mark_id_ast_id_map::MarkIdAstIdMap,
nodes::MarkupNode,
top_level_def::{assignment_mark_node, tld_w_comments_mark_node},
},
slow_pool::{MarkNodeId, SlowPool},
};
@ -20,11 +22,25 @@ use roc_ast::{
};
use roc_module::symbol::Interns;
pub fn add_node(
mark_node: MarkupNode,
ast_node_id: ASTNodeId,
mark_node_pool: &mut SlowPool,
mark_id_ast_id_map: &mut MarkIdAstIdMap,
) -> MarkNodeId {
let mark_node_id = mark_node_pool.add(mark_node);
mark_id_ast_id_map.insert(mark_node_id, ast_node_id);
mark_node_id
}
pub fn def2_to_markup<'a>(
env: &mut Env<'a>,
def2: &Def2,
def2_node_id: DefId,
mark_node_pool: &mut SlowPool,
mark_id_ast_id_map: &mut MarkIdAstIdMap,
interns: &Interns,
) -> ASTResult<MarkNodeId> {
let ast_node_id = ASTNodeId::ADefId(def2_node_id);
@ -39,45 +55,81 @@ pub fn def2_to_markup<'a>(
env.pool.get(*expr_id),
*expr_id,
mark_node_pool,
mark_id_ast_id_map,
interns,
0,
)?;
let tld_mn =
tld_mark_node(*identifier_id, expr_mn_id, ast_node_id, mark_node_pool, env)?;
let tld_mn = assignment_mark_node(
*identifier_id,
expr_mn_id,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
env,
)?;
mark_node_pool.add(tld_mn)
add_node(tld_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map)
}
Def2::Blank => mark_node_pool.add(new_blank_mn_w_nls(ast_node_id, None, 2)),
Def2::Blank => add_node(
new_blank_mn_w_nls(2),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
),
Def2::CommentsBefore { comments, def_id } => {
let inner_def = env.pool.get(*def_id);
let inner_def_mark_node_id =
def2_to_markup(env, inner_def, *def_id, mark_node_pool, interns)?;
let inner_def_mark_node_id = def2_to_markup(
env,
inner_def,
*def_id,
mark_node_pool,
mark_id_ast_id_map,
interns,
)?;
let full_mark_node = tld_w_comments_mark_node(
comments.clone(),
inner_def_mark_node_id,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
true,
)?;
mark_node_pool.add(full_mark_node)
add_node(
full_mark_node,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
)
}
Def2::CommentsAfter { def_id, comments } => {
let inner_def = env.pool.get(*def_id);
let inner_def_mark_node_id =
def2_to_markup(env, inner_def, *def_id, mark_node_pool, interns)?;
let inner_def_mark_node_id = def2_to_markup(
env,
inner_def,
*def_id,
mark_node_pool,
mark_id_ast_id_map,
interns,
)?;
let full_mark_node = tld_w_comments_mark_node(
comments.clone(),
inner_def_mark_node_id,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
false,
)?;
mark_node_pool.add(full_mark_node)
add_node(
full_mark_node,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
)
}
};

View File

@ -6,6 +6,7 @@ use crate::{
new_left_accolade_mn, new_left_square_mn, new_operator_mn, new_right_accolade_mn,
new_right_square_mn,
},
mark_id_ast_id_map::MarkIdAstIdMap,
nodes::{
get_string, join_mark_nodes_commas, join_mark_nodes_spaces, new_markup_node, MarkupNode,
},
@ -32,12 +33,15 @@ use roc_ast::{
};
use roc_module::{module_err::ModuleResult, symbol::Interns};
use super::from_def2::add_node;
// make Markup Nodes: generate String representation, assign Highlighting Style
pub fn expr2_to_markup<'a>(
env: &Env<'a>,
expr2: &Expr2,
expr2_node_id: ExprId,
mark_node_pool: &mut SlowPool,
mark_id_ast_id_map: &mut MarkIdAstIdMap,
interns: &Interns,
indent_level: usize,
) -> ASTResult<MarkNodeId> {
@ -58,30 +62,51 @@ pub fn expr2_to_markup<'a>(
ast_node_id,
HighlightStyle::Number,
mark_node_pool,
mark_id_ast_id_map,
indent_level,
)
}
Expr2::Str(text) => {
let content = format!("\"{}\"", text.as_str(env.pool));
string_mark_node(&content, indent_level, ast_node_id, mark_node_pool)
string_mark_node(
&content,
indent_level,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
)
}
Expr2::SmallStr(array_str) => {
let content = format!("\"{}\"", array_str.as_str());
string_mark_node(&content, indent_level, ast_node_id, mark_node_pool)
string_mark_node(
&content,
indent_level,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
)
}
Expr2::GlobalTag { name, .. } => new_markup_node(
with_indent(indent_level, &get_string(env, name)),
ast_node_id,
HighlightStyle::Type,
mark_node_pool,
mark_id_ast_id_map,
indent_level,
),
Expr2::Call { args, expr_id, .. } => {
let expr = env.pool.get(*expr_id);
let fun_call_mark_id =
expr2_to_markup(env, expr, *expr_id, mark_node_pool, interns, indent_level)?;
let fun_call_mark_id = expr2_to_markup(
env,
expr,
*expr_id,
mark_node_pool,
mark_id_ast_id_map,
interns,
indent_level,
)?;
let arg_expr_ids: Vec<ExprId> =
args.iter(env.pool).map(|(_, arg_id)| *arg_id).collect();
@ -91,24 +116,31 @@ pub fn expr2_to_markup<'a>(
.map(|arg_id| {
let arg_expr = env.pool.get(*arg_id);
expr2_to_markup(env, arg_expr, *arg_id, mark_node_pool, interns, 0)
expr2_to_markup(
env,
arg_expr,
*arg_id,
mark_node_pool,
mark_id_ast_id_map,
interns,
0,
)
})
.collect::<ASTResult<Vec<MarkNodeId>>>()?;
let mut args_with_sapces =
join_mark_nodes_spaces(arg_call_mark_ids, true, ast_node_id, mark_node_pool);
join_mark_nodes_spaces(arg_call_mark_ids, true, mark_node_pool);
let mut children_ids = vec![fun_call_mark_id];
children_ids.append(&mut args_with_sapces);
let call_node = MarkupNode::Nested {
ast_node_id,
children_ids,
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(call_node)
add_node(call_node, ast_node_id, mark_node_pool, mark_id_ast_id_map)
}
Expr2::Var(symbol) => {
let text = symbol.fully_qualified(interns, env.home);
@ -118,12 +150,17 @@ pub fn expr2_to_markup<'a>(
ast_node_id,
HighlightStyle::Value,
mark_node_pool,
mark_id_ast_id_map,
indent_level,
)
}
Expr2::List { elems, .. } => {
let mut children_ids =
vec![mark_node_pool.add(new_left_square_mn(expr2_node_id, None))];
let mut children_ids = vec![add_node(
new_left_square_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
)];
let indexed_node_ids: Vec<(usize, ExprId)> =
elems.iter(env.pool).copied().enumerate().collect();
@ -136,43 +173,66 @@ pub fn expr2_to_markup<'a>(
sub_expr2,
*node_id,
mark_node_pool,
mark_id_ast_id_map,
interns,
indent_level,
)?);
if idx + 1 < elems.len() {
children_ids.push(mark_node_pool.add(new_comma_mn(expr2_node_id, None)));
children_ids.push(add_node(
new_comma_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
));
}
}
children_ids.push(mark_node_pool.add(new_right_square_mn(expr2_node_id, None)));
let list_node = MarkupNode::Nested {
children_ids.push(add_node(
new_right_square_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
));
let list_mn = MarkupNode::Nested {
children_ids,
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(list_node)
add_node(list_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map)
}
Expr2::EmptyRecord => {
let children_ids = vec![
mark_node_pool.add(new_left_accolade_mn(expr2_node_id, None)),
mark_node_pool.add(new_right_accolade_mn(expr2_node_id, None)),
add_node(
new_left_accolade_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
),
add_node(
new_right_accolade_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
),
];
let record_node = MarkupNode::Nested {
ast_node_id,
let record_mn = MarkupNode::Nested {
children_ids,
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(record_node)
add_node(record_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map)
}
Expr2::Record { fields, .. } => {
let mut children_ids =
vec![mark_node_pool.add(new_left_accolade_mn(expr2_node_id, None))];
let mut children_ids = vec![add_node(
new_left_accolade_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
)];
for (idx, field_node_id) in fields.iter_node_ids().enumerate() {
let record_field = env.pool.get(field_node_id);
@ -184,6 +244,7 @@ pub fn expr2_to_markup<'a>(
ast_node_id,
HighlightStyle::RecordField,
mark_node_pool,
mark_id_ast_id_map,
indent_level,
));
@ -191,7 +252,12 @@ pub fn expr2_to_markup<'a>(
RecordField::InvalidLabelOnly(_, _) => (),
RecordField::LabelOnly(_, _, _) => (),
RecordField::LabeledValue(_, _, sub_expr2_node_id) => {
children_ids.push(mark_node_pool.add(new_colon_mn(expr2_node_id, None)));
children_ids.push(add_node(
new_colon_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
));
let sub_expr2 = env.pool.get(*sub_expr2_node_id);
children_ids.push(expr2_to_markup(
@ -199,6 +265,7 @@ pub fn expr2_to_markup<'a>(
sub_expr2,
*sub_expr2_node_id,
mark_node_pool,
mark_id_ast_id_map,
interns,
indent_level,
)?);
@ -206,22 +273,36 @@ pub fn expr2_to_markup<'a>(
}
if idx + 1 < fields.len() {
children_ids.push(mark_node_pool.add(new_comma_mn(expr2_node_id, None)));
children_ids.push(add_node(
new_comma_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
));
}
}
children_ids.push(mark_node_pool.add(new_right_accolade_mn(expr2_node_id, None)));
let record_node = MarkupNode::Nested {
children_ids.push(add_node(
new_right_accolade_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
));
let record_mn = MarkupNode::Nested {
children_ids,
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(record_node)
add_node(record_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map)
}
Expr2::Blank => mark_node_pool.add(new_blank_mn(ast_node_id, None)),
Expr2::Blank => add_node(
new_blank_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
),
Expr2::LetValue {
def_id,
body_id: _,
@ -235,16 +316,21 @@ pub fn expr2_to_markup<'a>(
let val_name_mn = MarkupNode::Text {
content: val_name,
ast_node_id,
syn_high_style: HighlightStyle::Value,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
};
let val_name_mn_id = mark_node_pool.add(val_name_mn);
let val_name_mn_id =
add_node(val_name_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map);
let equals_mn_id = mark_node_pool.add(new_equals_mn(ast_node_id, None));
let equals_mn_id = add_node(
new_equals_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let value_def = env.pool.get(*def_id);
@ -259,6 +345,7 @@ pub fn expr2_to_markup<'a>(
env.pool.get(*expr_id),
*expr_id,
mark_node_pool,
mark_id_ast_id_map,
interns,
indent_level,
)?;
@ -266,14 +353,13 @@ pub fn expr2_to_markup<'a>(
let body_mn = mark_node_pool.get_mut(body_mn_id);
body_mn.add_newline_at_end();
let full_let_node = MarkupNode::Nested {
ast_node_id,
let full_let_mn = MarkupNode::Nested {
children_ids: vec![val_name_mn_id, equals_mn_id, body_mn_id],
parent_id_opt: None,
newlines_at_end: 1,
};
mark_node_pool.add(full_let_node)
add_node(full_let_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map)
}
other => {
unimplemented!(
@ -291,8 +377,13 @@ pub fn expr2_to_markup<'a>(
body_id,
extra: _,
} => {
let backslash_mn = new_operator_mn("\\".to_string(), expr2_node_id, None);
let backslash_mn_id = mark_node_pool.add(backslash_mn);
let backslash_mn = new_operator_mn("\\".to_string());
let backslash_mn_id = add_node(
backslash_mn,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let arg_names: Vec<&str> = args
.iter(env.pool)
@ -320,31 +411,31 @@ pub fn expr2_to_markup<'a>(
let arg_mark_nodes = arg_names
.iter()
.map(|arg_name| new_arg_name_mn(arg_name.to_string(), expr2_node_id))
.map(|arg_name| new_arg_name_mn(arg_name.to_string()))
.collect_vec();
let args_with_commas: Vec<MarkupNode> =
join_mark_nodes_commas(arg_mark_nodes, ASTNodeId::AExprId(expr2_node_id));
let args_with_commas: Vec<MarkupNode> = join_mark_nodes_commas(arg_mark_nodes);
let mut args_with_commas_ids: Vec<MarkNodeId> = args_with_commas
.into_iter()
.map(|mark_node| mark_node_pool.add(mark_node))
.map(|mark_node| {
add_node(mark_node, ast_node_id, mark_node_pool, mark_id_ast_id_map)
})
.collect();
let arrow_mn = new_arrow_mn(ASTNodeId::AExprId(expr2_node_id), 1);
let arrow_mn_id = mark_node_pool.add(arrow_mn);
let arrow_mn = new_arrow_mn(1);
let arrow_mn_id = add_node(arrow_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map);
let mut children_ids = vec![backslash_mn_id];
children_ids.append(&mut args_with_commas_ids);
children_ids.push(arrow_mn_id);
let args_mn = MarkupNode::Nested {
ast_node_id: ASTNodeId::AExprId(expr2_node_id),
children_ids,
parent_id_opt: None,
newlines_at_end: 0,
};
let args_mn_id = mark_node_pool.add(args_mn);
let args_mn_id = add_node(args_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map);
let body_expr = env.pool.get(*body_id);
let body_mn_id = expr2_to_markup(
@ -352,24 +443,25 @@ pub fn expr2_to_markup<'a>(
body_expr,
*body_id,
mark_node_pool,
mark_id_ast_id_map,
interns,
indent_level + 1,
)?;
let function_node = MarkupNode::Nested {
ast_node_id,
let function_mn = MarkupNode::Nested {
children_ids: vec![args_mn_id, body_mn_id],
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(function_node)
add_node(function_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map)
}
Expr2::RuntimeError() => new_markup_node(
"RunTimeError".to_string(),
ast_node_id,
HighlightStyle::Blank,
mark_node_pool,
mark_id_ast_id_map,
indent_level,
),
rest => todo!("implement expr2_to_markup for {:?}", rest),
@ -392,12 +484,14 @@ fn string_mark_node(
indent_level: usize,
ast_node_id: ASTNodeId,
mark_node_pool: &mut SlowPool,
mark_id_ast_id_map: &mut MarkIdAstIdMap,
) -> MarkNodeId {
new_markup_node(
with_indent(indent_level, content),
ast_node_id,
HighlightStyle::String,
mark_node_pool,
mark_id_ast_id_map,
indent_level,
)
}

View File

@ -1,4 +1,4 @@
use roc_ast::lang::core::{ast::ASTNodeId, expr::expr2::ExprId, header::AppHeader};
use roc_ast::lang::core::{ast::ASTNodeId, header::AppHeader};
use crate::{
markup::{
@ -7,54 +7,82 @@ use crate::{
new_comma_mn, new_left_accolade_mn, new_left_square_mn, new_right_accolade_mn,
new_right_square_mn,
},
mark_id_ast_id_map::MarkIdAstIdMap,
nodes::{set_parent_for_all, MarkupNode},
},
slow_pool::{MarkNodeId, SlowPool},
syntax_highlight::HighlightStyle,
};
pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool) -> MarkNodeId {
use super::from_def2::add_node;
pub fn header_to_markup(
app_header: &AppHeader,
mark_node_pool: &mut SlowPool,
mark_id_ast_id_map: &mut MarkIdAstIdMap,
) -> MarkNodeId {
let expr_id = app_header.ast_node_id;
let ast_node_id = ASTNodeId::AExprId(expr_id);
let app_node_id = header_mn("app ".to_owned(), expr_id, mark_node_pool);
let app_node_id = header_mn(
"app ".to_owned(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let app_name_node_id = header_val_mn(
app_header.app_name.clone(),
expr_id,
ast_node_id,
HighlightStyle::String,
mark_node_pool,
mark_id_ast_id_map,
);
let full_app_node = MarkupNode::Nested {
ast_node_id,
children_ids: vec![app_node_id, app_name_node_id],
parent_id_opt: None,
newlines_at_end: 1,
};
let packages_node_id = header_mn(" packages ".to_owned(), expr_id, mark_node_pool);
let packages_node_id = header_mn(
" packages ".to_owned(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let pack_left_acc_node_id = mark_node_pool.add(new_left_accolade_mn(expr_id, None));
let pack_left_acc_node_id = add_node(
new_left_accolade_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let pack_base_node_id = header_val_mn(
"base: ".to_owned(),
expr_id,
ast_node_id,
HighlightStyle::RecordField,
mark_node_pool,
mark_id_ast_id_map,
);
let pack_val_node_id = header_val_mn(
app_header.packages_base.clone(),
expr_id,
ast_node_id,
HighlightStyle::String,
mark_node_pool,
mark_id_ast_id_map,
);
let pack_right_acc_node_id = mark_node_pool.add(new_right_accolade_mn(expr_id, None));
let pack_right_acc_node_id = add_node(
new_right_accolade_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let full_packages_node = MarkupNode::Nested {
ast_node_id,
children_ids: vec![
packages_node_id,
pack_left_acc_node_id,
@ -66,18 +94,34 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool) -
newlines_at_end: 1,
};
let imports_node_id = header_mn(" imports ".to_owned(), expr_id, mark_node_pool);
let imports_node_id = header_mn(
" imports ".to_owned(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let imports_left_square_node_id = mark_node_pool.add(new_left_square_mn(expr_id, None));
let imports_left_square_node_id = add_node(
new_left_square_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let mut import_child_ids: Vec<MarkNodeId> = add_header_mn_list(
&app_header.imports,
expr_id,
ast_node_id,
HighlightStyle::Import,
mark_node_pool,
mark_id_ast_id_map,
);
let imports_right_square_node_id = mark_node_pool.add(new_right_square_mn(expr_id, None));
let imports_right_square_node_id = add_node(
new_right_square_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let mut full_import_children = vec![imports_node_id, imports_left_square_node_id];
@ -85,26 +129,46 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool) -
full_import_children.push(imports_right_square_node_id);
let full_import_node = MarkupNode::Nested {
ast_node_id,
children_ids: full_import_children,
parent_id_opt: None,
newlines_at_end: 1,
};
let provides_node_id = header_mn(" provides ".to_owned(), expr_id, mark_node_pool);
let provides_node_id = header_mn(
" provides ".to_owned(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let provides_left_square_node_id = mark_node_pool.add(new_left_square_mn(expr_id, None));
let provides_left_square_node_id = add_node(
new_left_square_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let mut provides_val_node_ids: Vec<MarkNodeId> = add_header_mn_list(
&app_header.provides,
expr_id,
ast_node_id,
HighlightStyle::Provides,
mark_node_pool,
mark_id_ast_id_map,
);
let provides_right_square_node_id = mark_node_pool.add(new_right_square_mn(expr_id, None));
let provides_right_square_node_id = add_node(
new_right_square_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let provides_end_node_id = header_mn(" to base".to_owned(), expr_id, mark_node_pool);
let provides_end_node_id = header_mn(
" to base".to_owned(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let mut full_provides_children = vec![provides_node_id, provides_left_square_node_id];
@ -113,19 +177,37 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool) -
full_provides_children.push(provides_end_node_id);
let full_provides_node = MarkupNode::Nested {
ast_node_id,
children_ids: full_provides_children,
parent_id_opt: None,
newlines_at_end: 1,
};
let full_app_node_id = mark_node_pool.add(full_app_node);
let full_packages_node = mark_node_pool.add(full_packages_node);
let full_import_node_id = mark_node_pool.add(full_import_node);
let full_provides_node_id = mark_node_pool.add(full_provides_node);
let full_app_node_id = add_node(
full_app_node,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let full_packages_node = add_node(
full_packages_node,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let full_import_node_id = add_node(
full_import_node,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let full_provides_node_id = add_node(
full_provides_node,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let header_mark_node = MarkupNode::Nested {
ast_node_id,
children_ids: vec![
full_app_node_id,
full_packages_node,
@ -136,7 +218,12 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool) -
newlines_at_end: 1,
};
let header_mn_id = mark_node_pool.add(header_mark_node);
let header_mn_id = add_node(
header_mark_node,
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
set_parent_for_all(header_mn_id, mark_node_pool);
@ -146,9 +233,10 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool) -
// Used for provides and imports
fn add_header_mn_list(
str_vec: &[String],
expr_id: ExprId,
ast_node_id: ASTNodeId,
highlight_style: HighlightStyle,
mark_node_pool: &mut SlowPool,
mark_id_ast_id_map: &mut MarkIdAstIdMap,
) -> Vec<MarkNodeId> {
let nr_of_elts = str_vec.len();
@ -158,13 +246,22 @@ fn add_header_mn_list(
.map(|(indx, provide_str)| {
let provide_str = header_val_mn(
provide_str.to_owned(),
expr_id,
ast_node_id,
highlight_style,
mark_node_pool,
mark_id_ast_id_map,
);
if indx != nr_of_elts - 1 {
vec![provide_str, mark_node_pool.add(new_comma_mn(expr_id, None))]
vec![
provide_str,
add_node(
new_comma_mn(),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
),
]
} else {
vec![provide_str]
}
@ -173,33 +270,37 @@ fn add_header_mn_list(
.collect()
}
fn header_mn(content: String, expr_id: ExprId, mark_node_pool: &mut SlowPool) -> MarkNodeId {
fn header_mn(
content: String,
ast_node_id: ASTNodeId,
mark_node_pool: &mut SlowPool,
mark_id_ast_id_map: &mut MarkIdAstIdMap,
) -> MarkNodeId {
let mark_node = MarkupNode::Text {
content,
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::PackageRelated,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(mark_node)
add_node(mark_node, ast_node_id, mark_node_pool, mark_id_ast_id_map)
}
fn header_val_mn(
content: String,
expr_id: ExprId,
ast_node_id: ASTNodeId,
highlight_style: HighlightStyle,
mark_node_pool: &mut SlowPool,
mark_id_ast_id_map: &mut MarkIdAstIdMap,
) -> MarkNodeId {
let mark_node = MarkupNode::Text {
content,
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: highlight_style,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(mark_node)
add_node(mark_node, ast_node_id, mark_node_pool, mark_id_ast_id_map)
}

View File

@ -0,0 +1,29 @@
use std::collections::HashMap;
use roc_ast::lang::core::ast::ASTNodeId;
use crate::markup_error::MarkNodeIdWithoutCorrespondingASTNodeId;
use crate::{markup_error::MarkResult, slow_pool::MarkNodeId};
/// A hashmap is wrapped to allow for an easy swap out with more performant alternatives
#[derive(Debug, Default)]
pub struct MarkIdAstIdMap {
map: HashMap<MarkNodeId, ASTNodeId>,
}
impl MarkIdAstIdMap {
pub fn insert(&mut self, mn_id: MarkNodeId, ast_id: ASTNodeId) {
self.map.insert(mn_id, ast_id);
}
pub fn get(&self, mn_id: MarkNodeId) -> MarkResult<ASTNodeId> {
match self.map.get(&mn_id) {
Some(ast_node_id) => Ok(*ast_node_id),
None => MarkNodeIdWithoutCorrespondingASTNodeId {
node_id: mn_id,
keys_str: format!("{:?}", self.map.keys()),
}
.fail(),
}
}
}

View File

@ -1,5 +1,6 @@
pub mod attribute;
pub mod common_nodes;
pub mod convert;
pub mod mark_id_ast_id_map;
pub mod nodes;
pub mod top_level_def;

View File

@ -4,7 +4,10 @@ use crate::{
syntax_highlight::HighlightStyle,
};
use super::{attribute::Attributes, common_nodes::new_comma_mn_ast};
use super::{
attribute::Attributes, common_nodes::new_comma_mn, convert::from_def2::add_node,
mark_id_ast_id_map::MarkIdAstIdMap,
};
use crate::markup_error::{ExpectedTextNode, NestedNodeMissingChild, NestedNodeRequired};
use itertools::Itertools;
@ -18,42 +21,29 @@ use std::fmt;
#[derive(Debug)]
pub enum MarkupNode {
Nested {
ast_node_id: ASTNodeId,
children_ids: Vec<MarkNodeId>,
parent_id_opt: Option<MarkNodeId>,
newlines_at_end: usize,
},
Text {
content: String,
ast_node_id: ASTNodeId,
syn_high_style: HighlightStyle,
attributes: Attributes,
parent_id_opt: Option<MarkNodeId>,
newlines_at_end: usize,
},
Blank {
ast_node_id: ASTNodeId,
attributes: Attributes,
parent_id_opt: Option<MarkNodeId>,
newlines_at_end: usize,
},
Indent {
ast_node_id: ASTNodeId,
indent_level: usize,
parent_id_opt: Option<MarkNodeId>,
},
}
impl MarkupNode {
pub fn get_ast_node_id(&self) -> ASTNodeId {
match self {
MarkupNode::Nested { ast_node_id, .. } => *ast_node_id,
MarkupNode::Text { ast_node_id, .. } => *ast_node_id,
MarkupNode::Blank { ast_node_id, .. } => *ast_node_id,
MarkupNode::Indent { ast_node_id, .. } => *ast_node_id,
}
}
pub fn get_parent_id_opt(&self) -> Option<MarkNodeId> {
match self {
MarkupNode::Nested { parent_id_opt, .. } => *parent_id_opt,
@ -85,24 +75,24 @@ impl MarkupNode {
// return (index of child in list of children, closest ast index of child corresponding to ast node)
pub fn get_child_indices(
&self,
child_id: MarkNodeId,
mark_node_pool: &SlowPool,
mark_node_id: MarkNodeId,
ast_node_id: ASTNodeId,
mark_id_ast_id_map: &MarkIdAstIdMap,
) -> MarkResult<(usize, usize)> {
match self {
MarkupNode::Nested { children_ids, .. } => {
let mut mark_child_index_opt: Option<usize> = None;
let mut child_ids_with_ast: Vec<MarkNodeId> = Vec::new();
let self_ast_id = self.get_ast_node_id();
for (indx, &mark_child_id) in children_ids.iter().enumerate() {
if mark_child_id == child_id {
if mark_child_id == mark_node_id {
mark_child_index_opt = Some(indx);
}
let child_mark_node = mark_node_pool.get(mark_child_id);
let child_ast_node_id = mark_id_ast_id_map.get(mark_child_id)?;
// a node that points to the same ast_node as the parent is a ',', '[', ']'
// those are not "real" ast children
if child_mark_node.get_ast_node_id() != self_ast_id {
if child_ast_node_id != ast_node_id {
child_ids_with_ast.push(mark_child_id)
}
}
@ -145,7 +135,7 @@ impl MarkupNode {
}
} else {
NestedNodeMissingChild {
node_id: child_id,
node_id: mark_node_id,
children_ids: children_ids.clone(),
}
.fail()
@ -258,6 +248,14 @@ impl MarkupNode {
}
}
pub fn make_nested_mn(children_ids: Vec<MarkNodeId>, newlines_at_end: usize) -> MarkupNode {
MarkupNode::Nested {
children_ids,
parent_id_opt: None,
newlines_at_end,
}
}
pub fn get_string<'a>(env: &Env<'a>, pool_str: &PoolStr) -> String {
pool_str.as_str(env.pool).to_owned()
}
@ -269,6 +267,7 @@ pub const LEFT_SQUARE_BR: &str = "[ ";
pub const RIGHT_SQUARE_BR: &str = " ]";
pub const COLON: &str = ": ";
pub const COMMA: &str = ", ";
pub const DOT: &str = ".";
pub const STRING_QUOTES: &str = "\"\"";
pub const EQUALS: &str = " = ";
pub const ARROW: &str = " -> ";
@ -279,36 +278,34 @@ pub fn new_markup_node(
node_id: ASTNodeId,
highlight_style: HighlightStyle,
mark_node_pool: &mut SlowPool,
mark_id_ast_id_map: &mut MarkIdAstIdMap,
indent_level: usize,
) -> MarkNodeId {
let content_node = MarkupNode::Text {
content: text,
ast_node_id: node_id,
syn_high_style: highlight_style,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
};
let content_node_id = mark_node_pool.add(content_node);
let content_node_id = add_node(content_node, node_id, mark_node_pool, mark_id_ast_id_map);
if indent_level > 0 {
let indent_node = MarkupNode::Indent {
ast_node_id: node_id,
indent_level,
parent_id_opt: None,
};
let indent_node_id = mark_node_pool.add(indent_node);
let indent_node_id = add_node(indent_node, node_id, mark_node_pool, mark_id_ast_id_map);
let nested_node = MarkupNode::Nested {
ast_node_id: node_id,
children_ids: vec![indent_node_id, content_node_id],
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(nested_node)
add_node(nested_node, node_id, mark_node_pool, mark_id_ast_id_map)
} else {
content_node_id
}
@ -318,7 +315,6 @@ pub fn set_parent_for_all(markup_node_id: MarkNodeId, mark_node_pool: &mut SlowP
let node = mark_node_pool.get(markup_node_id);
if let MarkupNode::Nested {
ast_node_id: _,
children_ids,
parent_id_opt: _,
newlines_at_end: _,
@ -426,7 +422,6 @@ pub fn get_root_mark_node_id(mark_node_id: MarkNodeId, mark_node_pool: &SlowPool
pub fn join_mark_nodes_spaces(
mark_nodes_ids: Vec<MarkNodeId>,
with_prepend: bool,
ast_node_id: ASTNodeId,
mark_node_pool: &mut SlowPool,
) -> Vec<MarkNodeId> {
let space_range_max = if with_prepend {
@ -439,7 +434,6 @@ pub fn join_mark_nodes_spaces(
.map(|_| {
let space_node = MarkupNode::Text {
content: " ".to_string(),
ast_node_id,
syn_high_style: HighlightStyle::Blank,
attributes: Attributes::default(),
parent_id_opt: None,
@ -458,12 +452,9 @@ pub fn join_mark_nodes_spaces(
}
// put comma mark nodes between each node in mark_nodes
pub fn join_mark_nodes_commas(
mark_nodes: Vec<MarkupNode>,
ast_node_id: ASTNodeId,
) -> Vec<MarkupNode> {
pub fn join_mark_nodes_commas(mark_nodes: Vec<MarkupNode>) -> Vec<MarkupNode> {
let join_nodes: Vec<MarkupNode> = (0..(mark_nodes.len() - 1))
.map(|_| new_comma_mn_ast(ast_node_id, None))
.map(|_| new_comma_mn())
.collect();
mark_nodes.into_iter().interleave(join_nodes).collect()

View File

@ -14,37 +14,43 @@ use crate::{
syntax_highlight::HighlightStyle,
};
// Top Level Defined Value. example: `main = "Hello, World!"`
pub fn tld_mark_node<'a>(
use super::{
common_nodes::new_assign_mn, convert::from_def2::add_node, mark_id_ast_id_map::MarkIdAstIdMap,
};
// represents for example: `main = "Hello, World!"`
pub fn assignment_mark_node<'a>(
identifier_id: IdentId,
expr_mark_node_id: MarkNodeId,
ast_node_id: ASTNodeId,
mark_node_pool: &mut SlowPool,
mark_id_ast_id_map: &mut MarkIdAstIdMap,
env: &Env<'a>,
) -> ASTResult<MarkupNode> {
let val_name = env.ident_ids.get_name_str_res(identifier_id)?;
let val_name_mn = MarkupNode::Text {
content: val_name.to_owned(),
ast_node_id,
syn_high_style: HighlightStyle::Value,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
};
let val_name_mn_id = mark_node_pool.add(val_name_mn);
let val_name_mn_id = add_node(val_name_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map);
let equals_mn_id = mark_node_pool.add(new_equals_mn(ast_node_id, None));
let full_let_node = MarkupNode::Nested {
let equals_mn_id = add_node(
new_equals_mn(),
ast_node_id,
children_ids: vec![val_name_mn_id, equals_mn_id, expr_mark_node_id],
parent_id_opt: None,
newlines_at_end: 3,
};
mark_node_pool,
mark_id_ast_id_map,
);
Ok(full_let_node)
Ok(new_assign_mn(
val_name_mn_id,
equals_mn_id,
expr_mark_node_id,
))
}
pub fn tld_w_comments_mark_node(
@ -52,9 +58,15 @@ pub fn tld_w_comments_mark_node(
def_mark_node_id: MarkNodeId,
ast_node_id: ASTNodeId,
mark_node_pool: &mut SlowPool,
mark_id_ast_id_map: &mut MarkIdAstIdMap,
comments_before: bool,
) -> ASTResult<MarkupNode> {
let comment_mn_id = mark_node_pool.add(new_comments_mn(comments, ast_node_id, 1));
let comment_mn_id = add_node(
new_comments_mn(comments, 1),
ast_node_id,
mark_node_pool,
mark_id_ast_id_map,
);
let children_ids = if comments_before {
vec![comment_mn_id, def_mark_node_id]
@ -63,7 +75,6 @@ pub fn tld_w_comments_mark_node(
};
let tld_w_comment_node = MarkupNode::Nested {
ast_node_id,
children_ids,
parent_id_opt: None,
newlines_at_end: 2,

View File

@ -24,6 +24,16 @@ pub enum MarkError {
node_type: String,
backtrace: Backtrace,
},
#[snafu(display(
"MarkNodeIdWithoutCorrespondingASTNodeId: MarkupNode with id {} was not found in MarkIdAstIdMap, available keys are: {}.",
node_id,
keys_str
))]
MarkNodeIdWithoutCorrespondingASTNodeId {
node_id: MarkNodeId,
keys_str: String,
backtrace: Backtrace,
},
#[snafu(display("NestedNodeMissingChild: expected to find child with id {} in Nested MarkupNode, but it was missing. Id's of the children are {:?}.", node_id, children_ids))]
NestedNodeMissingChild {
node_id: MarkNodeId,

View File

@ -1,6 +1,4 @@
use std::fmt;
use crate::markup::nodes::MarkupNode;
use crate::markup::{mark_id_ast_id_map::MarkIdAstIdMap, nodes::MarkupNode};
pub type MarkNodeId = usize;
@ -34,14 +32,15 @@ impl SlowPool {
// TODO delete children of old node, this requires SlowPool to be changed to
// make sure the indexes still make sense after removal/compaction
}
}
impl fmt::Display for SlowPool {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "\n\n(mark_node_pool)\n")?;
pub fn debug_string(&self, mark_id_ast_id_map: &MarkIdAstIdMap) -> String {
let mut ret_str = String::new();
for (index, node) in self.nodes.iter().enumerate() {
let ast_node_id_str = format!("{:?}", node.get_ast_node_id());
for (mark_node_id, node) in self.nodes.iter().enumerate() {
let ast_node_id_str = match mark_id_ast_id_map.get(mark_node_id) {
Ok(ast_id) => format!("{:?}", ast_id),
Err(err) => format!("{:?}", err),
};
let ast_node_id: String = ast_node_id_str
.chars()
.filter(|c| c.is_ascii_digit())
@ -55,17 +54,16 @@ impl fmt::Display for SlowPool {
child_str = format!("children: {:?}", node_children);
}
writeln!(
f,
ret_str.push_str(&format!(
"{}: {} ({}) ast_id {:?} {}",
index,
mark_node_id,
node.node_type_as_string(),
node.get_content(),
ast_node_id.parse::<usize>().unwrap(),
child_str
)?;
));
}
Ok(())
ret_str
}
}

View File

@ -6,7 +6,6 @@ use crate::colors::{from_hsb, RgbaTup};
#[derive(Hash, Eq, PartialEq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum HighlightStyle {
Operator, // =+-<>...
Comma,
String,
FunctionName,
FunctionArgName,
@ -21,6 +20,9 @@ pub enum HighlightStyle {
Blank,
Comment,
DocsComment,
UppercaseIdent,
LowercaseIdent, // TODO we probably don't want all lowercase identifiers to have the same color?
Keyword, // if, else, when...
}
pub fn default_highlight_map() -> HashMap<HighlightStyle, RgbaTup> {
@ -31,7 +33,6 @@ pub fn default_highlight_map() -> HashMap<HighlightStyle, RgbaTup> {
let mut highlight_map = HashMap::new();
[
(Operator, from_hsb(185, 50, 75)),
(Comma, from_hsb(258, 50, 90)),
(String, from_hsb(346, 65, 97)),
(FunctionName, almost_white),
(FunctionArgName, from_hsb(225, 50, 100)),
@ -46,6 +47,9 @@ pub fn default_highlight_map() -> HashMap<HighlightStyle, RgbaTup> {
(Blank, from_hsb(258, 50, 90)),
(Comment, from_hsb(258, 50, 90)), // TODO check color
(DocsComment, from_hsb(258, 50, 90)), // TODO check color
(UppercaseIdent, almost_white),
(LowercaseIdent, from_hsb(225, 50, 100)),
(Keyword, almost_white),
]
.iter()
.for_each(|tup| {

View File

@ -11,6 +11,7 @@ roc_region = { path = "../region" }
roc_module = { path = "../module" }
roc_types = { path = "../types" }
roc_target = { path = "../roc_target" }
lazy_static = "1.4.0"
[build-dependencies]
# dunce can be removed once ziglang/zig#5109 is fixed

View File

@ -18,10 +18,118 @@ interface Dict
]
imports []
## A [dictionary](https://en.wikipedia.org/wiki/Associative_array) that lets you can associate keys with values.
##
## ### Inserting
##
## The most basic way to use a dictionary is to start with an empty one and then:
## 1. Call [Dict.insert] passing a key and a value, to associate that key with that value in the dictionary.
## 2. Later, call [Dict.get] passing the same key as before, and it will return the value you stored.
##
## Here's an example of a dictionary which uses a city's name as the key, and its population as the associated value.
##
## populationByCity =
## Dict.empty
## |> Dict.insert "London" 8_961_989
## |> Dict.insert "Philadelphia" 1_603_797
## |> Dict.insert "Shanghai" 24_870_895
## |> Dict.insert "Delhi" 16_787_941
## |> Dict.insert "Amsterdam" 872_680
##
## ### Converting to a [List]
##
## We can call [Dict.toList] on `populationByCity` to turn it into a list of key-value pairs:
##
## Dict.toList populationByCity == [
## { k: "London", v: 8961989 },
## { k: "Philadelphia", v: 1603797 },
## { k: "Shanghai", v: 24870895 },
## { k: "Delhi", v: 16787941 },
## { k: "Amsterdam", v: 872680 },
## ]
##
## We can use the similar [Dict.keyList] and [Dict.values] functions to get only the keys or only the values,
## instead of getting these `{ k, v }` records that contain both.
##
## You may notice that these lists have the same order as the original insertion order. This will be true if
## all you ever do is [insert] and [get] operations on the dictionary, but [remove] operations can change this order.
## Let's see how that looks.
##
## ### Removing
##
## We can remove an element from the dictionary, like so:
##
## populationByCity
## |> Dict.remove "Philadelphia"
## |> Dict.toList
## ==
## [
## { k: "London", v: 8961989 },
## { k: "Amsterdam", v: 872680 },
## { k: "Shanghai", v: 24870895 },
## { k: "Delhi", v: 16787941 },
## ]
##
## Notice that the order changed! Philadelphia has been not only removed from the list, but Amsterdam - the last
## entry we inserted - has been moved into the spot where Philadelphia was previously. This is exactly what
## [Dict.remove] does: it removes an element and moves the most recent insertion into the vacated spot.
##
## This move is done as a performance optimization, and it lets [remove] have
## [constant time complexity](https://en.wikipedia.org/wiki/Time_complexity#Constant_time). If you need a removal
## operation which preserves ordering, [Dict.removeShift] will remove the element and then shift everything after it
## over one spot. Be aware that this shifting requires copying every single entry after the removed element, though,
## so it can be massively more costly than [remove]! This makes [remove] the recommended default choice;
## [removeShift] should only be used if maintaining original insertion order is absolutely necessary.
##
##
## ### Removing
##
## ### Equality
##
## When comparing two dictionaries for equality, they are `==` only if their both their contents and their
## orderings match. This preserves the property that if `dict1 == dict2`, you should be able to rely on
## `fn dict1 == fn dict2` also being `True`, even if `fn` relies on the dictionary's ordering (for example, if
## `fn` is `Dict.toList` or calls it internally.)
##
## The [Dict.hasSameContents] function gives an alternative to `==` which ignores ordering
## and returns `True` if both dictionaries have the same keys and associated values.
Dict k v : [ @Dict k v ] # TODO k should require a hashing and equating constraint
## An empty dictionary.
empty : Dict * *
size : Dict * * -> Nat
isEmpty : Dict * * -> Bool
## Returns a [List] of the dictionary's key/value pairs.
##
## See [walk] to walk over the key/value pairs without creating an intermediate data structure.
toList : Dict k v -> List { k, v }
## Returns a [List] of the dictionary's keys.
##
## See [keySet] to get a [Set] of keys instead, or [walkKeys] to walk over the keys without creating
## an intermediate data structure.
keyList : Dict key * -> List key
## Returns a [Set] of the dictionary's keys.
##
## See [keyList] to get a [List] of keys instead, or [walkKeys] to walk over the keys without creating
## an intermediate data structure.
keySet : Dict key * -> Set key
## Returns a [List] of the dictionary's values.
##
## See [walkValues] to walk over the values without creating an intermediate data structure.
values : Dict * value -> List value
walk : Dict k v, state, (state, k, v -> state) -> state
walkKeys : Dict key *, state, (state, key -> state) -> state
walkValues : Dict * value, state, (state, value -> state) -> state
## Convert each key and value in the #Dict to something new, by calling a conversion
## function on each of them. Then return a new #Map of the converted keys and values.
##
@ -32,9 +140,9 @@ isEmpty : Dict * * -> Bool
## `map` functions like this are common in Roc, and they all work similarly.
## See for example [List.map], [Result.map], and `Set.map`.
map :
Dict beforeKey beforeValue,
({ key: beforeKey, value: beforeValue } -> { key: afterKey, value: afterValue })
-> Dict afterKey afterValue
Dict beforeKey beforeVal,
({ k: beforeKey, v: beforeVal } -> { k: afterKey, v: afterVal })
-> Dict afterKey afterVal
# DESIGN NOTES: The reason for panicking when given NaN is that:
# * If we allowed NaN in, Dict.insert would no longer be idempotent.
@ -47,3 +155,56 @@ map :
## defined to be unequal to *NaN*, inserting a *NaN* key results in an entry
## that can never be retrieved or removed from the [Dict].
insert : Dict key val, key, val -> Dict key val
## Removes a key from the dictionary in [constant time](https://en.wikipedia.org/wiki/Time_complexity#Constant_time), without preserving insertion order.
##
## Since the internal [List] which determines the order of operations like [toList] and [walk] cannot have gaps in it,
## whenever an element is removed from the middle of that list, something must be done to eliminate the resulting gap.
##
## * [removeShift] eliminates the gap by shifting over every element after the removed one. This takes [linear time](https://en.wikipedia.org/wiki/Time_complexity#Linear_time),
## and preserves the original ordering.
## * [remove] eliminates the gap by replacing the removed element with the one at the end of the list - that is, the most recent insertion. This takes [constant time](https://en.wikipedia.org/wiki/Time_complexity#Constant_time), but does not preserve the original ordering.
##
## For example, suppose we have a `populationByCity` with these contents:
##
## Dict.toList populationByCity == [
## { k: "London", v: 8961989 },
## { k: "Philadelphia", v: 1603797 },
## { k: "Shanghai", v: 24870895 },
## { k: "Delhi", v: 16787941 },
## { k: "Amsterdam", v: 872680 },
## ]
##
## Using `Dict.remove "Philadelphia"` on this will replace the `"Philadelphia"` entry with the most recent insertion,
## which is `"Amsterdam"` in this case.
##
## populationByCity
## |> Dict.remove "Philadelphia"
## |> Dict.toList
## ==
## [
## { k: "London", v: 8961989 },
## { k: "Amsterdam", v: 872680 },
## { k: "Shanghai", v: 24870895 },
## { k: "Delhi", v: 16787941 },
## ]
##
## Both [remove] and [removeShift] leave the dictionary with the same contents; they only differ in ordering and in
## performance. Since ordering only affects operations like [toList] and [walk], [remove] is the better default
## choice because it has much better performance characteristics; [removeShift] should only be used when it's
## absolutely necessary for operations like [toList] and [walk] to preserve the exact original insertion order.
remove : Dict k v, k -> Dict k v
## Removes a key from the dictionary in [linear time](https://en.wikipedia.org/wiki/Time_complexity#Linear_time), while preserving insertion order.
##
## It's better to use [remove] than this by default, since [remove] has [constant time complexity](https://en.wikipedia.org/wiki/Time_complexity#Constant_time),
## which commonly leads [removeShift] to take many times as long to run as [remove] does. However, [remove] does not
## preserve insertion order, so the slower [removeShift] exists only for use cases where it's abolutely necessary for
## ordering-sensitive functions like [toList] and [walk] to preserve the exact original insertion order.
##
## See the [remove] documentation for more details about the differences between [remove] and [removeShift].
removeShift : Dict k v, k -> Dict k v
## Returns whether both dictionaries have the same keys, and the same values associated with those keys.
## This is different from `==` in that it disregards the ordering of the keys and values.
hasSameContents : Dict k v, Dict k v -> Bool

View File

@ -13,6 +13,15 @@ use roc_types::subs::VarId;
use roc_types::types::RecordField;
use std::collections::HashMap;
lazy_static::lazy_static! {
static ref STDLIB: StdLib = standard_stdlib();
}
/// A global static that stores our initialized standard library definitions
pub fn borrow_stdlib() -> &'static StdLib {
&STDLIB
}
/// Example:
///
/// let_tvars! { a, b, c }

View File

@ -1,6 +1,7 @@
use crate::env::Env;
use crate::scope::Scope;
use roc_collections::all::{ImMap, MutMap, MutSet, SendMap};
use roc_error_macros::todo_abilities;
use roc_module::ident::{Ident, Lowercase, TagName};
use roc_module::symbol::{IdentIds, ModuleId, Symbol};
use roc_parse::ast::{AssignedField, Pattern, Tag, TypeAnnotation, TypeHeader};
@ -242,6 +243,7 @@ pub fn find_type_def_symbols(
SpaceBefore(inner, _) | SpaceAfter(inner, _) => {
stack.push(inner);
}
Where(..) => todo_abilities!(),
Inferred | Wildcard | Malformed(_) => {}
}
}
@ -626,6 +628,7 @@ fn can_annotation_help(
Type::Variable(var)
}
Where(..) => todo_abilities!(),
Malformed(string) => {
malformed(env, region, string);

View File

@ -1,5 +1,5 @@
use crate::expected::{Expected, PExpected};
use roc_collections::soa::{Index, Slice};
use roc_collections::soa::{EitherIndex, Index, Slice};
use roc_module::ident::TagName;
use roc_module::symbol::Symbol;
use roc_region::all::{Loc, Region};
@ -120,14 +120,27 @@ impl Constraints {
pub const PCATEGORY_CHARACTER: Index<PatternCategory> = Index::new(10);
#[inline(always)]
pub fn push_type(&mut self, typ: Type) -> Index<Type> {
pub fn push_type(&mut self, typ: Type) -> EitherIndex<Type, Variable> {
match typ {
Type::EmptyRec => Self::EMPTY_RECORD,
Type::EmptyTagUnion => Self::EMPTY_TAG_UNION,
other => Index::push_new(&mut self.types, other),
Type::EmptyRec => EitherIndex::from_left(Self::EMPTY_RECORD),
Type::EmptyTagUnion => EitherIndex::from_left(Self::EMPTY_TAG_UNION),
Type::Variable(var) => Self::push_type_variable(var),
other => {
let index: Index<Type> = Index::push_new(&mut self.types, other);
EitherIndex::from_left(index)
}
}
}
#[inline(always)]
const fn push_type_variable(var: Variable) -> EitherIndex<Type, Variable> {
// that's right, we use the variable's integer value as the index
// that way, we don't need to push anything onto a vector
let index: Index<Variable> = Index::new(var.index());
EitherIndex::from_right(index)
}
#[inline(always)]
pub fn push_expected_type(&mut self, expected: Expected<Type>) -> Index<Expected<Type>> {
Index::push_new(&mut self.expectations, expected)
@ -180,13 +193,56 @@ impl Constraints {
category: Category,
region: Region,
) -> Constraint {
let type_index = Index::push_new(&mut self.types, typ);
let type_index = self.push_type(typ);
let expected_index = Index::push_new(&mut self.expectations, expected);
let category_index = Self::push_category(self, category);
Constraint::Eq(type_index, expected_index, category_index, region)
}
#[inline(always)]
pub fn equal_types_var(
&mut self,
var: Variable,
expected: Expected<Type>,
category: Category,
region: Region,
) -> Constraint {
let type_index = Self::push_type_variable(var);
let expected_index = Index::push_new(&mut self.expectations, expected);
let category_index = Self::push_category(self, category);
Constraint::Eq(type_index, expected_index, category_index, region)
}
#[inline(always)]
pub fn equal_types_with_storage(
&mut self,
typ: Type,
expected: Expected<Type>,
category: Category,
region: Region,
storage_var: Variable,
) -> Constraint {
let type_index = self.push_type(typ);
let expected_index = Index::push_new(&mut self.expectations, expected);
let category_index = Self::push_category(self, category);
let equal = Constraint::Eq(type_index, expected_index, category_index, region);
let storage_type_index = Self::push_type_variable(storage_var);
let storage_category = Category::Storage(std::file!(), std::line!());
let storage_category_index = Self::push_category(self, storage_category);
let storage = Constraint::Eq(
storage_type_index,
expected_index,
storage_category_index,
region,
);
self.and_constraint([equal, storage])
}
pub fn equal_pattern_types(
&mut self,
typ: Type,
@ -194,7 +250,7 @@ impl Constraints {
category: PatternCategory,
region: Region,
) -> Constraint {
let type_index = Index::push_new(&mut self.types, typ);
let type_index = self.push_type(typ);
let expected_index = Index::push_new(&mut self.pattern_expectations, expected);
let category_index = Self::push_pattern_category(self, category);
@ -208,7 +264,7 @@ impl Constraints {
category: PatternCategory,
region: Region,
) -> Constraint {
let type_index = Index::push_new(&mut self.types, typ);
let type_index = self.push_type(typ);
let expected_index = Index::push_new(&mut self.pattern_expectations, expected);
let category_index = Index::push_new(&mut self.pattern_categories, category);
@ -216,7 +272,7 @@ impl Constraints {
}
pub fn is_open_type(&mut self, typ: Type) -> Constraint {
let type_index = Index::push_new(&mut self.types, typ);
let type_index = self.push_type(typ);
Constraint::IsOpenType(type_index)
}
@ -309,7 +365,7 @@ impl Constraints {
let let_index = Index::new(self.let_constraints.len() as _);
self.let_constraints.push(let_contraint);
Constraint::Let(let_index)
Constraint::Let(let_index, Slice::default())
}
#[inline(always)]
@ -335,7 +391,7 @@ impl Constraints {
let let_index = Index::new(self.let_constraints.len() as _);
self.let_constraints.push(let_contraint);
Constraint::Let(let_index)
Constraint::Let(let_index, Slice::default())
}
#[inline(always)]
@ -353,6 +409,7 @@ impl Constraints {
I3: IntoIterator<Item = (Symbol, Loc<Type>)>,
I3::IntoIter: ExactSizeIterator,
{
// defs and ret constraint are stored consequtively, so we only need to store one index
let defs_and_ret_constraint = Index::new(self.constraints.len() as _);
self.constraints.push(defs_constraint);
@ -368,7 +425,55 @@ impl Constraints {
let let_index = Index::new(self.let_constraints.len() as _);
self.let_constraints.push(let_contraint);
Constraint::Let(let_index)
Constraint::Let(let_index, Slice::default())
}
/// A variant of `Let` used specifically for imports. When importing types from another module,
/// we use a StorageSubs to store the data, and copy over the relevant
/// variables/content/flattype/tagname etc.
///
/// The general idea is to let-generalize the imorted types in the target module.
/// More concretely, we need to simulate what `type_to_var` (solve.rs) does to a `Type`.
/// While the copying puts all the data the right place, it misses that `type_to_var` puts
/// the variables that it creates (to store the nodes of a Type in Subs) in the pool of the
/// current rank (so they can be generalized).
///
/// So, during copying of an import (`copy_import_to`, subs.rs) we track the variables that
/// we need to put into the pool (simulating what `type_to_var` would do). Those variables
/// then need to find their way to the pool, and a convenient approach turned out to be to
/// tag them onto the `Let` that we used to add the imported values.
#[inline(always)]
pub fn let_import_constraint<I1, I2>(
&mut self,
rigid_vars: I1,
def_types: I2,
module_constraint: Constraint,
pool_variables: &[Variable],
) -> Constraint
where
I1: IntoIterator<Item = Variable>,
I2: IntoIterator<Item = (Symbol, Loc<Type>)>,
I2::IntoIter: ExactSizeIterator,
{
// defs and ret constraint are stored consequtively, so we only need to store one index
let defs_and_ret_constraint = Index::new(self.constraints.len() as _);
self.constraints.push(Constraint::True);
self.constraints.push(module_constraint);
let let_contraint = LetConstraint {
rigid_vars: self.variable_slice(rigid_vars),
flex_vars: Slice::default(),
def_types: self.def_types_slice(def_types),
defs_and_ret_constraint,
};
let let_index = Index::new(self.let_constraints.len() as _);
self.let_constraints.push(let_contraint);
let pool_slice = self.variable_slice(pool_variables.iter().copied());
Constraint::Let(let_index, pool_slice)
}
#[inline(always)]
@ -408,6 +513,7 @@ impl Constraints {
region,
)
}
pub fn contains_save_the_environment(&self, constraint: &Constraint) -> bool {
match constraint {
Constraint::Eq(..) => false,
@ -416,7 +522,7 @@ impl Constraints {
Constraint::Pattern(..) => false,
Constraint::True => false,
Constraint::SaveTheEnvironment => true,
Constraint::Let(index) => {
Constraint::Let(index, _) => {
let let_constraint = &self.let_constraints[index.index()];
let offset = let_constraint.defs_and_ret_constraint.index();
@ -446,35 +552,63 @@ impl Constraints {
filename: &'static str,
line_number: u32,
) -> Constraint {
let type_index = Index::push_new(&mut self.types, typ);
let type_index = self.push_type(typ);
let string_index = Index::push_new(&mut self.strings, filename);
Constraint::Store(type_index, variable, string_index, line_number)
}
pub fn store_index(
&mut self,
type_index: EitherIndex<Type, Variable>,
variable: Variable,
filename: &'static str,
line_number: u32,
) -> Constraint {
let string_index = Index::push_new(&mut self.strings, filename);
Constraint::Store(type_index, variable, string_index, line_number)
}
}
static_assertions::assert_eq_size!([u8; 3 * 8], Constraint);
roc_error_macros::assert_sizeof_default!(Constraint, 3 * 8);
#[derive(Debug, Clone, PartialEq)]
#[derive(Clone, PartialEq)]
pub enum Constraint {
Eq(Index<Type>, Index<Expected<Type>>, Index<Category>, Region),
Store(Index<Type>, Variable, Index<&'static str>, u32),
Eq(
EitherIndex<Type, Variable>,
Index<Expected<Type>>,
Index<Category>,
Region,
),
Store(
EitherIndex<Type, Variable>,
Variable,
Index<&'static str>,
u32,
),
Lookup(Symbol, Index<Expected<Type>>, Region),
Pattern(
Index<Type>,
EitherIndex<Type, Variable>,
Index<PExpected<Type>>,
Index<PatternCategory>,
Region,
),
True, // Used for things that always unify, e.g. blanks and runtime errors
/// Used for things that always unify, e.g. blanks and runtime errors
True,
SaveTheEnvironment,
Let(Index<LetConstraint>),
/// A Let constraint introduces symbols and their annotation at a certain level of nesting
///
/// The `Slice<Variable>` is used for imports where we manually put the Content into Subs
/// by copying from another module, but have to make sure that any variables we use to store
/// these contents are added to `Pool` at the correct rank
Let(Index<LetConstraint>, Slice<Variable>),
And(Slice<Constraint>),
/// Presence constraints
IsOpenType(Index<Type>), // Theory; always applied to a variable? if yes the use that
IsOpenType(EitherIndex<Type, Variable>), // Theory; always applied to a variable? if yes the use that
IncludesTag(Index<IncludesTag>),
PatternPresence(
Index<Type>,
EitherIndex<Type, Variable>,
Index<PExpected<Type>>,
Index<PatternCategory>,
Region,
@ -503,3 +637,36 @@ pub struct IncludesTag {
pub pattern_category: Index<PatternCategory>,
pub region: Region,
}
/// Custom impl to limit vertical space used by the debug output
impl std::fmt::Debug for Constraint {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Eq(arg0, arg1, arg2, arg3) => {
write!(f, "Eq({:?}, {:?}, {:?}, {:?})", arg0, arg1, arg2, arg3)
}
Self::Store(arg0, arg1, arg2, arg3) => {
write!(f, "Store({:?}, {:?}, {:?}, {:?})", arg0, arg1, arg2, arg3)
}
Self::Lookup(arg0, arg1, arg2) => {
write!(f, "Lookup({:?}, {:?}, {:?})", arg0, arg1, arg2)
}
Self::Pattern(arg0, arg1, arg2, arg3) => {
write!(f, "Pattern({:?}, {:?}, {:?}, {:?})", arg0, arg1, arg2, arg3)
}
Self::True => write!(f, "True"),
Self::SaveTheEnvironment => write!(f, "SaveTheEnvironment"),
Self::Let(arg0, arg1) => f.debug_tuple("Let").field(arg0).field(arg1).finish(),
Self::And(arg0) => f.debug_tuple("And").field(arg0).finish(),
Self::IsOpenType(arg0) => f.debug_tuple("IsOpenType").field(arg0).finish(),
Self::IncludesTag(arg0) => f.debug_tuple("IncludesTag").field(arg0).finish(),
Self::PatternPresence(arg0, arg1, arg2, arg3) => {
write!(
f,
"PatternPresence({:?}, {:?}, {:?}, {:?})",
arg0, arg1, arg2, arg3
)
}
}
}
}

View File

@ -12,6 +12,7 @@ use crate::procedure::References;
use crate::scope::create_alias;
use crate::scope::Scope;
use roc_collections::all::{default_hasher, ImMap, ImSet, MutMap, MutSet, SendMap};
use roc_error_macros::todo_abilities;
use roc_module::ident::Lowercase;
use roc_module::symbol::Symbol;
use roc_parse::ast;
@ -286,7 +287,7 @@ pub fn canonicalize_defs<'a>(
// Record all the annotation's references in output.references.lookups
for symbol in can_ann.references {
output.references.lookups.insert(symbol);
output.references.type_lookups.insert(symbol);
output.references.referenced_type_defs.insert(symbol);
}
@ -410,7 +411,7 @@ pub fn sort_can_defs(
// Determine the full set of references by traversing the graph.
let mut visited_symbols = MutSet::default();
let returned_lookups = ImSet::clone(&output.references.lookups);
let returned_lookups = ImSet::clone(&output.references.value_lookups);
// Start with the return expression's referenced locals. They're the only ones that count!
//
@ -483,10 +484,10 @@ pub fn sort_can_defs(
let mut loc_succ = local_successors(references, &env.closures);
// if the current symbol is a closure, peek into its body
if let Some(References { lookups, .. }) = env.closures.get(symbol) {
if let Some(References { value_lookups, .. }) = env.closures.get(symbol) {
let home = env.home;
for lookup in lookups {
for lookup in value_lookups {
if lookup != symbol && lookup.module_id() == home {
// DO NOT register a self-call behind a lambda!
//
@ -533,8 +534,8 @@ pub fn sort_can_defs(
let mut loc_succ = local_successors(references, &env.closures);
// if the current symbol is a closure, peek into its body
if let Some(References { lookups, .. }) = env.closures.get(symbol) {
for lookup in lookups {
if let Some(References { value_lookups, .. }) = env.closures.get(symbol) {
for lookup in value_lookups {
loc_succ.insert(*lookup);
}
}
@ -920,7 +921,7 @@ fn canonicalize_pending_def<'a>(
// Record all the annotation's references in output.references.lookups
for symbol in type_annotation.references.iter() {
output.references.lookups.insert(*symbol);
output.references.type_lookups.insert(*symbol);
output.references.referenced_type_defs.insert(*symbol);
}
@ -1042,7 +1043,7 @@ fn canonicalize_pending_def<'a>(
// Record all the annotation's references in output.references.lookups
for symbol in type_annotation.references.iter() {
output.references.lookups.insert(*symbol);
output.references.type_lookups.insert(*symbol);
output.references.referenced_type_defs.insert(*symbol);
}
@ -1122,7 +1123,7 @@ fn canonicalize_pending_def<'a>(
// Recursion doesn't count as referencing. (If it did, all recursive functions
// would result in circular def errors!)
refs_by_symbol.entry(symbol).and_modify(|(_, refs)| {
refs.lookups = refs.lookups.without(&symbol);
refs.value_lookups = refs.value_lookups.without(&symbol);
});
// renamed_closure_def = Some(&symbol);
@ -1262,7 +1263,7 @@ fn canonicalize_pending_def<'a>(
// Recursion doesn't count as referencing. (If it did, all recursive functions
// would result in circular def errors!)
refs_by_symbol.entry(symbol).and_modify(|(_, refs)| {
refs.lookups = refs.lookups.without(&symbol);
refs.value_lookups = refs.value_lookups.without(&symbol);
});
loc_can_expr.value = Closure(ClosureData {
@ -1357,7 +1358,8 @@ pub fn can_defs_with_return<'a>(
// Now that we've collected all the references, check to see if any of the new idents
// we defined went unused by the return expression. If any were unused, report it.
for (symbol, region) in symbols_introduced {
if !output.references.has_lookup(symbol) {
if !output.references.has_value_lookup(symbol) && !output.references.has_type_lookup(symbol)
{
env.problem(Problem::UnusedDef(symbol, region));
}
}
@ -1588,6 +1590,8 @@ fn to_pending_def<'a>(
}
}
Ability { .. } => todo_abilities!(),
Expect(_condition) => todo!(),
SpaceBefore(sub_def, _) | SpaceAfter(sub_def, _) => {

View File

@ -27,8 +27,11 @@ pub struct Env<'a> {
/// current closure name (if any)
pub closure_name_symbol: Option<Symbol>,
/// Symbols which were referenced by qualified lookups.
pub qualified_lookups: MutSet<Symbol>,
/// Symbols of values/functions which were referenced by qualified lookups.
pub qualified_value_lookups: MutSet<Symbol>,
/// Symbols of types which were referenced by qualified lookups.
pub qualified_type_lookups: MutSet<Symbol>,
pub top_level_symbols: MutSet<Symbol>,
@ -51,7 +54,8 @@ impl<'a> Env<'a> {
exposed_ident_ids,
problems: Vec::new(),
closures: MutMap::default(),
qualified_lookups: MutSet::default(),
qualified_value_lookups: MutSet::default(),
qualified_type_lookups: MutSet::default(),
tailcallable_symbol: None,
closure_name_symbol: None,
top_level_symbols: MutSet::default(),
@ -71,6 +75,8 @@ impl<'a> Env<'a> {
ident
);
let is_type_name = ident.starts_with(|c: char| c.is_uppercase());
let module_name = ModuleName::from(module_name_str);
let ident = Ident::from(ident);
@ -83,7 +89,11 @@ impl<'a> Env<'a> {
Some(ident_id) => {
let symbol = Symbol::new(module_id, *ident_id);
self.qualified_lookups.insert(symbol);
if is_type_name {
self.qualified_type_lookups.insert(symbol);
} else {
self.qualified_value_lookups.insert(symbol);
}
Ok(symbol)
}
@ -104,7 +114,11 @@ impl<'a> Env<'a> {
Some(ident_id) => {
let symbol = Symbol::new(module_id, *ident_id);
self.qualified_lookups.insert(symbol);
if is_type_name {
self.qualified_type_lookups.insert(symbol);
} else {
self.qualified_value_lookups.insert(symbol);
}
Ok(symbol)
}

View File

@ -493,7 +493,7 @@ pub fn canonicalize_expr<'a>(
Ok((name, opaque_def)) => {
let argument = Box::new(args.pop().unwrap());
output.references.referenced_type_defs.insert(name);
output.references.lookups.insert(name);
output.references.type_lookups.insert(name);
let (type_arguments, lambda_set_variables, specialized_def_type) =
freshen_opaque_def(var_store, opaque_def);
@ -587,7 +587,7 @@ pub fn canonicalize_expr<'a>(
}
}
ast::Expr::Var { module_name, ident } => {
canonicalize_lookup(env, scope, module_name, ident, region)
canonicalize_var_lookup(env, scope, module_name, ident, region)
}
ast::Expr::Underscore(name) => {
// we parse underscores, but they are not valid expression syntax
@ -661,8 +661,12 @@ pub fn canonicalize_expr<'a>(
&loc_body_expr.value,
);
let mut captured_symbols: MutSet<Symbol> =
new_output.references.lookups.iter().copied().collect();
let mut captured_symbols: MutSet<Symbol> = new_output
.references
.value_lookups
.iter()
.copied()
.collect();
// filter out the closure's name itself
captured_symbols.remove(&symbol);
@ -684,7 +688,10 @@ pub fn canonicalize_expr<'a>(
output.union(new_output);
// filter out aliases
captured_symbols.retain(|s| !output.references.referenced_type_defs.contains(s));
debug_assert!(captured_symbols
.iter()
.all(|s| !output.references.referenced_type_defs.contains(s)));
// captured_symbols.retain(|s| !output.references.referenced_type_defs.contains(s));
// filter out functions that don't close over anything
captured_symbols.retain(|s| !output.non_closures.contains(s));
@ -693,7 +700,7 @@ pub fn canonicalize_expr<'a>(
// went unreferenced. If any did, report them as unused arguments.
for (sub_symbol, region) in scope.symbols() {
if !original_scope.contains_symbol(*sub_symbol) {
if !output.references.has_lookup(*sub_symbol) {
if !output.references.has_value_lookup(*sub_symbol) {
// The body never referenced this argument we declared. It's an unused argument!
env.problem(Problem::UnusedArgument(symbol, *sub_symbol, *region));
}
@ -701,7 +708,7 @@ pub fn canonicalize_expr<'a>(
// We shouldn't ultimately count arguments as referenced locals. Otherwise,
// we end up with weird conclusions like the expression (\x -> x + 1)
// references the (nonexistent) local variable x!
output.references.lookups.remove(sub_symbol);
output.references.value_lookups.remove(sub_symbol);
}
}
@ -1082,8 +1089,10 @@ fn canonicalize_when_branch<'a>(
for (symbol, region) in scope.symbols() {
let symbol = *symbol;
if !output.references.has_lookup(symbol)
&& !branch_output.references.has_lookup(symbol)
if !output.references.has_value_lookup(symbol)
&& !output.references.has_type_lookup(symbol)
&& !branch_output.references.has_value_lookup(symbol)
&& !branch_output.references.has_type_lookup(symbol)
&& !original_scope.contains_symbol(symbol)
{
env.problem(Problem::UnusedDef(symbol, *region));
@ -1107,7 +1116,7 @@ pub fn local_successors<'a>(
references: &'a References,
closures: &'a MutMap<Symbol, References>,
) -> ImSet<Symbol> {
let mut answer = references.lookups.clone();
let mut answer = references.value_lookups.clone();
for call_symbol in references.calls.iter() {
answer = answer.union(call_successors(*call_symbol, closures));
@ -1127,7 +1136,7 @@ fn call_successors(call_symbol: Symbol, closures: &MutMap<Symbol, References>) -
}
if let Some(references) = closures.get(&symbol) {
answer.extend(references.lookups.iter().copied());
answer.extend(references.value_lookups.iter().copied());
queue.extend(references.calls.iter().copied());
seen.insert(symbol);
@ -1152,7 +1161,7 @@ where
Some((_, refs)) => {
visited.insert(defined_symbol);
for local in refs.lookups.iter() {
for local in refs.value_lookups.iter() {
if !visited.contains(local) {
let other_refs: References =
references_from_local(*local, visited, refs_by_def, closures);
@ -1160,7 +1169,7 @@ where
answer = answer.union(other_refs);
}
answer.lookups.insert(*local);
answer.value_lookups.insert(*local);
}
for call in refs.calls.iter() {
@ -1194,7 +1203,7 @@ where
visited.insert(call_symbol);
for closed_over_local in references.lookups.iter() {
for closed_over_local in references.value_lookups.iter() {
if !visited.contains(closed_over_local) {
let other_refs =
references_from_local(*closed_over_local, visited, refs_by_def, closures);
@ -1202,7 +1211,7 @@ where
answer = answer.union(other_refs);
}
answer.lookups.insert(*closed_over_local);
answer.value_lookups.insert(*closed_over_local);
}
for call in references.calls.iter() {
@ -1335,7 +1344,7 @@ fn canonicalize_field<'a>(
}
}
fn canonicalize_lookup(
fn canonicalize_var_lookup(
env: &mut Env<'_>,
scope: &mut Scope,
module_name: &str,
@ -1350,7 +1359,7 @@ fn canonicalize_lookup(
// Look it up in scope!
match scope.lookup(&(*ident).into(), region) {
Ok(symbol) => {
output.references.lookups.insert(symbol);
output.references.value_lookups.insert(symbol);
Var(symbol)
}
@ -1365,7 +1374,7 @@ fn canonicalize_lookup(
// Look it up in the env!
match env.qualified_lookup(module_name, ident, region) {
Ok(symbol) => {
output.references.lookups.insert(symbol);
output.references.value_lookups.insert(symbol);
Var(symbol)
}

View File

@ -23,21 +23,29 @@ pub struct Module {
pub module_id: ModuleId,
pub exposed_imports: MutMap<Symbol, Variable>,
pub exposed_symbols: MutSet<Symbol>,
pub references: MutSet<Symbol>,
pub referenced_values: MutSet<Symbol>,
pub referenced_types: MutSet<Symbol>,
pub aliases: MutMap<Symbol, Alias>,
pub rigid_variables: MutMap<Variable, Lowercase>,
pub rigid_variables: RigidVariables,
}
#[derive(Debug, Default)]
pub struct RigidVariables {
pub named: MutMap<Variable, Lowercase>,
pub wildcards: MutSet<Variable>,
}
#[derive(Debug)]
pub struct ModuleOutput {
pub aliases: MutMap<Symbol, Alias>,
pub rigid_variables: MutMap<Variable, Lowercase>,
pub rigid_variables: RigidVariables,
pub declarations: Vec<Declaration>,
pub exposed_imports: MutMap<Symbol, Variable>,
pub lookups: Vec<(Symbol, Variable, Region)>,
pub problems: Vec<Problem>,
pub ident_ids: IdentIds,
pub references: MutSet<Symbol>,
pub referenced_values: MutSet<Symbol>,
pub referenced_types: MutSet<Symbol>,
pub scope: Scope,
}
@ -167,7 +175,7 @@ pub fn canonicalize_module_defs<'a>(
}
let mut lookups = Vec::with_capacity(num_deps);
let mut rigid_variables = MutMap::default();
let mut rigid_variables = RigidVariables::default();
// Exposed values are treated like defs that appear before any others, e.g.
//
@ -238,38 +246,38 @@ pub fn canonicalize_module_defs<'a>(
// See if any of the new idents we defined went unused.
// If any were unused and also not exposed, report it.
for (symbol, region) in symbols_introduced {
if !output.references.has_lookup(symbol) && !exposed_symbols.contains(&symbol) {
if !output.references.has_value_lookup(symbol)
&& !output.references.has_type_lookup(symbol)
&& !exposed_symbols.contains(&symbol)
{
env.problem(Problem::UnusedDef(symbol, region));
}
}
for (var, lowercase) in output.introduced_variables.name_by_var {
rigid_variables.insert(var, lowercase.clone());
rigid_variables.named.insert(var, lowercase.clone());
}
for var in output.introduced_variables.wildcards {
rigid_variables.insert(var, "*".into());
rigid_variables.wildcards.insert(var);
}
let mut references = MutSet::default();
let mut referenced_values = MutSet::default();
let mut referenced_types = MutSet::default();
// Gather up all the symbols that were referenced across all the defs' lookups.
for symbol in output.references.lookups.iter() {
references.insert(*symbol);
}
referenced_values.extend(output.references.value_lookups);
referenced_types.extend(output.references.type_lookups);
// Gather up all the symbols that were referenced across all the defs' calls.
for symbol in output.references.calls.iter() {
references.insert(*symbol);
}
referenced_values.extend(output.references.calls);
// Gather up all the symbols that were referenced from other modules.
for symbol in env.qualified_lookups.iter() {
references.insert(*symbol);
}
referenced_values.extend(env.qualified_value_lookups.iter().copied());
referenced_types.extend(env.qualified_type_lookups.iter().copied());
// add any builtins used by other builtins
let transitive_builtins: Vec<Symbol> = references
let transitive_builtins: Vec<Symbol> = referenced_values
.iter()
.filter(|s| s.is_builtin())
.map(|s| crate::builtins::builtin_dependencies(*s))
@ -277,7 +285,7 @@ pub fn canonicalize_module_defs<'a>(
.copied()
.collect();
references.extend(transitive_builtins);
referenced_values.extend(transitive_builtins);
// NOTE previously we inserted builtin defs into the list of defs here
// this is now done later, in file.rs.
@ -456,19 +464,15 @@ pub fn canonicalize_module_defs<'a>(
}
// Incorporate any remaining output.lookups entries into references.
for symbol in output.references.lookups {
references.insert(symbol);
}
referenced_values.extend(output.references.value_lookups);
referenced_types.extend(output.references.type_lookups);
// Incorporate any remaining output.calls entries into references.
for symbol in output.references.calls {
references.insert(symbol);
}
referenced_values.extend(output.references.calls);
// Gather up all the symbols that were referenced from other modules.
for symbol in env.qualified_lookups.iter() {
references.insert(*symbol);
}
referenced_values.extend(env.qualified_value_lookups.iter().copied());
referenced_types.extend(env.qualified_type_lookups.iter().copied());
for declaration in declarations.iter_mut() {
match declaration {
@ -482,7 +486,7 @@ pub fn canonicalize_module_defs<'a>(
// TODO this loops over all symbols in the module, we can speed it up by having an
// iterator over all builtin symbols
for symbol in references.iter() {
for symbol in referenced_values.iter() {
if symbol.is_builtin() {
// this can fail when the symbol is for builtin types, or has no implementation yet
if let Some(def) = crate::builtins::builtin_defs_map(*symbol, var_store) {
@ -496,7 +500,8 @@ pub fn canonicalize_module_defs<'a>(
aliases,
rigid_variables,
declarations,
references,
referenced_values,
referenced_types,
exposed_imports: can_exposed_imports,
problems: env.problems,
lookups,

View File

@ -96,6 +96,7 @@ pub fn desugar_def<'a>(arena: &'a Bump, def: &'a Def<'a>) -> Def<'a> {
SpaceBefore(def, _) | SpaceAfter(def, _) => desugar_def(arena, def),
alias @ Alias { .. } => *alias,
opaque @ Opaque { .. } => *opaque,
ability @ Ability { .. } => *ability,
ann @ Annotation(_, _) => *ann,
AnnotatedBody {
ann_pattern,

View File

@ -254,7 +254,7 @@ pub fn canonicalize_pattern<'a>(
freshen_opaque_def(var_store, opaque_def);
output.references.referenced_type_defs.insert(opaque);
output.references.lookups.insert(opaque);
output.references.type_lookups.insert(opaque);
Pattern::UnwrappedOpaque {
whole_var: var_store.fresh(),

View File

@ -45,7 +45,8 @@ impl Procedure {
#[derive(Clone, Debug, Default, PartialEq)]
pub struct References {
pub bound_symbols: ImSet<Symbol>,
pub lookups: ImSet<Symbol>,
pub type_lookups: ImSet<Symbol>,
pub value_lookups: ImSet<Symbol>,
/// Aliases or opaque types referenced
pub referenced_type_defs: ImSet<Symbol>,
pub calls: ImSet<Symbol>,
@ -57,7 +58,8 @@ impl References {
}
pub fn union(mut self, other: References) -> Self {
self.lookups = self.lookups.union(other.lookups);
self.value_lookups = self.value_lookups.union(other.value_lookups);
self.type_lookups = self.type_lookups.union(other.type_lookups);
self.calls = self.calls.union(other.calls);
self.bound_symbols = self.bound_symbols.union(other.bound_symbols);
self.referenced_type_defs = self.referenced_type_defs.union(other.referenced_type_defs);
@ -66,13 +68,18 @@ impl References {
}
pub fn union_mut(&mut self, other: References) {
self.lookups.extend(other.lookups);
self.value_lookups.extend(other.value_lookups);
self.type_lookups.extend(other.type_lookups);
self.calls.extend(other.calls);
self.bound_symbols.extend(other.bound_symbols);
self.referenced_type_defs.extend(other.referenced_type_defs);
}
pub fn has_lookup(&self, symbol: Symbol) -> bool {
self.lookups.contains(&symbol)
pub fn has_value_lookup(&self, symbol: Symbol) -> bool {
self.value_lookups.contains(&symbol)
}
pub fn has_type_lookup(&self, symbol: Symbol) -> bool {
self.type_lookups.contains(&symbol)
}
}

View File

@ -117,3 +117,56 @@ impl<T> Slice<T> {
self.indices().map(|i| Index::new(i as _))
}
}
#[derive(PartialEq, Eq)]
pub struct EitherIndex<T, U> {
index: u32,
_marker: std::marker::PhantomData<(T, U)>,
}
impl<T, U> Clone for EitherIndex<T, U> {
fn clone(&self) -> Self {
Self {
index: self.index,
_marker: self._marker,
}
}
}
impl<T, U> Copy for EitherIndex<T, U> {}
impl<T, U> std::fmt::Debug for EitherIndex<T, U> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Index({})", self.index)
}
}
impl<T, U> EitherIndex<T, U> {
const MASK: u32 = 1 << 31;
pub const fn from_left(input: Index<T>) -> Self {
assert!(input.index & Self::MASK == 0);
Self {
index: input.index,
_marker: std::marker::PhantomData,
}
}
pub const fn from_right(input: Index<U>) -> Self {
assert!(input.index & Self::MASK == 0);
Self {
index: input.index | Self::MASK,
_marker: std::marker::PhantomData,
}
}
pub const fn split(self) -> Result<Index<T>, Index<U>> {
if self.index & Self::MASK == 0 {
Ok(Index::new(self.index))
} else {
Err(Index::new(self.index ^ Self::MASK))
}
}
}

View File

@ -126,25 +126,17 @@ pub fn constrain_expr(
// lifetime parameter on `Type`
Box::new(Type::EmptyRec),
);
let record_con = constraints.equal_types(
let record_con = constraints.equal_types_with_storage(
record_type,
expected.clone(),
expected,
Category::Record,
region,
*record_var,
);
rec_constraints.push(record_con);
// variable to store in the AST
let stored_con = constraints.equal_types(
Type::Variable(*record_var),
expected,
Category::Storage(std::file!(), std::line!()),
region,
);
field_vars.push(*record_var);
rec_constraints.push(stored_con);
let and_constraint = constraints.and_constraint(rec_constraints);
constraints.exists(field_vars, and_constraint)
@ -177,14 +169,14 @@ pub fn constrain_expr(
let record_type = Type::Variable(*record_var);
// NOTE from elm compiler: fields_type is separate so that Error propagates better
let fields_con = constraints.equal_types(
record_type.clone(),
let fields_con = constraints.equal_types_var(
*record_var,
NoExpectation(fields_type),
Category::Record,
region,
);
let record_con =
constraints.equal_types(record_type.clone(), expected, Category::Record, region);
constraints.equal_types_var(*record_var, expected, Category::Record, region);
vars.push(*record_var);
vars.push(*ext_var);
@ -273,7 +265,7 @@ pub fn constrain_expr(
let fn_type = Variable(*fn_var);
let fn_region = loc_fn.region;
let fn_expected = NoExpectation(fn_type.clone());
let fn_expected = NoExpectation(fn_type);
let fn_reason = Reason::FnCall {
name: opt_symbol,
@ -323,11 +315,7 @@ pub fn constrain_expr(
let expected_fn_type = ForReason(
fn_reason,
Function(
arg_types,
Box::new(closure_type),
Box::new(ret_type.clone()),
),
Function(arg_types, Box::new(closure_type), Box::new(ret_type)),
region,
);
@ -335,9 +323,9 @@ pub fn constrain_expr(
let and_cons = [
fn_con,
constraints.equal_types(fn_type, expected_fn_type, category.clone(), fn_region),
constraints.equal_types_var(*fn_var, expected_fn_type, category.clone(), fn_region),
constraints.and_constraint(arg_cons),
constraints.equal_types(ret_type, expected, category, region),
constraints.equal_types_var(*ret_var, expected, category, region),
];
let and_constraint = constraints.and_constraint(and_cons);
@ -415,14 +403,12 @@ pub fn constrain_expr(
pattern_state_constraints,
ret_constraint,
),
// "the closure's type is equal to expected type"
constraints.equal_types(function_type.clone(), expected, Category::Lambda, region),
// "fn_var is equal to the closure's type" - fn_var is used in code gen
constraints.equal_types(
Type::Variable(*fn_var),
NoExpectation(function_type),
Category::Storage(std::file!(), std::line!()),
constraints.equal_types_with_storage(
function_type,
expected,
Category::Lambda,
region,
*fn_var,
),
closure_constraint,
];
@ -469,8 +455,8 @@ pub fn constrain_expr(
// TODO why does this cond var exist? is it for error messages?
let first_cond_region = branches[0].0.region;
let cond_var_is_bool_con = constraints.equal_types(
Type::Variable(*cond_var),
let cond_var_is_bool_con = constraints.equal_types_var(
*cond_var,
expect_bool(first_cond_region),
Category::If,
first_cond_region,
@ -528,8 +514,8 @@ pub fn constrain_expr(
),
);
let ast_con = constraints.equal_types(
Type::Variable(*branch_var),
let ast_con = constraints.equal_types_var(
*branch_var,
NoExpectation(tipe),
Category::Storage(std::file!(), std::line!()),
region,
@ -583,8 +569,8 @@ pub fn constrain_expr(
),
);
branch_cons.push(constraints.equal_types(
Type::Variable(*branch_var),
branch_cons.push(constraints.equal_types_var(
*branch_var,
expected,
Category::Storage(std::file!(), std::line!()),
region,
@ -654,8 +640,8 @@ pub fn constrain_expr(
branch_constraints.push(branch_con);
}
branch_constraints.push(constraints.equal_types(
typ,
branch_constraints.push(constraints.equal_types_var(
*expr_var,
expected,
Category::When,
region,
@ -665,7 +651,8 @@ pub fn constrain_expr(
}
_ => {
let branch_type = Variable(*expr_var);
let branch_var = *expr_var;
let branch_type = Variable(branch_var);
let mut branch_cons = Vec::with_capacity(branches.len());
for (index, when_branch) in branches.iter().enumerate() {
@ -703,8 +690,8 @@ pub fn constrain_expr(
//
// The return type of each branch must equal the return type of
// the entire when-expression.
branch_cons.push(constraints.equal_types(
branch_type,
branch_cons.push(constraints.equal_types_var(
branch_var,
expected,
Category::When,
region,
@ -731,15 +718,15 @@ pub fn constrain_expr(
let mut rec_field_types = SendMap::default();
let label = field.clone();
rec_field_types.insert(label, RecordField::Demanded(field_type.clone()));
rec_field_types.insert(label, RecordField::Demanded(field_type));
let record_type = Type::Record(rec_field_types, Box::new(ext_type));
let record_expected = Expected::NoExpectation(record_type);
let category = Category::Access(field.clone());
let record_con = constraints.equal_types(
Type::Variable(*record_var),
let record_con = constraints.equal_types_var(
*record_var,
record_expected.clone(),
category.clone(),
region,
@ -756,7 +743,7 @@ pub fn constrain_expr(
record_expected,
);
let eq = constraints.equal_types(field_type, expected, category, region);
let eq = constraints.equal_types_var(field_var, expected, category, region);
constraints.exists_many(
[*record_var, field_var, ext_var],
[constraint, eq, record_con],
@ -785,12 +772,8 @@ pub fn constrain_expr(
let category = Category::Accessor(field.clone());
let record_expected = Expected::NoExpectation(record_type.clone());
let record_con = constraints.equal_types(
Type::Variable(*record_var),
record_expected,
category.clone(),
region,
);
let record_con =
constraints.equal_types_var(*record_var, record_expected, category.clone(), region);
let lambda_set = Type::ClosureTag {
name: *closure_name,
@ -801,13 +784,13 @@ pub fn constrain_expr(
let function_type = Type::Function(
vec![record_type],
Box::new(closure_type.clone()),
Box::new(closure_type),
Box::new(field_type),
);
let cons = [
constraints.equal_types(
closure_type,
constraints.equal_types_var(
*closure_var,
NoExpectation(lambda_set),
category.clone(),
region,
@ -847,8 +830,8 @@ pub fn constrain_expr(
constrain_recursive_defs(constraints, env, defs, body_con),
// Record the type of tne entire def-expression in the variable.
// Code gen will need that later!
constraints.equal_types(
Type::Variable(*var),
constraints.equal_types_var(
*var,
expected,
Category::Storage(std::file!(), std::line!()),
loc_ret.region,
@ -882,8 +865,8 @@ pub fn constrain_expr(
constrain_def(constraints, env, def, body_con),
// Record the type of the entire def-expression in the variable.
// Code gen will need that later!
constraints.equal_types(
Type::Variable(*var),
constraints.equal_types_var(
*var,
expected.clone(),
Category::Storage(std::file!(), std::line!()),
ret_region,
@ -919,7 +902,7 @@ pub fn constrain_expr(
types.push(Type::Variable(*var));
}
let union_con = constraints.equal_types(
let union_con = constraints.equal_types_with_storage(
Type::TagUnion(
vec![(name.clone(), types)],
Box::new(Type::Variable(*ext_var)),
@ -930,18 +913,12 @@ pub fn constrain_expr(
args_count: arguments.len(),
},
region,
);
let ast_con = constraints.equal_types(
Type::Variable(*variant_var),
expected,
Category::Storage(std::file!(), std::line!()),
region,
*variant_var,
);
vars.push(*variant_var);
vars.push(*ext_var);
arg_cons.push(union_con);
arg_cons.push(ast_con);
constraints.exists_many(vars, arg_cons)
}
@ -970,7 +947,7 @@ pub fn constrain_expr(
types.push(Type::Variable(*var));
}
let union_con = constraints.equal_types(
let union_con = constraints.equal_types_with_storage(
Type::FunctionOrTagUnion(
name.clone(),
*closure_name,
@ -982,18 +959,12 @@ pub fn constrain_expr(
args_count: arguments.len(),
},
region,
);
let ast_con = constraints.equal_types(
Type::Variable(*variant_var),
expected,
Category::Storage(std::file!(), std::line!()),
region,
*variant_var,
);
vars.push(*variant_var);
vars.push(*ext_var);
arg_cons.push(union_con);
arg_cons.push(ast_con);
constraints.exists_many(vars, arg_cons)
}
@ -1028,11 +999,12 @@ pub fn constrain_expr(
// Link the entire wrapped opaque type (with the now-constrained argument) to the
// expected type
let opaque_con = constraints.equal_types(
let opaque_con = constraints.equal_types_with_storage(
opaque_type,
expected.clone(),
expected,
Category::OpaqueWrap(*name),
region,
*opaque_var,
);
// Link the entire wrapped opaque type (with the now-constrained argument) to the type
@ -1045,14 +1017,6 @@ pub fn constrain_expr(
arg_loc_expr.region,
);
// Store the entire wrapped opaque type in `opaque_var`
let storage_con = constraints.equal_types(
Type::Variable(*opaque_var),
expected,
Category::Storage(std::file!(), std::line!()),
region,
);
let mut vars = vec![*arg_var, *opaque_var];
// Also add the fresh variables we created for the type argument and lambda sets
vars.extend(type_arguments.iter().map(|(_, t)| {
@ -1062,18 +1026,12 @@ pub fn constrain_expr(
v.0.expect_variable("all lambda sets should be fresh variables here")
}));
constraints.exists_many(
vars,
[arg_con, opaque_con, link_type_variables_con, storage_con],
)
constraints.exists_many(vars, [arg_con, opaque_con, link_type_variables_con])
}
RunLowLevel { args, ret_var, op } => {
// This is a modified version of what we do for function calls.
// The operation's return type
let ret_type = Variable(*ret_var);
// This will be used in the occurs check
let mut vars = Vec::with_capacity(1 + args.len());
@ -1103,7 +1061,7 @@ pub fn constrain_expr(
let category = Category::LowLevelOpResult(*op);
// Deviation: elm uses an additional And here
let eq = constraints.equal_types(ret_type, expected, category, region);
let eq = constraints.equal_types_var(*ret_var, expected, category, region);
arg_cons.push(eq);
constraints.exists_many(vars, arg_cons)
}
@ -1114,9 +1072,6 @@ pub fn constrain_expr(
} => {
// This is a modified version of what we do for function calls.
// The operation's return type
let ret_type = Variable(*ret_var);
// This will be used in the occurs check
let mut vars = Vec::with_capacity(1 + args.len());
@ -1146,7 +1101,7 @@ pub fn constrain_expr(
let category = Category::ForeignCall;
// Deviation: elm uses an additional And here
let eq = constraints.equal_types(ret_type, expected, category, region);
let eq = constraints.equal_types_var(*ret_var, expected, category, region);
arg_cons.push(eq);
constraints.exists_many(vars, arg_cons)
}
@ -1248,14 +1203,7 @@ fn constrain_empty_record(
region: Region,
expected: Expected<Type>,
) -> Constraint {
let expected_index = constraints.push_expected_type(expected);
Constraint::Eq(
Constraints::EMPTY_RECORD,
expected_index,
Constraints::CATEGORY_RECORD,
region,
)
constraints.equal_types(Type::EmptyRec, expected, Category::Record, region)
}
/// Constrain top-level module declarations
@ -1455,8 +1403,8 @@ fn constrain_def(
def_pattern_state.vars.push(*pattern_var);
pattern_types.push(Type::Variable(*pattern_var));
let pattern_con = constraints.equal_types(
Type::Variable(*pattern_var),
let pattern_con = constraints.equal_types_var(
*pattern_var,
Expected::NoExpectation(loc_ann.clone()),
Category::Storage(std::file!(), std::line!()),
loc_pattern.region,
@ -1496,6 +1444,8 @@ fn constrain_def(
vars.push(*fn_var);
let defs_constraint = constraints.and_constraint(state.constraints);
let signature_closure_type = *signature_closure_type.clone();
let signature_index = constraints.push_type(signature);
let cons = [
constraints.let_constraint(
[],
@ -1504,21 +1454,31 @@ fn constrain_def(
defs_constraint,
ret_constraint,
),
constraints.equal_types(
Type::Variable(closure_var),
constraints.equal_types_var(
closure_var,
Expected::FromAnnotation(
def.loc_pattern.clone(),
arity,
AnnotationSource::TypedBody {
region: annotation.region,
},
*signature_closure_type.clone(),
signature_closure_type,
),
Category::ClosureSize,
region,
),
constraints.store(signature.clone(), *fn_var, std::file!(), std::line!()),
constraints.store(signature, expr_var, std::file!(), std::line!()),
constraints.store_index(
signature_index,
*fn_var,
std::file!(),
std::line!(),
),
constraints.store_index(
signature_index,
expr_var,
std::file!(),
std::line!(),
),
constraints.store(ret_type, ret_var, std::file!(), std::line!()),
closure_constraint,
];
@ -1665,8 +1625,8 @@ fn constrain_closure_size(
)
};
let finalizer = constraints.equal_types(
Type::Variable(closure_var),
let finalizer = constraints.equal_types_var(
closure_var,
NoExpectation(closure_type),
Category::ClosureSize,
region,
@ -1908,8 +1868,8 @@ pub fn rec_defs_help(
def_pattern_state.vars.push(*pattern_var);
pattern_types.push(Type::Variable(*pattern_var));
let pattern_con = constraints.equal_types(
Type::Variable(*pattern_var),
let pattern_con = constraints.equal_types_var(
*pattern_var,
Expected::NoExpectation(loc_ann.clone()),
Category::Storage(std::file!(), std::line!()),
loc_pattern.region,
@ -1945,6 +1905,7 @@ pub fn rec_defs_help(
vars.push(*fn_var);
let signature_index = constraints.push_type(signature);
let state_constraints = constraints.and_constraint(state.constraints);
let cons = [
constraints.let_constraint(
@ -1962,13 +1923,18 @@ pub fn rec_defs_help(
),
// "fn_var is equal to the closure's type" - fn_var is used in code gen
// Store type into AST vars. We use Store so errors aren't reported twice
constraints.store(
signature.clone(),
constraints.store_index(
signature_index,
*fn_var,
std::file!(),
std::line!(),
),
constraints.store(signature, expr_var, std::file!(), std::line!()),
constraints.store_index(
signature_index,
expr_var,
std::file!(),
std::line!(),
),
constraints.store(ret_type, ret_var, std::file!(), std::line!()),
closure_constraint,
];

View File

@ -1,19 +1,101 @@
use roc_builtins::std::StdLib;
use roc_can::constraint::{Constraint, Constraints};
use roc_can::def::Declaration;
use roc_collections::all::{MutMap, MutSet, SendMap};
use roc_collections::all::MutMap;
use roc_error_macros::internal_error;
use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::{Loc, Region};
use roc_region::all::Loc;
use roc_types::solved_types::{FreeVars, SolvedType};
use roc_types::subs::{VarStore, Variable};
use roc_types::types::{Alias, Problem};
pub type SubsByModule = MutMap<ModuleId, ExposedModuleTypes>;
/// The types of all exposed values/functions of a collection of modules
#[derive(Clone, Debug, Default)]
pub struct ExposedByModule {
exposed: MutMap<ModuleId, ExposedModuleTypes>,
}
impl ExposedByModule {
pub fn insert(&mut self, module_id: ModuleId, exposed: ExposedModuleTypes) {
self.exposed.insert(module_id, exposed);
}
pub fn get(&self, module_id: &ModuleId) -> Option<&ExposedModuleTypes> {
self.exposed.get(module_id)
}
/// Convenient when you need mutable access to the StorageSubs in the ExposedModuleTypes
pub fn get_mut(&mut self, module_id: &ModuleId) -> Option<&mut ExposedModuleTypes> {
self.exposed.get_mut(module_id)
}
/// Create a clone of `self` that has just a subset of the modules
///
/// Useful when we know what modules a particular module imports, and want just
/// the exposed types for those exposed modules.
pub fn retain_modules<'a>(&self, it: impl Iterator<Item = &'a ModuleId>) -> Self {
let mut output = Self::default();
for module_id in it {
match self.exposed.get(module_id) {
None => {
internal_error!("Module {:?} did not register its exposed values", module_id)
}
Some(exposed_types) => {
output.exposed.insert(*module_id, exposed_types.clone());
}
}
}
output
}
}
#[derive(Clone, Debug, Default)]
pub struct ExposedForModule {
pub exposed_by_module: ExposedByModule,
pub imported_values: Vec<Symbol>,
}
impl ExposedForModule {
pub fn new<'a>(
it: impl Iterator<Item = &'a Symbol>,
exposed_by_module: ExposedByModule,
) -> Self {
let mut imported_values = Vec::new();
for symbol in it {
// Today, builtins are not actually imported,
// but generated in each module that uses them
//
// This will change when we write builtins in roc
if symbol.is_builtin() {
continue;
}
if let Some(ExposedModuleTypes::Valid { .. }) =
exposed_by_module.exposed.get(&symbol.module_id())
{
imported_values.push(*symbol);
} else {
continue;
}
}
Self {
imported_values,
exposed_by_module,
}
}
}
/// The types of all exposed values/functions of a module
#[derive(Clone, Debug)]
pub enum ExposedModuleTypes {
Invalid,
Valid(MutMap<Symbol, SolvedType>, MutMap<Symbol, Alias>),
Valid {
stored_vars_by_symbol: Vec<(Symbol, Variable)>,
storage_subs: roc_types::subs::StorageSubs,
},
}
pub fn constrain_module(
@ -30,17 +112,56 @@ pub struct Import {
pub solved_type: SolvedType,
}
pub fn constrain_imported_values(
pub fn introduce_builtin_imports(
constraints: &mut Constraints,
imports: Vec<Import>,
imports: Vec<Symbol>,
body_con: Constraint,
var_store: &mut VarStore,
) -> (Vec<Variable>, Constraint) {
let mut def_types = SendMap::default();
) -> Constraint {
let stdlib = roc_builtins::std::borrow_stdlib();
let (rigid_vars, def_types) = constrain_builtin_imports(stdlib, imports, var_store);
constraints.let_import_constraint(rigid_vars, def_types, body_con, &[])
}
pub fn constrain_builtin_imports(
stdlib: &StdLib,
imports: Vec<Symbol>,
var_store: &mut VarStore,
) -> (Vec<Variable>, Vec<(Symbol, Loc<roc_types::types::Type>)>) {
let mut def_types = Vec::new();
let mut rigid_vars = Vec::new();
for import in imports {
for symbol in imports {
let mut free_vars = FreeVars::default();
let import = match stdlib.types.get(&symbol) {
Some((solved_type, region)) => {
let loc_symbol = Loc {
value: symbol,
region: *region,
};
Import {
loc_symbol,
solved_type: solved_type.clone(),
}
}
None => {
let is_valid_alias = stdlib.applies.contains(&symbol)
// This wasn't a builtin value or Apply; maybe it was a builtin alias.
|| roc_types::builtin_aliases::aliases().contains_key(&symbol);
if !is_valid_alias {
panic!(
"Could not find {:?} in builtin types {:?} or builtin aliases",
symbol, stdlib.types,
);
}
continue;
}
};
let loc_symbol = import.loc_symbol;
// an imported symbol can be either an alias or a value
@ -55,13 +176,13 @@ pub fn constrain_imported_values(
var_store,
);
def_types.insert(
def_types.push((
loc_symbol.value,
Loc {
region: loc_symbol.region,
value: typ,
},
);
));
for (_, var) in free_vars.named_vars {
rigid_vars.push(var);
@ -80,137 +201,5 @@ pub fn constrain_imported_values(
}
}
(
rigid_vars.clone(),
constraints.let_constraint(rigid_vars, [], def_types, Constraint::True, body_con),
)
}
/// Run pre_constrain_imports to get imported_symbols and imported_aliases.
pub fn constrain_imports(
constraints: &mut Constraints,
imported_symbols: Vec<Import>,
constraint: Constraint,
var_store: &mut VarStore,
) -> Constraint {
let (_introduced_rigids, constraint) =
constrain_imported_values(constraints, imported_symbols, constraint, var_store);
// TODO determine what to do with those rigids
// for var in introduced_rigids {
// output.ftv.insert(var, format!("internal_{:?}", var).into());
// }
constraint
}
pub struct ConstrainableImports {
pub imported_symbols: Vec<Import>,
pub imported_aliases: MutMap<Symbol, Alias>,
pub unused_imports: MutMap<ModuleId, Region>,
}
/// Run this before constraining imports.
///
/// Constraining imports is split into two different functions, because this
/// part of the work needs to be done on the main thread, whereas the rest of it
/// can be done on a different thread.
pub fn pre_constrain_imports(
home: ModuleId,
references: &MutSet<Symbol>,
imported_modules: MutMap<ModuleId, Region>,
exposed_types: &mut SubsByModule,
stdlib: &StdLib,
) -> ConstrainableImports {
let mut imported_symbols = Vec::with_capacity(references.len());
let mut imported_aliases = MutMap::default();
let mut unused_imports = imported_modules; // We'll remove these as we encounter them.
// Translate referenced symbols into constraints. We do this on the main
// thread because we need exclusive access to the exposed_types map, in order
// to get the necessary constraint info for any aliases we imported. We also
// resolve builtin types now, so we can use a reference to stdlib instead of
// having to either clone it or recreate it from scratch on the other thread.
for &symbol in references.iter() {
let module_id = symbol.module_id();
// We used this module, so clearly it is not unused!
unused_imports.remove(&module_id);
if module_id.is_builtin() {
// For builtin modules, we create imports from the
// hardcoded builtin map.
match stdlib.types.get(&symbol) {
Some((solved_type, region)) => {
let loc_symbol = Loc {
value: symbol,
region: *region,
};
imported_symbols.push(Import {
loc_symbol,
solved_type: solved_type.clone(),
});
}
None => {
let is_valid_alias = stdlib.applies.contains(&symbol)
// This wasn't a builtin value or Apply; maybe it was a builtin alias.
|| roc_types::builtin_aliases::aliases().contains_key(&symbol);
if !is_valid_alias {
panic!(
"Could not find {:?} in builtin types {:?} or builtin aliases",
symbol, stdlib.types,
);
}
}
}
} else if module_id != home {
// We already have constraints for our own symbols.
let region = Region::zero(); // TODO this should be the region where this symbol was declared in its home module. Look that up!
let loc_symbol = Loc {
value: symbol,
region,
};
match exposed_types.get(&module_id) {
Some(ExposedModuleTypes::Valid(solved_types, new_aliases)) => {
// If the exposed value was invalid (e.g. it didn't have
// a corresponding definition), it won't have an entry
// in solved_types
if let Some(solved_type) = solved_types.get(&symbol) {
// TODO should this be a union?
for (k, v) in new_aliases.clone() {
imported_aliases.insert(k, v);
}
imported_symbols.push(Import {
loc_symbol,
solved_type: solved_type.clone(),
});
}
}
Some(ExposedModuleTypes::Invalid) => {
// If that module was invalid, use True constraints
// for everything imported from it.
imported_symbols.push(Import {
loc_symbol,
solved_type: SolvedType::Erroneous(Problem::InvalidModule),
});
}
None => {
panic!(
"Could not find module {:?} in exposed_types {:?}",
module_id, exposed_types
);
}
}
}
}
ConstrainableImports {
imported_symbols,
imported_aliases,
unused_imports,
}
(rigid_vars, def_types)
}

View File

@ -504,12 +504,22 @@ pub fn constrain_pattern(
);
// Link the entire wrapped opaque type (with the now-constrained argument) to the type
// variables of the opaque type
// TODO: better expectation here
let link_type_variables_con = constraints.equal_types(
(**specialized_def_type).clone(),
Expected::NoExpectation(arg_pattern_type),
Category::OpaqueWrap(*opaque),
// variables of the opaque type.
//
// For example, suppose we have `O k := [ A k, B k ]`, and the pattern `@O (A s) -> s == ""`.
// Previous constraints will have solved `typeof s ~ Str`, and we have the
// `specialized_def_type` being `[ A k1, B k1 ]`, specializing `k` as `k1` for this opaque
// usage.
// We now want to link `typeof s ~ k1`, so to capture this relationship, we link
// the type of `A s` (the arg type) to `[ A k1, B k1 ]` (the specialized opaque type).
//
// This must **always** be a presence constraint, that is enforcing
// `[ A k1, B k1 ] += typeof (A s)`, because we are in a destructure position and not
// all constructors are covered in this branch!
let link_type_variables_con = constraints.pattern_presence(
arg_pattern_type,
PExpected::NoExpectation((**specialized_def_type).clone()),
PatternCategory::Opaque(*opaque),
loc_arg_pattern.region,
);

View File

@ -3,7 +3,9 @@ use crate::{
spaces::{fmt_comments_only, fmt_spaces, NewlineAt, INDENT},
Buf,
};
use roc_parse::ast::{AssignedField, Collection, Expr, Tag, TypeAnnotation, TypeHeader};
use roc_parse::ast::{
AssignedField, Collection, Expr, ExtractSpaces, HasClause, Tag, TypeAnnotation, TypeHeader,
};
use roc_parse::ident::UppercaseIdent;
use roc_region::all::Loc;
@ -159,6 +161,10 @@ impl<'a> Formattable for TypeAnnotation<'a> {
Apply(_, _, args) => args.iter().any(|loc_arg| loc_arg.value.is_multiline()),
As(lhs, _, _) => lhs.value.is_multiline(),
Where(annot, has_clauses) => {
annot.is_multiline() || has_clauses.iter().any(|has| has.is_multiline())
}
Record { fields, ext } => {
match ext {
Some(ann) if ann.value.is_multiline() => return true,
@ -291,6 +297,15 @@ impl<'a> Formattable for TypeAnnotation<'a> {
}
}
Where(annot, has_clauses) => {
annot.format_with_options(buf, parens, newlines, indent);
buf.push_str(" ");
for (i, has) in has_clauses.iter().enumerate() {
buf.push_str(if i == 0 { "| " } else { ", " });
has.format_with_options(buf, parens, newlines, indent);
}
}
SpaceBefore(ann, spaces) => {
buf.newline();
@ -514,3 +529,22 @@ impl<'a> Formattable for Tag<'a> {
}
}
}
impl<'a> Formattable for HasClause<'a> {
fn is_multiline(&self) -> bool {
self.var.value.is_multiline() || self.ability.is_multiline()
}
fn format_with_options<'buf>(
&self,
buf: &mut Buf<'buf>,
parens: Parens,
newlines: Newlines,
indent: u16,
) {
buf.push_str(self.var.value.extract_spaces().item);
buf.push_str(" has ");
self.ability
.format_with_options(buf, parens, newlines, indent);
}
}

View File

@ -2,7 +2,7 @@ use crate::annotation::{Formattable, Newlines, Parens};
use crate::pattern::fmt_pattern;
use crate::spaces::{fmt_spaces, INDENT};
use crate::Buf;
use roc_parse::ast::{Def, Expr, Pattern, TypeHeader};
use roc_parse::ast::{AbilityDemand, Def, Expr, ExtractSpaces, Pattern, TypeHeader};
use roc_region::all::Loc;
/// A Located formattable value is also formattable
@ -22,6 +22,7 @@ impl<'a> Formattable for Def<'a> {
SpaceBefore(sub_def, spaces) | SpaceAfter(sub_def, spaces) => {
spaces.iter().any(|s| s.is_comment()) || sub_def.is_multiline()
}
Ability { demands, .. } => demands.iter().any(|d| d.is_multiline()),
NotYetImplemented(s) => todo!("{}", s),
}
}
@ -83,6 +84,32 @@ impl<'a> Formattable for Def<'a> {
ann.format(buf, indent + INDENT)
}
Ability {
header: TypeHeader { name, vars },
loc_has: _,
demands,
} => {
buf.indent(indent);
buf.push_str(name.value);
for var in *vars {
buf.spaces(1);
fmt_pattern(buf, &var.value, indent, Parens::NotNeeded);
}
buf.push_str(" has");
if !self.is_multiline() {
debug_assert_eq!(demands.len(), 1);
buf.push_str(" ");
demands[0].format(buf, indent + INDENT);
} else {
for demand in demands.iter() {
buf.newline();
buf.indent(indent + INDENT);
demand.format(buf, indent + INDENT);
}
}
}
Body(loc_pattern, loc_expr) => {
fmt_body(buf, &loc_pattern.value, &loc_expr.value, indent);
}
@ -167,3 +194,15 @@ pub fn fmt_body<'a, 'buf>(
body.format_with_options(buf, Parens::NotNeeded, Newlines::Yes, indent);
}
}
impl<'a> Formattable for AbilityDemand<'a> {
fn is_multiline(&self) -> bool {
self.name.value.is_multiline() || self.typ.is_multiline()
}
fn format<'buf>(&self, buf: &mut Buf<'buf>, indent: u16) {
buf.push_str(self.name.value.extract_spaces().item);
buf.push_str(" : ");
self.typ.value.format(buf, indent + INDENT);
}
}

View File

@ -6558,7 +6558,13 @@ fn build_int_binop<'a, 'ctx, 'env>(
NumGte => bd.build_int_compare(SGE, lhs, rhs, "int_gte").into(),
NumLt => bd.build_int_compare(SLT, lhs, rhs, "int_lt").into(),
NumLte => bd.build_int_compare(SLE, lhs, rhs, "int_lte").into(),
NumRemUnchecked => bd.build_int_signed_rem(lhs, rhs, "rem_int").into(),
NumRemUnchecked => {
if int_width.is_signed() {
bd.build_int_signed_rem(lhs, rhs, "rem_int").into()
} else {
bd.build_int_unsigned_rem(lhs, rhs, "rem_uint").into()
}
}
NumIsMultipleOf => {
// this builds the following construct
//
@ -6632,7 +6638,13 @@ fn build_int_binop<'a, 'ctx, 'env>(
&[lhs.into(), rhs.into()],
&bitcode::NUM_POW_INT[int_width],
),
NumDivUnchecked => bd.build_int_signed_div(lhs, rhs, "div_int").into(),
NumDivUnchecked => {
if int_width.is_signed() {
bd.build_int_signed_div(lhs, rhs, "div_int").into()
} else {
bd.build_int_unsigned_div(lhs, rhs, "div_uint").into()
}
}
NumDivCeilUnchecked => call_bitcode_fn(
env,
&[lhs.into(), rhs.into()],

View File

@ -936,11 +936,13 @@ impl<'a> WasmBackend<'a> {
}
_ => internal_error!("Cannot create struct {:?} with storage {:?}", sym, storage),
};
} else {
} else if !fields.is_empty() {
// Struct expression but not Struct layout => single element. Copy it.
let field_storage = self.storage.get(&fields[0]).to_owned();
self.storage
.clone_value(&mut self.code_builder, storage, &field_storage, fields[0]);
} else {
// Empty record. Nothing to do.
}
}

View File

@ -4,6 +4,7 @@ use crate::docs::TypeAnnotation::{
};
use crate::file::LoadedModule;
use roc_can::scope::Scope;
use roc_error_macros::todo_abilities;
use roc_module::ident::ModuleName;
use roc_module::symbol::IdentIds;
use roc_parse::ast::CommentOrNewline;
@ -251,6 +252,8 @@ fn generate_entry_doc<'a>(
(acc, None)
}
Def::Ability { .. } => todo_abilities!(),
Def::Body(_, _) => (acc, None),
Def::Expect(c) => todo!("documentation for tests {:?}", c),

View File

@ -4,15 +4,16 @@ use crossbeam::channel::{bounded, Sender};
use crossbeam::deque::{Injector, Stealer, Worker};
use crossbeam::thread;
use parking_lot::Mutex;
use roc_builtins::std::StdLib;
use roc_builtins::std::{borrow_stdlib, StdLib};
use roc_can::constraint::{Constraint as ConstraintSoa, Constraints};
use roc_can::def::Declaration;
use roc_can::module::{canonicalize_module_defs, Module};
use roc_collections::all::{default_hasher, BumpMap, MutMap, MutSet};
use roc_constrain::module::{
constrain_imports, constrain_module, pre_constrain_imports, ConstrainableImports,
ExposedModuleTypes, Import, SubsByModule,
constrain_builtin_imports, constrain_module, ExposedByModule, ExposedForModule,
ExposedModuleTypes,
};
use roc_error_macros::internal_error;
use roc_module::ident::{Ident, ModuleName, QualifiedModuleName};
use roc_module::symbol::{
IdentIds, Interns, ModuleId, ModuleIds, PQModuleName, PackageModuleIds, PackageQualified,
@ -247,7 +248,6 @@ fn start_phase<'a>(
var_store,
imported_modules,
&mut state.exposed_types,
state.stdlib,
dep_idents,
declarations,
)
@ -501,13 +501,11 @@ enum Msg<'a> {
decls: Vec<Declaration>,
dep_idents: MutMap<ModuleId, IdentIds>,
module_timing: ModuleTiming,
unused_imports: MutMap<ModuleId, Region>,
},
FinishedAllTypeChecking {
solved_subs: Solved<Subs>,
exposed_vars_by_symbol: Vec<(Symbol, Variable)>,
exposed_aliases_by_symbol: MutMap<Symbol, Alias>,
exposed_values: Vec<Symbol>,
dep_idents: MutMap<ModuleId, IdentIds>,
documentation: MutMap<ModuleId, ModuleDocumentation>,
},
@ -566,8 +564,7 @@ struct State<'a> {
pub root_id: ModuleId,
pub platform_data: Option<PlatformData>,
pub goal_phase: Phase,
pub stdlib: &'a StdLib,
pub exposed_types: SubsByModule,
pub exposed_types: ExposedByModule,
pub output_path: Option<&'a str>,
pub platform_path: PlatformPath<'a>,
pub target_info: TargetInfo,
@ -606,8 +603,7 @@ impl<'a> State<'a> {
root_id: ModuleId,
target_info: TargetInfo,
goal_phase: Phase,
stdlib: &'a StdLib,
exposed_types: SubsByModule,
exposed_types: ExposedByModule,
arc_modules: Arc<Mutex<PackageModuleIds<'a>>>,
ident_ids_by_module: Arc<Mutex<MutMap<ModuleId, IdentIds>>>,
) -> Self {
@ -618,7 +614,6 @@ impl<'a> State<'a> {
target_info,
platform_data: None,
goal_phase,
stdlib,
output_path: None,
platform_path: PlatformPath::NotSpecified,
module_cache: ModuleCache::default(),
@ -729,14 +724,14 @@ enum BuildTask<'a> {
Solve {
module: Module,
ident_ids: IdentIds,
imported_symbols: Vec<Import>,
imported_builtins: Vec<Symbol>,
exposed_for_module: ExposedForModule,
module_timing: ModuleTiming,
constraints: Constraints,
constraint: ConstraintSoa,
var_store: VarStore,
declarations: Vec<Declaration>,
dep_idents: MutMap<ModuleId, IdentIds>,
unused_imports: MutMap<ModuleId, Region>,
},
BuildPendingSpecializations {
module_timing: ModuleTiming,
@ -812,7 +807,7 @@ pub fn load_and_typecheck<'a>(
filename: PathBuf,
stdlib: &'a StdLib,
src_dir: &Path,
exposed_types: SubsByModule,
exposed_types: ExposedByModule,
target_info: TargetInfo,
) -> Result<LoadedModule, LoadingProblem<'a>> {
use LoadResult::*;
@ -839,7 +834,7 @@ pub fn load_and_monomorphize<'a>(
filename: PathBuf,
stdlib: &'a StdLib,
src_dir: &Path,
exposed_types: SubsByModule,
exposed_types: ExposedByModule,
target_info: TargetInfo,
) -> Result<MonomorphizedModule<'a>, LoadingProblem<'a>> {
use LoadResult::*;
@ -867,7 +862,7 @@ pub fn load_and_monomorphize_from_str<'a>(
src: &'a str,
stdlib: &'a StdLib,
src_dir: &Path,
exposed_types: SubsByModule,
exposed_types: ExposedByModule,
target_info: TargetInfo,
) -> Result<MonomorphizedModule<'a>, LoadingProblem<'a>> {
use LoadResult::*;
@ -1031,9 +1026,9 @@ enum LoadResult<'a> {
fn load<'a>(
arena: &'a Bump,
load_start: LoadStart<'a>,
stdlib: &'a StdLib,
_stdlib: &'a StdLib,
src_dir: &Path,
exposed_types: SubsByModule,
exposed_types: ExposedByModule,
goal_phase: Phase,
target_info: TargetInfo,
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
@ -1043,7 +1038,6 @@ fn load<'a>(
load_single_threaded(
arena,
load_start,
stdlib,
src_dir,
exposed_types,
goal_phase,
@ -1053,7 +1047,6 @@ fn load<'a>(
load_multi_threaded(
arena,
load_start,
stdlib,
src_dir,
exposed_types,
goal_phase,
@ -1067,9 +1060,8 @@ fn load<'a>(
fn load_single_threaded<'a>(
arena: &'a Bump,
load_start: LoadStart<'a>,
stdlib: &'a StdLib,
src_dir: &Path,
exposed_types: SubsByModule,
exposed_types: ExposedByModule,
goal_phase: Phase,
target_info: TargetInfo,
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
@ -1090,7 +1082,6 @@ fn load_single_threaded<'a>(
root_id,
target_info,
goal_phase,
stdlib,
exposed_types,
arc_modules,
ident_ids_by_module,
@ -1154,7 +1145,6 @@ fn state_thread_step<'a>(
solved_subs,
exposed_vars_by_symbol,
exposed_aliases_by_symbol,
exposed_values,
dep_idents,
documentation,
} => {
@ -1164,7 +1154,6 @@ fn state_thread_step<'a>(
let typechecked = finish(
state,
solved_subs,
exposed_values,
exposed_aliases_by_symbol,
exposed_vars_by_symbol,
dep_idents,
@ -1243,9 +1232,8 @@ fn state_thread_step<'a>(
fn load_multi_threaded<'a>(
arena: &'a Bump,
load_start: LoadStart<'a>,
stdlib: &'a StdLib,
src_dir: &Path,
exposed_types: SubsByModule,
exposed_types: ExposedByModule,
goal_phase: Phase,
target_info: TargetInfo,
) -> Result<LoadResult<'a>, LoadingProblem<'a>> {
@ -1260,7 +1248,6 @@ fn load_multi_threaded<'a>(
root_id,
target_info,
goal_phase,
stdlib,
exposed_types,
arc_modules,
ident_ids_by_module,
@ -1553,6 +1540,32 @@ fn debug_print_ir(state: &State, flag: &str) {
println!("{}", result);
}
/// Report modules that are imported, but from which nothing is used
fn report_unused_imported_modules<'a>(
state: &mut State<'a>,
module_id: ModuleId,
constrained_module: &ConstrainedModule,
) {
let mut unused_imported_modules = constrained_module.imported_modules.clone();
for symbol in constrained_module.module.referenced_values.iter() {
unused_imported_modules.remove(&symbol.module_id());
}
for symbol in constrained_module.module.referenced_types.iter() {
unused_imported_modules.remove(&symbol.module_id());
}
let existing = match state.module_cache.can_problems.entry(module_id) {
Vacant(entry) => entry.insert(std::vec::Vec::new()),
Occupied(entry) => entry.into_mut(),
};
for (unused, region) in unused_imported_modules.drain() {
existing.push(roc_problem::can::Problem::UnusedImport(unused, region));
}
}
fn update<'a>(
mut state: State<'a>,
msg: Msg<'a>,
@ -1730,6 +1743,8 @@ fn update<'a>(
state.module_cache.documentation.insert(module_id, docs);
}
report_unused_imported_modules(&mut state, module_id, &constrained_module);
state
.module_cache
.aliases
@ -1756,7 +1771,6 @@ fn update<'a>(
decls,
dep_idents,
mut module_timing,
mut unused_imports,
} => {
log!("solved types for {:?}", module_id);
module_timing.end_time = SystemTime::now();
@ -1766,15 +1780,6 @@ fn update<'a>(
.type_problems
.insert(module_id, solved_module.problems);
let existing = match state.module_cache.can_problems.entry(module_id) {
Vacant(entry) => entry.insert(std::vec::Vec::new()),
Occupied(entry) => entry.into_mut(),
};
for (unused, region) in unused_imports.drain() {
existing.push(roc_problem::can::Problem::UnusedImport(unused, region));
}
let work = state.dependencies.notify(module_id, Phase::SolveTypes);
// if there is a platform, the Package-Config module provides host-exposed,
@ -1815,7 +1820,6 @@ fn update<'a>(
.send(Msg::FinishedAllTypeChecking {
solved_subs,
exposed_vars_by_symbol: solved_module.exposed_vars_by_symbol,
exposed_values: solved_module.exposed_symbols,
exposed_aliases_by_symbol: solved_module.aliases,
dep_idents,
documentation,
@ -1832,7 +1836,10 @@ fn update<'a>(
} else {
state.exposed_types.insert(
module_id,
ExposedModuleTypes::Valid(solved_module.solved_types, solved_module.aliases),
ExposedModuleTypes::Valid {
stored_vars_by_symbol: solved_module.stored_vars_by_symbol,
storage_subs: solved_module.storage_subs,
},
);
if state.goal_phase > Phase::SolveTypes {
@ -2133,7 +2140,6 @@ fn finish_specialization(
fn finish(
state: State,
solved: Solved<Subs>,
exposed_values: Vec<Symbol>,
exposed_aliases_by_symbol: MutMap<Symbol, Alias>,
exposed_vars_by_symbol: Vec<(Symbol, Variable)>,
dep_idents: MutMap<ModuleId, IdentIds>,
@ -2156,6 +2162,8 @@ fn finish(
.map(|(id, (path, src))| (id, (path, src.into())))
.collect();
let exposed_values = exposed_vars_by_symbol.iter().map(|x| x.0).collect();
LoadedModule {
module_id: state.root_id,
interns,
@ -3027,7 +3035,7 @@ fn send_header_two<'a>(
impl<'a> BuildTask<'a> {
// TODO trim down these arguments - possibly by moving Constraint into Module
#[allow(clippy::too_many_arguments)]
pub fn solve_module(
fn solve_module(
module: Module,
ident_ids: IdentIds,
module_timing: ModuleTiming,
@ -3035,40 +3043,33 @@ impl<'a> BuildTask<'a> {
constraint: ConstraintSoa,
var_store: VarStore,
imported_modules: MutMap<ModuleId, Region>,
exposed_types: &mut SubsByModule,
stdlib: &StdLib,
exposed_types: &mut ExposedByModule,
dep_idents: MutMap<ModuleId, IdentIds>,
declarations: Vec<Declaration>,
) -> Self {
let home = module.module_id;
let exposed_by_module = exposed_types.retain_modules(imported_modules.keys());
let exposed_for_module =
ExposedForModule::new(module.referenced_values.iter(), exposed_by_module);
// Get the constraints for this module's imports. We do this on the main thread
// to avoid having to lock the map of exposed types, or to clone it
// (which would be more expensive for the main thread).
let ConstrainableImports {
imported_symbols,
imported_aliases: _,
unused_imports,
} = pre_constrain_imports(
home,
&module.references,
imported_modules,
exposed_types,
stdlib,
);
let imported_builtins = module
.referenced_values
.iter()
.filter(|s| s.is_builtin())
.copied()
.collect();
// Next, solve this module in the background.
Self::Solve {
module,
ident_ids,
imported_symbols,
imported_builtins,
exposed_for_module,
constraints,
constraint,
var_store,
declarations,
dep_idents,
module_timing,
unused_imports,
}
}
}
@ -3078,25 +3079,19 @@ fn run_solve<'a>(
module: Module,
ident_ids: IdentIds,
mut module_timing: ModuleTiming,
imported_symbols: Vec<Import>,
imported_builtins: Vec<Symbol>,
mut exposed_for_module: ExposedForModule,
mut constraints: Constraints,
constraint: ConstraintSoa,
mut var_store: VarStore,
decls: Vec<Declaration>,
dep_idents: MutMap<ModuleId, IdentIds>,
unused_imports: MutMap<ModuleId, Region>,
) -> Msg<'a> {
// We have more constraining work to do now, so we'll add it to our timings.
let constrain_start = SystemTime::now();
// Finish constraining the module by wrapping the existing Constraint
// in the ones we just computed. We can do this off the main thread.
let constraint = constrain_imports(
&mut constraints,
imported_symbols,
constraint,
&mut var_store,
);
let (mut rigid_vars, mut def_types) =
constrain_builtin_imports(borrow_stdlib(), imported_builtins, &mut var_store);
let constrain_end = SystemTime::now();
@ -3109,25 +3104,81 @@ fn run_solve<'a>(
..
} = module;
// TODO
// if false { debug_assert!(constraint.validate(), "{:?}", &constraint); }
let mut subs = Subs::new_from_varstore(var_store);
let mut import_variables = Vec::new();
for symbol in exposed_for_module.imported_values {
let module_id = symbol.module_id();
match exposed_for_module.exposed_by_module.get_mut(&module_id) {
Some(t) => match t {
ExposedModuleTypes::Invalid => {
// make the type a flex var, so it unifies with anything
// this way the error is only reported in the module it originates in
let variable = subs.fresh_unnamed_flex_var();
def_types.push((
symbol,
Loc::at_zero(roc_types::types::Type::Variable(variable)),
));
}
ExposedModuleTypes::Valid {
stored_vars_by_symbol,
storage_subs,
} => {
let variable = match stored_vars_by_symbol.iter().find(|(s, _)| *s == symbol) {
None => {
// Today we define builtins in each module that uses them
// so even though they have a different module name from
// the surrounding module, they are not technically imported
debug_assert!(symbol.is_builtin());
continue;
}
Some((_, x)) => *x,
};
let copied_import = storage_subs.export_variable_to(&mut subs, variable);
// not a typo; rigids are turned into flex during type inference, but when imported we must
// consider them rigid variables
rigid_vars.extend(copied_import.rigid);
rigid_vars.extend(copied_import.flex);
import_variables.extend(copied_import.registered);
def_types.push((
symbol,
Loc::at_zero(roc_types::types::Type::Variable(copied_import.variable)),
));
}
},
None => {
internal_error!("Imported module {:?} is not available", module_id)
}
}
}
let actual_constraint =
constraints.let_import_constraint(rigid_vars, def_types, constraint, &import_variables);
let (solved_subs, solved_env, problems) =
roc_solve::module::run_solve(&constraints, constraint, rigid_variables, var_store);
roc_solve::module::run_solve(&constraints, actual_constraint, rigid_variables, subs);
let exposed_vars_by_symbol: Vec<_> = solved_env
.vars_by_symbol()
.filter(|(k, _)| exposed_symbols.contains(k))
.collect();
let solved_types = roc_solve::module::make_solved_types(&solved_subs, &exposed_vars_by_symbol);
let mut solved_subs = solved_subs;
let (storage_subs, stored_vars_by_symbol) =
roc_solve::module::exposed_types_storage_subs(&mut solved_subs, &exposed_vars_by_symbol);
let solved_module = SolvedModule {
exposed_vars_by_symbol,
exposed_symbols: exposed_symbols.into_iter().collect::<Vec<_>>(),
solved_types,
problems,
aliases,
stored_vars_by_symbol,
storage_subs,
};
// Record the final timings
@ -3146,7 +3197,6 @@ fn run_solve<'a>(
dep_idents,
solved_module,
module_timing,
unused_imports,
}
}
@ -3266,7 +3316,8 @@ fn canonicalize_and_constrain<'a>(
module_id,
exposed_imports: module_output.exposed_imports,
exposed_symbols,
references: module_output.references,
referenced_values: module_output.referenced_values,
referenced_types: module_output.referenced_types,
aliases: module_output.aliases,
rigid_variables: module_output.rigid_variables,
};
@ -3758,25 +3809,25 @@ fn run_task<'a>(
Solve {
module,
module_timing,
imported_symbols,
imported_builtins,
exposed_for_module,
constraints,
constraint,
var_store,
ident_ids,
declarations,
dep_idents,
unused_imports,
} => Ok(run_solve(
module,
ident_ids,
module_timing,
imported_symbols,
imported_builtins,
exposed_for_module,
constraints,
constraint,
var_store,
declarations,
dep_idents,
unused_imports,
)),
BuildPendingSpecializations {
module_id,

View File

@ -18,8 +18,7 @@ mod test_load {
use bumpalo::Bump;
use roc_can::def::Declaration::*;
use roc_can::def::Def;
use roc_collections::all::MutMap;
use roc_constrain::module::SubsByModule;
use roc_constrain::module::ExposedByModule;
use roc_load::file::LoadedModule;
use roc_module::ident::ModuleName;
use roc_module::symbol::{Interns, ModuleId};
@ -111,7 +110,6 @@ mod test_load {
let stdlib = roc_builtins::std::standard_stdlib();
let mut file_handles: Vec<_> = Vec::new();
let exposed_types = MutMap::default();
// create a temporary directory
let dir = tempdir()?;
@ -146,7 +144,7 @@ mod test_load {
full_file_path,
arena.alloc(stdlib),
dir.path(),
exposed_types,
Default::default(),
TARGET_INFO,
)
};
@ -159,7 +157,7 @@ mod test_load {
fn load_fixture(
dir_name: &str,
module_name: &str,
subs_by_module: SubsByModule,
subs_by_module: ExposedByModule,
) -> LoadedModule {
let src_dir = fixtures_dir().join(dir_name);
let filename = src_dir.join(format!("{}.roc", module_name));
@ -325,7 +323,7 @@ mod test_load {
#[test]
fn interface_with_deps() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let src_dir = fixtures_dir().join("interface_with_deps");
let filename = src_dir.join("Primary.roc");
let arena = Bump::new();
@ -373,7 +371,7 @@ mod test_load {
#[test]
fn load_unit() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("no_deps", "Unit", subs_by_module);
expect_types(
@ -386,7 +384,7 @@ mod test_load {
#[test]
fn import_alias() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("interface_with_deps", "ImportAlias", subs_by_module);
expect_types(
@ -399,7 +397,7 @@ mod test_load {
#[test]
fn load_and_typecheck() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("interface_with_deps", "WithBuiltins", subs_by_module);
expect_types(
@ -419,7 +417,7 @@ mod test_load {
#[test]
fn iface_quicksort() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("interface_with_deps", "Quicksort", subs_by_module);
expect_types(
@ -435,7 +433,7 @@ mod test_load {
#[test]
fn quicksort_one_def() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("app_with_deps", "QuicksortOneDef", subs_by_module);
expect_types(
@ -448,7 +446,7 @@ mod test_load {
#[test]
fn app_quicksort() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("app_with_deps", "Quicksort", subs_by_module);
expect_types(
@ -464,7 +462,7 @@ mod test_load {
#[test]
fn load_astar() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("interface_with_deps", "AStar", subs_by_module);
expect_types(
@ -482,7 +480,7 @@ mod test_load {
#[test]
fn load_principal_types() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("no_deps", "Principal", subs_by_module);
expect_types(
@ -496,7 +494,7 @@ mod test_load {
#[test]
fn iface_dep_types() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("interface_with_deps", "Primary", subs_by_module);
expect_types(
@ -511,14 +509,14 @@ mod test_load {
"w" => "Dep1.Identity {}",
"succeed" => "a -> Dep1.Identity a",
"yay" => "Res.Res {} err",
"withDefault" => "Res.Res a *, a -> a",
"withDefault" => "Res.Res a err, a -> a",
},
);
}
#[test]
fn app_dep_types() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("app_with_deps", "Primary", subs_by_module);
expect_types(
@ -533,14 +531,14 @@ mod test_load {
"w" => "Dep1.Identity {}",
"succeed" => "a -> Dep1.Identity a",
"yay" => "Res.Res {} err",
"withDefault" => "Res.Res a *, a -> a",
"withDefault" => "Res.Res a err, a -> a",
},
);
}
#[test]
fn imported_dep_regression() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("interface_with_deps", "OneDep", subs_by_module);
expect_types(
@ -590,7 +588,7 @@ mod test_load {
#[test]
#[should_panic(expected = "FILE NOT FOUND")]
fn file_not_found() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("interface_with_deps", "invalid$name", subs_by_module);
expect_types(
@ -604,7 +602,7 @@ mod test_load {
#[test]
#[should_panic(expected = "FILE NOT FOUND")]
fn imported_file_not_found() {
let subs_by_module = MutMap::default();
let subs_by_module = Default::default();
let loaded_module = load_fixture("no_deps", "MissingDep", subs_by_module);
expect_types(

View File

@ -23,6 +23,20 @@ pub enum Spaced<'a, T> {
SpaceAfter(&'a Spaced<'a, T>, &'a [CommentOrNewline<'a>]),
}
impl<'a, T> Spaced<'a, T> {
/// A `Spaced` is multiline if it has newlines or comments before or after the item, since
/// comments induce newlines!
pub fn is_multiline(&self) -> bool {
match self {
Spaced::Item(_) => false,
Spaced::SpaceBefore(_, spaces) | Spaced::SpaceAfter(_, spaces) => {
debug_assert!(!spaces.is_empty());
true
}
}
}
}
impl<'a, T: Debug> Debug for Spaced<'a, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
@ -248,6 +262,22 @@ impl<'a> TypeHeader<'a> {
}
}
/// The `has` keyword associated with ability definitions.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Has<'a> {
Has,
SpaceBefore(&'a Has<'a>, &'a [CommentOrNewline<'a>]),
SpaceAfter(&'a Has<'a>, &'a [CommentOrNewline<'a>]),
}
/// An ability demand is a value defining the ability; for example `hash : a -> U64 | a has Hash`
/// for a `Hash` ability.
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct AbilityDemand<'a> {
pub name: Loc<Spaced<'a, &'a str>>,
pub typ: Loc<TypeAnnotation<'a>>,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Def<'a> {
// TODO in canonicalization, validate the pattern; only certain patterns
@ -269,6 +299,15 @@ pub enum Def<'a> {
typ: Loc<TypeAnnotation<'a>>,
},
/// An ability definition. E.g.
/// Hash has
/// hash : a -> U64 | a has Hash
Ability {
header: TypeHeader<'a>,
loc_has: Loc<Has<'a>>,
demands: &'a [AbilityDemand<'a>],
},
// TODO in canonicalization, check to see if there are any newlines after the
// annotation; if not, and if it's followed by a Body, then the annotation
// applies to that expr! (TODO: verify that the pattern for both annotation and body match.)
@ -304,6 +343,13 @@ impl<'a> Def<'a> {
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct HasClause<'a> {
pub var: Loc<Spaced<'a, &'a str>>,
// Should always be a zero-argument `Apply`; we'll check this in canonicalization
pub ability: Loc<TypeAnnotation<'a>>,
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum TypeAnnotation<'a> {
/// A function. The types of its arguments, then the type of its return value.
@ -343,6 +389,9 @@ pub enum TypeAnnotation<'a> {
/// The `*` type variable, e.g. in (List *)
Wildcard,
/// A "where" clause demanding abilities designated by a `|`, e.g. `a -> U64 | a has Hash`
Where(&'a Loc<TypeAnnotation<'a>>, &'a [Loc<HasClause<'a>>]),
// We preserve this for the formatter; canonicalization ignores it.
SpaceBefore(&'a TypeAnnotation<'a>, &'a [CommentOrNewline<'a>]),
SpaceAfter(&'a TypeAnnotation<'a>, &'a [CommentOrNewline<'a>]),
@ -814,6 +863,15 @@ impl<'a> Spaceable<'a> for Def<'a> {
}
}
impl<'a> Spaceable<'a> for Has<'a> {
fn before(&'a self, spaces: &'a [CommentOrNewline<'a>]) -> Self {
Has::SpaceBefore(self, spaces)
}
fn after(&'a self, spaces: &'a [CommentOrNewline<'a>]) -> Self {
Has::SpaceAfter(self, spaces)
}
}
impl<'a> Expr<'a> {
pub fn loc_ref(&'a self, region: Region) -> Loc<&'a Self> {
Loc {

View File

@ -1,5 +1,5 @@
use crate::ast::{
AssignedField, Collection, CommentOrNewline, Def, Expr, ExtractSpaces, Pattern, Spaceable,
AssignedField, Collection, CommentOrNewline, Def, Expr, ExtractSpaces, Has, Pattern, Spaceable,
TypeAnnotation, TypeHeader,
};
use crate::blankspace::{space0_after_e, space0_around_ee, space0_before_e, space0_e};
@ -1071,6 +1071,187 @@ fn finish_parsing_alias_or_opaque<'a>(
parse_defs_expr(options, start_column, def_state, arena, state)
}
mod ability {
use super::*;
use crate::{
ast::{AbilityDemand, Spaceable, Spaced},
parser::EAbility,
};
/// Parses a single ability demand line; see `parse_demand`.
fn parse_demand_help<'a>(
start_column: u32,
) -> impl Parser<'a, AbilityDemand<'a>, EAbility<'a>> {
map!(
and!(
specialize(|_, pos| EAbility::DemandName(pos), loc!(lowercase_ident())),
skip_first!(
and!(
// TODO: do we get anything from picking up spaces here?
space0_e(start_column, EAbility::DemandName),
word1(b':', EAbility::DemandColon)
),
specialize(
EAbility::Type,
// Require the type to be more indented than the name
type_annotation::located_help(start_column + 1, true)
)
)
),
|(name, typ): (Loc<&'a str>, Loc<TypeAnnotation<'a>>)| {
AbilityDemand {
name: name.map_owned(Spaced::Item),
typ,
}
}
)
}
pub enum IndentLevel {
PendingMin(u32),
Exact(u32),
}
/// Parses an ability demand like `hash : a -> U64 | a has Hash`, in the context of a larger
/// ability definition.
/// This is basically the same as parsing a free-floating annotation, but with stricter rules.
pub fn parse_demand<'a>(
indent: IndentLevel,
) -> impl Parser<'a, (u32, AbilityDemand<'a>), EAbility<'a>> {
move |arena, state: State<'a>| {
let initial = state.clone();
// Put no restrictions on the indent after the spaces; we'll check it manually.
match space0_e(0, EAbility::DemandName).parse(arena, state) {
Err((MadeProgress, fail, _)) => Err((NoProgress, fail, initial)),
Err((NoProgress, fail, _)) => Err((NoProgress, fail, initial)),
Ok((_progress, spaces, state)) => {
match indent {
IndentLevel::PendingMin(min_indent) if state.column() < min_indent => {
let indent_difference = state.column() as i32 - min_indent as i32;
Err((
MadeProgress,
EAbility::DemandAlignment(indent_difference, state.pos()),
initial,
))
}
IndentLevel::Exact(wanted) if state.column() < wanted => {
// This demand is not indented correctly
let indent_difference = state.column() as i32 - wanted as i32;
Err((
// Rollback because the deindent may be because there is a next
// expression
NoProgress,
EAbility::DemandAlignment(indent_difference, state.pos()),
initial,
))
}
IndentLevel::Exact(wanted) if state.column() > wanted => {
// This demand is not indented correctly
let indent_difference = state.column() as i32 - wanted as i32;
Err((
MadeProgress,
EAbility::DemandAlignment(indent_difference, state.pos()),
initial,
))
}
_ => {
let indent_column = state.column();
let parser = parse_demand_help(indent_column);
match parser.parse(arena, state) {
Err((MadeProgress, fail, state)) => {
Err((MadeProgress, fail, state))
}
Err((NoProgress, fail, _)) => {
// We made progress relative to the entire ability definition,
// so this is an error.
Err((MadeProgress, fail, initial))
}
Ok((_, mut demand, state)) => {
// Tag spaces onto the parsed demand name
if !spaces.is_empty() {
demand.name = arena
.alloc(demand.name.value)
.with_spaces_before(spaces, demand.name.region);
}
Ok((MadeProgress, (indent_column, demand), state))
}
}
}
}
}
}
}
}
}
fn finish_parsing_ability<'a>(
start_column: u32,
options: ExprParseOptions,
name: Loc<&'a str>,
args: &'a [Loc<Pattern<'a>>],
loc_has: Loc<Has<'a>>,
arena: &'a Bump,
state: State<'a>,
) -> ParseResult<'a, Expr<'a>, EExpr<'a>> {
let mut demands = Vec::with_capacity_in(2, arena);
let min_indent_for_demand = start_column + 1;
// Parse the first demand. This will determine the indentation level all the
// other demands must observe.
let (_, (demand_indent_level, first_demand), mut state) =
ability::parse_demand(ability::IndentLevel::PendingMin(min_indent_for_demand))
.parse(arena, state)
.map_err(|(progress, err, state)| {
(progress, EExpr::Ability(err, state.pos()), state)
})?;
demands.push(first_demand);
let demand_indent = ability::IndentLevel::Exact(demand_indent_level);
let demand_parser = ability::parse_demand(demand_indent);
loop {
match demand_parser.parse(arena, state.clone()) {
Ok((_, (_indent, demand), next_state)) => {
state = next_state;
demands.push(demand);
}
Err((MadeProgress, problem, old_state)) => {
return Err((
MadeProgress,
EExpr::Ability(problem, old_state.pos()),
old_state,
));
}
Err((NoProgress, _, old_state)) => {
state = old_state;
break;
}
}
}
let def_region = Region::span_across(&name.region, &demands.last().unwrap().typ.region);
let def = Def::Ability {
header: TypeHeader { name, vars: args },
loc_has,
demands: demands.into_bump_slice(),
};
let loc_def = &*(arena.alloc(Loc::at(def_region, def)));
let def_state = DefState {
defs: bumpalo::vec![in arena; loc_def],
spaces_after: &[],
};
parse_defs_expr(options, start_column, def_state, arena, state)
}
fn parse_expr_operator<'a>(
min_indent: u32,
options: ExprParseOptions,
@ -1290,6 +1471,62 @@ fn parse_expr_end<'a>(
match parser.parse(arena, state.clone()) {
Err((MadeProgress, f, s)) => Err((MadeProgress, f, s)),
Ok((
_,
has @ Loc {
value:
Expr::Var {
module_name: "",
ident: "has",
},
..
},
state,
)) if matches!(expr_state.expr.value, Expr::GlobalTag(..)) => {
// This is an ability definition, `Ability arg1 ... has ...`.
let name = expr_state.expr.map_owned(|e| match e {
Expr::GlobalTag(name) => name,
_ => unreachable!(),
});
let mut arguments = Vec::with_capacity_in(expr_state.arguments.len(), arena);
for argument in expr_state.arguments {
match expr_to_pattern_help(arena, &argument.value) {
Ok(good) => {
arguments.push(Loc::at(argument.region, good));
}
Err(_) => {
let start = argument.region.start();
let err = &*arena.alloc(EPattern::Start(start));
return Err((
MadeProgress,
EExpr::Pattern(err, argument.region.start()),
state,
));
}
}
}
// Attach any spaces to the `has` keyword
let has = if !expr_state.spaces_after.is_empty() {
arena
.alloc(Has::Has)
.with_spaces_before(expr_state.spaces_after, has.region)
} else {
Loc::at(has.region, Has::Has)
};
finish_parsing_ability(
start_column,
options,
name,
arguments.into_bump_slice(),
has,
arena,
state,
)
}
Ok((_, mut arg, state)) => {
let new_end = state.pos();
@ -1762,6 +1999,7 @@ mod when {
((_, _), _),
State<'a>,
) = branch_alternatives(min_indent, options, None).parse(arena, state)?;
let original_indent = pattern_indent_level;
state.indent_column = pattern_indent_level;

View File

@ -104,6 +104,7 @@ impl_space_problem! {
ETypeTagUnion<'a>,
ETypedIdent<'a>,
EWhen<'a>,
EAbility<'a>,
PInParens<'a>,
PRecord<'a>
}
@ -331,6 +332,7 @@ pub enum EExpr<'a> {
DefMissingFinalExpr2(&'a EExpr<'a>, Position),
Type(EType<'a>, Position),
Pattern(&'a EPattern<'a>, Position),
Ability(EAbility<'a>, Position),
IndentDefBody(Position),
IndentEquals(Position),
IndentAnnotation(Position),
@ -472,6 +474,16 @@ pub enum EWhen<'a> {
PatternAlignment(u32, Position),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum EAbility<'a> {
Space(BadInputError, Position),
Type(EType<'a>, Position),
DemandAlignment(i32, Position),
DemandName(Position),
DemandColon(Position),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum EIf<'a> {
Space(BadInputError, Position),
@ -564,6 +576,8 @@ pub enum EType<'a> {
TStart(Position),
TEnd(Position),
TFunctionArgument(Position),
TWhereBar(Position),
THasClause(Position),
///
TIndentStart(Position),
TIndentEnd(Position),
@ -1406,6 +1420,32 @@ where
}
}
pub fn word3<'a, ToError, E>(
word_1: u8,
word_2: u8,
word_3: u8,
to_error: ToError,
) -> impl Parser<'a, (), E>
where
ToError: Fn(Position) -> E,
E: 'a,
{
debug_assert_ne!(word_1, b'\n');
debug_assert_ne!(word_2, b'\n');
debug_assert_ne!(word_3, b'\n');
let needle = [word_1, word_2, word_3];
move |_arena: &'a Bump, state: State<'a>| {
if state.bytes().starts_with(&needle) {
let state = state.advance(3);
Ok((MadeProgress, (), state))
} else {
Err((NoProgress, to_error(state.pos()), state))
}
}
}
#[macro_export]
macro_rules! word1_check_indent {
($word:expr, $word_problem:expr, $min_indent:expr, $indent_problem:expr) => {

View File

@ -1,8 +1,12 @@
use crate::ast::{AssignedField, Pattern, Tag, TypeAnnotation, TypeHeader};
use crate::ast::{
AssignedField, CommentOrNewline, HasClause, Pattern, Spaced, Tag, TypeAnnotation, TypeHeader,
};
use crate::blankspace::{space0_around_ee, space0_before_e, space0_e};
use crate::ident::lowercase_ident;
use crate::keyword;
use crate::parser::then;
use crate::parser::{
allocated, backtrackable, optional, specialize, specialize_ref, word1, word2, EType,
allocated, backtrackable, optional, specialize, specialize_ref, word1, word2, word3, EType,
ETypeApply, ETypeInParens, ETypeInlineAlias, ETypeRecord, ETypeTagUnion, ParseResult, Parser,
Progress::{self, *},
};
@ -240,7 +244,6 @@ where
fn record_type_field<'a>(
min_indent: u32,
) -> impl Parser<'a, AssignedField<'a, TypeAnnotation<'a>>, ETypeRecord<'a>> {
use crate::ident::lowercase_ident;
use crate::parser::Either::*;
use AssignedField::*;
@ -368,6 +371,75 @@ fn loc_applied_args_e<'a>(
zero_or_more!(loc_applied_arg(min_indent))
}
fn has_clause<'a>(min_indent: u32) -> impl Parser<'a, Loc<HasClause<'a>>, EType<'a>> {
map!(
// Suppose we are trying to parse "a has Hash"
and!(
space0_around_ee(
// Parse "a", with appropriate spaces
specialize(
|_, pos| EType::TBadTypeVariable(pos),
loc!(map!(lowercase_ident(), Spaced::Item)),
),
min_indent,
EType::TIndentStart,
EType::TIndentEnd
),
then(
// Parse "has"; we don't care about this keyword
word3(b'h', b'a', b's', EType::THasClause),
// Parse "Hash"; this may be qualified from another module like "Hash.Hash"
|arena, state, _progress, _output| {
space0_before_e(
specialize(EType::TApply, loc!(parse_concrete_type)),
state.column() + 1,
EType::TIndentStart,
)
.parse(arena, state)
}
)
),
|(var, ability): (Loc<Spaced<'a, &'a str>>, Loc<TypeAnnotation<'a>>)| {
let region = Region::span_across(&var.region, &ability.region);
let has_clause = HasClause { var, ability };
Loc::at(region, has_clause)
}
)
}
/// Parse a chain of `has` clauses, e.g. " | a has Hash, b has Eq".
/// Returns the clauses and spaces before the starting "|", if there were any.
fn has_clause_chain<'a>(
min_indent: u32,
) -> impl Parser<'a, (&'a [CommentOrNewline<'a>], &'a [Loc<HasClause<'a>>]), EType<'a>> {
move |arena, state: State<'a>| {
let (_, (spaces_before, ()), state) = and!(
space0_e(min_indent, EType::TIndentStart),
word1(b'|', EType::TWhereBar)
)
.parse(arena, state)?;
let min_demand_indent = state.column() + 1;
// Parse the first clause (there must be one), then the rest
let (_, first_clause, state) = has_clause(min_demand_indent).parse(arena, state)?;
let (_, mut clauses, state) = zero_or_more!(skip_first!(
word1(b',', EType::THasClause),
has_clause(min_demand_indent)
))
.parse(arena, state)?;
// Usually the number of clauses shouldn't be too large, so this is okay
clauses.insert(0, first_clause);
Ok((
MadeProgress,
(spaces_before, clauses.into_bump_slice()),
state,
))
}
}
fn expression<'a>(
min_indent: u32,
is_trailing_comma_valid: bool,
@ -404,7 +476,7 @@ fn expression<'a>(
]
.parse(arena, state.clone());
match result {
let (progress, annot, state) = match result {
Ok((p2, (rest, _dropped_spaces), state)) => {
let (p3, return_type, state) =
space0_before_e(term(min_indent), min_indent, EType::TIndentStart)
@ -421,7 +493,7 @@ fn expression<'a>(
value: TypeAnnotation::Function(output, arena.alloc(return_type)),
};
let progress = p1.or(p2).or(p3);
Ok((progress, result, state))
(progress, result, state)
}
Err(err) => {
if !is_trailing_comma_valid {
@ -442,7 +514,36 @@ fn expression<'a>(
}
// We ran into trouble parsing the function bits; just return the single term
Ok((p1, first, state))
(p1, first, state)
}
};
// Finally, try to parse a where clause if there is one.
// The where clause must be at least as deep as where the type annotation started.
let min_where_clause_indent = min_indent;
match has_clause_chain(min_where_clause_indent).parse(arena, state.clone()) {
Ok((where_progress, (spaces_before, has_chain), state)) => {
use crate::ast::Spaceable;
let region = Region::span_across(&annot.region, &has_chain.last().unwrap().region);
let type_annot = if !spaces_before.is_empty() {
let spaced = arena
.alloc(annot.value)
.with_spaces_before(spaces_before, annot.region);
&*arena.alloc(spaced)
} else {
&*arena.alloc(annot)
};
let where_annot = TypeAnnotation::Where(type_annot, has_chain);
Ok((
where_progress.or(progress),
Loc::at(region, where_annot),
state,
))
}
Err(_) => {
// Ran into a problem parsing a where clause; don't suppose there is one.
Ok((progress, annot, state))
}
}
})

View File

@ -0,0 +1,42 @@
Defs(
[
@0-36 Ability {
header: TypeHeader {
name: @0-4 "Hash",
vars: [],
},
loc_has: @5-8 Has,
demands: [
AbilityDemand {
name: @11-15 SpaceBefore(
"hash",
[
Newline,
],
),
typ: @33-36 Function(
[
@18-19 BoundVariable(
"a",
),
],
@33-36 Apply(
"",
"U64",
[],
),
),
},
],
},
],
@38-39 SpaceBefore(
Num(
"1",
),
[
Newline,
Newline,
],
),
)

View File

@ -0,0 +1,5 @@
Hash has
hash : a
-> U64
1

View File

@ -0,0 +1,62 @@
Defs(
[
@0-45 Ability {
header: TypeHeader {
name: @0-4 "Hash",
vars: [],
},
loc_has: @5-8 Has,
demands: [
AbilityDemand {
name: @11-15 SpaceBefore(
"hash",
[
Newline,
],
),
typ: @23-26 Function(
[
@18-19 BoundVariable(
"a",
),
],
@23-26 Apply(
"",
"U64",
[],
),
),
},
AbilityDemand {
name: @29-34 SpaceBefore(
"hash2",
[
Newline,
],
),
typ: @42-45 Function(
[
@37-38 BoundVariable(
"a",
),
],
@42-45 Apply(
"",
"U64",
[],
),
),
},
],
},
],
@47-48 SpaceBefore(
Num(
"1",
),
[
Newline,
Newline,
],
),
)

View File

@ -0,0 +1,5 @@
Hash has
hash : a -> U64
hash2 : a -> U64
1

View File

@ -0,0 +1,37 @@
Defs(
[
@0-24 Ability {
header: TypeHeader {
name: @0-4 "Hash",
vars: [],
},
loc_has: @5-8 Has,
demands: [
AbilityDemand {
name: @9-13 "hash",
typ: @21-24 Function(
[
@16-17 BoundVariable(
"a",
),
],
@21-24 Apply(
"",
"U64",
[],
),
),
},
],
},
],
@26-27 SpaceBefore(
Num(
"1",
),
[
Newline,
Newline,
],
),
)

View File

@ -0,0 +1,3 @@
Hash has hash : a -> U64
1

View File

@ -0,0 +1,48 @@
Defs(
[
@0-27 Annotation(
@0-1 Identifier(
"f",
),
@15-27 Where(
@15-16 Function(
[
@4-5 BoundVariable(
"a",
),
],
@15-16 Function(
[
@10-11 BoundVariable(
"b",
),
],
@15-16 BoundVariable(
"c",
),
),
),
[
@20-27 HasClause {
var: @20-21 "a",
ability: @26-27 Apply(
"",
"A",
[],
),
},
],
),
),
],
@29-30 SpaceBefore(
Var {
module_name: "",
ident: "f",
},
[
Newline,
Newline,
],
),
)

View File

@ -0,0 +1,3 @@
f : a -> (b -> c) | a has A
f

View File

@ -0,0 +1,64 @@
Defs(
[
@0-48 Annotation(
@0-1 Identifier(
"f",
),
@15-48 Where(
@15-16 Function(
[
@4-5 BoundVariable(
"a",
),
],
@15-16 Function(
[
@10-11 BoundVariable(
"b",
),
],
@15-16 BoundVariable(
"c",
),
),
),
[
@20-27 HasClause {
var: @20-21 "a",
ability: @26-27 Apply(
"",
"A",
[],
),
},
@29-37 HasClause {
var: @29-30 "b",
ability: @35-37 Apply(
"",
"Eq",
[],
),
},
@39-48 HasClause {
var: @39-40 "c",
ability: @45-48 Apply(
"",
"Ord",
[],
),
},
],
),
),
],
@50-51 SpaceBefore(
Var {
module_name: "",
ident: "f",
},
[
Newline,
Newline,
],
),
)

View File

@ -0,0 +1,3 @@
f : a -> (b -> c) | a has A, b has Eq, c has Ord
f

View File

@ -0,0 +1,79 @@
Defs(
[
@0-67 Annotation(
@0-1 Identifier(
"f",
),
@15-67 Where(
@15-16 SpaceBefore(
Function(
[
@4-5 BoundVariable(
"a",
),
],
@15-16 Function(
[
@10-11 BoundVariable(
"b",
),
],
@15-16 BoundVariable(
"c",
),
),
),
[
Newline,
],
),
[
@24-34 HasClause {
var: @24-25 "a",
ability: @30-34 Apply(
"",
"Hash",
[],
),
},
@42-50 HasClause {
var: @42-43 SpaceBefore(
"b",
[
Newline,
],
),
ability: @48-50 Apply(
"",
"Eq",
[],
),
},
@58-67 HasClause {
var: @58-59 SpaceBefore(
"c",
[
Newline,
],
),
ability: @64-67 Apply(
"",
"Ord",
[],
),
},
],
),
),
],
@69-70 SpaceBefore(
Var {
module_name: "",
ident: "f",
},
[
Newline,
Newline,
],
),
)

View File

@ -0,0 +1,6 @@
f : a -> (b -> c)
| a has Hash,
b has Eq,
c has Ord
f

View File

@ -0,0 +1,34 @@
Defs(
[
@0-15 Annotation(
@0-1 Identifier(
"f",
),
@4-15 Where(
@4-5 BoundVariable(
"a",
),
[
@8-15 HasClause {
var: @8-9 "a",
ability: @14-15 Apply(
"",
"A",
[],
),
},
],
),
),
],
@17-18 SpaceBefore(
Var {
module_name: "",
ident: "f",
},
[
Newline,
Newline,
],
),
)

View File

@ -0,0 +1,3 @@
f : a | a has A
f

View File

@ -0,0 +1,48 @@
Defs(
[
@0-29 Annotation(
@0-1 Identifier(
"f",
),
@9-29 Where(
@9-12 SpaceBefore(
Function(
[
@4-5 BoundVariable(
"a",
),
],
@9-12 Apply(
"",
"U64",
[],
),
),
[
Newline,
],
),
[
@19-29 HasClause {
var: @19-20 "a",
ability: @25-29 Apply(
"",
"Hash",
[],
),
},
],
),
),
],
@31-32 SpaceBefore(
Var {
module_name: "",
ident: "f",
},
[
Newline,
Newline,
],
),
)

View File

@ -0,0 +1,4 @@
f : a -> U64
| a has Hash
f

View File

@ -123,6 +123,9 @@ mod test_parse {
fail/type_argument_no_arrow.expr,
fail/type_double_comma.expr,
pass/list_closing_indent_not_enough.expr,
pass/ability_single_line.expr,
pass/ability_multi_line.expr,
pass/ability_demand_signature_is_multiline.expr,
pass/add_var_with_spaces.expr,
pass/add_with_spaces.expr,
pass/annotated_record_destructure.expr,
@ -273,6 +276,11 @@ mod test_parse {
pass/when_with_negative_numbers.expr,
pass/when_with_numbers.expr,
pass/when_with_records.expr,
pass/where_clause_function.expr,
pass/where_clause_non_function.expr,
pass/where_clause_multiple_has.expr,
pass/where_clause_multiple_has_across_newlines.expr,
pass/where_clause_on_newline.expr,
pass/zero_float.expr,
pass/zero_int.expr,
}

View File

@ -1,35 +1,47 @@
use crate::solve;
use roc_can::constraint::{Constraint as ConstraintSoa, Constraints};
use roc_can::module::RigidVariables;
use roc_collections::all::MutMap;
use roc_module::ident::Lowercase;
use roc_module::symbol::Symbol;
use roc_types::solved_types::{Solved, SolvedType};
use roc_types::subs::{Subs, VarStore, Variable};
use roc_types::subs::{StorageSubs, Subs, Variable};
use roc_types::types::Alias;
#[derive(Debug)]
pub struct SolvedModule {
pub solved_types: MutMap<Symbol, SolvedType>,
pub aliases: MutMap<Symbol, Alias>,
pub exposed_symbols: Vec<Symbol>,
pub exposed_vars_by_symbol: Vec<(Symbol, Variable)>,
pub problems: Vec<solve::TypeError>,
/// all aliases and their definitions. this has to include non-exposed aliases
/// because exposed aliases can depend on non-exposed ones)
pub aliases: MutMap<Symbol, Alias>,
/// Used when the goal phase is TypeChecking, and
/// to create the types for HostExposed. This
/// has some overlap with the StorageSubs fields,
/// so maybe we can get rid of this at some point
pub exposed_vars_by_symbol: Vec<(Symbol, Variable)>,
/// Used when importing this module into another module
pub stored_vars_by_symbol: Vec<(Symbol, Variable)>,
pub storage_subs: StorageSubs,
}
pub fn run_solve(
constraints: &Constraints,
constraint: ConstraintSoa,
rigid_variables: MutMap<Variable, Lowercase>,
var_store: VarStore,
rigid_variables: RigidVariables,
mut subs: Subs,
) -> (Solved<Subs>, solve::Env, Vec<solve::TypeError>) {
let env = solve::Env::default();
let mut subs = Subs::new_from_varstore(var_store);
for (var, name) in rigid_variables {
for (var, name) in rigid_variables.named {
subs.rigid_var(var, name);
}
for var in rigid_variables.wildcards {
subs.rigid_var(var, "*".into());
}
// Now that the module is parsed, canonicalized, and constrained,
// we need to type check it.
let mut problems = Vec::new();
@ -59,3 +71,19 @@ pub fn make_solved_types(
solved_types
}
pub fn exposed_types_storage_subs(
solved_subs: &mut Solved<Subs>,
exposed_vars_by_symbol: &[(Symbol, Variable)],
) -> (StorageSubs, Vec<(Symbol, Variable)>) {
let subs = solved_subs.inner_mut();
let mut storage_subs = StorageSubs::new(Subs::new());
let mut stored_vars_by_symbol = Vec::with_capacity(exposed_vars_by_symbol.len());
for (symbol, var) in exposed_vars_by_symbol.iter() {
let new_var = storage_subs.import_variable_from(subs, *var).variable;
stored_vars_by_symbol.push((*symbol, new_var));
}
(storage_subs, stored_vars_by_symbol)
}

View File

@ -189,6 +189,7 @@ pub fn run_in_place(
constraint: &Constraint,
) -> Env {
let mut pools = Pools::default();
let state = State {
env: env.clone(),
mark: Mark::NONE.next(),
@ -225,6 +226,12 @@ enum Work<'a> {
env: &'a Env,
rank: Rank,
let_con: &'a LetConstraint,
/// The variables used to store imported types in the Subs.
/// The `Contents` are copied from the source module, but to
/// mimic `type_to_var`, we must add these variables to `Pools`
/// at the correct rank
pool_variables: &'a [Variable],
},
/// The ret_con part of a let constraint that introduces rigid and/or flex variables
///
@ -234,6 +241,12 @@ enum Work<'a> {
env: &'a Env,
rank: Rank,
let_con: &'a LetConstraint,
/// The variables used to store imported types in the Subs.
/// The `Contents` are copied from the source module, but to
/// mimic `type_to_var`, we must add these variables to `Pools`
/// at the correct rank
pool_variables: &'a [Variable],
},
}
@ -277,7 +290,12 @@ fn solve(
continue;
}
Work::LetConNoVariables { env, rank, let_con } => {
Work::LetConNoVariables {
env,
rank,
let_con,
pool_variables,
} => {
// NOTE be extremely careful with shadowing here
let offset = let_con.defs_and_ret_constraint.index();
let ret_constraint = &constraints.constraints[offset + 1];
@ -292,6 +310,8 @@ fn solve(
let_con.def_types,
);
pools.get_mut(rank).extend(pool_variables);
let mut new_env = env.clone();
for (symbol, loc_var) in local_def_vars.iter() {
new_env.insert_symbol_var_if_vacant(*symbol, loc_var.value);
@ -306,7 +326,12 @@ fn solve(
continue;
}
Work::LetConIntroducesVariables { env, rank, let_con } => {
Work::LetConIntroducesVariables {
env,
rank,
let_con,
pool_variables,
} => {
// NOTE be extremely careful with shadowing here
let offset = let_con.defs_and_ret_constraint.index();
let ret_constraint = &constraints.constraints[offset + 1];
@ -330,6 +355,8 @@ fn solve(
let_con.def_types,
);
pools.get_mut(next_rank).extend(pool_variables);
debug_assert_eq!(
{
let offenders = pools
@ -414,11 +441,18 @@ fn solve(
copy
}
Eq(type_index, expectation_index, category_index, region) => {
let typ = &constraints.types[type_index.index()];
let expectation = &constraints.expectations[expectation_index.index()];
let category = &constraints.categories[category_index.index()];
let actual = type_to_var(subs, rank, pools, cached_aliases, typ);
let actual = either_type_index_to_var(
constraints,
subs,
rank,
pools,
cached_aliases,
*type_index,
);
let expectation = &constraints.expectations[expectation_index.index()];
let expected = type_to_var(
subs,
rank,
@ -457,11 +491,16 @@ fn solve(
}
}
Store(source_index, target, _filename, _linenr) => {
let source = &constraints.types[source_index.index()];
// a special version of Eq that is used to store types in the AST.
// IT DOES NOT REPORT ERRORS!
let actual = type_to_var(subs, rank, pools, cached_aliases, source);
let actual = either_type_index_to_var(
constraints,
subs,
rank,
pools,
cached_aliases,
*source_index,
);
let target = *target;
match unify(subs, actual, target, Mode::EQ) {
@ -572,11 +611,18 @@ fn solve(
}
Pattern(type_index, expectation_index, category_index, region)
| PatternPresence(type_index, expectation_index, category_index, region) => {
let typ = &constraints.types[type_index.index()];
let expectation = &constraints.pattern_expectations[expectation_index.index()];
let category = &constraints.pattern_categories[category_index.index()];
let actual = type_to_var(subs, rank, pools, cached_aliases, typ);
let actual = either_type_index_to_var(
constraints,
subs,
rank,
pools,
cached_aliases,
*type_index,
);
let expectation = &constraints.pattern_expectations[expectation_index.index()];
let expected = type_to_var(
subs,
rank,
@ -619,7 +665,7 @@ fn solve(
}
}
}
Let(index) => {
Let(index, pool_slice) => {
let let_con = &constraints.let_constraints[index.index()];
let offset = let_con.defs_and_ret_constraint.index();
@ -629,7 +675,11 @@ fn solve(
let flex_vars = &constraints.variables[let_con.flex_vars.indices()];
let rigid_vars = &constraints.variables[let_con.rigid_vars.indices()];
let pool_variables = &constraints.variables[pool_slice.indices()];
if matches!(&ret_constraint, True) && let_con.rigid_vars.is_empty() {
debug_assert!(pool_variables.is_empty());
introduce(subs, rank, pools, flex_vars);
// If the return expression is guaranteed to solve,
@ -647,7 +697,12 @@ fn solve(
//
// Note that the LetConSimple gets the current env and rank,
// and not the env/rank from after solving the defs_constraint
stack.push(Work::LetConNoVariables { env, rank, let_con });
stack.push(Work::LetConNoVariables {
env,
rank,
let_con,
pool_variables,
});
stack.push(Work::Constraint {
env,
rank,
@ -689,7 +744,12 @@ fn solve(
//
// Note that the LetConSimple gets the current env and rank,
// and not the env/rank from after solving the defs_constraint
stack.push(Work::LetConIntroducesVariables { env, rank, let_con });
stack.push(Work::LetConIntroducesVariables {
env,
rank,
let_con,
pool_variables,
});
stack.push(Work::Constraint {
env,
rank: next_rank,
@ -700,9 +760,15 @@ fn solve(
}
}
IsOpenType(type_index) => {
let typ = &constraints.types[type_index.index()];
let actual = either_type_index_to_var(
constraints,
subs,
rank,
pools,
cached_aliases,
*type_index,
);
let actual = type_to_var(subs, rank, pools, cached_aliases, typ);
let mut new_desc = subs.get(actual);
match new_desc.content {
Content::Structure(FlatType::TagUnion(tags, _)) => {
@ -850,6 +916,27 @@ fn put_scratchpad(scratchpad: bumpalo::Bump) {
});
}
fn either_type_index_to_var(
constraints: &Constraints,
subs: &mut Subs,
rank: Rank,
pools: &mut Pools,
_alias_map: &mut MutMap<Symbol, Variable>,
either_type_index: roc_collections::soa::EitherIndex<Type, Variable>,
) -> Variable {
match either_type_index.split() {
Ok(type_index) => {
let typ = &constraints.types[type_index.index()];
type_to_var(subs, rank, pools, _alias_map, typ)
}
Err(var_index) => {
// we cheat, and store the variable directly in the index
unsafe { Variable::from_index(var_index.index() as _) }
}
}
}
fn type_to_var(
subs: &mut Subs,
rank: Rank,

View File

@ -10,7 +10,6 @@ mod helpers;
#[cfg(test)]
mod solve_expr {
use crate::helpers::with_larger_debug_stack;
use roc_collections::all::MutMap;
use roc_types::pretty_print::{content_to_string, name_all_type_vars};
// HELPERS
@ -47,7 +46,7 @@ mod solve_expr {
module_src = &temp;
}
let exposed_types = MutMap::default();
let exposed_types = Default::default();
let loaded = {
let dir = tempdir()?;
let filename = PathBuf::from("Test.roc");
@ -5574,4 +5573,57 @@ mod solve_expr {
r#"[ A, B, C ]"#,
)
}
#[test]
// https://github.com/rtfeldman/roc/issues/2702
fn tag_inclusion_behind_opaque() {
infer_eq_without_problem(
indoc!(
r#"
Outer k := [ Empty, Wrapped k ]
insert : Outer k, k -> Outer k
insert = \m, var ->
when m is
$Outer Empty -> $Outer (Wrapped var)
$Outer (Wrapped _) -> $Outer (Wrapped var)
insert
"#
),
r#"Outer k, k -> Outer k"#,
)
}
#[test]
fn tag_inclusion_behind_opaque_infer() {
infer_eq_without_problem(
indoc!(
r#"
Outer k := [ Empty, Wrapped k ]
when ($Outer Empty) is
$Outer Empty -> $Outer (Wrapped "")
$Outer (Wrapped k) -> $Outer (Wrapped k)
"#
),
r#"Outer Str"#,
)
}
#[test]
fn tag_inclusion_behind_opaque_infer_single_ctor() {
infer_eq_without_problem(
indoc!(
r#"
Outer := [ A, B ]
when ($Outer A) is
$Outer A -> $Outer A
$Outer B -> $Outer B
"#
),
r#"Outer"#,
)
}
}

View File

@ -2831,3 +2831,31 @@ fn upcast_of_int_checked_is_zext() {
u16
)
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
fn modulo_of_unsigned() {
assert_evals_to!(
indoc!(
r#"
0b1111_1111u8 % 64
"#
),
63,
u8
)
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
fn div_of_unsigned() {
assert_evals_to!(
indoc!(
r#"
0b1111_1111u8 // 2
"#
),
127,
u8
)
}

View File

@ -48,14 +48,13 @@ pub fn helper(
module_src = &temp;
}
let exposed_types = MutMap::default();
let loaded = roc_load::file::load_and_monomorphize_from_str(
arena,
filename,
module_src,
&stdlib,
src_dir,
exposed_types,
Default::default(),
roc_target::TargetInfo::default_x86_64(),
);

View File

@ -3,7 +3,7 @@ use inkwell::module::Module;
use libloading::Library;
use roc_build::link::module_to_dylib;
use roc_build::program::FunctionIterator;
use roc_collections::all::{MutMap, MutSet};
use roc_collections::all::MutSet;
use roc_gen_llvm::llvm::externs::add_default_roc_externs;
use roc_mono::ir::OptLevel;
use roc_region::all::LineInfo;
@ -51,14 +51,13 @@ fn create_llvm_module<'a>(
module_src = &temp;
}
let exposed_types = MutMap::default();
let loaded = roc_load::file::load_and_monomorphize_from_str(
arena,
filename,
module_src,
stdlib,
src_dir,
exposed_types,
Default::default(),
target_info,
);

View File

@ -86,14 +86,13 @@ fn compile_roc_to_wasm_bytes<'a, T: Wasm32Result>(
module_src = &temp;
}
let exposed_types = MutMap::default();
let loaded = roc_load::file::load_and_monomorphize_from_str(
arena,
filename,
module_src,
stdlib,
src_dir,
exposed_types,
Default::default(),
roc_target::TargetInfo::default_wasm32(),
);

View File

@ -1,12 +1,25 @@
procedure List.4 (#Attr.2, #Attr.3, #Attr.4):
let Test.19 : U64 = lowlevel ListLen #Attr.2;
let Test.17 : Int1 = lowlevel NumLt #Attr.3 Test.19;
let Test.24 : U64 = lowlevel ListLen #Attr.2;
let Test.17 : Int1 = lowlevel NumLt #Attr.3 Test.24;
if Test.17 then
let Test.18 : List I64 = lowlevel ListSet #Attr.2 #Attr.3 #Attr.4;
let Test.19 : {List I64, I64} = CallByName List.58 #Attr.2 #Attr.3 #Attr.4;
let Test.18 : List I64 = StructAtIndex 0 Test.19;
inc Test.18;
dec Test.19;
ret Test.18;
else
ret #Attr.2;
procedure List.58 (#Attr.2, #Attr.3, #Attr.4):
let Test.23 : U64 = lowlevel ListLen #Attr.2;
let Test.21 : Int1 = lowlevel NumLt #Attr.3 Test.23;
if Test.21 then
let Test.22 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4;
ret Test.22;
else
let Test.20 : {List I64, I64} = Struct {#Attr.2, #Attr.4};
ret Test.20;
procedure List.7 (#Attr.2):
let Test.9 : U64 = lowlevel ListLen #Attr.2;
ret Test.9;

View File

@ -1,12 +1,25 @@
procedure List.4 (#Attr.2, #Attr.3, #Attr.4):
let Test.11 : U64 = lowlevel ListLen #Attr.2;
let Test.9 : Int1 = lowlevel NumLt #Attr.3 Test.11;
let Test.16 : U64 = lowlevel ListLen #Attr.2;
let Test.9 : Int1 = lowlevel NumLt #Attr.3 Test.16;
if Test.9 then
let Test.10 : List I64 = lowlevel ListSet #Attr.2 #Attr.3 #Attr.4;
let Test.11 : {List I64, I64} = CallByName List.58 #Attr.2 #Attr.3 #Attr.4;
let Test.10 : List I64 = StructAtIndex 0 Test.11;
inc Test.10;
dec Test.11;
ret Test.10;
else
ret #Attr.2;
procedure List.58 (#Attr.2, #Attr.3, #Attr.4):
let Test.15 : U64 = lowlevel ListLen #Attr.2;
let Test.13 : Int1 = lowlevel NumLt #Attr.3 Test.15;
if Test.13 then
let Test.14 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4;
ret Test.14;
else
let Test.12 : {List I64, I64} = Struct {#Attr.2, #Attr.4};
ret Test.12;
procedure Test.2 (Test.3):
let Test.6 : U64 = 0i64;
let Test.7 : I64 = 0i64;

View File

@ -1,59 +1,72 @@
procedure List.3 (#Attr.2, #Attr.3):
let Test.37 : U64 = lowlevel ListLen #Attr.2;
let Test.34 : Int1 = lowlevel NumLt #Attr.3 Test.37;
if Test.34 then
let Test.36 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3;
let Test.35 : [C {}, C I64] = Ok Test.36;
ret Test.35;
let Test.42 : U64 = lowlevel ListLen #Attr.2;
let Test.39 : Int1 = lowlevel NumLt #Attr.3 Test.42;
if Test.39 then
let Test.41 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3;
let Test.40 : [C {}, C I64] = Ok Test.41;
ret Test.40;
else
let Test.33 : {} = Struct {};
let Test.32 : [C {}, C I64] = Err Test.33;
ret Test.32;
let Test.38 : {} = Struct {};
let Test.37 : [C {}, C I64] = Err Test.38;
ret Test.37;
procedure List.4 (#Attr.2, #Attr.3, #Attr.4):
let Test.15 : U64 = lowlevel ListLen #Attr.2;
let Test.13 : Int1 = lowlevel NumLt #Attr.3 Test.15;
let Test.20 : U64 = lowlevel ListLen #Attr.2;
let Test.13 : Int1 = lowlevel NumLt #Attr.3 Test.20;
if Test.13 then
let Test.14 : List I64 = lowlevel ListSet #Attr.2 #Attr.3 #Attr.4;
let Test.15 : {List I64, I64} = CallByName List.58 #Attr.2 #Attr.3 #Attr.4;
let Test.14 : List I64 = StructAtIndex 0 Test.15;
inc Test.14;
dec Test.15;
ret Test.14;
else
ret #Attr.2;
procedure List.58 (#Attr.2, #Attr.3, #Attr.4):
let Test.19 : U64 = lowlevel ListLen #Attr.2;
let Test.17 : Int1 = lowlevel NumLt #Attr.3 Test.19;
if Test.17 then
let Test.18 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4;
ret Test.18;
else
let Test.16 : {List I64, I64} = Struct {#Attr.2, #Attr.4};
ret Test.16;
procedure Test.1 (Test.2):
let Test.38 : U64 = 0i64;
let Test.30 : [C {}, C I64] = CallByName List.3 Test.2 Test.38;
let Test.31 : U64 = 0i64;
let Test.29 : [C {}, C I64] = CallByName List.3 Test.2 Test.31;
let Test.8 : {[C {}, C I64], [C {}, C I64]} = Struct {Test.29, Test.30};
joinpoint Test.26:
let Test.17 : List I64 = Array [];
ret Test.17;
let Test.43 : U64 = 0i64;
let Test.35 : [C {}, C I64] = CallByName List.3 Test.2 Test.43;
let Test.36 : U64 = 0i64;
let Test.34 : [C {}, C I64] = CallByName List.3 Test.2 Test.36;
let Test.8 : {[C {}, C I64], [C {}, C I64]} = Struct {Test.34, Test.35};
joinpoint Test.31:
let Test.22 : List I64 = Array [];
ret Test.22;
in
let Test.23 : [C {}, C I64] = StructAtIndex 1 Test.8;
let Test.24 : U8 = 1i64;
let Test.25 : U8 = GetTagId Test.23;
let Test.28 : Int1 = lowlevel Eq Test.24 Test.25;
if Test.28 then
let Test.20 : [C {}, C I64] = StructAtIndex 0 Test.8;
let Test.21 : U8 = 1i64;
let Test.22 : U8 = GetTagId Test.20;
let Test.27 : Int1 = lowlevel Eq Test.21 Test.22;
if Test.27 then
let Test.19 : [C {}, C I64] = StructAtIndex 0 Test.8;
let Test.4 : I64 = UnionAtIndex (Id 1) (Index 0) Test.19;
let Test.18 : [C {}, C I64] = StructAtIndex 1 Test.8;
let Test.5 : I64 = UnionAtIndex (Id 1) (Index 0) Test.18;
let Test.16 : U64 = 0i64;
let Test.10 : List I64 = CallByName List.4 Test.2 Test.16 Test.5;
let Test.28 : [C {}, C I64] = StructAtIndex 1 Test.8;
let Test.29 : U8 = 1i64;
let Test.30 : U8 = GetTagId Test.28;
let Test.33 : Int1 = lowlevel Eq Test.29 Test.30;
if Test.33 then
let Test.25 : [C {}, C I64] = StructAtIndex 0 Test.8;
let Test.26 : U8 = 1i64;
let Test.27 : U8 = GetTagId Test.25;
let Test.32 : Int1 = lowlevel Eq Test.26 Test.27;
if Test.32 then
let Test.24 : [C {}, C I64] = StructAtIndex 0 Test.8;
let Test.4 : I64 = UnionAtIndex (Id 1) (Index 0) Test.24;
let Test.23 : [C {}, C I64] = StructAtIndex 1 Test.8;
let Test.5 : I64 = UnionAtIndex (Id 1) (Index 0) Test.23;
let Test.21 : U64 = 0i64;
let Test.10 : List I64 = CallByName List.4 Test.2 Test.21 Test.5;
let Test.11 : U64 = 0i64;
let Test.9 : List I64 = CallByName List.4 Test.10 Test.11 Test.4;
ret Test.9;
else
dec Test.2;
jump Test.26;
jump Test.31;
else
dec Test.2;
jump Test.26;
jump Test.31;
procedure Test.0 ():
let Test.7 : List I64 = Array [1i64, 2i64];

View File

@ -1,55 +1,68 @@
procedure List.3 (#Attr.2, #Attr.3):
let Test.39 : U64 = lowlevel ListLen #Attr.2;
let Test.36 : Int1 = lowlevel NumLt #Attr.3 Test.39;
if Test.36 then
let Test.38 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3;
let Test.37 : [C {}, C I64] = Ok Test.38;
ret Test.37;
let Test.44 : U64 = lowlevel ListLen #Attr.2;
let Test.41 : Int1 = lowlevel NumLt #Attr.3 Test.44;
if Test.41 then
let Test.43 : I64 = lowlevel ListGetUnsafe #Attr.2 #Attr.3;
let Test.42 : [C {}, C I64] = Ok Test.43;
ret Test.42;
else
let Test.35 : {} = Struct {};
let Test.34 : [C {}, C I64] = Err Test.35;
ret Test.34;
let Test.40 : {} = Struct {};
let Test.39 : [C {}, C I64] = Err Test.40;
ret Test.39;
procedure List.4 (#Attr.2, #Attr.3, #Attr.4):
let Test.19 : U64 = lowlevel ListLen #Attr.2;
let Test.17 : Int1 = lowlevel NumLt #Attr.3 Test.19;
let Test.24 : U64 = lowlevel ListLen #Attr.2;
let Test.17 : Int1 = lowlevel NumLt #Attr.3 Test.24;
if Test.17 then
let Test.18 : List I64 = lowlevel ListSet #Attr.2 #Attr.3 #Attr.4;
let Test.19 : {List I64, I64} = CallByName List.58 #Attr.2 #Attr.3 #Attr.4;
let Test.18 : List I64 = StructAtIndex 0 Test.19;
inc Test.18;
dec Test.19;
ret Test.18;
else
ret #Attr.2;
procedure Test.1 (Test.2, Test.3, Test.4):
let Test.33 : [C {}, C I64] = CallByName List.3 Test.4 Test.3;
let Test.32 : [C {}, C I64] = CallByName List.3 Test.4 Test.2;
let Test.13 : {[C {}, C I64], [C {}, C I64]} = Struct {Test.32, Test.33};
joinpoint Test.29:
let Test.20 : List I64 = Array [];
procedure List.58 (#Attr.2, #Attr.3, #Attr.4):
let Test.23 : U64 = lowlevel ListLen #Attr.2;
let Test.21 : Int1 = lowlevel NumLt #Attr.3 Test.23;
if Test.21 then
let Test.22 : {List I64, I64} = lowlevel ListReplaceUnsafe #Attr.2 #Attr.3 #Attr.4;
ret Test.22;
else
let Test.20 : {List I64, I64} = Struct {#Attr.2, #Attr.4};
ret Test.20;
procedure Test.1 (Test.2, Test.3, Test.4):
let Test.38 : [C {}, C I64] = CallByName List.3 Test.4 Test.3;
let Test.37 : [C {}, C I64] = CallByName List.3 Test.4 Test.2;
let Test.13 : {[C {}, C I64], [C {}, C I64]} = Struct {Test.37, Test.38};
joinpoint Test.34:
let Test.25 : List I64 = Array [];
ret Test.25;
in
let Test.26 : [C {}, C I64] = StructAtIndex 1 Test.13;
let Test.27 : U8 = 1i64;
let Test.28 : U8 = GetTagId Test.26;
let Test.31 : Int1 = lowlevel Eq Test.27 Test.28;
if Test.31 then
let Test.23 : [C {}, C I64] = StructAtIndex 0 Test.13;
let Test.24 : U8 = 1i64;
let Test.25 : U8 = GetTagId Test.23;
let Test.30 : Int1 = lowlevel Eq Test.24 Test.25;
if Test.30 then
let Test.22 : [C {}, C I64] = StructAtIndex 0 Test.13;
let Test.6 : I64 = UnionAtIndex (Id 1) (Index 0) Test.22;
let Test.21 : [C {}, C I64] = StructAtIndex 1 Test.13;
let Test.7 : I64 = UnionAtIndex (Id 1) (Index 0) Test.21;
let Test.31 : [C {}, C I64] = StructAtIndex 1 Test.13;
let Test.32 : U8 = 1i64;
let Test.33 : U8 = GetTagId Test.31;
let Test.36 : Int1 = lowlevel Eq Test.32 Test.33;
if Test.36 then
let Test.28 : [C {}, C I64] = StructAtIndex 0 Test.13;
let Test.29 : U8 = 1i64;
let Test.30 : U8 = GetTagId Test.28;
let Test.35 : Int1 = lowlevel Eq Test.29 Test.30;
if Test.35 then
let Test.27 : [C {}, C I64] = StructAtIndex 0 Test.13;
let Test.6 : I64 = UnionAtIndex (Id 1) (Index 0) Test.27;
let Test.26 : [C {}, C I64] = StructAtIndex 1 Test.13;
let Test.7 : I64 = UnionAtIndex (Id 1) (Index 0) Test.26;
let Test.15 : List I64 = CallByName List.4 Test.4 Test.2 Test.7;
let Test.14 : List I64 = CallByName List.4 Test.15 Test.3 Test.6;
ret Test.14;
else
dec Test.4;
jump Test.29;
jump Test.34;
else
dec Test.4;
jump Test.29;
jump Test.34;
procedure Test.0 ():
let Test.10 : U64 = 0i64;

View File

@ -96,15 +96,13 @@ fn compiles_to_ir(test_name: &str, src: &str) {
module_src = &temp;
}
let exposed_types = MutMap::default();
let loaded = roc_load::file::load_and_monomorphize_from_str(
arena,
filename,
module_src,
&stdlib,
src_dir,
exposed_types,
Default::default(),
TARGET_INFO,
);

View File

@ -789,7 +789,6 @@ impl Variable {
///
/// It is not guaranteed that the variable is in bounds.
pub unsafe fn from_index(v: u32) -> Self {
debug_assert!(v >= Self::NUM_RESERVED_VARS as u32);
Variable(v)
}
@ -3326,6 +3325,14 @@ impl StorageSubs {
deep_copy_var_to(source, &mut self.subs, variable)
}
pub fn import_variable_from(&mut self, source: &mut Subs, variable: Variable) -> CopiedImport {
copy_import_to(source, &mut self.subs, variable)
}
pub fn export_variable_to(&mut self, target: &mut Subs, variable: Variable) -> CopiedImport {
copy_import_to(&mut self.subs, target, variable)
}
pub fn merge_into(self, target: &mut Subs) -> impl Fn(Variable) -> Variable {
let self_offsets = StorageSubsOffsets {
utable: self.subs.utable.len() as u32,
@ -3538,21 +3545,16 @@ impl StorageSubs {
use std::cell::RefCell;
std::thread_local! {
/// Scratchpad arena so we don't need to allocate a new one all the time
static SCRATCHPAD: RefCell<bumpalo::Bump> = RefCell::new(bumpalo::Bump::with_capacity(4 * 1024));
static SCRATCHPAD: RefCell<Option<bumpalo::Bump>> = RefCell::new(Some(bumpalo::Bump::with_capacity(4 * 1024)));
}
fn take_scratchpad() -> bumpalo::Bump {
let mut result = bumpalo::Bump::new();
SCRATCHPAD.with(|f| {
result = f.replace(bumpalo::Bump::new());
});
result
SCRATCHPAD.with(|f| f.take().unwrap())
}
fn put_scratchpad(scratchpad: bumpalo::Bump) {
SCRATCHPAD.with(|f| {
f.replace(scratchpad);
f.replace(Some(scratchpad));
});
}
@ -3610,7 +3612,8 @@ fn deep_copy_var_to_help<'a>(
// return var;
//
// but we cannot, because this `var` is in the source, not the target, and we
// should only return variables in the target
// should only return variables in the target. so, we have to create a new
// variable in the target.
}
visited.push(var);
@ -3818,7 +3821,17 @@ fn deep_copy_var_to_help<'a>(
copy
}
FlexVar(_) | Error => copy,
FlexVar(Some(name_index)) => {
let name = source.field_names[name_index.index as usize].clone();
let new_name_index = SubsIndex::push_new(&mut target.field_names, name);
let content = FlexVar(Some(new_name_index));
target.set_content(copy, content);
copy
}
FlexVar(None) | Error => copy,
RecursionVar {
opt_name,
@ -3840,8 +3853,10 @@ fn deep_copy_var_to_help<'a>(
copy
}
RigidVar(name) => {
target.set(copy, make_descriptor(FlexVar(Some(name))));
RigidVar(name_index) => {
let name = source.field_names[name_index.index as usize].clone();
let new_name_index = SubsIndex::push_new(&mut target.field_names, name);
target.set(copy, make_descriptor(FlexVar(Some(new_name_index))));
copy
}
@ -3888,6 +3903,425 @@ fn deep_copy_var_to_help<'a>(
}
}
/// Bookkeeping to correctly move these types into the target subs
///
/// We track the rigid/flex variables because they need to be part of a `Let`
/// constraint, introducing these variables at the right rank
///
/// We also track `registered` variables. An import should be equivalent to
/// a call to `type_to_var` (solve.rs). The `copy_import_to` function puts
/// the right `Contents` into the target `Subs` at the right locations,
/// but `type_to_var` furthermore adds the variables used to store those `Content`s
/// to `Pools` at the right rank. Here we remember the variables used to store `Content`s
/// so that we can later add them to `Pools`
#[derive(Debug)]
pub struct CopiedImport {
pub variable: Variable,
pub flex: Vec<Variable>,
pub rigid: Vec<Variable>,
pub registered: Vec<Variable>,
}
struct CopyImportEnv<'a> {
visited: bumpalo::collections::Vec<'a, Variable>,
source: &'a mut Subs,
target: &'a mut Subs,
flex: Vec<Variable>,
rigid: Vec<Variable>,
registered: Vec<Variable>,
}
pub fn copy_import_to(
source: &mut Subs, // mut to set the copy
target: &mut Subs,
var: Variable,
) -> CopiedImport {
// the rank at which we introduce imports.
//
// Type checking starts at rank 1 aka toplevel. When there are rigid/flex variables introduced by a
// constraint, then these must be generalized relative to toplevel, and hence are introduced at
// rank 2.
//
// We always use: even if there are no rigids imported, introducing at rank 2 is correct
// (if slightly inefficient) because there are no rigids anyway so generalization is trivial
let rank = Rank::toplevel().next();
let mut arena = take_scratchpad();
let copied_import = {
let visited = bumpalo::collections::Vec::with_capacity_in(256, &arena);
let mut env = CopyImportEnv {
visited,
source,
target,
flex: Vec::new(),
rigid: Vec::new(),
registered: Vec::new(),
};
let copy = copy_import_to_help(&mut env, rank, var);
let CopyImportEnv {
visited,
source,
flex,
rigid,
registered,
target: _,
} = env;
// we have tracked all visited variables, and can now traverse them
// in one go (without looking at the UnificationTable) and clear the copy field
for var in visited {
let descriptor = source.get_ref_mut(var);
if descriptor.copy.is_some() {
descriptor.rank = Rank::NONE;
descriptor.mark = Mark::NONE;
descriptor.copy = OptVariable::NONE;
}
}
CopiedImport {
variable: copy,
flex,
rigid,
registered,
}
};
arena.reset();
put_scratchpad(arena);
copied_import
}
/// is this content registered (in the current pool) by type_to_variable?
/// TypeToVar skips registering for flex and rigid variables, and
/// also for the empty records and tag unions (they used the Variable::EMPTY_RECORD/...)
/// standard variables
fn is_registered(content: &Content) -> bool {
match content {
Content::FlexVar(_) | Content::RigidVar(_) => false,
Content::Structure(FlatType::EmptyRecord | FlatType::EmptyTagUnion) => false,
Content::Structure(_)
| Content::RecursionVar { .. }
| Content::Alias(_, _, _, _)
| Content::RangedNumber(_, _)
| Content::Error => true,
}
}
fn copy_import_to_help(env: &mut CopyImportEnv<'_>, max_rank: Rank, var: Variable) -> Variable {
use Content::*;
use FlatType::*;
let desc = env.source.get_without_compacting(var);
if let Some(copy) = desc.copy.into_variable() {
debug_assert!(env.target.contains(copy));
return copy;
} else if desc.rank != Rank::NONE {
// DO NOTHING, Fall through
//
// The original copy_import can do
// return var;
//
// but we cannot, because this `var` is in the source, not the target, and we
// should only return variables in the target. so, we have to create a new
// variable in the target.
}
env.visited.push(var);
let make_descriptor = |content| Descriptor {
content,
rank: max_rank,
mark: Mark::NONE,
copy: OptVariable::NONE,
};
// let copy = env.target.fresh_unnamed_flex_var();
let copy = env.target.fresh(make_descriptor(unnamed_flex_var()));
// is this content registered (in the current pool) by type_to_variable?
if is_registered(&desc.content) {
env.registered.push(copy);
}
// Link the original variable to the new variable. This lets us
// avoid making multiple copies of the variable we are instantiating.
//
// Need to do this before recursively copying to avoid looping.
env.source.modify(var, |descriptor| {
descriptor.mark = Mark::NONE;
descriptor.copy = copy.into();
});
// Now we recursively copy the content of the variable.
// We have already marked the variable as copied, so we
// will not repeat this work or crawl this variable again.
match desc.content {
Structure(flat_type) => {
let new_flat_type = match flat_type {
Apply(symbol, arguments) => {
let new_arguments = SubsSlice::reserve_into_subs(env.target, arguments.len());
for (target_index, var_index) in (new_arguments.indices()).zip(arguments) {
let var = env.source[var_index];
let copy_var = copy_import_to_help(env, max_rank, var);
env.target.variables[target_index] = copy_var;
}
Apply(symbol, new_arguments)
}
Func(arguments, closure_var, ret_var) => {
let new_ret_var = copy_import_to_help(env, max_rank, ret_var);
let new_closure_var = copy_import_to_help(env, max_rank, closure_var);
let new_arguments = SubsSlice::reserve_into_subs(env.target, arguments.len());
for (target_index, var_index) in (new_arguments.indices()).zip(arguments) {
let var = env.source[var_index];
let copy_var = copy_import_to_help(env, max_rank, var);
env.target.variables[target_index] = copy_var;
}
Func(new_arguments, new_closure_var, new_ret_var)
}
same @ EmptyRecord | same @ EmptyTagUnion | same @ Erroneous(_) => same,
Record(fields, ext_var) => {
let record_fields = {
let new_variables =
VariableSubsSlice::reserve_into_subs(env.target, fields.len());
let it = (new_variables.indices()).zip(fields.iter_variables());
for (target_index, var_index) in it {
let var = env.source[var_index];
let copy_var = copy_import_to_help(env, max_rank, var);
env.target.variables[target_index] = copy_var;
}
let field_names_start = env.target.field_names.len() as u32;
let field_types_start = env.target.record_fields.len() as u32;
let field_names = &env.source.field_names[fields.field_names().indices()];
env.target.field_names.extend(field_names.iter().cloned());
let record_fields =
&env.source.record_fields[fields.record_fields().indices()];
env.target
.record_fields
.extend(record_fields.iter().copied());
RecordFields {
length: fields.len() as _,
field_names_start,
variables_start: new_variables.start,
field_types_start,
}
};
Record(record_fields, copy_import_to_help(env, max_rank, ext_var))
}
TagUnion(tags, ext_var) => {
let new_ext = copy_import_to_help(env, max_rank, ext_var);
let new_variable_slices =
SubsSlice::reserve_variable_slices(env.target, tags.len());
let it = (new_variable_slices.indices()).zip(tags.variables());
for (target_index, index) in it {
let slice = env.source[index];
let new_variables = SubsSlice::reserve_into_subs(env.target, slice.len());
let it = (new_variables.indices()).zip(slice);
for (target_index, var_index) in it {
let var = env.source[var_index];
let copy_var = copy_import_to_help(env, max_rank, var);
env.target.variables[target_index] = copy_var;
}
env.target.variable_slices[target_index] = new_variables;
}
let new_tag_names = {
let tag_names = tags.tag_names();
let slice = &env.source.tag_names[tag_names.indices()];
let start = env.target.tag_names.len() as u32;
let length = tag_names.len() as u16;
env.target.tag_names.extend(slice.iter().cloned());
SubsSlice::new(start, length)
};
let union_tags = UnionTags::from_slices(new_tag_names, new_variable_slices);
TagUnion(union_tags, new_ext)
}
FunctionOrTagUnion(tag_name, symbol, ext_var) => {
let new_tag_name = SubsIndex::new(env.target.tag_names.len() as u32);
env.target.tag_names.push(env.source[tag_name].clone());
FunctionOrTagUnion(
new_tag_name,
symbol,
copy_import_to_help(env, max_rank, ext_var),
)
}
RecursiveTagUnion(rec_var, tags, ext_var) => {
let new_variable_slices =
SubsSlice::reserve_variable_slices(env.target, tags.len());
let it = (new_variable_slices.indices()).zip(tags.variables());
for (target_index, index) in it {
let slice = env.source[index];
let new_variables = SubsSlice::reserve_into_subs(env.target, slice.len());
let it = (new_variables.indices()).zip(slice);
for (target_index, var_index) in it {
let var = env.source[var_index];
let copy_var = copy_import_to_help(env, max_rank, var);
env.target.variables[target_index] = copy_var;
}
env.target.variable_slices[target_index] = new_variables;
}
let new_tag_names = {
let tag_names = tags.tag_names();
let slice = &env.source.tag_names[tag_names.indices()];
let start = env.target.tag_names.len() as u32;
let length = tag_names.len() as u16;
env.target.tag_names.extend(slice.iter().cloned());
SubsSlice::new(start, length)
};
let union_tags = UnionTags::from_slices(new_tag_names, new_variable_slices);
let new_ext = copy_import_to_help(env, max_rank, ext_var);
let new_rec_var = copy_import_to_help(env, max_rank, rec_var);
RecursiveTagUnion(new_rec_var, union_tags, new_ext)
}
};
env.target
.set(copy, make_descriptor(Structure(new_flat_type)));
copy
}
FlexVar(opt_name_index) => {
if let Some(name_index) = opt_name_index {
let name = env.source.field_names[name_index.index as usize].clone();
let new_name_index = SubsIndex::push_new(&mut env.target.field_names, name);
let content = FlexVar(Some(new_name_index));
env.target.set_content(copy, content);
}
env.flex.push(copy);
copy
}
Error => {
// Open question: should this return Error, or a Flex var?
env.target.set(copy, make_descriptor(Error));
copy
}
RigidVar(name_index) => {
let name = env.source.field_names[name_index.index as usize].clone();
let new_name_index = SubsIndex::push_new(&mut env.target.field_names, name);
env.target
.set(copy, make_descriptor(RigidVar(new_name_index)));
env.rigid.push(copy);
copy
}
RecursionVar {
opt_name,
structure,
} => {
let new_structure = copy_import_to_help(env, max_rank, structure);
debug_assert!((new_structure.index() as usize) < env.target.len());
env.target.set(
copy,
make_descriptor(RecursionVar {
opt_name,
structure: new_structure,
}),
);
copy
}
Alias(symbol, arguments, real_type_var, kind) => {
let new_variables =
SubsSlice::reserve_into_subs(env.target, arguments.all_variables_len as _);
for (target_index, var_index) in (new_variables.indices()).zip(arguments.variables()) {
let var = env.source[var_index];
let copy_var = copy_import_to_help(env, max_rank, var);
env.target.variables[target_index] = copy_var;
}
let new_arguments = AliasVariables {
variables_start: new_variables.start,
..arguments
};
let new_real_type_var = copy_import_to_help(env, max_rank, real_type_var);
let new_content = Alias(symbol, new_arguments, new_real_type_var, kind);
env.target.set(copy, make_descriptor(new_content));
copy
}
RangedNumber(typ, vars) => {
let new_typ = copy_import_to_help(env, max_rank, typ);
let new_vars = SubsSlice::reserve_into_subs(env.target, vars.len());
for (target_index, var_index) in (new_vars.indices()).zip(vars) {
let var = env.source[var_index];
let copy_var = copy_import_to_help(env, max_rank, var);
env.target.variables[target_index] = copy_var;
}
let new_content = RangedNumber(new_typ, new_vars);
env.target.set(copy, make_descriptor(new_content));
copy
}
}
}
fn var_contains_content_help<P>(
subs: &Subs,
var: Variable,

View File

@ -180,10 +180,12 @@ fn unify_context(subs: &mut Subs, pool: &mut Pool, ctx: Context) -> Outcome {
// println!("\n --------------- \n");
let content_1 = subs.get(ctx.first).content;
let content_2 = subs.get(ctx.second).content;
let mode = if ctx.mode.is_eq() { "~" } else { "+=" };
println!(
"{:?} {:?} ~ {:?} {:?}",
"{:?} {:?} {} {:?} {:?}",
ctx.first,
roc_types::subs::SubsFmtContent(&content_1, subs),
mode,
ctx.second,
roc_types::subs::SubsFmtContent(&content_2, subs),
);
@ -334,7 +336,13 @@ fn unify_alias(
problems.extend(merge(subs, ctx, *other_content));
}
// if problems.is_empty() { problems.extend(unify_pool(subs, pool, real_var, *other_real_var)); }
// THEORY: if two aliases or opaques have the same name and arguments, their
// real_var is the same and we don't need to check it.
// See https://github.com/rtfeldman/roc/pull/1510
//
// if problems.is_empty() && either_is_opaque {
// problems.extend(unify_pool(subs, pool, real_var, *other_real_var, ctx.mode));
// }
problems
} else {

View File

@ -20,8 +20,10 @@ roc_types = { path = "../compiler/types" }
roc_parse = { path = "../compiler/parse" }
roc_target = { path = "../compiler/roc_target" }
roc_collections = { path = "../compiler/collections" }
roc_highlight = { path = "../highlight"}
bumpalo = { version = "3.8.0", features = ["collections"] }
snafu = { version = "0.6.10", features = ["backtraces"] }
peg = "0.8.0"
[dev-dependencies]
pretty_assertions = "1.0.0"

View File

@ -1,80 +0,0 @@
use bumpalo::{collections::String as BumpString, Bump};
use roc_ast::{
ast_error::ASTResult,
lang::{self, core::def::def_to_def2::def_to_def2},
mem_pool::pool::Pool,
};
use roc_code_markup::{markup::convert::from_def2::def2_to_markup, slow_pool::SlowPool};
use roc_module::symbol::{IdentIds, Interns, ModuleId};
use roc_region::all::Region;
use roc_types::subs::VarStore;
use crate::{docs_error::DocsResult, html::mark_node_to_html};
// html is written to buf
pub fn defs_to_html<'a>(
buf: &mut BumpString<'a>,
defs: Vec<roc_parse::ast::Def<'a>>,
env_module_id: ModuleId,
interns: &mut Interns,
) -> DocsResult<()> {
let mut env_pool = Pool::with_capacity(1024);
let env_arena = Bump::new();
let mut var_store = VarStore::default();
let dep_idents = IdentIds::exposed_builtins(8);
let exposed_ident_ids = IdentIds::default();
let def_arena = Bump::new();
let mut env = lang::env::Env::new(
env_module_id,
&env_arena,
&mut env_pool,
&mut var_store,
dep_idents,
&interns.module_ids,
exposed_ident_ids,
);
let mut scope = lang::scope::Scope::new(env.home, env.pool, env.var_store);
scope.fill_scope(&env, &mut interns.all_ident_ids)?;
let region = Region::zero();
for def in defs.iter() {
write_def_to_bump_str_html(&def_arena, &mut env, &mut scope, region, def, interns, buf)?;
}
Ok(())
}
fn write_def_to_bump_str_html<'a, 'b>(
arena: &'a Bump,
env: &mut lang::env::Env<'a>,
scope: &mut lang::scope::Scope,
region: Region,
def: &'a roc_parse::ast::Def<'a>,
interns: &Interns,
buf: &mut BumpString<'b>,
) -> ASTResult<()> {
let def2 = def_to_def2(arena, env, scope, def, region);
let def2_id = env.pool.add(def2);
let mut mark_node_pool = SlowPool::default();
let def2_markup_id = def2_to_markup(
env,
env.pool.get(def2_id),
def2_id,
&mut mark_node_pool,
interns,
)?;
let def2_markup_node = mark_node_pool.get(def2_markup_id);
mark_node_to_html(def2_markup_node, &mark_node_pool, buf);
Ok(())
}

View File

@ -1,7 +1,8 @@
use peg::error::ParseError;
use roc_ast::ast_error::ASTError;
use roc_module::module_err::ModuleError;
use roc_parse::parser::SyntaxError;
use snafu::{NoneError, ResultExt, Snafu};
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub))]
@ -18,17 +19,18 @@ pub enum DocsError {
WrapSyntaxError {
msg: String,
},
WrapPegParseError {
source: ParseError<usize>,
},
}
pub type DocsResult<T, E = DocsError> = std::result::Result<T, E>;
impl<'a> From<SyntaxError<'a>> for DocsError {
fn from(syntax_err: SyntaxError) -> Self {
let msg = format!("{:?}", syntax_err);
// hack to handle MarkError derive
let dummy_res: Result<(), NoneError> = Err(NoneError {});
dummy_res.context(WrapSyntaxError { msg }).unwrap_err()
Self::WrapSyntaxError {
msg: format!("{:?}", syntax_err),
}
}
}
@ -43,3 +45,11 @@ impl From<ModuleError> for DocsError {
Self::WrapModuleError { source: module_err }
}
}
impl From<ParseError<usize>> for DocsError {
fn from(peg_parse_err: ParseError<usize>) -> Self {
Self::WrapPegParseError {
source: peg_parse_err,
}
}
}

View File

@ -1,74 +0,0 @@
use crate::html::mark_node_to_html;
use bumpalo::{collections::String as BumpString, Bump};
use roc_ast::{
ast_error::ASTResult,
lang::{self, core::expr::expr_to_expr2::expr_to_expr2},
mem_pool::pool::Pool,
};
use roc_code_markup::{markup::convert::from_expr2::expr2_to_markup, slow_pool::SlowPool};
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds};
use roc_parse::ast::Expr;
use roc_region::all::Region;
use roc_types::subs::VarStore;
// html is written to buf
pub fn expr_to_html<'a>(
buf: &mut BumpString<'a>,
expr: Expr<'a>,
env_module_id: ModuleId,
env_module_ids: &'a ModuleIds,
interns: &Interns,
) {
let mut env_pool = Pool::with_capacity(1024);
let env_arena = Bump::new();
let mut var_store = VarStore::default();
let dep_idents = IdentIds::exposed_builtins(8);
let exposed_ident_ids = IdentIds::default();
let mut env = lang::env::Env::new(
env_module_id,
&env_arena,
&mut env_pool,
&mut var_store,
dep_idents,
env_module_ids,
exposed_ident_ids,
);
let mut scope = lang::scope::Scope::new(env.home, env.pool, env.var_store);
let region = Region::zero();
// TODO remove unwrap
write_expr_to_bump_str_html(&mut env, &mut scope, region, &expr, interns, buf).unwrap();
}
fn write_expr_to_bump_str_html<'a, 'b>(
env: &mut lang::env::Env<'a>,
scope: &mut lang::scope::Scope,
region: Region,
expr: &'a Expr,
interns: &Interns,
buf: &mut BumpString<'b>,
) -> ASTResult<()> {
let (expr2, _) = expr_to_expr2(env, scope, expr, region);
let expr2_id = env.pool.add(expr2);
let mut mark_node_pool = SlowPool::default();
let expr2_markup_id = expr2_to_markup(
env,
env.pool.get(expr2_id),
expr2_id,
&mut mark_node_pool,
interns,
0,
)?;
let expr2_markup_node = mark_node_pool.get(expr2_markup_id);
mark_node_to_html(expr2_markup_node, &mark_node_pool, buf);
Ok(())
}

View File

@ -1,12 +1,7 @@
use bumpalo::collections::String as BumpString;
use roc_code_markup::{markup::nodes::MarkupNode, slow_pool::SlowPool};
// determine appropriate css class for MarkupNode
pub fn mark_node_to_html<'a>(
mark_node: &MarkupNode,
mark_node_pool: &SlowPool,
buf: &mut BumpString<'a>,
) {
pub fn mark_node_to_html(mark_node: &MarkupNode, mark_node_pool: &SlowPool, buf: &mut String) {
let mut additional_newlines = 0;
match mark_node {
@ -31,7 +26,6 @@ pub fn mark_node_to_html<'a>(
let css_class = match syn_high_style {
Operator => "operator",
Comma => "comma",
String => "string",
FunctionName => "function-name",
FunctionArgName => "function-arg-name",
@ -46,6 +40,9 @@ pub fn mark_node_to_html<'a>(
Blank => "blank",
Comment => "comment",
DocsComment => "docs-comment",
UppercaseIdent => "uppercase-ident",
LowercaseIdent => "lowercase-ident",
Keyword => "keyword-ident",
};
write_html_to_buf(content, css_class, buf);
@ -77,7 +74,7 @@ pub fn mark_node_to_html<'a>(
}
}
fn write_html_to_buf<'a>(content: &str, css_class: &'static str, buf: &mut BumpString<'a>) {
fn write_html_to_buf(content: &str, css_class: &'static str, buf: &mut String) {
let opening_tag: String = ["<span class=\"syntax-", css_class, "\">"].concat();
buf.push_str(opening_tag.as_str());

View File

@ -1,27 +1,26 @@
extern crate pulldown_cmark;
extern crate roc_load;
use bumpalo::{collections::String as BumpString, Bump};
use def::defs_to_html;
use docs_error::DocsResult;
use expr::expr_to_html;
use bumpalo::Bump;
use docs_error::{DocsError, DocsResult};
use html::mark_node_to_html;
use roc_builtins::std::StdLib;
use roc_can::scope::Scope;
use roc_code_markup::markup::nodes::MarkupNode;
use roc_code_markup::slow_pool::SlowPool;
use roc_collections::all::MutMap;
use roc_highlight::highlight_parser::{highlight_defs, highlight_expr};
use roc_load::docs::DocEntry::DocDef;
use roc_load::docs::{DocEntry, TypeAnnotation};
use roc_load::docs::{ModuleDocumentation, RecordField};
use roc_load::file::{LoadedModule, LoadingProblem};
use roc_module::symbol::{IdentIds, Interns, ModuleId, ModuleIds};
use roc_module::symbol::{IdentIds, Interns, ModuleId};
use roc_parse::ident::{parse_ident, Ident};
use roc_parse::parser::SyntaxError;
use roc_parse::state::State;
use roc_region::all::{Position, Region};
use roc_region::all::Region;
use std::fs;
use std::path::{Path, PathBuf};
mod def;
mod docs_error;
mod expr;
mod html;
pub fn generate_docs_html(filenames: Vec<PathBuf>, std_lib: StdLib, build_dir: &Path) {
@ -110,46 +109,45 @@ pub fn generate_docs_html(filenames: Vec<PathBuf>, std_lib: StdLib, build_dir: &
}
// converts plain-text code to highlighted html
pub fn syntax_highlight_expr<'a>(
arena: &'a Bump,
buf: &mut BumpString<'a>,
code_str: &'a str,
env_module_id: ModuleId,
env_module_ids: &'a ModuleIds,
interns: &Interns,
) -> Result<String, SyntaxError<'a>> {
pub fn syntax_highlight_expr(code_str: &str) -> DocsResult<String> {
let trimmed_code_str = code_str.trim_end().trim();
let state = State::new(trimmed_code_str.as_bytes());
let mut mark_node_pool = SlowPool::default();
match roc_parse::expr::test_parse_expr(0, arena, state) {
Ok(loc_expr) => {
expr_to_html(buf, loc_expr.value, env_module_id, env_module_ids, interns);
let mut highlighted_html_str = String::new();
Ok(buf.to_string())
match highlight_expr(trimmed_code_str, &mut mark_node_pool) {
Ok(root_mark_node_id) => {
let root_mark_node = mark_node_pool.get(root_mark_node_id);
mark_node_to_html(root_mark_node, &mark_node_pool, &mut highlighted_html_str);
Ok(highlighted_html_str)
}
Err(fail) => Err(SyntaxError::Expr(fail, Position::default())),
Err(err) => Err(DocsError::from(err)),
}
}
// converts plain-text code to highlighted html
pub fn syntax_highlight_top_level_defs<'a>(
arena: &'a Bump,
buf: &mut BumpString<'a>,
code_str: &'a str,
env_module_id: ModuleId,
interns: &mut Interns,
) -> DocsResult<String> {
pub fn syntax_highlight_top_level_defs(code_str: &str) -> DocsResult<String> {
let trimmed_code_str = code_str.trim_end().trim();
match roc_parse::test_helpers::parse_defs_with(arena, trimmed_code_str) {
Ok(vec_loc_def) => {
let vec_def = vec_loc_def.iter().map(|loc| loc.value).collect();
let mut mark_node_pool = SlowPool::default();
defs_to_html(buf, vec_def, env_module_id, interns)?;
let mut highlighted_html_str = String::new();
Ok(buf.to_string())
match highlight_defs(trimmed_code_str, &mut mark_node_pool) {
Ok(mark_node_id_vec) => {
let def_mark_nodes: Vec<&MarkupNode> = mark_node_id_vec
.iter()
.map(|mn_id| mark_node_pool.get(*mn_id))
.collect();
for mn in def_mark_nodes {
mark_node_to_html(mn, &mark_node_pool, &mut highlighted_html_str)
}
Ok(highlighted_html_str)
}
Err(err) => Err(err.into()),
Err(err) => Err(DocsError::from(err)),
}
}
@ -426,7 +424,7 @@ pub fn load_modules_for_files(filenames: Vec<PathBuf>, std_lib: StdLib) -> Vec<L
filename,
&std_lib,
src_dir.as_path(),
MutMap::default(),
Default::default(),
roc_target::TargetInfo::default_x86_64(), // This is just type-checking for docs, so "target" doesn't matter
) {
Ok(loaded) => modules.push(loaded),
@ -957,17 +955,9 @@ fn markdown_to_html(
(0, 0)
}
Event::Text(CowStr::Borrowed(code_str)) if expecting_code_block => {
let code_block_arena = Bump::new();
let mut code_block_buf = BumpString::new_in(&code_block_arena);
match syntax_highlight_expr(
&code_block_arena,
&mut code_block_buf,
code_str,
loaded_module.module_id,
&loaded_module.interns.module_ids,
&loaded_module.interns
code_str
)
{
Ok(highlighted_code_str) => {

View File

@ -1,38 +1,17 @@
#[macro_use]
extern crate pretty_assertions;
#[macro_use]
extern crate indoc;
// Keep this around until the commented out tests can be enabled again.
/*#[macro_use]
extern crate indoc;*/
#[cfg(test)]
mod insert_doc_syntax_highlighting {
use std::{fs::File, io::Write, path::PathBuf};
use bumpalo::{collections::String as BumpString, Bump};
use roc_ast::module::load_module;
use roc_docs::{syntax_highlight_expr, syntax_highlight_top_level_defs};
use roc_load::file::LoadedModule;
use tempfile::tempdir;
use uuid::Uuid;
fn expect_html(code_str: &str, want: &str, use_expr: bool) {
let mut loaded_module = if use_expr {
make_mock_module("")
} else {
make_mock_module(code_str)
};
let code_block_arena = Bump::new();
let mut code_block_buf = BumpString::new_in(&code_block_arena);
if use_expr {
match syntax_highlight_expr(
&code_block_arena,
&mut code_block_buf,
code_str,
loaded_module.module_id,
&loaded_module.interns.module_ids,
&loaded_module.interns,
) {
match syntax_highlight_expr(code_str) {
Ok(highlighted_code_str) => {
assert_eq!(highlighted_code_str, want);
}
@ -41,13 +20,7 @@ mod insert_doc_syntax_highlighting {
}
};
} else {
match syntax_highlight_top_level_defs(
&code_block_arena,
&mut code_block_buf,
code_str,
loaded_module.module_id,
&mut loaded_module.interns,
) {
match syntax_highlight_top_level_defs(code_str) {
Ok(highlighted_code_str) => {
assert_eq!(highlighted_code_str, want);
}
@ -58,36 +31,6 @@ mod insert_doc_syntax_highlighting {
}
}
pub const HELLO_WORLD: &str = r#"interface Test exposes [ ] imports [ ]
main = "Hello, world!"
"#;
fn make_mock_module(code_str: &str) -> LoadedModule {
let temp_dir = tempdir().expect("Failed to create temporary directory for test.");
let temp_file_path_buf =
PathBuf::from([Uuid::new_v4().to_string(), ".roc".to_string()].join(""));
let temp_file_full_path = temp_dir.path().join(temp_file_path_buf);
let mut file = File::create(temp_file_full_path.clone()).unwrap_or_else(|_| {
panic!(
"Failed to create temporary file for path {:?}",
temp_file_full_path
)
});
let mut full_code_str = HELLO_WORLD.to_owned();
full_code_str.push_str("\n\n");
full_code_str.push_str(code_str);
writeln!(file, "{}", full_code_str)
.unwrap_or_else(|_| panic!("Failed to write {:?} to file: {:?}", HELLO_WORLD, file));
load_module(&temp_file_full_path)
}
fn expect_html_expr(code_str: &str, want: &str) {
expect_html(code_str, want, true)
}
@ -101,7 +44,9 @@ main = "Hello, world!"
expect_html_expr("2", r#"<span class="syntax-number">2</span>"#);
}
#[test]
// These tests have been commented out due to introduction of a new syntax highlighting approach.
// You can make these tests work by following the instructions at the top of this file here: roc/highlight/src/highlight_parser.rs
/*#[test]
fn string_expr() {
expect_html_expr(r#""abc""#, r#"<span class="syntax-string">"abc"</span>"#);
}
@ -144,10 +89,18 @@ main = "Hello, world!"
r#"{ a: { bB: "WoRlD" } }"#,
"<span class=\"syntax-bracket\">{ </span><span class=\"syntax-recordfield\">a</span><span class=\"syntax-operator\">: </span><span class=\"syntax-bracket\">{ </span><span class=\"syntax-recordfield\">bB</span><span class=\"syntax-operator\">: </span><span class=\"syntax-string\">\"WoRlD\"</span><span class=\"syntax-bracket\"> }</span><span class=\"syntax-bracket\"> }</span>",
);
}
}*/
#[test]
fn top_level_def_value() {
fn top_level_def_val_num() {
expect_html_def(
r#"myVal = 0"#,
"<span class=\"syntax-lowercase-ident\">myVal</span><span class=\"syntax-operator\"> = </span><span class=\"syntax-number\">0</span>\n\n",
);
}
/*#[test]
fn top_level_def_val_str() {
expect_html_def(
r#"myVal = "Hello, World!""#,
"<span class=\"syntax-value\">myVal</span><span class=\"syntax-operator\"> = </span><span class=\"syntax-string\">\"Hello, World!\"</span>\n\n\n",
@ -198,7 +151,7 @@ main = "Hello, world!"
),
"<span class=\"syntax-comment\"># COMMENT</span>\n<span class=\"syntax-value\">myVal</span><span class=\"syntax-operator\"> = </span><span class=\"syntax-string\">\"Hello, World!\"</span>\n\n\n\n\n",
);
}
}*/
// TODO see issue #2134
/*#[test]

View File

@ -9,6 +9,7 @@ use crate::ui::text::text_pos::TextPos;
use crate::ui::ui_error::{LineInsertionFailed, OutOfBounds, UIResult};
use crate::ui::util::{slice_get, slice_get_mut};
use roc_ast::lang::core::ast::ASTNodeId;
use roc_code_markup::markup::mark_id_ast_id_map::MarkIdAstIdMap;
use roc_code_markup::markup::nodes::get_root_mark_node_id;
use roc_code_markup::slow_pool::MarkNodeId;
use roc_code_markup::slow_pool::SlowPool;
@ -210,18 +211,23 @@ impl GridNodeMap {
ed_model: &EdModel,
) -> EdResult<(TextPos, TextPos, ASTNodeId, MarkNodeId)> {
let line = slice_get(caret_pos.line, &self.lines)?;
let node_id = slice_get(caret_pos.column, line)?;
let node = ed_model.mark_node_pool.get(*node_id);
let node_id = *slice_get(caret_pos.column, line)?;
let node = ed_model.mark_node_pool.get(node_id);
if node.is_nested() {
let (start_pos, end_pos) = self.get_nested_start_end_pos(*node_id, ed_model)?;
let (start_pos, end_pos) = self.get_nested_start_end_pos(node_id, ed_model)?;
Ok((start_pos, end_pos, node.get_ast_node_id(), *node_id))
Ok((
start_pos,
end_pos,
ed_model.mark_id_ast_id_map.get(node_id)?,
node_id,
))
} else {
let (first_node_index, last_node_index) = first_last_index_of(*node_id, line)?;
let (first_node_index, last_node_index) = first_last_index_of(node_id, line)?;
let curr_node_id = slice_get(first_node_index, line)?;
let curr_ast_node_id = ed_model.mark_node_pool.get(*curr_node_id).get_ast_node_id();
let curr_node_id = *slice_get(first_node_index, line)?;
let curr_ast_node_id = ed_model.mark_id_ast_id_map.get(curr_node_id)?;
let mut expr_start_index = first_node_index;
let mut expr_end_index = last_node_index;
@ -230,11 +236,8 @@ impl GridNodeMap {
let mut pos_extra_subtract = 0;
for i in (0..first_node_index).rev() {
let prev_pos_node_id = slice_get(i, line)?;
let prev_ast_node_id = ed_model
.mark_node_pool
.get(*prev_pos_node_id)
.get_ast_node_id();
let prev_pos_node_id = *slice_get(i, line)?;
let prev_ast_node_id = ed_model.mark_id_ast_id_map.get(prev_pos_node_id)?;
if prev_ast_node_id == curr_ast_node_id {
if pos_extra_subtract > 0 {
@ -253,10 +256,7 @@ impl GridNodeMap {
for i in last_node_index..line.len() {
let next_pos_node_id = slice_get(i, line)?;
let next_ast_node_id = ed_model
.mark_node_pool
.get(*next_pos_node_id)
.get_ast_node_id();
let next_ast_node_id = ed_model.mark_id_ast_id_map.get(*next_pos_node_id)?;
if next_ast_node_id == curr_ast_node_id {
if pos_extra_add > 0 {
@ -270,8 +270,11 @@ impl GridNodeMap {
}
}
let correct_mark_node_id =
GridNodeMap::get_top_node_with_expr_id(*curr_node_id, &ed_model.mark_node_pool);
let correct_mark_node_id = GridNodeMap::get_top_node_with_expr_id(
curr_node_id,
&ed_model.mark_node_pool,
&ed_model.mark_id_ast_id_map,
)?;
Ok((
TextPos {
@ -293,19 +296,18 @@ impl GridNodeMap {
fn get_top_node_with_expr_id(
curr_node_id: MarkNodeId,
mark_node_pool: &SlowPool,
) -> MarkNodeId {
mark_id_ast_id_map: &MarkIdAstIdMap,
) -> EdResult<MarkNodeId> {
let curr_node = mark_node_pool.get(curr_node_id);
if let Some(parent_id) = curr_node.get_parent_id_opt() {
let parent = mark_node_pool.get(parent_id);
if parent.get_ast_node_id() == curr_node.get_ast_node_id() {
parent_id
if mark_id_ast_id_map.get(parent_id)? == mark_id_ast_id_map.get(curr_node_id)? {
Ok(parent_id)
} else {
curr_node_id
Ok(curr_node_id)
}
} else {
curr_node_id
Ok(curr_node_id)
}
}
@ -388,6 +390,7 @@ impl GridNodeMap {
&self,
line_nr: usize,
mark_node_pool: &SlowPool,
mark_id_ast_id_map: &MarkIdAstIdMap,
) -> EdResult<MarkNodeId> {
for curr_line_nr in (0..line_nr).rev() {
let first_col_pos = TextPos {
@ -399,7 +402,7 @@ impl GridNodeMap {
let mark_node_id = self.get_id_at_row_col(first_col_pos)?;
let root_mark_node_id = get_root_mark_node_id(mark_node_id, mark_node_pool);
let ast_node_id = mark_node_pool.get(root_mark_node_id).get_ast_node_id();
let ast_node_id = mark_id_ast_id_map.get(root_mark_node_id)?;
if let ASTNodeId::ADefId(_) = ast_node_id {
return Ok(root_mark_node_id);

View File

@ -1,4 +1,5 @@
use roc_ast::lang::core::def::def2::Def2;
use roc_code_markup::markup::common_nodes::NEW_LINES_AFTER_DEF;
use crate::editor::ed_error::EdResult;
use crate::editor::mvc::app_update::InputOutcome;
@ -21,38 +22,33 @@ pub fn break_line(ed_model: &mut EdModel) -> EdResult<InputOutcome> {
column: caret_pos.column - 1,
})
{
let new_blank_line_nr = caret_line_nr + 3;
let new_blank_line_nr = caret_line_nr + NEW_LINES_AFTER_DEF;
// if there already is a blank line at new_blank_line_nr just move the caret there, don't add extra lines
// safe unwrap, we already checked the nr_of_lines
if !(ed_model.code_lines.nr_of_lines() >= new_blank_line_nr
&& ed_model.code_lines.line_len(new_blank_line_nr).unwrap() == 0)
{
// two blank lines between top level definitions
EdModel::insert_empty_line(caret_line_nr + 1, &mut ed_model.grid_node_map)?;
EdModel::insert_empty_line(caret_line_nr + 2, &mut ed_model.grid_node_map)?;
// third "empty" line will be filled by the blank
EdModel::insert_empty_line(caret_line_nr + 3, &mut ed_model.grid_node_map)?;
for i in 1..=NEW_LINES_AFTER_DEF {
EdModel::insert_empty_line(caret_line_nr + i, &mut ed_model.grid_node_map)?;
}
insert_new_blank(ed_model, caret_pos.line + 3)?;
insert_new_blank(ed_model, caret_pos.line + NEW_LINES_AFTER_DEF + 1)?;
}
}
}
ed_model.simple_move_carets_down(3); // two blank lines between top level definitions
ed_model.simple_move_carets_down(NEW_LINES_AFTER_DEF); // one blank lines between top level definitions
Ok(InputOutcome::Accepted)
}
pub fn insert_new_blank(ed_model: &mut EdModel, insert_on_line_nr: usize) -> EdResult<()> {
println!(
"{}",
ed_model.module.ast.ast_to_string(ed_model.module.env.pool)
);
// find position of the previous ASTNode to figure out where to add this new Blank ASTNode
let def_mark_node_id = ed_model
.grid_node_map
.get_def_mark_node_id_before_line(insert_on_line_nr, &ed_model.mark_node_pool)?;
let def_mark_node_id = ed_model.grid_node_map.get_def_mark_node_id_before_line(
insert_on_line_nr,
&ed_model.mark_node_pool,
&ed_model.mark_id_ast_id_map,
)?;
let new_line_blank = Def2::Blank;
let new_line_blank_id = ed_model.module.env.pool.add(new_line_blank);

View File

@ -16,6 +16,7 @@ use roc_ast::lang::env::Env;
use roc_ast::mem_pool::pool_str::PoolStr;
use roc_ast::parse::parse_ast;
use roc_code_markup::markup::convert::from_ast::ast_to_mark_nodes;
use roc_code_markup::markup::mark_id_ast_id_map::MarkIdAstIdMap;
use roc_code_markup::markup::nodes;
use roc_code_markup::slow_pool::{MarkNodeId, SlowPool};
use roc_load::file::LoadedModule;
@ -31,6 +32,7 @@ pub struct EdModel<'a> {
pub grid_node_map: GridNodeMap, // allows us to map window coordinates to MarkNodeId's
pub markup_ids: Vec<MarkNodeId>, // one root node for every top level definition
pub mark_node_pool: SlowPool, // all MarkupNodes for this file are saved into this pool and can be retrieved using their MarkNodeId
pub mark_id_ast_id_map: MarkIdAstIdMap, // To find the ASTNode that is represented by a MarkNode
pub glyph_dim_rect_opt: Option<Rect>, // represents the width and height of single monospace glyph(char)
pub has_focus: bool,
pub caret_w_select_vec: NonEmpty<(CaretWSelect, Option<MarkNodeId>)>, // the editor supports multiple carets/cursors and multiple selections
@ -64,7 +66,7 @@ pub fn init_model<'a>(
let mut mark_node_pool = SlowPool::default();
let markup_ids = if code_str.is_empty() {
let (markup_ids, mark_id_ast_id_map) = if code_str.is_empty() {
EmptyCodeString {}.fail()
} else {
Ok(ast_to_mark_nodes(
@ -107,6 +109,7 @@ pub fn init_model<'a>(
grid_node_map,
markup_ids,
mark_node_pool,
mark_id_ast_id_map,
glyph_dim_rect_opt: None,
has_focus: true,
caret_w_select_vec: NonEmpty::new((caret, None)),
@ -160,7 +163,12 @@ impl<'a> EdModel<'a> {
if let Some(parent_id) = curr_mark_node.get_parent_id_opt() {
let parent = self.mark_node_pool.get(parent_id);
Ok(parent.get_child_indices(curr_mark_node_id, &self.mark_node_pool)?)
let ast_node_id = self.mark_id_ast_id_map.get(curr_mark_node_id)?;
Ok(parent.get_child_indices(
curr_mark_node_id,
ast_node_id,
&self.mark_id_ast_id_map,
)?)
} else {
MissingParent {
node_id: curr_mark_node_id,
@ -212,7 +220,7 @@ impl<'a> EdModule<'a> {
pub mod test_ed_model {
use crate::editor::ed_error::EdResult;
use crate::editor::mvc::ed_model;
use crate::editor::resources::strings::{HELLO_WORLD, PLATFORM_STR};
use crate::editor::resources::strings::{nr_hello_world_lines, HELLO_WORLD, PLATFORM_STR};
use crate::ui::text::caret_w_select::test_caret_w_select::convert_dsl_to_selection;
use crate::ui::text::caret_w_select::test_caret_w_select::convert_selection_to_dsl;
use crate::ui::text::caret_w_select::CaretPos;
@ -331,10 +339,9 @@ pub mod test_ed_model {
)?;
// adjust caret for header and main function
let nr_hello_world_lines = HELLO_WORLD.matches('\n').count() - 1;
let caret_w_select = convert_dsl_to_selection(&code_lines)?;
let adjusted_caret_pos = TextPos {
line: caret_w_select.caret_pos.line + nr_hello_world_lines,
line: caret_w_select.caret_pos.line + nr_hello_world_lines(),
column: caret_w_select.caret_pos.column,
};

View File

@ -377,8 +377,7 @@ impl<'a> EdModel<'a> {
let expr2_level_mark_node = self.mark_node_pool.get(selected_block.mark_node_id);
if let Some(parent_id) = expr2_level_mark_node.get_parent_id_opt() {
let parent_mark_node = self.mark_node_pool.get(parent_id);
let ast_node_id = parent_mark_node.get_ast_node_id();
let ast_node_id = self.mark_id_ast_id_map.get(parent_id)?;
let (expr_start_pos, expr_end_pos) = self
.grid_node_map
@ -568,7 +567,6 @@ impl<'a> EdModel<'a> {
let newlines_at_end = expr2_level_mark_node.get_newlines_at_end();
let blank_replacement = MarkupNode::Blank {
ast_node_id: sel_block.ast_node_id,
attributes: Attributes::default(),
parent_id_opt: expr2_level_mark_node.get_parent_id_opt(),
newlines_at_end,
@ -658,13 +656,16 @@ impl<'a> EdModel<'a> {
fn post_process_ast_update(&mut self) -> EdResult<()> {
//dbg!("{}",self.module.ast.ast_to_string(self.module.env.pool));
self.markup_ids = ast_to_mark_nodes(
let markup_ids_tup = ast_to_mark_nodes(
&mut self.module.env,
&self.module.ast,
&mut self.mark_node_pool,
&self.loaded_module.interns,
)?;
self.markup_ids = markup_ids_tup.0;
self.mark_id_ast_id_map = markup_ids_tup.1;
self.code_lines = CodeLines::from_str(&nodes::mark_nodes_to_string(
&self.markup_ids,
&self.mark_node_pool,
@ -839,7 +840,7 @@ pub fn get_node_context<'a>(ed_model: &'a EdModel) -> EdResult<NodeContext<'a>>
.get_id_at_row_col(ed_model.get_caret())?;
let curr_mark_node = ed_model.mark_node_pool.get(curr_mark_node_id);
let parent_id_opt = curr_mark_node.get_parent_id_opt();
let ast_node_id = curr_mark_node.get_ast_node_id();
let ast_node_id = ed_model.mark_id_ast_id_map.get(curr_mark_node_id)?;
Ok(NodeContext {
old_caret_pos,
@ -1002,10 +1003,7 @@ pub fn handle_new_char_expr(
match expr_ref {
Expr2::SmallInt { .. } => update_int(ed_model, curr_mark_node_id, ch)?,
_ => {
let prev_ast_node_id = ed_model
.mark_node_pool
.get(prev_mark_node_id)
.get_ast_node_id();
let prev_ast_node_id = ed_model.mark_id_ast_id_map.get(prev_mark_node_id)?;
match prev_ast_node_id {
ASTNodeId::ADefId(_) => InputOutcome::Ignored,
@ -1026,10 +1024,7 @@ pub fn handle_new_char_expr(
let mark_parent_id_opt = curr_mark_node.get_parent_id_opt();
if let Some(mark_parent_id) = mark_parent_id_opt {
let parent_ast_id = ed_model
.mark_node_pool
.get(mark_parent_id)
.get_ast_node_id();
let parent_ast_id = ed_model.mark_id_ast_id_map.get(mark_parent_id)?;
match parent_ast_id {
ASTNodeId::ADefId(_) => InputOutcome::Ignored,
@ -1047,10 +1042,7 @@ pub fn handle_new_char_expr(
let mark_parent_id_opt = curr_mark_node.get_parent_id_opt();
if let Some(mark_parent_id) = mark_parent_id_opt {
let parent_ast_id = ed_model
.mark_node_pool
.get(mark_parent_id)
.get_ast_node_id();
let parent_ast_id = ed_model.mark_id_ast_id_map.get(mark_parent_id)?;
match parent_ast_id {
ASTNodeId::ADefId(_) => InputOutcome::Ignored,
@ -1215,8 +1207,7 @@ pub fn handle_new_char(received_char: &char, ed_model: &mut EdModel) -> EdResult
let outcome =
if ed_model.node_exists_at_caret() {
let curr_mark_node_id = ed_model.get_curr_mark_node_id()?;
let curr_mark_node = ed_model.mark_node_pool.get(curr_mark_node_id);
let ast_node_id = curr_mark_node.get_ast_node_id();
let ast_node_id = ed_model.mark_id_ast_id_map.get(curr_mark_node_id)?;
match ast_node_id {
ASTNodeId::ADefId(def_id) => {
@ -1233,9 +1224,9 @@ pub fn handle_new_char(received_char: &char, ed_model: &mut EdModel) -> EdResult
} else {
let prev_mark_node_id_opt = ed_model.get_prev_mark_node_id()?;
if let Some(prev_mark_node_id) = prev_mark_node_id_opt {
let prev_mark_node = ed_model.mark_node_pool.get(prev_mark_node_id);
let prev_ast_node = ed_model.module.env.pool.get(prev_mark_node.get_ast_node_id().to_expr_id()?);
let prev_ast_node_id = ed_model.mark_id_ast_id_map.get(prev_mark_node_id)?.to_expr_id()?;
let prev_ast_node = ed_model.module.env.pool.get(prev_ast_node_id);
match prev_ast_node {
Expr2::SmallInt{ .. } => {
@ -1284,6 +1275,8 @@ pub fn handle_new_char(received_char: &char, ed_model: &mut EdModel) -> EdResult
#[cfg(test)]
pub mod test_ed_update {
use std::iter;
use crate::editor::ed_error::print_err;
use crate::editor::mvc::ed_model::test_ed_model::ed_model_from_dsl;
use crate::editor::mvc::ed_model::test_ed_model::ed_model_to_dsl;
@ -1291,13 +1284,14 @@ pub mod test_ed_update {
use crate::editor::mvc::ed_update::handle_new_char;
use crate::editor::mvc::ed_update::EdModel;
use crate::editor::mvc::ed_update::EdResult;
use crate::editor::resources::strings::HELLO_WORLD;
use crate::editor::resources::strings::nr_hello_world_lines;
use crate::ui::text::lines::SelectableLines;
use crate::ui::ui_error::UIResult;
use crate::window::keyboard_input::no_mods;
use crate::window::keyboard_input::test_modifiers::ctrl_cmd_shift;
use crate::window::keyboard_input::Modifiers;
use bumpalo::Bump;
use roc_code_markup::markup::common_nodes::NEW_LINES_AFTER_DEF;
use roc_module::symbol::ModuleIds;
use threadpool::ThreadPool;
use winit::event::VirtualKeyCode::*;
@ -1432,8 +1426,7 @@ pub mod test_ed_update {
}
fn strip_header(lines: &mut Vec<String>) {
let nr_hello_world_lines = HELLO_WORLD.matches('\n').count() - 1;
lines.drain(0..nr_hello_world_lines);
lines.drain(0..nr_hello_world_lines());
}
pub fn assert_insert_seq_nls(
@ -1492,8 +1485,11 @@ pub mod test_ed_update {
// add newlines like the editor's formatting would add them
fn add_nls(lines: Vec<String>) -> Vec<String> {
let mut new_lines = lines;
//Two lines between TLD's, extra newline so the user can go to third line add new def there
new_lines.append(&mut vec!["".to_owned(), "".to_owned(), "".to_owned()]);
//line(s) between TLD's, extra newline so the user can go to last line add new def there
let mut extra_empty_lines = iter::repeat("".to_owned())
.take(NEW_LINES_AFTER_DEF)
.collect();
new_lines.append(&mut extra_empty_lines);
new_lines
}
@ -2585,7 +2581,7 @@ pub mod test_ed_update {
fn test_enter() -> Result<(), String> {
assert_insert_seq(
ovec![""],
ovec!["ab = 5", "", "", "cd = \"good┃\"", "", "", ""],
add_nls(ovec!["ab = 5", "", "cd = \"good┃\""]),
"ab🡲🡲🡲5\rcd🡲🡲🡲\"good",
)?;

View File

@ -65,7 +65,7 @@ pub fn update_int(
.get_offset_to_node_id(old_caret_pos, int_mark_node_id)?;
let int_mark_node = ed_model.mark_node_pool.get_mut(int_mark_node_id);
let int_ast_node_id = int_mark_node.get_ast_node_id();
let int_ast_node_id = ed_model.mark_id_ast_id_map.get(int_mark_node_id)?;
let content_str_mut = int_mark_node.get_content_mut()?;

View File

@ -58,9 +58,7 @@ pub fn add_blank_child(
let trip_result: EdResult<(ExprId, ExprId, MarkNodeId)> = if let Some(parent_id) = parent_id_opt
{
let parent = ed_model.mark_node_pool.get(parent_id);
let list_ast_node_id = parent.get_ast_node_id();
let list_ast_node_id = ed_model.mark_id_ast_id_map.get(parent_id)?;
let list_ast_node = ed_model.module.env.pool.get(list_ast_node_id.to_expr_id()?);
match list_ast_node {

View File

@ -91,7 +91,6 @@ pub fn update_empty_record(
let record_field_node = MarkupNode::Text {
content: new_input.to_owned(),
ast_node_id,
syn_high_style: HighlightStyle::RecordField,
attributes: Attributes::default(),
parent_id_opt,
@ -149,9 +148,9 @@ pub fn update_record_colon(
let prev_mark_node_id_opt = ed_model.get_prev_mark_node_id()?;
if let Some(prev_mark_node_id) = prev_mark_node_id_opt {
let prev_mark_node = ed_model.mark_node_pool.get(prev_mark_node_id);
let prev_mn_ast_node_id = ed_model.mark_id_ast_id_map.get(prev_mark_node_id)?;
match prev_mark_node.get_ast_node_id() {
match prev_mn_ast_node_id {
ASTNodeId::ADefId(_) => Ok(InputOutcome::Ignored),
ASTNodeId::AExprId(prev_expr_id) => {
let prev_expr = ed_model.module.env.pool.get(prev_expr_id);

View File

@ -105,7 +105,6 @@ fn markup_to_wgpu_helper<'a>(
match markup_node {
MarkupNode::Nested {
ast_node_id: _,
children_ids,
parent_id_opt: _,
newlines_at_end,
@ -131,7 +130,6 @@ fn markup_to_wgpu_helper<'a>(
}
MarkupNode::Text {
content,
ast_node_id: _,
syn_high_style,
attributes,
parent_id_opt: _,
@ -183,7 +181,6 @@ fn markup_to_wgpu_helper<'a>(
wgpu_texts.push(glyph_text);
}
MarkupNode::Blank {
ast_node_id: _,
attributes: _,
parent_id_opt: _,
newlines_at_end,

View File

@ -45,9 +45,13 @@ pub fn build_debug_graphics(
.with_color(colors::to_slice(from_hsb(266, 31, 96)))
.with_scale(config.debug_font_size);
let mark_node_pool_text = glyph_brush::OwnedText::new(format!("{}", ed_model.mark_node_pool))
.with_color(colors::to_slice(from_hsb(110, 45, 82)))
.with_scale(config.debug_font_size);
let mark_node_pool_text = glyph_brush::OwnedText::new(
ed_model
.mark_node_pool
.debug_string(&ed_model.mark_id_ast_id_map),
)
.with_color(colors::to_slice(from_hsb(110, 45, 82)))
.with_scale(config.debug_font_size);
let mut ast_node_text_str = "AST:\n".to_owned();

View File

@ -23,9 +23,12 @@ app "test-app"
main = "Hello, world!"
"#;
pub fn nr_hello_world_lines() -> usize {
HELLO_WORLD.matches('\n').count() - 1
}
pub const PLATFORM_STR: &str = r#"
platform "test-platform"
requires {} { main : Str }

View File

@ -1,5 +1,5 @@
// Adapted from https://github.com/sotrh/learn-wgpu
// by Benjamin Hansen - license information can be found in the COPYRIGHT
// by Benjamin Hansen - license information can be found in the LEGAL_DETAILS
// file in the root directory of this distribution.
//
// Thank you, Benjamin!
@ -159,7 +159,7 @@ impl StagingBuffer {
}
// Taken from https://github.com/sotrh/learn-wgpu
// by Benjamin Hansen - license information can be found in the COPYRIGHT
// by Benjamin Hansen - license information can be found in the LEGAL_DETAILS
// file in the root directory of this distribution.
//
// Thank you, Benjamin!

View File

@ -1,5 +1,5 @@
// Taken from https://github.com/sotrh/learn-wgpu
// by Benjamin Hansen - license information can be found in the COPYRIGHT
// by Benjamin Hansen - license information can be found in the LEGAL_DETAILS
// file in the root directory of this distribution.
//
// Thank you, Benjamin!

Some files were not shown because too many files have changed in this diff Show More