Merge remote-tracking branch 'origin/trunk' into roc-run-from-ram

This commit is contained in:
Folkert 2022-05-22 13:41:00 +02:00
commit 95fce168a4
No known key found for this signature in database
GPG Key ID: 1F17F6FFD112B97C
245 changed files with 10269 additions and 3657 deletions

View File

@ -10,6 +10,11 @@ test-gen-wasm = "test -p roc_gen_wasm -p test_gen --no-default-features --featur
rustflags = ["-Copt-level=s", "-Clto=fat"]
[env]
# Gives us the path of the workspace root for use in cargo tests without having
# to compute it per-package.
# https://github.com/rust-lang/cargo/issues/3946#issuecomment-973132993
ROC_WORKSPACE_DIR = { value = "", relative = true }
# Debug flags. Keep this up-to-date with compiler/debug_flags/src/lib.rs.
# Set = "1" to turn a debug flag on.
ROC_PRETTY_PRINT_ALIAS_CONTENTS = "0"

View File

@ -80,3 +80,4 @@ Kas Buunk <kasbuunk@icloud.com>
Oskar Hahn <mail@oshahn.de>
Nuno Ferreira <nunogcferreira@gmail.com>
Mfon Eti-mfon <mfonetimfon@gmail.com>
Drake Bennion <drake.bennion@gmail.com>

View File

@ -50,14 +50,47 @@ If you plan on using `nix-shell` regularly, check out [direnv](https://direnv.ne
The editor is a :construction:WIP:construction: and not ready yet to replace your favorite editor, although if you want to try it out on nix, read on.
`cargo run edit` should work from NixOS, if you use a nix-shell from inside another OS, follow the instructions below.
#### Nvidia GPU
#### from nix flake
Running the ediotr may fail using the classic nix-shell, we recommend using the nix flake, see [enabling nix flakes](https://nixos.wiki/wiki/Flakes).
start a nix shell using `nix develop` and follow the instructions below for your graphics configuration.
##### Nvidia GPU
```
nix run --override-input nixpkgs nixpkgs/nixos-21.11 --impure github:guibou/nixGL#nixVulkanNvidia -- cargo run edit
```
If you get an error like:
```
error: unable to execute '/nix/store/qk6...wjla-nixVulkanNvidia-470.103.01/bin/nixVulkanNvidia': No such file or directory
```
The intel command should work:
```
nix run --override-input nixpkgs nixpkgs/nixos-21.11 --impure github:guibou/nixGL#nixVulkanIntel -- cargo run edit
```
##### Integrated Intel Graphics
```
nix run --override-input nixpkgs nixpkgs/nixos-21.11 --impure github:guibou/nixGL#nixVulkanIntel -- cargo run edit
```
##### Other configs
Check the [nixGL repo](https://github.com/guibou/nixGL) for other graphics configurations. Feel free to ask us for help if you get stuck.
#### using a classic nix-shell
##### Nvidia GPU
Outside of a nix shell, execute the following:
```
nix-channel --add https://github.com/guibou/nixGL/archive/main.tar.gz nixgl && nix-channel --update
nix-env -iA nixgl.auto.nixVulkanNvidia
```
Running the editor does not work with `nix-shell --pure`.
Running the editor does not work with `nix-shell --pure`, instead run:
```
nix-shell
```
@ -66,25 +99,11 @@ nix-shell
nixVulkanNvidia-460.91.03 cargo run edit
```
#### Integrated Intel Graphics
##### Integrated Intel Graphics
:exclamation: ** Our Nix setup currently cannot run the editor with integrated intel graphics, see #1856 ** :exclamation:
nix-shell does not work here, use the flake instead; check the section "Integrated Intel Graphics" under "from nix flake".
Outside of a nix shell, run:
```bash
git clone https://github.com/guibou/nixGL
cd nixGL
nix-env -f ./ -iA nixVulkanIntel
```
cd to the roc repo, and run (without --pure):
```
nix-shell
nixVulkanIntel cargo run edit
```
#### Other configs
##### Other configs
Check the [nixGL repo](https://github.com/guibou/nixGL) for other graphics configurations.

View File

@ -14,7 +14,7 @@ Most contributors execute the following commands befor pushing their code:
```
cargo test
cargo fmt --all -- --check
cargo clippy --workspace --tests -- -D warnings
cargo clippy --workspace --tests -- --deny warnings
```
Execute `cargo fmt --all` to fix the formatting.

28
Cargo.lock generated
View File

@ -523,6 +523,7 @@ dependencies = [
"roc_collections",
"roc_load",
"roc_module",
"roc_reporting",
"serde",
"serde-xml-rs",
"strip-ansi-escapes",
@ -1165,6 +1166,17 @@ dependencies = [
"generic-array 0.14.5",
]
[[package]]
name = "dircpy"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70b2666334bac0698c34f849a823a049800f9fe86a950cfd192e2d2a817da920"
dependencies = [
"jwalk",
"log",
"walkdir",
]
[[package]]
name = "directories-next"
version = "2.0.0"
@ -1992,6 +2004,16 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "jwalk"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "172752e853a067cbce46427de8470ddf308af7fd8ceaf9b682ef31a5021b6bb9"
dependencies = [
"crossbeam",
"rayon",
]
[[package]]
name = "khronos-egl"
version = "4.1.0"
@ -3434,6 +3456,9 @@ version = "0.1.0"
dependencies = [
"bumpalo",
"clap 3.1.17",
"cli_utils",
"ctor",
"dircpy",
"indoc",
"pretty_assertions",
"roc_builtins",
@ -3446,10 +3471,10 @@ dependencies = [
"roc_reporting",
"roc_std",
"roc_target",
"roc_test_utils",
"roc_types",
"target-lexicon",
"tempfile",
"ven_graph",
]
[[package]]
@ -4041,6 +4066,7 @@ dependencies = [
"roc_problem",
"roc_region",
"roc_solve",
"roc_std",
"roc_target",
"roc_test_utils",
"roc_types",

View File

@ -69,7 +69,9 @@ check-clippy:
FROM +build-rust-test
RUN cargo clippy -V
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo clippy --workspace --tests -- -D warnings
cargo clippy --workspace --tests -- --deny warnings
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo clippy --workspace --tests --release -- --deny warnings
check-rustfmt:
FROM +build-rust-test
@ -83,6 +85,7 @@ check-typos:
test-rust:
FROM +build-rust-test
ENV ROC_WORKSPACE_DIR=/earthbuild
ENV RUST_BACKTRACE=1
# for race condition problem with cli test
ENV ROC_NUM_WORKERS=1

View File

@ -3,7 +3,7 @@ name = "roc_ast"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
edition = "2021"
description = "AST as used by the editor and (soon) docs. In contrast to the compiler, these types do not keep track of a location in a file."
[dependencies]

View File

@ -77,25 +77,28 @@ pub fn expr_to_expr2<'a>(
}
Num(string) => {
match finish_parsing_num(string) {
Ok(ParsedNumResult::UnknownNum(int, _) | ParsedNumResult::Int(int, _)) => {
Ok((
parsed,
ParsedNumResult::UnknownNum(int, _) | ParsedNumResult::Int(int, _),
)) => {
let expr = Expr2::SmallInt {
number: IntVal::I64(match int {
IntValue::U128(_) => todo!(),
IntValue::I128(n) => n as i64, // FIXME
IntValue::I128(n) => i128::from_ne_bytes(n) as i64, // FIXME
}),
var: env.var_store.fresh(),
// TODO non-hardcode
style: IntStyle::Decimal,
text: PoolStr::new(string, env.pool),
text: PoolStr::new(parsed, env.pool),
};
(expr, Output::default())
}
Ok(ParsedNumResult::Float(float, _)) => {
Ok((parsed, ParsedNumResult::Float(float, _))) => {
let expr = Expr2::Float {
number: FloatVal::F64(float),
var: env.var_store.fresh(),
text: PoolStr::new(string, env.pool),
text: PoolStr::new(parsed, env.pool),
};
(expr, Output::default())
@ -126,7 +129,7 @@ pub fn expr_to_expr2<'a>(
let expr = Expr2::SmallInt {
number: IntVal::I64(match int {
IntValue::U128(_) => todo!(),
IntValue::I128(n) => n as i64, // FIXME
IntValue::I128(n) => i128::from_ne_bytes(n) as i64, // FIXME
}),
var: env.var_store.fresh(),
// TODO non-hardcode

View File

@ -193,22 +193,22 @@ pub fn to_pattern2<'a>(
let problem = MalformedPatternProblem::MalformedInt;
malformed_pattern(env, problem, region)
}
Ok(ParsedNumResult::UnknownNum(int, _bound)) => {
Ok((_, ParsedNumResult::UnknownNum(int, _bound))) => {
Pattern2::NumLiteral(
env.var_store.fresh(),
match int {
IntValue::U128(_) => todo!(),
IntValue::I128(n) => n as i64, // FIXME
IntValue::I128(n) => i128::from_ne_bytes(n) as i64, // FIXME
},
)
}
Ok(ParsedNumResult::Int(int, _bound)) => {
Ok((_, ParsedNumResult::Int(int, _bound))) => {
Pattern2::IntLiteral(IntVal::I64(match int {
IntValue::U128(_) => todo!(),
IntValue::I128(n) => n as i64, // FIXME
IntValue::I128(n) => i128::from_ne_bytes(n) as i64, // FIXME
}))
}
Ok(ParsedNumResult::Float(int, _bound)) => {
Ok((_, ParsedNumResult::Float(int, _bound))) => {
Pattern2::FloatLiteral(FloatVal::F64(int))
}
},
@ -228,7 +228,7 @@ pub fn to_pattern2<'a>(
Ok((int, _bound)) => {
let int = match int {
IntValue::U128(_) => todo!(),
IntValue::I128(n) => n as i64, // FIXME
IntValue::I128(n) => i128::from_ne_bytes(n) as i64, // FIXME
};
if *is_negative {
Pattern2::IntLiteral(IntVal::I64(-int))

View File

@ -4,7 +4,7 @@ version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
repository = "https://github.com/rtfeldman/roc"
edition = "2018"
edition = "2021"
description = "A CLI for roc-bindgen"
[[bin]]
@ -26,11 +26,14 @@ roc_collections = { path = "../compiler/collections" }
roc_target = { path = "../compiler/roc_target" }
roc_error_macros = { path = "../error_macros" }
bumpalo = { version = "3.8.0", features = ["collections"] }
ven_graph = { path = "../vendor/pathfinding" }
target-lexicon = "0.12.3"
clap = { version = "3.1.15", default-features = false, features = ["std", "color", "suggestions", "derive"] }
[dev-dependencies]
pretty_assertions = "1.0.0"
indoc = "1.0.3"
tempfile = "3.2.0"
indoc = "1.0.3"
cli_utils = { path = "../cli_utils" }
roc_test_utils = { path = "../test_utils" }
dircpy = "0.3.9"
ctor = "0.1.22"

View File

@ -1,13 +1,13 @@
use std::convert::TryInto;
use crate::structs::Structs;
use crate::types::{TypeId, Types};
use crate::types::{RocTagUnion, TypeId, Types};
use crate::{enums::Enums, types::RocType};
use bumpalo::Bump;
use roc_builtins::bitcode::{FloatWidth::*, IntWidth::*};
use roc_module::ident::{Lowercase, TagName};
use roc_module::symbol::{Interns, Symbol};
use roc_mono::layout::{cmp_fields, ext_var_is_empty_tag_union, Builtin, Layout, LayoutCache};
use roc_mono::layout::{
cmp_fields, ext_var_is_empty_tag_union, Builtin, Layout, LayoutCache, UnionLayout,
};
use roc_types::subs::UnionTags;
use roc_types::{
subs::{Content, FlatType, Subs, Variable},
@ -45,8 +45,7 @@ pub fn add_type_help<'a>(
Content::FlexVar(_)
| Content::RigidVar(_)
| Content::FlexAbleVar(_, _)
| Content::RigidAbleVar(_, _)
| Content::RecursionVar { .. } => {
| Content::RigidAbleVar(_, _) => {
todo!("TODO give a nice error message for a non-concrete type being passed to the host")
}
Content::Structure(FlatType::Record(fields, ext)) => {
@ -77,29 +76,23 @@ pub fn add_type_help<'a>(
add_tag_union(env, opt_name, tags, var, types)
}
Content::Structure(FlatType::Apply(symbol, _)) => {
if symbol.is_builtin() {
match layout {
Layout::Builtin(builtin) => {
add_builtin_type(env, builtin, var, opt_name, types)
}
_ => {
unreachable!()
}
}
} else {
Content::Structure(FlatType::RecursiveTagUnion(_rec_var, tag_vars, ext_var)) => {
debug_assert!(ext_var_is_empty_tag_union(subs, *ext_var));
add_tag_union(env, opt_name, tag_vars, var, types)
}
Content::Structure(FlatType::Apply(_symbol, _)) => match layout {
Layout::Builtin(builtin) => add_builtin_type(env, builtin, var, opt_name, types),
_ => {
todo!("Handle non-builtin Apply")
}
}
},
Content::Structure(FlatType::Func(_, _, _)) => {
todo!()
}
Content::Structure(FlatType::FunctionOrTagUnion(_, _, _)) => {
todo!()
}
Content::Structure(FlatType::RecursiveTagUnion(_, _, _)) => {
todo!()
}
Content::Structure(FlatType::Erroneous(_)) => todo!(),
Content::Structure(FlatType::EmptyRecord) => todo!(),
Content::Structure(FlatType::EmptyTagUnion) => {
@ -124,6 +117,10 @@ pub fn add_type_help<'a>(
}
Content::RangedNumber(_, _) => todo!(),
Content::Error => todo!(),
Content::RecursionVar { .. } => {
// We should always skip over RecursionVars before we get here.
unreachable!()
}
}
}
@ -192,13 +189,33 @@ fn add_struct<I: IntoIterator<Item = (Lowercase, Variable)>>(
types: &mut Types,
) -> TypeId {
let subs = env.subs;
let fields_iter = fields.into_iter();
let mut sortables = bumpalo::collections::Vec::with_capacity_in(
fields_iter.size_hint().1.unwrap_or_default(),
env.arena,
);
let fields_iter = &mut fields.into_iter();
let first_field = match fields_iter.next() {
Some(field) => field,
None => {
// This is an empty record; there's no more work to do!
return types.add(RocType::Struct {
name,
fields: Vec::new(),
});
}
};
let second_field = match fields_iter.next() {
Some(field) => field,
None => {
// This is a single-field record; put it in a transparent wrapper.
let content = add_type(env, first_field.1, types);
for (label, field_var) in fields_iter {
return types.add(RocType::TransparentWrapper { name, content });
}
};
let mut sortables =
bumpalo::collections::Vec::with_capacity_in(2 + fields_iter.size_hint().0, env.arena);
for (label, field_var) in std::iter::once(first_field)
.chain(std::iter::once(second_field))
.chain(fields_iter)
{
sortables.push((
label,
field_var,
@ -220,11 +237,19 @@ fn add_struct<I: IntoIterator<Item = (Lowercase, Variable)>>(
let fields = sortables
.into_iter()
.map(|(label, field_var, field_layout)| {
(
label.to_string(),
add_type_help(env, field_layout, field_var, None, types),
)
.filter_map(|(label, field_var, field_layout)| {
let content = subs.get_content_without_compacting(field_var);
// Discard RecursionVar nodes. If we try to follow them,
// we'll end up right back here and recurse forever!
if matches!(content, Content::RecursionVar { .. }) {
None
} else {
Some((
label.to_string(),
add_type_help(env, field_layout, field_var, None, types),
))
}
})
.collect();
@ -291,6 +316,7 @@ fn add_tag_union(
};
}
let layout = env.layout_cache.from_var(env.arena, var, subs).unwrap();
let name = match opt_name {
Some(sym) => sym.as_str(env.interns).to_string(),
None => env.enum_names.get_name(var),
@ -299,30 +325,115 @@ fn add_tag_union(
// Sort tags alphabetically by tag name
tags.sort_by(|(name1, _), (name2, _)| name1.cmp(name2));
let tags = tags
let mut tags: Vec<_> = tags
.into_iter()
.map(|(tag_name, payload_vars)| {
let payloads = payload_vars
.iter()
.map(|payload_var| add_type(env, *payload_var, types))
.collect::<Vec<TypeId>>();
match struct_fields_needed(env, payload_vars.iter().copied()) {
0 => {
// no payload
(tag_name, None)
}
1 => {
// there's 1 payload item, so it doesn't need its own
// struct - e.g. for `[ Foo Str, Bar Str ]` both of them
// can have payloads of plain old Str, no struct wrapper needed.
let payload_var = payload_vars.get(0).unwrap();
let payload_id = add_type(env, *payload_var, types);
(tag_name, payloads)
(tag_name, Some(payload_id))
}
_ => {
if matches!(layout, Layout::Union(UnionLayout::NullableUnwrapped { .. })) {
// In the specific case of a NullableUnwrapped layout, we always
// know the payload has 1 element, and we always want to
// unwrap it even though there's technically 2 vars in there (the
// other being the recursion pointer, which we store implicitly
// in the nullable pointer instead of explicitly in the struct)
debug_assert_eq!(payload_vars.len(), 2);
let payload_var = payload_vars.get(0).unwrap();
let payload_id = add_type(env, *payload_var, types);
(tag_name, Some(payload_id))
} else {
// create a struct type for the payload and save it
let struct_name = format!("{}_{}", name, tag_name); // e.g. "MyUnion_MyVariant"
let fields = payload_vars.iter().enumerate().map(|(index, payload_var)| {
(format!("f{}", index).into(), *payload_var)
});
let struct_id = add_struct(env, struct_name, fields, types);
(tag_name, Some(struct_id))
}
}
}
})
.collect();
let typ = match env.layout_cache.from_var(env.arena, var, subs).unwrap() {
Layout::Struct { .. } => {
// a single-tag union with multiple payload values, e.g. [ Foo Str Str ]
unreachable!()
let typ = match layout {
Layout::Union(union_layout) => {
use roc_mono::layout::UnionLayout::*;
match union_layout {
// A non-recursive tag union
// e.g. `Result ok err : [ Ok ok, Err err ]`
NonRecursive(_) => RocType::TagUnion(RocTagUnion::NonRecursive { name, tags }),
// A recursive tag union (general case)
// e.g. `Expr : [ Sym Str, Add Expr Expr ]`
Recursive(_) => {
todo!()
}
// A recursive tag union with just one constructor
// Optimization: No need to store a tag ID (the payload is "unwrapped")
// e.g. `RoseTree a : [ Tree a (List (RoseTree a)) ]`
NonNullableUnwrapped(_) => {
todo!()
}
// A recursive tag union that has an empty variant
// Optimization: Represent the empty variant as null pointer => no memory usage & fast comparison
// It has more than one other variant, so they need tag IDs (payloads are "wrapped")
// e.g. `FingerTree a : [ Empty, Single a, More (Some a) (FingerTree (Tuple a)) (Some a) ]`
// see also: https://youtu.be/ip92VMpf_-A?t=164
NullableWrapped { .. } => {
todo!()
}
// A recursive tag union with only two variants, where one is empty.
// Optimizations: Use null for the empty variant AND don't store a tag ID for the other variant.
// e.g. `ConsList a : [ Nil, Cons a (ConsList a) ]`
NullableUnwrapped { nullable_id, .. } => {
// NullableUnwrapped tag unions should always have exactly 2 tags.
debug_assert_eq!(tags.len(), 2);
let null_tag;
let non_null;
if nullable_id {
// If nullable_id is true, then the null tag is second, which means
// pop() will return it because it's at the end of the vec.
null_tag = tags.pop().unwrap().0;
non_null = tags.pop().unwrap();
} else {
// The null tag is first, which means the tag with the payload is second.
non_null = tags.pop().unwrap();
null_tag = tags.pop().unwrap().0;
}
let (non_null_tag, non_null_payload) = non_null;
RocType::TagUnion(RocTagUnion::NullableUnwrapped {
name,
null_tag,
non_null_tag,
non_null_payload: non_null_payload.unwrap(),
})
}
}
}
Layout::Union(_) => todo!(),
Layout::Builtin(builtin) => match builtin {
Builtin::Int(int_width) => RocType::TagUnion {
tag_bytes: int_width.stack_size().try_into().unwrap(),
Builtin::Int(_) => RocType::TagUnion(RocTagUnion::Enumeration {
name,
tags,
},
tags: tags.into_iter().map(|(tag_name, _)| tag_name).collect(),
}),
Builtin::Bool => RocType::Bool,
Builtin::Float(_)
| Builtin::Decimal
@ -331,10 +442,28 @@ fn add_tag_union(
| Builtin::Set(_)
| Builtin::List(_) => unreachable!(),
},
Layout::Boxed(_) | Layout::LambdaSet(_) | Layout::RecursivePointer => {
Layout::Struct { .. }
| Layout::Boxed(_)
| Layout::LambdaSet(_)
| Layout::RecursivePointer => {
unreachable!()
}
};
types.add(typ)
}
fn struct_fields_needed<I: IntoIterator<Item = Variable>>(env: &mut Env<'_>, vars: I) -> usize {
let subs = env.subs;
let arena = env.arena;
vars.into_iter().fold(0, |count, var| {
let layout = env.layout_cache.from_var(arena, var, subs).unwrap();
if layout.is_dropped_because_empty() {
count
} else {
count + 1
}
})
}

File diff suppressed because it is too large Load Diff

View File

@ -73,7 +73,13 @@ pub fn load_types(
Declaration::Declare(def) => {
vec![def]
}
Declaration::DeclareRec(defs) => defs,
Declaration::DeclareRec(defs, cycle_mark) => {
if cycle_mark.is_illegal(subs) {
vec![]
} else {
defs
}
}
Declaration::Builtin(..) => {
unreachable!("Builtin decl in userspace module?")
}

View File

@ -1,9 +1,10 @@
use core::mem::align_of;
use roc_builtins::bitcode::{FloatWidth, IntWidth};
use roc_collections::VecMap;
use roc_mono::layout::UnionLayout;
use roc_std::RocDec;
use roc_target::TargetInfo;
use ven_graph::topological_sort;
use std::convert::TryInto;
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct TypeId(usize);
@ -50,16 +51,28 @@ impl Types {
}
pub fn sorted_ids(&self) -> Vec<TypeId> {
// TODO: instead use the bitvec matrix type we use in the Roc compiler -
// it's more efficient and also would bring us one step closer to dropping
// the dependency on this topological_sort implementation!
topological_sort(self.ids(), |id| match self.deps.get(id) {
Some(dep_ids) => dep_ids.to_vec(),
None => Vec::new(),
})
.unwrap_or_else(|err| {
unreachable!("Cyclic type definitions: {:?}", err);
})
use roc_collections::{ReferenceMatrix, TopologicalSort};
let mut matrix = ReferenceMatrix::new(self.by_id.len());
for type_id in self.ids() {
for dep in self.deps.get(&type_id).iter().flat_map(|x| x.iter()) {
matrix.set_row_col(type_id.0, dep.0, true);
}
}
match matrix.topological_sort_into_groups() {
TopologicalSort::Groups { groups } => groups
.into_iter()
.flatten()
.rev()
.map(|n| TypeId(n as usize))
.collect(),
TopologicalSort::HasCycles {
groups: _,
nodes_in_cycle,
} => unreachable!("Cyclic type definitions: {:?}", nodes_in_cycle),
}
}
pub fn iter(&self) -> impl ExactSizeIterator<Item = &RocType> {
@ -116,15 +129,7 @@ pub enum RocType {
RocDict(TypeId, TypeId),
RocSet(TypeId),
RocBox(TypeId),
RecursiveTagUnion {
name: String,
tags: Vec<(String, Vec<TypeId>)>,
},
TagUnion {
tag_bytes: u8,
name: String,
tags: Vec<(String, Vec<TypeId>)>,
},
TagUnion(RocTagUnion),
Struct {
name: String,
fields: Vec<(String, TypeId)>,
@ -154,14 +159,18 @@ impl RocType {
| RocType::F32
| RocType::F64
| RocType::F128
| RocType::TagUnion(RocTagUnion::Enumeration { .. })
| RocType::RocDec => false,
RocType::RocStr
| RocType::RocList(_)
| RocType::RocDict(_, _)
| RocType::RocSet(_)
| RocType::RocBox(_)
| RocType::RecursiveTagUnion { .. } => true,
RocType::TagUnion { tags, .. } => tags
| RocType::TagUnion(RocTagUnion::NonNullableUnwrapped { .. })
| RocType::TagUnion(RocTagUnion::NullableUnwrapped { .. })
| RocType::TagUnion(RocTagUnion::NullableWrapped { .. })
| RocType::TagUnion(RocTagUnion::Recursive { .. }) => true,
RocType::TagUnion(RocTagUnion::NonRecursive { tags, .. }) => tags
.iter()
.any(|(_, payloads)| payloads.iter().any(|id| types.get(*id).has_pointer(types))),
RocType::Struct { fields, .. } => fields
@ -187,27 +196,37 @@ impl RocType {
| RocType::U64
| RocType::I128
| RocType::U128
| RocType::RocDec => false,
| RocType::RocDec
| RocType::TagUnion(RocTagUnion::Enumeration { .. }) => false,
RocType::RocList(id) | RocType::RocSet(id) | RocType::RocBox(id) => {
types.get(*id).has_float(types)
}
RocType::RocDict(key_id, val_id) => {
types.get(*key_id).has_float(types) || types.get(*val_id).has_float(types)
}
RocType::RecursiveTagUnion { tags, .. } | RocType::TagUnion { tags, .. } => tags
.iter()
.any(|(_, payloads)| payloads.iter().any(|id| types.get(*id).has_float(types))),
RocType::Struct { fields, .. } => {
fields.iter().any(|(_, id)| types.get(*id).has_float(types))
}
RocType::TransparentWrapper { content, .. } => types.get(*content).has_float(types),
RocType::TagUnion(RocTagUnion::Recursive { tags, .. })
| RocType::TagUnion(RocTagUnion::NonRecursive { tags, .. }) => tags
.iter()
.any(|(_, payloads)| payloads.iter().any(|id| types.get(*id).has_float(types))),
RocType::TagUnion(RocTagUnion::NullableWrapped { non_null_tags, .. }) => non_null_tags
.iter()
.any(|(_, _, payloads)| payloads.iter().any(|id| types.get(*id).has_float(types))),
RocType::TagUnion(RocTagUnion::NullableUnwrapped {
non_null_payload: content,
..
})
| RocType::TagUnion(RocTagUnion::NonNullableUnwrapped { content, .. })
| RocType::TransparentWrapper { content, .. } => types.get(*content).has_float(types),
}
}
/// Useful when determining whether to derive Default in a Rust type.
pub fn has_tag_union(&self, types: &Types) -> bool {
pub fn has_enumeration(&self, types: &Types) -> bool {
match self {
RocType::RecursiveTagUnion { .. } | RocType::TagUnion { .. } => true,
RocType::TagUnion { .. } => true,
RocType::RocStr
| RocType::Bool
| RocType::I8
@ -225,15 +244,107 @@ impl RocType {
| RocType::F128
| RocType::RocDec => false,
RocType::RocList(id) | RocType::RocSet(id) | RocType::RocBox(id) => {
types.get(*id).has_tag_union(types)
types.get(*id).has_enumeration(types)
}
RocType::RocDict(key_id, val_id) => {
types.get(*key_id).has_tag_union(types) || types.get(*val_id).has_tag_union(types)
types.get(*key_id).has_enumeration(types)
|| types.get(*val_id).has_enumeration(types)
}
RocType::Struct { fields, .. } => fields
.iter()
.any(|(_, id)| types.get(*id).has_tag_union(types)),
RocType::TransparentWrapper { content, .. } => types.get(*content).has_tag_union(types),
.any(|(_, id)| types.get(*id).has_enumeration(types)),
RocType::TransparentWrapper { content, .. } => {
types.get(*content).has_enumeration(types)
}
}
}
pub fn size(&self, types: &Types, target_info: TargetInfo) -> usize {
use std::mem::size_of;
match self {
RocType::Bool => size_of::<bool>(),
RocType::I8 => size_of::<i8>(),
RocType::U8 => size_of::<u8>(),
RocType::I16 => size_of::<i16>(),
RocType::U16 => size_of::<u16>(),
RocType::I32 => size_of::<i32>(),
RocType::U32 => size_of::<u32>(),
RocType::I64 => size_of::<i64>(),
RocType::U64 => size_of::<u64>(),
RocType::I128 => size_of::<roc_std::I128>(),
RocType::U128 => size_of::<roc_std::U128>(),
RocType::F32 => size_of::<f32>(),
RocType::F64 => size_of::<f64>(),
RocType::F128 => todo!(),
RocType::RocDec => size_of::<roc_std::RocDec>(),
RocType::RocStr | RocType::RocList(_) | RocType::RocDict(_, _) | RocType::RocSet(_) => {
3 * target_info.ptr_size()
}
RocType::RocBox(_) => target_info.ptr_size(),
RocType::TagUnion(tag_union) => match tag_union {
RocTagUnion::Enumeration { tags, .. } => size_for_tag_count(tags.len()),
RocTagUnion::NonRecursive { tags, .. } => {
// The "unpadded" size (without taking alignment into account)
// is the sum of all the sizes of the fields.
let size_unpadded = tags.iter().fold(0, |total, (_, opt_payload_id)| {
if let Some(payload_id) = opt_payload_id {
let payload = types.get(*payload_id);
total + payload.size(types, target_info)
} else {
total
}
});
// Round up to the next multiple of alignment, to incorporate
// any necessary alignment padding.
//
// e.g. if we have a record with a Str and a U8, that would be a
// size_unpadded of 25, because Str is three 8-byte pointers and U8 is 1 byte,
// but the 8-byte alignment of the pointers means we'll round 25 up to 32.
let discriminant_align = align_for_tag_count(tags.len(), target_info);
let align = self.alignment(types, target_info).max(discriminant_align);
let size_padded = (size_unpadded / align) * align;
if size_unpadded == size_padded {
// We don't have any alignment padding, which means we can't
// put the discriminant in the padding and the compiler will
// add extra space for it.
let discriminant_size = size_for_tag_count(tags.len());
size_padded + discriminant_size.max(align)
} else {
size_padded
}
}
RocTagUnion::Recursive { .. } => todo!(),
RocTagUnion::NonNullableUnwrapped { .. } => todo!(),
RocTagUnion::NullableWrapped { .. } => todo!(),
RocTagUnion::NullableUnwrapped { .. } => todo!(),
},
RocType::Struct { fields, .. } => {
// The "unpadded" size (without taking alignment into account)
// is the sum of all the sizes of the fields.
let size_unpadded = fields.iter().fold(0, |total, (_, field_id)| {
let field = types.get(*field_id);
total + field.size(types, target_info)
});
// Round up to the next multiple of alignment, to incorporate
// any necessary alignment padding.
//
// e.g. if we have a record with a Str and a U8, that would be a
// size_unpadded of 25, because Str is three 8-byte pointers and U8 is 1 byte,
// but the 8-byte alignment of the pointers means we'll round 25 up to 32.
let align = self.alignment(types, target_info);
(size_unpadded / align) * align
}
RocType::TransparentWrapper { content, .. } => {
types.get(*content).size(types, target_info)
}
}
}
@ -246,10 +357,10 @@ impl RocType {
| RocType::RocBox(_) => target_info.ptr_alignment_bytes(),
RocType::RocDec => align_of::<RocDec>(),
RocType::Bool => align_of::<bool>(),
RocType::TagUnion { tags, .. } => {
RocType::TagUnion(RocTagUnion::NonRecursive { tags, .. }) => {
// The smallest alignment this could possibly have is based on the number of tags - e.g.
// 0 tags is an empty union (so, alignment 0), 1-255 tags has a u8 tag (so, alignment 1), etc.
let mut align = align_for_tag_count(tags.len());
let mut align = align_for_tag_count(tags.len(), target_info);
for (_, payloads) in tags {
for id in payloads {
@ -259,13 +370,13 @@ impl RocType {
align
}
RocType::RecursiveTagUnion { tags, .. } => {
RocType::TagUnion(RocTagUnion::Recursive { tags, .. }) => {
// The smallest alignment this could possibly have is based on the number of tags - e.g.
// 0 tags is an empty union (so, alignment 0), 1-255 tags has a u8 tag (so, alignment 1), etc.
//
// Unlike a regular tag union, a recursive one also includes a pointer.
let ptr_align = target_info.ptr_alignment_bytes();
let mut align = ptr_align.max(align_for_tag_count(tags.len()));
let mut align = ptr_align.max(align_for_tag_count(tags.len(), target_info));
for (_, payloads) in tags {
for id in payloads {
@ -275,6 +386,23 @@ impl RocType {
align
}
RocType::TagUnion(RocTagUnion::NullableWrapped { non_null_tags, .. }) => {
// The smallest alignment this could possibly have is based on the number of tags - e.g.
// 0 tags is an empty union (so, alignment 0), 1-255 tags has a u8 tag (so, alignment 1), etc.
//
// Unlike a regular tag union, a recursive one also includes a pointer.
let ptr_align = target_info.ptr_alignment_bytes();
let mut align =
ptr_align.max(align_for_tag_count(non_null_tags.len(), target_info));
for (_, _, payloads) in non_null_tags {
for id in payloads {
align = align.max(types.get(*id).alignment(types, target_info));
}
}
align
}
RocType::Struct { fields, .. } => fields.iter().fold(0, |align, (_, id)| {
align.max(types.get(*id).alignment(types, target_info))
}),
@ -291,25 +419,36 @@ impl RocType {
RocType::F32 => FloatWidth::F32.alignment_bytes(target_info) as usize,
RocType::F64 => FloatWidth::F64.alignment_bytes(target_info) as usize,
RocType::F128 => FloatWidth::F128.alignment_bytes(target_info) as usize,
RocType::TransparentWrapper { content, .. } => {
RocType::TransparentWrapper { content, .. }
| RocType::TagUnion(RocTagUnion::NullableUnwrapped {
non_null_payload: content,
..
})
| RocType::TagUnion(RocTagUnion::NonNullableUnwrapped { content, .. }) => {
types.get(*content).alignment(types, target_info)
}
RocType::TagUnion(RocTagUnion::Enumeration { tags, .. }) => {
UnionLayout::discriminant_size(tags.len())
.stack_size()
.try_into()
.unwrap()
}
}
}
}
fn align_for_tag_count(num_tags: usize) -> usize {
fn size_for_tag_count(num_tags: usize) -> usize {
if num_tags == 0 {
// empty tag union
0
} else if num_tags < u8::MAX as usize {
align_of::<u8>()
IntWidth::U8.stack_size() as usize
} else if num_tags < u16::MAX as usize {
align_of::<u16>()
IntWidth::U16.stack_size() as usize
} else if num_tags < u32::MAX as usize {
align_of::<u32>()
IntWidth::U32.stack_size() as usize
} else if num_tags < u64::MAX as usize {
align_of::<u64>()
IntWidth::U64.stack_size() as usize
} else {
panic!(
"Too many tags. You can't have more than {} tags in a tag union!",
@ -317,3 +456,140 @@ fn align_for_tag_count(num_tags: usize) -> usize {
);
}
}
/// Returns the alignment of the discriminant based on the target
/// (e.g. on wasm, these are always 4)
fn align_for_tag_count(num_tags: usize, target_info: TargetInfo) -> usize {
if num_tags == 0 {
// empty tag union
0
} else if num_tags < u8::MAX as usize {
IntWidth::U8.alignment_bytes(target_info) as usize
} else if num_tags < u16::MAX as usize {
IntWidth::U16.alignment_bytes(target_info) as usize
} else if num_tags < u32::MAX as usize {
IntWidth::U32.alignment_bytes(target_info) as usize
} else if num_tags < u64::MAX as usize {
IntWidth::U64.alignment_bytes(target_info) as usize
} else {
panic!(
"Too many tags. You can't have more than {} tags in a tag union!",
u64::MAX
);
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum RocTagUnion {
Enumeration {
name: String,
tags: Vec<String>,
},
/// A non-recursive tag union
/// e.g. `Result a e : [ Ok a, Err e ]`
NonRecursive {
name: String,
tags: Vec<(String, Option<TypeId>)>,
},
/// A recursive tag union (general case)
/// e.g. `Expr : [ Sym Str, Add Expr Expr ]`
Recursive {
name: String,
tags: Vec<(String, Option<TypeId>)>,
},
/// A recursive tag union with just one constructor
/// Optimization: No need to store a tag ID (the payload is "unwrapped")
/// e.g. `RoseTree a : [ Tree a (List (RoseTree a)) ]`
NonNullableUnwrapped {
name: String,
content: TypeId,
},
/// A recursive tag union that has an empty variant
/// Optimization: Represent the empty variant as null pointer => no memory usage & fast comparison
/// It has more than one other variant, so they need tag IDs (payloads are "wrapped")
/// e.g. `FingerTree a : [ Empty, Single a, More (Some a) (FingerTree (Tuple a)) (Some a) ]`
/// see also: https://youtu.be/ip92VMpf_-A?t=164
NullableWrapped {
name: String,
null_tag: String,
non_null_tags: Vec<(u16, String, Option<TypeId>)>,
},
/// A recursive tag union with only two variants, where one is empty.
/// Optimizations: Use null for the empty variant AND don't store a tag ID for the other variant.
/// e.g. `ConsList a : [ Nil, Cons a (ConsList a) ]`
NullableUnwrapped {
name: String,
/// e.g. Nil in `StrConsList : [ Nil, Cons Str (ConsList Str) ]`
null_tag: String,
/// e.g. Cons in `StrConsList : [ Nil, Cons Str (ConsList Str) ]`
non_null_tag: String,
/// There must be a payload associated with the non-null tag.
/// Otherwise, this would have been an Enumeration!
non_null_payload: TypeId,
},
}
impl RocTagUnion {
/// The byte offset where the discriminant is located within the tag union's
/// in-memory representation. So if you take a pointer to the tag union itself,
/// and add discriminant_offset to it, you'll have a pointer to the discriminant.
///
/// This is only useful when given tags from RocTagUnion::Recursive or
/// RocTagUnion::NonRecursive - other tag types do not store their discriminants
/// as plain numbers at a fixed offset!
pub fn discriminant_offset(
tags: &[(String, Option<TypeId>)],
types: &Types,
target_info: TargetInfo,
) -> usize {
tags.iter()
.fold(0, |max_size, (_, opt_tag_id)| match opt_tag_id {
Some(tag_id) => {
let size_unpadded = match types.get(*tag_id) {
// For structs (that is, payloads), we actually want
// to get the size *before* alignment padding is taken
// into account, since the discriminant is
// stored after those bytes.
RocType::Struct { fields, .. } => {
fields.iter().fold(0, |total, (_, field_id)| {
let field = types.get(*field_id);
total + field.size(types, target_info)
})
}
typ => max_size.max(typ.size(types, target_info)),
};
max_size.max(size_unpadded)
}
None => max_size,
})
}
}
#[test]
fn sizes_agree_with_roc_std() {
use std::mem::size_of;
let target_info = TargetInfo::from(&target_lexicon::Triple::host());
let mut types = Types::default();
assert_eq!(
RocType::RocStr.size(&types, target_info),
size_of::<roc_std::RocStr>(),
);
assert_eq!(
RocType::RocList(types.add(RocType::RocStr)).size(&types, target_info),
size_of::<roc_std::RocList<()>>(),
);
// TODO enable this once we have RocDict in roc_std
// assert_eq!(
// RocType::RocDict.size(&types, target_info),
// size_of::<roc_std::RocDict>(),
// );
}

View File

@ -4,3 +4,5 @@
#![allow(unused_imports)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
#![allow(clippy::undocumented_unsafe_blocks)]

View File

@ -16,15 +16,15 @@ comptime {
}
const Align = 2 * @alignOf(usize);
extern fn malloc(size: usize) callconv(.C) ?*align(Align) c_void;
extern fn realloc(c_ptr: [*]align(Align) u8, size: usize) callconv(.C) ?*c_void;
extern fn malloc(size: usize) callconv(.C) ?*align(Align) anyopaque;
extern fn realloc(c_ptr: [*]align(Align) u8, size: usize) callconv(.C) ?*anyopaque;
extern fn free(c_ptr: [*]align(Align) u8) callconv(.C) void;
extern fn memcpy(dst: [*]u8, src: [*]u8, size: usize) callconv(.C) void;
extern fn memset(dst: [*]u8, value: i32, size: usize) callconv(.C) void;
const DEBUG: bool = false;
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*anyopaque {
if (DEBUG) {
var ptr = malloc(size);
const stdout = std.io.getStdOut().writer();
@ -35,7 +35,7 @@ export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*c_void {
}
}
export fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*c_void {
export fn roc_realloc(c_ptr: *anyopaque, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*anyopaque {
if (DEBUG) {
const stdout = std.io.getStdOut().writer();
stdout.print("realloc: {d} (alignment {d}, old_size {d})\n", .{ c_ptr, alignment, old_size }) catch unreachable;
@ -44,7 +44,7 @@ export fn roc_realloc(c_ptr: *c_void, new_size: usize, old_size: usize, alignmen
return realloc(@alignCast(Align, @ptrCast([*]u8, c_ptr)), new_size);
}
export fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
export fn roc_dealloc(c_ptr: *anyopaque, alignment: u32) callconv(.C) void {
if (DEBUG) {
const stdout = std.io.getStdOut().writer();
stdout.print("dealloc: {d} (alignment {d})\n", .{ c_ptr, alignment }) catch unreachable;
@ -53,7 +53,7 @@ export fn roc_dealloc(c_ptr: *c_void, alignment: u32) callconv(.C) void {
free(@alignCast(Align, @ptrCast([*]u8, c_ptr)));
}
export fn roc_panic(c_ptr: *c_void, tag_id: u32) callconv(.C) void {
export fn roc_panic(c_ptr: *anyopaque, tag_id: u32) callconv(.C) void {
_ = tag_id;
const stderr = std.io.getStdErr().writer();

View File

@ -0,0 +1,30 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "host"
version = "0.1.0"
dependencies = [
"libc",
"roc_std",
]
[[package]]
name = "libc"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56d855069fafbb9b344c0f962150cd2c1187975cb1c22c1522c240d8c4986714"
[[package]]
name = "roc_std"
version = "0.1.0"
dependencies = [
"static_assertions",
]
[[package]]
name = "static_assertions"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f406d6ee68db6796e11ffd7b4d171864c58b7451e79ef9460ea33c287a1f89a7"

View File

@ -0,0 +1,33 @@
# ⚠️ READ THIS BEFORE MODIFYING THIS FILE! ⚠️
#
# This file is a fixture template. If the file you're looking at is
# in the fixture-templates/ directory, then you're all set - go ahead
# and modify it, and it will modify all the fixture tests.
#
# If this file is in the fixtures/ directory, on the other hand, then
# it is gitignored and will be overwritten the next time tests run.
# So you probably don't want to modify it by hand! Instead, modify the
# file with the same name in the fixture-templates/ directory.
[package]
name = "host"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
links = "app"
[lib]
name = "host"
path = "src/lib.rs"
crate-type = ["staticlib", "rlib"]
[[bin]]
name = "host"
path = "src/main.rs"
[dependencies]
roc_std = { path = "../../../../roc_std" }
libc = "0.2"
[workspace]

View File

@ -0,0 +1,15 @@
// ⚠️ READ THIS BEFORE MODIFYING THIS FILE! ⚠️
//
// This file is a fixture template. If the file you're looking at is
// in the fixture-templates/ directory, then you're all set - go ahead
// and modify it, and it will modify all the fixture tests.
//
// If this file is in the fixtures/ directory, on the other hand, then
// it is gitignored and will be overwritten the next time tests run.
// So you probably don't want to modify it by hand! Instead, modify the
// file with the same name in the fixture-templates/ directory.
fn main() {
println!("cargo:rustc-link-lib=dylib=app");
println!("cargo:rustc-link-search=.");
}

View File

@ -0,0 +1,14 @@
// ⚠️ READ THIS BEFORE MODIFYING THIS FILE! ⚠️
//
// This file is a fixture template. If the file you're looking at is
// in the fixture-templates/ directory, then you're all set - go ahead
// and modify it, and it will modify all the fixture tests.
//
// If this file is in the fixtures/ directory, on the other hand, then
// it is gitignored and will be overwritten the next time tests run.
// So you probably don't want to modify it by hand! Instead, modify the
// file with the same name in the fixture-templates/ directory.
extern int rust_main();
int main() { return rust_main(); }

View File

@ -0,0 +1,3 @@
fn main() {
std::process::exit(host::rust_main());
}

12
bindgen/tests/fixtures/.gitignore vendored Normal file
View File

@ -0,0 +1,12 @@
Cargo.lock
Cargo.toml
build.rs
host.c
bindings.rs
roc_externs.rs
main.rs
app
dynhost
libapp.so
metadata
preprocessedhost

View File

@ -0,0 +1,11 @@
platform "test-platform"
requires {} { main : _ }
exposes []
packages {}
imports []
provides [ mainForHost ]
MyRcd : { a : U64, b : U128 }
mainForHost : MyRcd
mainForHost = main

View File

@ -0,0 +1,6 @@
app "app"
packages { pf: "." }
imports []
provides [ main ] to pf
main = { a: 1995, b: 42 }

View File

@ -0,0 +1,93 @@
mod bindings;
extern "C" {
#[link_name = "roc__mainForHost_1_exposed_generic"]
fn roc_main(_: *mut bindings::MyRcd);
}
#[no_mangle]
pub extern "C" fn rust_main() -> i32 {
use std::cmp::Ordering;
use std::collections::hash_set::HashSet;
let record = unsafe {
let mut ret: core::mem::MaybeUninit<bindings::MyRcd> = core::mem::MaybeUninit::uninit();
roc_main(ret.as_mut_ptr());
ret.assume_init()
};
// Verify that the record has all the expected traits.
assert!(record == record); // PartialEq
assert!(record.clone() == record.clone()); // Clone
// Since this is a move, later uses of `record` will fail unless `record` has Copy
let rec2 = record; // Copy
assert!(rec2 != Default::default()); // Default
assert!(record.partial_cmp(&record) == Some(Ordering::Equal)); // PartialOrd
assert!(record.cmp(&record) == Ordering::Equal); // Ord
let mut set = HashSet::new();
set.insert(record); // Eq, Hash
set.insert(rec2);
assert_eq!(set.len(), 1);
println!("Record was: {:?}", record); // Debug
// Exit code
0
}
// Externs required by roc_std and by the Roc app
use core::ffi::c_void;
use std::ffi::CStr;
use std::os::raw::c_char;
#[no_mangle]
pub unsafe extern "C" fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return libc::malloc(size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return libc::realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return libc::free(c_ptr);
}
#[no_mangle]
pub unsafe extern "C" fn roc_panic(c_ptr: *mut c_void, tag_id: u32) {
match tag_id {
0 => {
let slice = CStr::from_ptr(c_ptr as *const c_char);
let string = slice.to_str().unwrap();
eprintln!("Roc hit a panic: {}", string);
std::process::exit(1);
}
_ => todo!(),
}
}
#[no_mangle]
pub unsafe extern "C" fn roc_memcpy(dst: *mut c_void, src: *mut c_void, n: usize) -> *mut c_void {
libc::memcpy(dst, src, n)
}
#[no_mangle]
pub unsafe extern "C" fn roc_memset(dst: *mut c_void, c: i32, n: usize) -> *mut c_void {
libc::memset(dst, c, n)
}

View File

@ -0,0 +1,11 @@
platform "test-platform"
requires {} { main : _ }
exposes []
packages {}
imports []
provides [ mainForHost ]
MyEnum : [ Foo, Bar, Baz ]
mainForHost : MyEnum
mainForHost = main

View File

@ -0,0 +1,6 @@
app "app"
packages { pf: "." }
imports []
provides [ main ] to pf
main = Foo

View File

@ -0,0 +1,97 @@
mod bindings;
extern "C" {
#[link_name = "roc__mainForHost_1_exposed_generic"]
fn roc_main(_: *mut bindings::MyEnum);
}
#[no_mangle]
pub extern "C" fn rust_main() -> i32 {
use std::cmp::Ordering;
use std::collections::hash_set::HashSet;
let tag_union = unsafe {
let mut ret: core::mem::MaybeUninit<bindings::MyEnum> = core::mem::MaybeUninit::uninit();
roc_main(ret.as_mut_ptr());
ret.assume_init()
};
// Verify that it has all the expected traits.
assert!(tag_union == tag_union); // PartialEq
assert!(tag_union.clone() == tag_union.clone()); // Clone
// Since this is a move, later uses of `tag_union` will fail unless `tag_union` has Copy
let union2 = tag_union; // Copy
assert!(tag_union.partial_cmp(&tag_union) == Some(Ordering::Equal)); // PartialOrd
assert!(tag_union.cmp(&tag_union) == Ordering::Equal); // Ord
let mut set = HashSet::new();
set.insert(tag_union); // Eq, Hash
set.insert(union2);
assert_eq!(set.len(), 1);
println!(
"tag_union was: {:?}, Bar is: {:?}, Baz is: {:?}",
tag_union,
bindings::MyEnum::Bar,
bindings::MyEnum::Baz,
); // Debug
// Exit code
0
}
// Externs required by roc_std and by the Roc app
use core::ffi::c_void;
use std::ffi::CStr;
use std::os::raw::c_char;
#[no_mangle]
pub unsafe extern "C" fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return libc::malloc(size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return libc::realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return libc::free(c_ptr);
}
#[no_mangle]
pub unsafe extern "C" fn roc_panic(c_ptr: *mut c_void, tag_id: u32) {
match tag_id {
0 => {
let slice = CStr::from_ptr(c_ptr as *const c_char);
let string = slice.to_str().unwrap();
eprintln!("Roc hit a panic: {}", string);
std::process::exit(1);
}
_ => todo!(),
}
}
#[no_mangle]
pub unsafe extern "C" fn roc_memcpy(dst: *mut c_void, src: *mut c_void, n: usize) -> *mut c_void {
libc::memcpy(dst, src, n)
}
#[no_mangle]
pub unsafe extern "C" fn roc_memset(dst: *mut c_void, c: i32, n: usize) -> *mut c_void {
libc::memset(dst, c, n)
}

View File

@ -0,0 +1,13 @@
platform "test-platform"
requires {} { main : _ }
exposes []
packages {}
imports []
provides [ mainForHost ]
Outer : { x : Inner, y : Str, z : List U8 }
Inner : { a : U16, b : F32 }
mainForHost : Outer
mainForHost = main

View File

@ -0,0 +1,6 @@
app "app"
packages { pf: "." }
imports []
provides [ main ] to pf
main = { x: { a: 5, b: 24 }, y: "foo", z: [ 1, 2 ] }

View File

@ -0,0 +1,95 @@
mod bindings;
extern "C" {
#[link_name = "roc__mainForHost_1_exposed_generic"]
fn roc_main(_: *mut bindings::Outer);
}
#[no_mangle]
pub extern "C" fn rust_main() -> i32 {
use std::cmp::Ordering;
let outer = unsafe {
let mut ret: core::mem::MaybeUninit<bindings::Outer> = core::mem::MaybeUninit::uninit();
roc_main(ret.as_mut_ptr());
ret.assume_init()
};
// Verify that `inner` has all the expected traits.
{
let inner = outer.x;
assert!(inner == inner); // PartialEq
assert!(inner.clone() == inner.clone()); // Clone
// Since this is a move, later uses of `inner` will fail unless `inner` has Copy
let inner2 = inner; // Copy
assert!(inner2 != Default::default()); // Default
assert!(inner.partial_cmp(&inner) == Some(Ordering::Equal)); // PartialOrd
}
// Verify that `outer` has all the expected traits.
{
assert!(outer == outer); // PartialEq
assert!(outer.clone() == outer.clone()); // Clone
assert!(outer != Default::default()); // Default
assert!(outer.partial_cmp(&outer) == Some(Ordering::Equal)); // PartialOrd
}
println!("Record was: {:?}", outer);
// Exit code
0
}
// Externs required by roc_std and by the Roc app
use core::ffi::c_void;
use std::ffi::CStr;
use std::os::raw::c_char;
#[no_mangle]
pub unsafe extern "C" fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return libc::malloc(size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return libc::realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return libc::free(c_ptr);
}
#[no_mangle]
pub unsafe extern "C" fn roc_panic(c_ptr: *mut c_void, tag_id: u32) {
match tag_id {
0 => {
let slice = CStr::from_ptr(c_ptr as *const c_char);
let string = slice.to_str().unwrap();
eprintln!("Roc hit a panic: {}", string);
std::process::exit(1);
}
_ => todo!(),
}
}
#[no_mangle]
pub unsafe extern "C" fn roc_memcpy(dst: *mut c_void, src: *mut c_void, n: usize) -> *mut c_void {
libc::memcpy(dst, src, n)
}
#[no_mangle]
pub unsafe extern "C" fn roc_memset(dst: *mut c_void, c: i32, n: usize) -> *mut c_void {
libc::memset(dst, c, n)
}

View File

@ -0,0 +1,17 @@
platform "test-platform"
requires {} { main : _ }
exposes []
packages {}
imports []
provides [ mainForHost ]
# This case is important to test because the U128
# gives the whole struct an alignment of 16, but the
# Str is the largest variant, so the whole union has
# a size of 32 (due to alignment, rounded up from Str's 24),
# and the discriminant is stored in the 8+ bytes of padding
# that all variants have.
NonRecursive : [ Foo Str, Bar U128, Blah I32, Baz ]
mainForHost : NonRecursive
mainForHost = main

View File

@ -0,0 +1,6 @@
app "app"
packages { pf: "." }
imports []
provides [ main ] to pf
main = Foo "This is a test"

View File

@ -0,0 +1,98 @@
mod bindings;
extern "C" {
#[link_name = "roc__mainForHost_1_exposed_generic"]
fn roc_main(_: *mut bindings::NonRecursive);
}
#[no_mangle]
pub extern "C" fn rust_main() -> i32 {
use std::cmp::Ordering;
use std::collections::hash_set::HashSet;
let tag_union = unsafe {
let mut ret: core::mem::MaybeUninit<bindings::NonRecursive> =
core::mem::MaybeUninit::uninit();
roc_main(ret.as_mut_ptr());
ret.assume_init()
};
// Verify that it has all the expected traits.
assert!(tag_union == tag_union); // PartialEq
assert!(tag_union.clone() == tag_union.clone()); // Clone
assert!(tag_union.partial_cmp(&tag_union) == Some(Ordering::Equal)); // PartialOrd
assert!(tag_union.cmp(&tag_union) == Ordering::Equal); // Ord
println!(
"tag_union was: {:?}\n`Foo \"small str\"` is: {:?}\n`Foo \"A long enough string to not be small\"` is: {:?}\n`Bar 123` is: {:?}\n`Baz` is: {:?}\n`Blah 456` is: {:?}",
tag_union,
bindings::NonRecursive::Foo("small str".into()),
bindings::NonRecursive::Foo("A long enough string to not be small".into()),
bindings::NonRecursive::Bar(123.into()),
bindings::NonRecursive::Baz,
bindings::NonRecursive::Blah(456),
); // Debug
let mut set = HashSet::new();
set.insert(tag_union.clone()); // Eq, Hash
set.insert(tag_union);
assert_eq!(set.len(), 1);
// Exit code
0
}
// Externs required by roc_std and by the Roc app
use core::ffi::c_void;
use std::ffi::CStr;
use std::os::raw::c_char;
#[no_mangle]
pub unsafe extern "C" fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return libc::malloc(size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return libc::realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return libc::free(c_ptr);
}
#[no_mangle]
pub unsafe extern "C" fn roc_panic(c_ptr: *mut c_void, tag_id: u32) {
match tag_id {
0 => {
let slice = CStr::from_ptr(c_ptr as *const c_char);
let string = slice.to_str().unwrap();
eprintln!("Roc hit a panic: {}", string);
std::process::exit(1);
}
_ => todo!(),
}
}
#[no_mangle]
pub unsafe extern "C" fn roc_memcpy(dst: *mut c_void, src: *mut c_void, n: usize) -> *mut c_void {
libc::memcpy(dst, src, n)
}
#[no_mangle]
pub unsafe extern "C" fn roc_memset(dst: *mut c_void, c: i32, n: usize) -> *mut c_void {
libc::memset(dst, c, n)
}

View File

@ -0,0 +1,15 @@
platform "test-platform"
requires {} { main : _ }
exposes []
packages {}
imports []
provides [ mainForHost ]
# This case is important to test because there's no padding
# after the largest variant, so the compiler adds an extra u8
# (rounded up to alignment, so an an extra 8 bytes) in which
# to store the discriminant. We have to bindgen accordingly!
NonRecursive : [ Foo Str, Bar I64, Blah I32, Baz ]
mainForHost : NonRecursive
mainForHost = main

View File

@ -0,0 +1,6 @@
app "app"
packages { pf: "." }
imports []
provides [ main ] to pf
main = Foo "This is a test"

View File

@ -0,0 +1,98 @@
mod bindings;
extern "C" {
#[link_name = "roc__mainForHost_1_exposed_generic"]
fn roc_main(_: *mut bindings::NonRecursive);
}
#[no_mangle]
pub extern "C" fn rust_main() -> i32 {
use std::cmp::Ordering;
use std::collections::hash_set::HashSet;
let tag_union = unsafe {
let mut ret: core::mem::MaybeUninit<bindings::NonRecursive> =
core::mem::MaybeUninit::uninit();
roc_main(ret.as_mut_ptr());
ret.assume_init()
};
// Verify that it has all the expected traits.
assert!(tag_union == tag_union); // PartialEq
assert!(tag_union.clone() == tag_union.clone()); // Clone
assert!(tag_union.partial_cmp(&tag_union) == Some(Ordering::Equal)); // PartialOrd
assert!(tag_union.cmp(&tag_union) == Ordering::Equal); // Ord
println!(
"tag_union was: {:?}\n`Foo \"small str\"` is: {:?}\n`Foo \"A long enough string to not be small\"` is: {:?}\n`Bar 123` is: {:?}\n`Baz` is: {:?}\n`Blah 456` is: {:?}",
tag_union,
bindings::NonRecursive::Foo("small str".into()),
bindings::NonRecursive::Foo("A long enough string to not be small".into()),
bindings::NonRecursive::Bar(123),
bindings::NonRecursive::Baz,
bindings::NonRecursive::Blah(456),
); // Debug
let mut set = HashSet::new();
set.insert(tag_union.clone()); // Eq, Hash
set.insert(tag_union);
assert_eq!(set.len(), 1);
// Exit code
0
}
// Externs required by roc_std and by the Roc app
use core::ffi::c_void;
use std::ffi::CStr;
use std::os::raw::c_char;
#[no_mangle]
pub unsafe extern "C" fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return libc::malloc(size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return libc::realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return libc::free(c_ptr);
}
#[no_mangle]
pub unsafe extern "C" fn roc_panic(c_ptr: *mut c_void, tag_id: u32) {
match tag_id {
0 => {
let slice = CStr::from_ptr(c_ptr as *const c_char);
let string = slice.to_str().unwrap();
eprintln!("Roc hit a panic: {}", string);
std::process::exit(1);
}
_ => todo!(),
}
}
#[no_mangle]
pub unsafe extern "C" fn roc_memcpy(dst: *mut c_void, src: *mut c_void, n: usize) -> *mut c_void {
libc::memcpy(dst, src, n)
}
#[no_mangle]
pub unsafe extern "C" fn roc_memset(dst: *mut c_void, c: i32, n: usize) -> *mut c_void {
libc::memset(dst, c, n)
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,82 @@
use roc_bindgen::bindgen_rs;
use roc_bindgen::load::load_types;
use roc_load::Threading;
use std::env;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
#[allow(dead_code)]
pub fn generate_bindings(decl_src: &str) -> String {
use tempfile::tempdir;
let mut src = indoc!(
r#"
platform "main"
requires {} { nothing : {} }
exposes []
packages {}
imports []
provides [ main ]
"#
)
.to_string();
src.push_str(decl_src);
let types = {
let dir = tempdir().expect("Unable to create tempdir");
let filename = PathBuf::from("Package-Config.roc");
let file_path = dir.path().join(filename);
let full_file_path = file_path.clone();
let mut file = File::create(file_path).unwrap();
writeln!(file, "{}", &src).unwrap();
let result = load_types(full_file_path, dir.path(), Threading::Single);
dir.close().expect("Unable to close tempdir");
result.expect("had problems loading")
};
// Reuse the `src` allocation since we're done with it.
let mut buf = src;
buf.clear();
bindgen_rs::write_types(&types, &mut buf).expect("I/O error when writing bindgen string");
buf
}
#[allow(dead_code)]
pub fn fixtures_dir(dir_name: &str) -> PathBuf {
let mut path = root_dir();
// Descend into cli/tests/fixtures/{dir_name}
path.push("bindgen");
path.push("tests");
path.push("fixtures");
path.push(dir_name);
path
}
#[allow(dead_code)]
pub fn root_dir() -> PathBuf {
let mut path = env::current_exe().ok().unwrap();
// Get rid of the filename in target/debug/deps/cli_run-99c65e4e9a1fbd06
path.pop();
// If we're in deps/ get rid of deps/ in target/debug/deps/
if path.ends_with("deps") {
path.pop();
}
// Get rid of target/debug/ so we're back at the project root
path.pop();
path.pop();
path
}

View File

@ -0,0 +1,227 @@
#[macro_use]
extern crate pretty_assertions;
#[macro_use]
extern crate indoc;
extern crate dircpy;
extern crate roc_collections;
mod helpers;
#[cfg(test)]
mod bindgen_cli_run {
use crate::helpers::{fixtures_dir, root_dir};
use cli_utils::helpers::{run_bindgen, run_roc, Out};
use std::fs;
use std::path::Path;
use std::process::Command;
// All of these tests rely on `target/` for the `cli` crate being up-to-date,
// so do a `cargo build` on it first!
#[ctor::ctor]
fn init() {
let args = if cfg!(debug_assertions) {
vec!["build"]
} else {
vec!["build", "--release"]
};
println!(
"Running `cargo {}` on the `cli` crate before running the tests. This may take a bit!",
args.join(" ")
);
let output = Command::new("cargo")
.args(args)
.current_dir(root_dir().join("cli"))
.output()
.unwrap_or_else(|err| {
panic!(
"Failed to `cargo build` roc CLI for bindgen CLI tests - error was: {:?}",
err
)
});
assert!(output.status.success());
}
/// This macro does two things.
///
/// First, it generates and runs a separate test for each of the given
/// expected stdout endings. Each of these should test a particular .roc file
/// in the fixtures/ directory. The fixtures themselves run assertions too, but
/// the stdout check verifies that we're actually running the code we think we are;
/// without it, it would be possible that the fixtures are just exiting without running
/// any assertions, and we would have no way to find out!
///
/// Second, this generates an extra test which (non-recursively) traverses the
/// fixtures/ directory and verifies that each of the .roc files in there
/// has had a corresponding test generated in the previous step. This test
/// will fail if we ever add a new .roc file to fixtures/ and forget to
/// add a test for it here!
macro_rules! fixtures {
($($test_name:ident:$fixture_dir:expr => $ends_with:expr,)+) => {
$(
#[test]
#[allow(non_snake_case)]
fn $test_name() {
let dir = fixtures_dir($fixture_dir);
generate_bindings_for(&dir, std::iter::empty());
let out = run_app(&dir.join("app.roc"), std::iter::empty());
assert!(out.status.success());
assert_eq!(out.stderr, "");
assert!(
out.stdout.ends_with($ends_with),
"Unexpected stdout ending - expected {:?} but stdout was: {:?}",
$ends_with,
out.stdout
);
}
)*
#[test]
fn all_fixtures_have_tests() {
use roc_collections::VecSet;
let mut all_fixtures: VecSet<String> = VecSet::default();
$(
all_fixtures.insert($fixture_dir.to_string());
)*
check_for_tests(&mut all_fixtures);
}
}
}
fixtures! {
basic_record:"basic-record" => "Record was: MyRcd { b: 42, a: 1995 }\n",
nested_record:"nested-record" => "Record was: Outer { y: \"foo\", z: [1, 2], x: Inner { b: 24.0, a: 5 } }\n",
enumeration:"enumeration" => "tag_union was: MyEnum::Foo, Bar is: MyEnum::Bar, Baz is: MyEnum::Baz\n",
union_with_padding:"union-with-padding" => indoc!(r#"
tag_union was: NonRecursive::Foo("This is a test")
`Foo "small str"` is: NonRecursive::Foo("small str")
`Foo "A long enough string to not be small"` is: NonRecursive::Foo("A long enough string to not be small")
`Bar 123` is: NonRecursive::Bar(123)
`Baz` is: NonRecursive::Baz
`Blah 456` is: NonRecursive::Blah(456)
"#),
union_without_padding:"union-without-padding" => indoc!(r#"
tag_union was: NonRecursive::Foo("This is a test")
`Foo "small str"` is: NonRecursive::Foo("small str")
`Foo "A long enough string to not be small"` is: NonRecursive::Foo("A long enough string to not be small")
`Bar 123` is: NonRecursive::Bar(123)
`Baz` is: NonRecursive::Baz
`Blah 456` is: NonRecursive::Blah(456)
"#),
}
fn check_for_tests(all_fixtures: &mut roc_collections::VecSet<String>) {
use roc_collections::VecSet;
// todo!("Remove a bunch of duplication - don't have a ton of files in there.");
let fixtures = fixtures_dir("");
let entries = std::fs::read_dir(fixtures.as_path()).unwrap_or_else(|err| {
panic!(
"Error trying to read {} as a fixtures directory: {}",
fixtures.to_string_lossy(),
err
);
});
for entry in entries {
let entry = entry.unwrap();
if entry.file_type().unwrap().is_dir() {
let fixture_dir_name = entry.file_name().into_string().unwrap();
if !all_fixtures.remove(&fixture_dir_name) {
panic!(
"The bindgen fixture directory {} does not have any corresponding tests in cli_run. Please add one, so if it ever stops working, we'll know about it right away!",
entry.path().to_string_lossy()
);
}
}
}
assert_eq!(all_fixtures, &mut VecSet::default());
}
fn generate_bindings_for<'a, I: IntoIterator<Item = &'a str>>(
platform_dir: &'a Path,
args: I,
) -> Out {
let package_config = platform_dir.join("Package-Config.roc");
let bindings_file = platform_dir.join("src").join("bindings.rs");
let fixture_templates_dir = platform_dir
.parent()
.unwrap()
.parent()
.unwrap()
.join("fixture-templates");
// Copy the rust template from the templates directory into the fixture dir.
dircpy::CopyBuilder::new(fixture_templates_dir.join("rust"), platform_dir)
.overwrite(true) // overwrite any files that were already present
.run()
.unwrap();
// Delete the bindings file to make sure we're actually regenerating it!
if bindings_file.exists() {
fs::remove_file(&bindings_file)
.expect("Unable to remove bindings.rs in order to regenerate it in the test");
}
// Generate a fresh bindings.rs for this platform
let bindgen_out = run_bindgen(
// converting these all to String avoids lifetime issues
args.into_iter().map(|arg| arg.to_string()).chain([
package_config.to_str().unwrap().to_string(),
bindings_file.to_str().unwrap().to_string(),
]),
);
// If there is any stderr, it should be reporting the runtime and that's it!
if !(bindgen_out.stderr.is_empty()
|| bindgen_out.stderr.starts_with("runtime: ") && bindgen_out.stderr.ends_with("ms\n"))
{
panic!(
"`roc-bindgen` command had unexpected stderr: {}",
bindgen_out.stderr
);
}
assert!(bindgen_out.status.success(), "bad status {:?}", bindgen_out);
bindgen_out
}
fn run_app<'a, I: IntoIterator<Item = &'a str>>(app_file: &'a Path, args: I) -> Out {
// Generate bindings.rs for this platform
let compile_out = run_roc(
// converting these all to String avoids lifetime issues
args.into_iter()
.map(|arg| arg.to_string())
.chain([app_file.to_str().unwrap().to_string()]),
&[],
);
// If there is any stderr, it should be reporting the runtime and that's it!
if !(compile_out.stderr.is_empty()
|| compile_out.stderr.starts_with("runtime: ") && compile_out.stderr.ends_with("ms\n"))
{
panic!(
"`roc` command had unexpected stderr: {}",
compile_out.stderr
);
}
assert!(compile_out.status.success(), "bad status {:?}", compile_out);
compile_out
}
}

View File

@ -1,7 +1,7 @@
[package]
name = "bench-runner"
version = "0.1.0"
edition = "2018"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@ -4,7 +4,7 @@ version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
repository = "https://github.com/rtfeldman/roc"
edition = "2018"
edition = "2021"
description = "A CLI for Roc"
default-run = "roc"
@ -59,7 +59,7 @@ roc_editor = { path = "../editor", optional = true }
roc_linker = { path = "../linker" }
roc_repl_cli = { path = "../repl_cli", optional = true }
clap = { version = "3.1.15", default-features = false, features = ["std", "color", "suggestions"] }
const_format = "0.2.22"
const_format = { version = "0.2.23", features = ["const_generics"] }
bumpalo = { version = "3.8.0", features = ["collections"] }
mimalloc = { version = "0.1.26", default-features = false }
libc = "0.2.106"

View File

@ -24,7 +24,7 @@ fn bench_group_wall_time(c: &mut Criterion) {
group.sample_size(nr_of_runs);
let bench_funcs: Vec<fn(Option<&mut BenchmarkGroup<WallTime>>) -> ()> = vec![
let bench_funcs: Vec<fn(Option<&mut BenchmarkGroup<WallTime>>)> = vec![
bench_nqueens, // queens 11
bench_cfold, // e = mkExpr 17 1
bench_deriv, // nest deriv 8 f

View File

@ -1,6 +1,6 @@
use bumpalo::Bump;
use roc_build::{
link::{link, rebuild_host, LinkType},
link::{link, preprocess_host_wasm32, rebuild_host, LinkType, LinkingStrategy},
program::{self, Problems},
};
use roc_builtins::bitcode;
@ -8,8 +8,8 @@ use roc_load::{LoadingProblem, Threading};
use roc_mono::ir::OptLevel;
use roc_reporting::report::RenderTarget;
use roc_target::TargetInfo;
use std::path::PathBuf;
use std::time::{Duration, SystemTime};
use std::{path::PathBuf, thread::JoinHandle};
use target_lexicon::Triple;
use tempfile::Builder;
@ -37,7 +37,7 @@ pub fn build_file<'a>(
emit_debug_info: bool,
emit_timings: bool,
link_type: LinkType,
surgically_link: bool,
linking_strategy: LinkingStrategy,
precompiled: bool,
target_valgrind: bool,
threading: Threading,
@ -68,8 +68,24 @@ pub fn build_file<'a>(
// > Non-Emscripten WebAssembly hasn't implemented __builtin_return_address
//
// and zig does not currently emit `.a` webassembly static libraries
let host_extension = if emit_wasm { "zig" } else { "o" };
let app_extension = if emit_wasm { "bc" } else { "o" };
let host_extension = if emit_wasm {
if matches!(opt_level, OptLevel::Development) {
"wasm"
} else {
"zig"
}
} else {
"o"
};
let app_extension = if emit_wasm {
if matches!(opt_level, OptLevel::Development) {
"wasm"
} else {
"bc"
}
} else {
"o"
};
let cwd = roc_file_path.parent().unwrap();
let mut binary_path = cwd.join(&*loaded.output_path); // TODO should join ".exe" on Windows
@ -103,11 +119,18 @@ pub fn build_file<'a>(
.map(|x| x.as_str(&loaded.interns).to_string())
.collect();
let preprocessed_host_path = if emit_wasm {
host_input_path.with_file_name("preprocessedhost.o")
} else {
host_input_path.with_file_name("preprocessedhost")
};
let rebuild_thread = spawn_rebuild_thread(
opt_level,
surgically_link,
linking_strategy,
precompiled,
host_input_path.clone(),
preprocessed_host_path.clone(),
binary_path.clone(),
target,
exposed_values,
@ -173,6 +196,24 @@ pub fn build_file<'a>(
let problems = program::report_problems_monomorphized(&mut loaded);
let loaded = loaded;
enum HostRebuildTiming {
BeforeApp(u128),
ConcurrentWithApp(JoinHandle<u128>),
}
let rebuild_timing = if linking_strategy == LinkingStrategy::Additive {
let rebuild_duration = rebuild_thread.join().unwrap();
if emit_timings && !precompiled {
println!(
"Finished rebuilding and preprocessing the host in {} ms\n",
rebuild_duration
);
}
HostRebuildTiming::BeforeApp(rebuild_duration)
} else {
HostRebuildTiming::ConcurrentWithApp(rebuild_thread)
};
let code_gen_timing = program::gen_from_mono_module(
arena,
loaded,
@ -181,6 +222,7 @@ pub fn build_file<'a>(
app_o_file,
opt_level,
emit_debug_info,
&preprocessed_host_path,
);
buf.push('\n');
@ -219,65 +261,72 @@ pub fn build_file<'a>(
);
}
let rebuild_duration = rebuild_thread.join().unwrap();
if emit_timings && !precompiled {
println!(
"Finished rebuilding and preprocessing the host in {} ms\n",
rebuild_duration
);
if let HostRebuildTiming::ConcurrentWithApp(thread) = rebuild_timing {
let rebuild_duration = thread.join().unwrap();
if emit_timings && !precompiled {
println!(
"Finished rebuilding and preprocessing the host in {} ms\n",
rebuild_duration
);
}
}
// Step 2: link the precompiled host and compiled app
let link_start = SystemTime::now();
let problems = if surgically_link {
roc_linker::link_preprocessed_host(target, &host_input_path, app_o_file, &binary_path)
.map_err(|err| {
todo!(
"gracefully handle failing to surgically link with error: {:?}",
err
);
})?;
problems
} else if matches!(link_type, LinkType::None) {
// Just copy the object file to the output folder.
binary_path.set_extension(app_extension);
std::fs::copy(app_o_file, &binary_path).unwrap();
problems
} else {
let mut inputs = vec![
host_input_path.as_path().to_str().unwrap(),
app_o_file.to_str().unwrap(),
];
if matches!(opt_level, OptLevel::Development) {
inputs.push(bitcode::BUILTINS_HOST_OBJ_PATH);
let problems = match (linking_strategy, link_type) {
(LinkingStrategy::Surgical, _) => {
roc_linker::link_preprocessed_host(target, &host_input_path, app_o_file, &binary_path)
.map_err(|err| {
todo!(
"gracefully handle failing to surgically link with error: {:?}",
err
);
})?;
problems
}
(LinkingStrategy::Additive, _) | (LinkingStrategy::Legacy, LinkType::None) => {
// Just copy the object file to the output folder.
binary_path.set_extension(app_extension);
std::fs::copy(app_o_file, &binary_path).unwrap();
problems
}
(LinkingStrategy::Legacy, _) => {
let mut inputs = vec![
host_input_path.as_path().to_str().unwrap(),
app_o_file.to_str().unwrap(),
];
if matches!(opt_level, OptLevel::Development) {
inputs.push(bitcode::BUILTINS_HOST_OBJ_PATH);
}
let (mut child, _) = // TODO use lld
link(
target,
binary_path.clone(),
&inputs,
link_type
)
.map_err(|_| {
todo!("gracefully handle `ld` failing to spawn.");
let (mut child, _) = // TODO use lld
link(
target,
binary_path.clone(),
&inputs,
link_type
)
.map_err(|_| {
todo!("gracefully handle `ld` failing to spawn.");
})?;
let exit_status = child.wait().map_err(|_| {
todo!("gracefully handle error after `ld` spawned");
})?;
let exit_status = child.wait().map_err(|_| {
todo!("gracefully handle error after `ld` spawned");
})?;
if exit_status.success() {
problems
} else {
let mut problems = problems;
if exit_status.success() {
problems
} else {
let mut problems = problems;
// Add an error for `ld` failing
problems.errors += 1;
// Add an error for `ld` failing
problems.errors += 1;
problems
problems
}
}
};
let linking_time = link_start.elapsed().unwrap();
if emit_timings {
@ -296,9 +345,10 @@ pub fn build_file<'a>(
#[allow(clippy::too_many_arguments)]
fn spawn_rebuild_thread(
opt_level: OptLevel,
surgically_link: bool,
linking_strategy: LinkingStrategy,
precompiled: bool,
host_input_path: PathBuf,
preprocessed_host_path: PathBuf,
binary_path: PathBuf,
target: &Triple,
exported_symbols: Vec<String>,
@ -312,31 +362,46 @@ fn spawn_rebuild_thread(
}
let rebuild_host_start = SystemTime::now();
if !precompiled {
if surgically_link {
roc_linker::build_and_preprocess_host(
opt_level,
&thread_local_target,
host_input_path.as_path(),
exported_symbols,
exported_closure_types,
target_valgrind,
)
.unwrap();
} else {
rebuild_host(
opt_level,
&thread_local_target,
host_input_path.as_path(),
None,
target_valgrind,
);
match linking_strategy {
LinkingStrategy::Additive => {
let host_dest = rebuild_host(
opt_level,
&thread_local_target,
host_input_path.as_path(),
None,
target_valgrind,
);
preprocess_host_wasm32(host_dest.as_path(), &preprocessed_host_path);
}
LinkingStrategy::Surgical => {
roc_linker::build_and_preprocess_host(
opt_level,
&thread_local_target,
host_input_path.as_path(),
preprocessed_host_path.as_path(),
exported_symbols,
exported_closure_types,
target_valgrind,
)
.unwrap();
}
LinkingStrategy::Legacy => {
rebuild_host(
opt_level,
&thread_local_target,
host_input_path.as_path(),
None,
target_valgrind,
);
}
}
}
if surgically_link {
if linking_strategy == LinkingStrategy::Surgical {
// Copy preprocessed host to executable location.
let prehost = host_input_path.with_file_name("preprocessedhost");
std::fs::copy(prehost, binary_path.as_path()).unwrap();
std::fs::copy(preprocessed_host_path, binary_path.as_path()).unwrap();
}
let rebuild_host_end = rebuild_host_start.elapsed().unwrap();

View File

@ -4,14 +4,13 @@ extern crate const_format;
use build::BuiltFile;
use bumpalo::Bump;
use clap::{Arg, ArgMatches, Command};
use roc_build::link::LinkType;
use roc_build::link::{LinkType, LinkingStrategy};
use roc_error_macros::{internal_error, user_error};
use roc_load::{LoadingProblem, Threading};
use roc_mono::ir::OptLevel;
use std::env;
use std::ffi::{CString, OsStr};
use std::io::{self, Write};
use std::os::unix::prelude::FromRawFd;
use std::io;
use std::path::{Path, PathBuf};
use std::process;
use target_lexicon::BinaryFormat;
@ -161,14 +160,6 @@ pub fn build_app<'a>() -> Command<'a> {
)
.subcommand(Command::new(CMD_RUN)
.about("Run a .roc file even if it has build errors")
.arg(
Arg::new(FLAG_TARGET)
.long(FLAG_TARGET)
.help("Choose a different target")
.default_value(Target::default().as_str())
.possible_values(Target::OPTIONS)
.required(false),
)
.arg(flag_optimize.clone())
.arg(flag_max_threads.clone())
.arg(flag_opt_size.clone())
@ -297,19 +288,26 @@ pub fn build(
Some(n) => Threading::AtMost(n),
};
// Use surgical linking when supported, or when explicitly requested with --linker surgical
let surgically_link = if matches.is_present(FLAG_LINKER) {
matches.value_of(FLAG_LINKER) == Some("surgical")
let wasm_dev_backend = matches!(opt_level, OptLevel::Development)
&& matches!(triple.architecture, Architecture::Wasm32);
let linking_strategy = if wasm_dev_backend {
LinkingStrategy::Additive
} else if !roc_linker::supported(&link_type, &triple)
|| matches.value_of(FLAG_LINKER) == Some("legacy")
{
LinkingStrategy::Legacy
} else {
roc_linker::supported(&link_type, &triple)
LinkingStrategy::Surgical
};
let precompiled = if matches.is_present(FLAG_PRECOMPILED) {
matches.value_of(FLAG_PRECOMPILED) == Some("true")
} else {
// When compiling for a different target, default to assuming a precompiled host.
// Otherwise compilation would most likely fail!
triple != Triple::host()
// Otherwise compilation would most likely fail because many toolchains assume you're compiling for the host
// We make an exception for Wasm, because cross-compiling is the norm in that case.
triple != Triple::host() && !matches!(triple.architecture, Architecture::Wasm32)
};
let path = Path::new(filename);
@ -343,7 +341,7 @@ pub fn build(
emit_debug_info,
emit_timings,
link_type,
surgically_link,
linking_strategy,
precompiled,
target_valgrind,
threading,
@ -600,15 +598,17 @@ fn roc_run_executable_file_path(cwd: &Path, binary_bytes: &mut [u8]) -> std::io:
}
// NOTE: this `fd` is special, using the rust `std::fs::File` functions does not work
let write_result =
unsafe { libc::write(fd, binary_bytes.as_ptr().cast(), binary_bytes.len()) };
let written = unsafe { libc::write(fd, binary_bytes.as_ptr().cast(), binary_bytes.len()) };
if write_result != 0 {
if written == -1 {
internal_error!("libc::write() failed: {:?}", errno::errno());
}
if written != binary_bytes.len() as isize {
internal_error!(
"libc::write({:?}, ..., {}) failed: {:?}",
fd,
"libc::write() did not write the correct number of bytes: reported {}, should be {}",
written,
binary_bytes.len(),
errno::errno()
);
}

View File

@ -11,7 +11,7 @@ extern crate roc_module;
mod cli_run {
use cli_utils::helpers::{
example_file, examples_dir, extract_valgrind_errors, fixture_file, fixtures_dir,
known_bad_file, run_cmd, run_roc, run_with_valgrind, Out, ValgrindError,
known_bad_file, run_cmd, run_roc, run_with_valgrind, strip_colors, Out, ValgrindError,
ValgrindErrorXWhat,
};
use const_format::concatcp;
@ -68,21 +68,6 @@ mod cli_run {
use_valgrind: bool,
}
fn strip_colors(str: &str) -> String {
use roc_reporting::report::ANSI_STYLE_CODES;
str.replace(ANSI_STYLE_CODES.red, "")
.replace(ANSI_STYLE_CODES.green, "")
.replace(ANSI_STYLE_CODES.yellow, "")
.replace(ANSI_STYLE_CODES.blue, "")
.replace(ANSI_STYLE_CODES.magenta, "")
.replace(ANSI_STYLE_CODES.cyan, "")
.replace(ANSI_STYLE_CODES.white, "")
.replace(ANSI_STYLE_CODES.bold, "")
.replace(ANSI_STYLE_CODES.underline, "")
.replace(ANSI_STYLE_CODES.reset, "")
.replace(ANSI_STYLE_CODES.color_reset, "")
}
fn check_compile_error(file: &Path, flags: &[&str], expected: &str) {
let compile_out = run_roc([CMD_CHECK, file.to_str().unwrap()].iter().chain(flags), &[]);
let err = compile_out.stdout.trim();
@ -922,7 +907,7 @@ mod cli_run {
r#"
UNRECOGNIZED NAME tests/known_bad/TypeError.roc
I cannot find a `d` value
Nothing is named `d` in this scope.
10 _ <- await (line d)
^

View File

@ -1,3 +1,7 @@
app
*.o
*.dSYM
dynhost
libapp.so
metadata
preprocessedhost

View File

@ -4,7 +4,7 @@ version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
repository = "https://github.com/rtfeldman/roc"
edition = "2018"
edition = "2021"
description = "Shared code for cli tests and benchmarks"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -12,6 +12,7 @@ description = "Shared code for cli tests and benchmarks"
[dependencies]
roc_cli = { path = "../cli" }
roc_collections = { path = "../compiler/collections" }
roc_reporting = { path = "../reporting" }
roc_load = { path = "../compiler/load" }
roc_module = { path = "../compiler/module" }
bumpalo = { version = "3.8.0", features = ["collections"] }
@ -20,7 +21,7 @@ serde = { version = "1.0.130", features = ["derive"] }
serde-xml-rs = "0.5.1"
strip-ansi-escapes = "0.1.1"
tempfile = "3.2.0"
const_format = "0.2.22"
const_format = { version = "0.2.23", features = ["const_generics"] }
[target.'cfg(unix)'.dependencies]
rlimit = "0.6.2"

View File

@ -10,6 +10,7 @@ use std::env;
use std::ffi::OsStr;
use std::io::Read;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::process::{Command, ExitStatus, Stdio};
use tempfile::NamedTempFile;
@ -21,7 +22,66 @@ pub struct Out {
pub status: ExitStatus,
}
pub fn run_roc<I, S>(args: I, stdin_vals: &[&str]) -> Out
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
let roc_binary_path = path_to_roc_binary();
// If we don't have a /target/release/roc, rebuild it!
if !roc_binary_path.exists() {
// Remove the /target/release/roc part
let root_project_dir = roc_binary_path
.parent()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap();
// cargo build --bin roc
// (with --release iff the test is being built with --release)
let args = if cfg!(debug_assertions) {
vec!["build", "--bin", "roc"]
} else {
vec!["build", "--release", "--bin", "roc"]
};
let output = Command::new("cargo")
.current_dir(root_project_dir)
.args(args)
.output()
.unwrap();
if !output.status.success() {
panic!("cargo build --release --bin roc failed. stdout was:\n\n{:?}\n\nstderr was:\n\n{:?}\n",
output.stdout,
output.stderr
);
}
}
run_with_stdin(&roc_binary_path, args, stdin_vals)
}
pub fn run_bindgen<I, S>(args: I) -> Out
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
run_with_stdin(&path_to_bindgen_binary(), args, &[])
}
pub fn path_to_roc_binary() -> PathBuf {
path_to_binary("roc")
}
pub fn path_to_bindgen_binary() -> PathBuf {
path_to_binary("roc-bindgen")
}
pub fn path_to_binary(binary_name: &str) -> PathBuf {
// Adapted from https://github.com/volta-cli/volta/blob/cefdf7436a15af3ce3a38b8fe53bb0cfdb37d3dd/tests/acceptance/support/sandbox.rs#L680
// by the Volta Contributors - license information can be found in
// the LEGAL_DETAILS file in the root directory of this distribution.
@ -40,13 +100,33 @@ pub fn path_to_roc_binary() -> PathBuf {
})
.unwrap_or_else(|| panic!("CARGO_BIN_PATH wasn't set, and couldn't be inferred from context. Can't run CLI tests."));
path.push("roc");
path.push(binary_name);
path
}
pub fn run_roc<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(args: I, stdin_vals: &[&str]) -> Out {
let mut cmd = Command::new(path_to_roc_binary());
pub fn strip_colors(str: &str) -> String {
use roc_reporting::report::ANSI_STYLE_CODES;
str.replace(ANSI_STYLE_CODES.red, "")
.replace(ANSI_STYLE_CODES.green, "")
.replace(ANSI_STYLE_CODES.yellow, "")
.replace(ANSI_STYLE_CODES.blue, "")
.replace(ANSI_STYLE_CODES.magenta, "")
.replace(ANSI_STYLE_CODES.cyan, "")
.replace(ANSI_STYLE_CODES.white, "")
.replace(ANSI_STYLE_CODES.bold, "")
.replace(ANSI_STYLE_CODES.underline, "")
.replace(ANSI_STYLE_CODES.reset, "")
.replace(ANSI_STYLE_CODES.color_reset, "")
}
pub fn run_with_stdin<I, S>(path: &Path, args: I, stdin_vals: &[&str]) -> Out
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
let mut cmd = Command::new(path);
for arg in args {
cmd.arg(arg);
@ -57,7 +137,12 @@ pub fn run_roc<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(args: I, stdin_vals:
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.expect("failed to execute compiled `roc` binary in CLI test");
.unwrap_or_else(|err| {
panic!(
"failed to execute compiled binary {} in CLI test: {err}",
path.to_string_lossy()
)
});
{
let stdin = child.stdin.as_mut().expect("Failed to open stdin");
@ -71,7 +156,7 @@ pub fn run_roc<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(args: I, stdin_vals:
let output = child
.wait_with_output()
.expect("failed to get output for compiled `roc` binary in CLI test");
.expect("failed to get output for compiled binary in CLI test");
Out {
stdout: String::from_utf8(output.stdout).unwrap(),

View File

@ -3,7 +3,7 @@ name = "roc_code_markup"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
edition = "2021"
description = "Our own markup language for Roc code. Used by the editor and the docs."
[dependencies]

View File

@ -1,6 +1,6 @@
[package]
authors = ["The Roc Contributors"]
edition = "2018"
edition = "2021"
license = "UPL-1.0"
name = "roc_alias_analysis"
version = "0.1.0"

View File

@ -28,7 +28,11 @@ pub fn func_name_bytes(proc: &Proc) -> [u8; SIZE] {
#[inline(always)]
fn debug() -> bool {
use roc_debug_flags::{dbg_do, ROC_DEBUG_ALIAS_ANALYSIS};
use roc_debug_flags::dbg_do;
#[cfg(debug_assertions)]
use roc_debug_flags::ROC_DEBUG_ALIAS_ANALYSIS;
dbg_do!(ROC_DEBUG_ALIAS_ANALYSIS, {
return true;
});

View File

@ -4,5 +4,5 @@ version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
repository = "https://github.com/rtfeldman/roc"
edition = "2018"
edition = "2021"
description = "A CLI for Roc"

View File

@ -3,7 +3,7 @@ name = "roc_build"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
edition = "2021"
[dependencies]
roc_collections = { path = "../collections" }

View File

@ -9,6 +9,7 @@ use std::io;
use std::path::{Path, PathBuf};
use std::process::{self, Child, Command, Output};
use target_lexicon::{Architecture, OperatingSystem, Triple};
use wasi_libc_sys::{WASI_COMPILER_RT_PATH, WASI_LIBC_PATH};
fn zig_executable() -> String {
match std::env::var("ROC_ZIG") {
@ -25,6 +26,16 @@ pub enum LinkType {
None = 2,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum LinkingStrategy {
/// Compile app and host object files, then use a linker like lld or wasm-ld
Legacy,
/// Compile app and host object files, then use the Roc surgical linker
Surgical,
/// Initialise the backend from a host object file, then add the app to it. No linker needed.
Additive,
}
/// input_paths can include the host as well as the app. e.g. &["host.o", "roc_app.o"]
pub fn link(
target: &Triple,
@ -70,8 +81,6 @@ fn find_zig_str_path() -> PathBuf {
}
fn find_wasi_libc_path() -> PathBuf {
use wasi_libc_sys::WASI_LIBC_PATH;
// Environment variable defined in wasi-libc-sys/build.rs
let wasi_libc_pathbuf = PathBuf::from(WASI_LIBC_PATH);
if std::path::Path::exists(&wasi_libc_pathbuf) {
@ -250,6 +259,14 @@ pub fn build_zig_host_wasm32(
if shared_lib_path.is_some() {
unimplemented!("Linking a shared library to wasm not yet implemented");
}
let zig_target = if matches!(opt_level, OptLevel::Development) {
"wasm32-wasi"
} else {
// For LLVM backend wasm we are emitting a .bc file anyway so this target is OK
"i386-linux-musl"
};
// NOTE currently just to get compiler warnings if the host code is invalid.
// the produced artifact is not used
//
@ -259,30 +276,32 @@ pub fn build_zig_host_wasm32(
//
// https://github.com/ziglang/zig/issues/9414
let mut command = Command::new(&zig_executable());
let args = &[
"build-obj",
zig_host_src,
emit_bin,
"--pkg-begin",
"str",
zig_str_path,
"--pkg-end",
// include the zig runtime
// "-fcompiler-rt",
// include libc
"--library",
"c",
"-target",
zig_target,
// "-femit-llvm-ir=/home/folkertdev/roc/roc/examples/benchmarks/platform/host.ll",
"-fPIC",
"--strip",
];
command
.env_clear()
.env("PATH", env_path)
.env("HOME", env_home)
.args(&[
"build-obj",
zig_host_src,
emit_bin,
"--pkg-begin",
"str",
zig_str_path,
"--pkg-end",
// include the zig runtime
// "-fcompiler-rt",
// include libc
"--library",
"c",
"-target",
"i386-linux-musl",
// "wasm32-wasi",
// "-femit-llvm-ir=/home/folkertdev/roc/roc/examples/benchmarks/platform/host.ll",
"-fPIC",
"--strip",
]);
.args(args);
if matches!(opt_level, OptLevel::Optimize) {
command.args(&["-O", "ReleaseSafe"]);
} else if matches!(opt_level, OptLevel::Size) {
@ -371,7 +390,7 @@ pub fn rebuild_host(
host_input_path: &Path,
shared_lib_path: Option<&Path>,
target_valgrind: bool,
) {
) -> PathBuf {
let c_host_src = host_input_path.with_file_name("host.c");
let c_host_dest = host_input_path.with_file_name("c_host.o");
let zig_host_src = host_input_path.with_file_name("host.zig");
@ -381,12 +400,19 @@ pub fn rebuild_host(
let swift_host_src = host_input_path.with_file_name("host.swift");
let swift_host_header_src = host_input_path.with_file_name("host.h");
let host_dest_native = host_input_path.with_file_name(if shared_lib_path.is_some() {
"dynhost"
let host_dest = if matches!(target.architecture, Architecture::Wasm32) {
if matches!(opt_level, OptLevel::Development) {
host_input_path.with_file_name("host.o")
} else {
host_input_path.with_file_name("host.bc")
}
} else {
"host.o"
});
let host_dest_wasm = host_input_path.with_file_name("host.bc");
host_input_path.with_file_name(if shared_lib_path.is_some() {
"dynhost"
} else {
"host.o"
})
};
let env_path = env::var("PATH").unwrap_or_else(|_| "".to_string());
let env_home = env::var("HOME").unwrap_or_else(|_| "".to_string());
@ -404,7 +430,11 @@ pub fn rebuild_host(
let output = match target.architecture {
Architecture::Wasm32 => {
let emit_bin = format!("-femit-llvm-ir={}", host_dest_wasm.to_str().unwrap());
let emit_bin = if matches!(opt_level, OptLevel::Development) {
format!("-femit-bin={}", host_dest.to_str().unwrap())
} else {
format!("-femit-llvm-ir={}", host_dest.to_str().unwrap())
};
build_zig_host_wasm32(
&env_path,
&env_home,
@ -416,7 +446,7 @@ pub fn rebuild_host(
)
}
Architecture::X86_64 => {
let emit_bin = format!("-femit-bin={}", host_dest_native.to_str().unwrap());
let emit_bin = format!("-femit-bin={}", host_dest.to_str().unwrap());
build_zig_host_native(
&env_path,
&env_home,
@ -430,7 +460,7 @@ pub fn rebuild_host(
)
}
Architecture::X86_32(_) => {
let emit_bin = format!("-femit-bin={}", host_dest_native.to_str().unwrap());
let emit_bin = format!("-femit-bin={}", host_dest.to_str().unwrap());
build_zig_host_native(
&env_path,
&env_home,
@ -445,7 +475,7 @@ pub fn rebuild_host(
}
Architecture::Aarch64(_) => {
let emit_bin = format!("-femit-bin={}", host_dest_native.to_str().unwrap());
let emit_bin = format!("-femit-bin={}", host_dest.to_str().unwrap());
build_zig_host_native(
&env_path,
&env_home,
@ -493,7 +523,7 @@ pub fn rebuild_host(
if shared_lib_path.is_some() {
// For surgical linking, just copy the dynamically linked rust app.
std::fs::copy(cargo_out_dir.join("host"), host_dest_native).unwrap();
std::fs::copy(cargo_out_dir.join("host"), &host_dest).unwrap();
} else {
// Cargo hosts depend on a c wrapper for the api. Compile host.c as well.
@ -517,7 +547,7 @@ pub fn rebuild_host(
c_host_dest.to_str().unwrap(),
"-lhost",
"-o",
host_dest_native.to_str().unwrap(),
host_dest.to_str().unwrap(),
])
.output()
.unwrap();
@ -555,7 +585,7 @@ pub fn rebuild_host(
let output = build_c_host_native(
&env_path,
&env_home,
host_dest_native.to_str().unwrap(),
host_dest.to_str().unwrap(),
&[
c_host_src.to_str().unwrap(),
rust_host_dest.to_str().unwrap(),
@ -583,7 +613,7 @@ pub fn rebuild_host(
c_host_dest.to_str().unwrap(),
rust_host_dest.to_str().unwrap(),
"-o",
host_dest_native.to_str().unwrap(),
host_dest.to_str().unwrap(),
])
.output()
.unwrap();
@ -608,7 +638,7 @@ pub fn rebuild_host(
let output = build_c_host_native(
&env_path,
&env_home,
host_dest_native.to_str().unwrap(),
host_dest.to_str().unwrap(),
&[c_host_src.to_str().unwrap()],
opt_level,
shared_lib_path,
@ -619,7 +649,7 @@ pub fn rebuild_host(
let output = build_swift_host_native(
&env_path,
&env_home,
host_dest_native.to_str().unwrap(),
host_dest.to_str().unwrap(),
&[swift_host_src.to_str().unwrap()],
opt_level,
shared_lib_path,
@ -629,6 +659,8 @@ pub fn rebuild_host(
);
validate_output("host.swift", "swiftc", output);
}
host_dest
}
fn nix_path_opt() -> Option<String> {
@ -1115,6 +1147,43 @@ pub fn module_to_dylib(
unsafe { Library::new(path) }
}
pub fn preprocess_host_wasm32(host_input_path: &Path, preprocessed_host_path: &Path) {
let host_input = host_input_path.to_str().unwrap();
let output_file = preprocessed_host_path.to_str().unwrap();
/*
Notes:
zig build-obj just gives you back the first input file, doesn't combine them!
zig build-lib works but doesn't emit relocations, even with --emit-relocs (bug?)
(gen_wasm needs relocs to adjust stack size by changing the __heap_base constant)
zig wasm-ld is a wrapper around wasm-ld and gives us maximum flexiblity
(but seems to be an unofficial API)
*/
let mut command = Command::new(&zig_executable());
let args = &[
"wasm-ld",
bitcode::BUILTINS_WASM32_OBJ_PATH,
host_input,
WASI_LIBC_PATH,
WASI_COMPILER_RT_PATH, // builtins need __multi3, __udivti3, __fixdfti
"-o",
output_file,
"--export-all",
"--no-entry",
"--import-undefined",
// "--relocatable", // enable this when gen_wasm can handle Custom sections in any order
];
command.args(args);
// println!("\npreprocess_host_wasm32");
// println!("zig {}\n", args.join(" "));
let output = command.output().unwrap();
validate_output(output_file, "zig", output)
}
fn validate_output(file_name: &str, cmd_name: &str, output: Output) {
if !output.status.success() {
match std::str::from_utf8(&output.stderr) {

View File

@ -154,6 +154,7 @@ fn report_problems_help(
}
}
#[allow(clippy::too_many_arguments)]
pub fn gen_from_mono_module(
arena: &bumpalo::Bump,
loaded: MonomorphizedModule,
@ -162,6 +163,7 @@ pub fn gen_from_mono_module(
app_o_file: &Path,
opt_level: OptLevel,
emit_debug_info: bool,
preprocessed_host_path: &Path,
) -> CodeGenTiming {
match opt_level {
OptLevel::Normal | OptLevel::Size | OptLevel::Optimize => gen_from_mono_module_llvm(
@ -173,7 +175,9 @@ pub fn gen_from_mono_module(
opt_level,
emit_debug_info,
),
OptLevel::Development => gen_from_mono_module_dev(arena, loaded, target, app_o_file),
OptLevel::Development => {
gen_from_mono_module_dev(arena, loaded, target, app_o_file, preprocessed_host_path)
}
}
}
@ -400,11 +404,14 @@ pub fn gen_from_mono_module_dev(
loaded: MonomorphizedModule,
target: &target_lexicon::Triple,
app_o_file: &Path,
preprocessed_host_path: &Path,
) -> CodeGenTiming {
use target_lexicon::Architecture;
match target.architecture {
Architecture::Wasm32 => gen_from_mono_module_dev_wasm32(arena, loaded, app_o_file),
Architecture::Wasm32 => {
gen_from_mono_module_dev_wasm32(arena, loaded, app_o_file, preprocessed_host_path)
}
Architecture::X86_64 | Architecture::Aarch64(_) => {
gen_from_mono_module_dev_assembly(arena, loaded, target, app_o_file)
}
@ -418,6 +425,7 @@ pub fn gen_from_mono_module_dev(
loaded: MonomorphizedModule,
target: &target_lexicon::Triple,
app_o_file: &Path,
_host_input_path: &Path,
) -> CodeGenTiming {
use target_lexicon::Architecture;
@ -434,7 +442,15 @@ fn gen_from_mono_module_dev_wasm32(
arena: &bumpalo::Bump,
loaded: MonomorphizedModule,
app_o_file: &Path,
preprocessed_host_path: &Path,
) -> CodeGenTiming {
if true {
// WIP: gen_wasm is not yet able to link roc__mainForHost_1_exposed
// It works fine in tests and in the web REPL, but not with a real host!
// This code path is not part of test_gen or repl_wasm
todo!("WebAssembly development backend is not ready to use yet!")
}
let code_gen_start = SystemTime::now();
let MonomorphizedModule {
module_id,
@ -456,23 +472,17 @@ fn gen_from_mono_module_dev_wasm32(
exposed_to_host,
};
let platform_and_builtins_object_file_bytes: &[u8] = if true {
todo!("The WebAssembly dev backend is a work in progress. Coming soon!")
} else {
&[] // This `if` gets rid of "unreachable code" warnings. When we're ready to use it, we'll notice!
};
let preloaded_host_bytes = std::fs::read(preprocessed_host_path)
.expect("Failed to read host object file! Try setting --precompiled-host=false");
let bytes = roc_gen_wasm::build_module(
&env,
&mut interns,
platform_and_builtins_object_file_bytes,
procedures,
);
let final_binary_bytes =
roc_gen_wasm::build_module(&env, &mut interns, &preloaded_host_bytes, procedures);
let code_gen = code_gen_start.elapsed().unwrap();
let emit_o_file_start = SystemTime::now();
std::fs::write(&app_o_file, &bytes).expect("failed to write object to file");
// The app_o_file is actually the final binary
std::fs::write(&app_o_file, &final_binary_bytes).expect("failed to write object to file");
let emit_o_file = emit_o_file_start.elapsed().unwrap();

View File

@ -3,7 +3,7 @@ name = "roc_builtins"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
edition = "2021"
[dependencies]
roc_collections = { path = "../collections" }

View File

@ -48,15 +48,15 @@ Since `List.repeat` is implemented entirely as low level functions, its `body` i
## Connecting the definition to the implementation
### module/src/low_level.rs
This `LowLevel` thing connects the builtin defined in this module to its implementation. Its referenced in `can/src/builtins.rs` and it is used in `gen/src/llvm/build.rs`.
This `LowLevel` thing connects the builtin defined in this module to its implementation. It's referenced in `can/src/builtins.rs` and it is used in `gen/src/llvm/build.rs`.
## Bottom level LLVM values and functions
### gen/src/llvm/build.rs
This is where bottom-level functions that need to be written as LLVM are created. If the function leads to a tag thats a good sign it should not be written here in `build.rs`. If its simple fundamental stuff like `INT_ADD` then it certainly should be written here.
This is where bottom-level functions that need to be written as LLVM are created. If the function leads to a tag thats a good sign it should not be written here in `build.rs`. If it's simple fundamental stuff like `INT_ADD` then it certainly should be written here.
## Letting the compiler know these functions exist
### builtins/src/std.rs
Its one thing to actually write these functions, its _another_ thing to let the Roc compiler know they exist as part of the standard library. You have to tell the compiler "Hey, this function exists, and it has this type signature". That happens in `std.rs`.
It's one thing to actually write these functions, it's _another_ thing to let the Roc compiler know they exist as part of the standard library. You have to tell the compiler "Hey, this function exists, and it has this type signature". That happens in `std.rs`.
## Specifying how we pass args to the function
### builtins/mono/src/borrow.rs

View File

@ -1,6 +1,6 @@
interface Bool
exposes [ Bool, and, or, not, isEq, isNotEq ]
imports [ ]
imports []
Bool : [ True, False ]
@ -56,7 +56,6 @@ and : Bool, Bool -> Bool
## In Roc, this is not the case. See the performance notes for [Bool.and] for details.
or : Bool, Bool -> Bool
# xor : Bool, Bool -> Bool # currently unimplemented
## Returns `False` when given `True`, and vice versa.
not : Bool -> Bool

View File

@ -1,6 +1,6 @@
interface Box
exposes [ box, unbox ]
imports [ ]
imports []
box : a -> Box a
unbox : Box a -> a

View File

@ -17,7 +17,7 @@ interface Dict
]
imports
[
Bool.{ Bool }
Bool.{ Bool },
]
## A [dictionary](https://en.wikipedia.org/wiki/Associative_array) that lets you can associate keys with values.
@ -68,8 +68,6 @@ interface Dict
## When comparing two dictionaries for equality, they are `==` only if their both their contents and their
## orderings match. This preserves the property that if `dict1 == dict2`, you should be able to rely on
## `fn dict1 == fn dict2` also being `True`, even if `fn` relies on the dictionary's ordering.
## An empty dictionary.
empty : Dict k v
single : k, v -> Dict k v

View File

@ -0,0 +1,65 @@
interface Encode
exposes
[
Encoder,
Encoding,
toEncoder,
EncoderFormatting,
u8,
u16,
u32,
u64,
u128,
i8,
i16,
i32,
i64,
i128,
f32,
f64,
dec,
bool,
string,
list,
custom,
appendWith,
append,
toBytes,
]
imports
[]
Encoder fmt := List U8, fmt -> List U8 | fmt has EncoderFormatting
Encoding has
toEncoder : val -> Encoder fmt | val has Encoding, fmt has EncoderFormatting
EncoderFormatting has
u8 : U8 -> Encoder fmt | fmt has EncoderFormatting
u16 : U16 -> Encoder fmt | fmt has EncoderFormatting
u32 : U32 -> Encoder fmt | fmt has EncoderFormatting
u64 : U64 -> Encoder fmt | fmt has EncoderFormatting
u128 : U128 -> Encoder fmt | fmt has EncoderFormatting
i8 : I8 -> Encoder fmt | fmt has EncoderFormatting
i16 : I16 -> Encoder fmt | fmt has EncoderFormatting
i32 : I32 -> Encoder fmt | fmt has EncoderFormatting
i64 : I64 -> Encoder fmt | fmt has EncoderFormatting
i128 : I128 -> Encoder fmt | fmt has EncoderFormatting
f32 : F32 -> Encoder fmt | fmt has EncoderFormatting
f64 : F64 -> Encoder fmt | fmt has EncoderFormatting
dec : Dec -> Encoder fmt | fmt has EncoderFormatting
bool : Bool -> Encoder fmt | fmt has EncoderFormatting
string : Str -> Encoder fmt | fmt has EncoderFormatting
list : List elem, (elem -> Encoder fmt) -> Encoder fmt | fmt has EncoderFormatting
custom : (List U8, fmt -> List U8) -> Encoder fmt | fmt has EncoderFormatting
custom = \encoder -> @Encoder encoder
appendWith : List U8, Encoder fmt, fmt -> List U8 | fmt has EncoderFormatting
appendWith = \lst, @Encoder doEncoding, fmt -> doEncoding lst fmt
append : List U8, val, fmt -> List U8 | val has Encoding, fmt has EncoderFormatting
append = \lst, val, fmt -> appendWith lst (toEncoder val) fmt
toBytes : val, fmt -> List U8 | val has Encoding, fmt has EncoderFormatting
toBytes = \val, fmt -> appendWith [] (toEncoder val) fmt

View File

@ -0,0 +1,83 @@
interface Json
exposes
[
Json,
format,
]
imports
[
Encode.{
custom,
appendWith,
u8,
u16,
u32,
u64,
u128,
i8,
i16,
i32,
i64,
i128,
f32,
f64,
dec,
bool,
string,
list,
},
]
Json := {}
format = @Json {}
numToBytes = \n ->
n |> Num.toStr |> Str.toUtf8
# impl EncoderFormatting for Json
u8 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
u16 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
u32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
u64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
u128 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
i8 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
i16 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
i32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
i64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
i128 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
f32 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
f64 = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
dec = \n -> custom \bytes, @Json {} -> List.concat bytes (numToBytes n)
bool = \b -> custom \bytes, @Json {} ->
if
b
then
List.concat bytes (Str.toUtf8 "true")
else
List.concat bytes (Str.toUtf8 "false")
string = \s -> custom \bytes, @Json {} ->
List.append bytes (Num.toU8 '"')
|> List.concat (Str.toUtf8 s)
|> List.append (Num.toU8 '"')
list = \lst, encodeElem ->
custom \bytes, @Json {} ->
head = List.append bytes (Num.toU8 '[')
withList = List.walk lst head (\bytes1, elem -> appendWith bytes1 (encodeElem elem) (@Json {}))
List.append withList (Num.toU8 ']')

View File

@ -53,11 +53,10 @@ interface List
]
imports
[
Bool.{ Bool }
Bool.{ Bool },
]
## Types
## A sequential list of values.
##
## >>> [ 1, 2, 3 ] # a list of numbers
@ -193,7 +192,6 @@ interface List
## * Even when copying is faster, other list operations may still be slightly slower with persistent data structures. For example, even if it were a persistent data structure, [List.map], [List.walk], and [List.keepIf] would all need to traverse every element in the list and build up the result from scratch. These operations are all
## * Roc's compiler optimizes many list operations into in-place mutations behind the scenes, depending on how the list is being used. For example, [List.map], [List.keepIf], and [List.set] can all be optimized to perform in-place mutations.
## * If possible, it is usually best for performance to use large lists in a way where the optimizer can turn them into in-place mutations. If this is not possible, a persistent data structure might be faster - but this is a rare enough scenario that it would not be good for the average Roc program's performance if this were the way [List] worked by default. Instead, you can look outside Roc's standard modules for an implementation of a persistent data structure - likely built using [List] under the hood!
## Check if the list is empty.
##
## >>> List.isEmpty [ 1, 2, 3 ]
@ -321,7 +319,7 @@ walk : List elem, state, (state, elem -> state) -> state
## Note that in other languages, `walkBackwards` is sometimes called `reduceRight`,
## `fold`, `foldRight`, or `foldr`.
walkBackwards : List elem, state, (state, elem -> state) -> state
walkBackwards : List elem, state, (state, elem -> state) -> state
## Same as [List.walk], except you can stop walking early.
##
@ -406,7 +404,7 @@ keepOks : List before, (before -> Result after *) -> List after
## >>> fn = \str -> if Str.isEmpty str then Err StrWasEmpty else Ok (Str.len str)
## >>>
## >>> List.keepErrs [ "", "a", "bc", "", "d", "ef", "" ]
keepErrs: List before, (before -> Result * after) -> List after
keepErrs : List before, (before -> Result * after) -> List after
## Convert each element in the list to something new, by calling a conversion
## function on each of them. Then return a new list of the converted values.
@ -445,7 +443,7 @@ mapWithIndex : List a, (a, Nat -> b) -> List b
##
## >>> List.range 2 8
range : Int a, Int a -> List (Int a)
sortWith : List a, (a, a -> [ LT, EQ, GT ] ) -> List a
sortWith : List a, (a, a -> [ LT, EQ, GT ]) -> List a
## Sorts a list in ascending order (lowest to highest), using a function which
## specifies a way to represent each element as a number.
@ -541,41 +539,35 @@ drop : List elem, Nat -> List elem
## To replace the element at a given index, instead of dropping it, see [List.set].
dropAt : List elem, Nat -> List elem
min : List (Num a) -> Result (Num a) [ ListWasEmpty ]*
min : List (Num a) -> Result (Num a) [ ListWasEmpty ]*
min = \list ->
when List.first list is
Ok initial ->
Ok (minHelp list initial)
Err ListWasEmpty ->
Err ListWasEmpty
minHelp : List (Num a), Num a -> Num a
minHelp : List (Num a), Num a -> Num a
minHelp = \list, initial ->
List.walk list initial \bestSoFar, current ->
if current < bestSoFar then
current
else
bestSoFar
max : List (Num a) -> Result (Num a) [ ListWasEmpty ]*
max : List (Num a) -> Result (Num a) [ ListWasEmpty ]*
max = \list ->
when List.first list is
Ok initial ->
Ok (maxHelp list initial)
Err ListWasEmpty ->
Err ListWasEmpty
maxHelp : List (Num a), Num a -> Num a
maxHelp : List (Num a), Num a -> Num a
maxHelp = \list, initial ->
List.walk list initial \bestSoFar, current ->
if current > bestSoFar then
current
else
bestSoFar
@ -616,4 +608,4 @@ intersperse : List elem, elem -> List elem
## than the given index, # and the `others` list will be all the others. (This
## means if you give an index of 0, the `before` list will be empty and the
## `others` list will have the same elements as the original list.)
split : List elem, Nat -> { before: List elem, others: List elem }
split : List elem, Nat -> { before : List elem, others : List elem }

View File

@ -4,46 +4,36 @@ interface Num
Num,
Int,
Frac,
Integer,
FloatingPoint,
I128,
I64,
I32,
I16,
I8,
U128,
U64,
U32,
U16,
U8,
Signed128,
Signed64,
Signed32,
Signed16,
Signed8,
Unsigned128,
Unsigned64,
Unsigned32,
Unsigned16,
Unsigned8,
Nat,
Dec,
F32,
F64,
Natural,
Decimal,
Binary32,
Binary64,
abs,
neg,
add,
@ -155,7 +145,7 @@ interface Num
]
imports
[
Bool.{ Bool }
Bool.{ Bool },
]
## Represents a number that could be either an [Int] or a [Frac].
@ -343,7 +333,6 @@ Num range := range
##
## As such, it's very important to design your code not to exceed these bounds!
## If you need to do math outside these bounds, consider using a larger numeric size.
Int range : Num (Integer range)
## A fixed-size number with a fractional component.
@ -501,7 +490,6 @@ F32 : Num (FloatingPoint Binary32)
Dec : Num (FloatingPoint Decimal)
# ------- Functions
## Convert a number to a [Str].
##
## This is the same as calling `Num.format {}` - so for more details on
@ -875,7 +863,6 @@ subChecked : Num a, Num a -> Result (Num a) [ Overflow ]*
mulWrap : Int range, Int range -> Int range
# mulSaturated : Num a, Num a -> Num a
## Multiply two numbers and check for overflow.
##
## This is the same as [Num.mul] except if the operation overflows, instead of
@ -1086,7 +1073,6 @@ minF64 = -1.7976931348623157e308
maxF64 : F64
maxF64 = 1.7976931348623157e308
## Converts an [Int] to an [I8]. If the given number can't be precisely represented in an [I8],
## the returned number may be different from the given number.
toI8 : Int * -> I8
@ -1142,9 +1128,7 @@ toNatChecked : Int * -> Result Nat [ OutOfBounds ]*
toF32Checked : Num * -> Result F32 [ OutOfBounds ]*
toF64Checked : Num * -> Result F64 [ OutOfBounds ]*
# Special Floating-Point operations
## When given a [F64] or [F32] value, returns `False` if that value is
## [*NaN*](Num.isNaN), ∞ or -∞, and `True` otherwise.
##
@ -1152,8 +1136,7 @@ toF64Checked : Num * -> Result F64 [ OutOfBounds ]*
##
## This is the opposite of [isInfinite], except when given [*NaN*](Num.isNaN). Both
## [isFinite] and [isInfinite] return `False` for [*NaN*](Num.isNaN).
#isFinite : Frac * -> Bool
# isFinite : Frac * -> Bool
## When given a [F64] or [F32] value, returns `True` if that value is either
## ∞ or -∞, and `False` otherwise.
##
@ -1161,8 +1144,7 @@ toF64Checked : Num * -> Result F64 [ OutOfBounds ]*
##
## This is the opposite of [isFinite], except when given [*NaN*](Num.isNaN). Both
## [isFinite] and [isInfinite] return `False` for [*NaN*](Num.isNaN).
#isInfinite : Frac * -> Bool
# isInfinite : Frac * -> Bool
## When given a [F64] or [F32] value, returns `True` if that value is
## *NaN* ([not a number](https://en.wikipedia.org/wiki/NaN)), and `False` otherwise.
##
@ -1185,21 +1167,17 @@ toF64Checked : Num * -> Result F64 [ OutOfBounds ]*
## Note that you should never put a *NaN* into a [Set], or use it as the key in
## a [Dict]. The result is entries that can never be removed from those
## collections! See the documentation for [Set.add] and [Dict.insert] for details.
#isNaN : Frac * -> Bool
# isNaN : Frac * -> Bool
## Returns the higher of two numbers.
##
## If either argument is [*NaN*](Num.isNaN), returns `False` no matter what. (*NaN*
## is [defined to be unordered](https://en.wikipedia.org/wiki/NaN#Comparison_with_NaN).)
#max : Num a, Num a -> Num a
# max : Num a, Num a -> Num a
## Returns the lower of two numbers.
##
## If either argument is [*NaN*](Num.isNaN), returns `False` no matter what. (*NaN*
## is [defined to be unordered](https://en.wikipedia.org/wiki/NaN#Comparison_with_NaN).)
#min : Num a, Num a -> Num a
# min : Num a, Num a -> Num a
# Branchless implementation that works for all numeric types:
#
# let is_lt = arg1 < arg2;
@ -1209,57 +1187,46 @@ toF64Checked : Num * -> Result F64 [ OutOfBounds ]*
# 1, 1 -> (0 - 1) + 1 == 0 # Eq
# 5, 1 -> (0 - 0) + 1 == 1 # Gt
# 1, 5 -> (1 - 0) + 1 == 2 # Lt
## Returns `Lt` if the first number is less than the second, `Gt` if
## the first is greater than the second, and `Eq` if they're equal.
##
## Although this can be passed to `List.sort`, you'll get better performance
## by using `List.sortAsc` or `List.sortDesc` instead.
#compare : Num a, Num a -> [ Lt, Eq, Gt ]
# compare : Num a, Num a -> [ Lt, Eq, Gt ]
## [Endianness](https://en.wikipedia.org/wiki/Endianness)
# Endi : [ Big, Little, Native ]
## The `Endi` argument does not matter for [U8] and [I8], since they have
## only one byte.
# toBytes : Num *, Endi -> List U8
## when Num.parseBytes bytes Big is
## Ok { val: f64, rest } -> ...
## Err (ExpectedNum (Frac Binary64)) -> ...
# parseBytes : List U8, Endi -> Result { val : Num a, rest : List U8 } [ ExpectedNum a ]*
## when Num.fromBytes bytes Big is
## Ok f64 -> ...
## Err (ExpectedNum (Frac Binary64)) -> ...
# fromBytes : List U8, Endi -> Result (Num a) [ ExpectedNum a ]*
# Bit shifts
## [Logical bit shift](https://en.wikipedia.org/wiki/Bitwise_operation#Logical_shift) left.
##
## `a << b` is shorthand for `Num.shl a b`.
#shl : Int a, Int a -> Int a
# shl : Int a, Int a -> Int a
## [Arithmetic bit shift](https://en.wikipedia.org/wiki/Bitwise_operation#Arithmetic_shift) left.
##
## This is called `shlWrap` because any bits shifted
## off the beginning of the number will be wrapped around to
## the end. (In contrast, [shl] replaces discarded bits with zeroes.)
#shlWrap : Int a, Int a -> Int a
# shlWrap : Int a, Int a -> Int a
## [Logical bit shift](https://en.wikipedia.org/wiki/Bitwise_operation#Logical_shift) right.
##
## `a >> b` is shorthand for `Num.shr a b`.
#shr : Int a, Int a -> Int a
# shr : Int a, Int a -> Int a
## [Arithmetic bit shift](https://en.wikipedia.org/wiki/Bitwise_operation#Arithmetic_shift) right.
##
## This is called `shrWrap` because any bits shifted
## off the end of the number will be wrapped around to
## the beginning. (In contrast, [shr] replaces discarded bits with zeroes.)
#shrWrap : Int a, Int a -> Int a
# shrWrap : Int a, Int a -> Int a
# ## Convert a number into a [Str], formatted with the given options.
# ##
# ## Default options:

View File

@ -12,8 +12,10 @@ Result ok err : [ Ok ok, Err err ]
isOk : Result ok err -> Bool
isOk = \result ->
when result is
Ok _ -> True
Err _ -> False
Ok _ ->
True
Err _ ->
False
## Return True if the result indicates a failure, else return False
##
@ -21,8 +23,10 @@ isOk = \result ->
isErr : Result ok err -> Bool
isErr = \result ->
when result is
Ok _ -> False
Err _ -> True
Ok _ ->
False
Err _ ->
True
## If the result is `Ok`, return the value it holds. Otherwise, return
## the given default value.
@ -33,8 +37,10 @@ isErr = \result ->
withDefault : Result ok err, ok -> ok
withDefault = \result, default ->
when result is
Ok value -> value
Err _ -> default
Ok value ->
value
Err _ ->
default
## If the result is `Ok`, transform the value it holds by running a conversion
## function on it. Then return a new `Ok` holding the transformed value.
@ -50,8 +56,10 @@ withDefault = \result, default ->
map : Result a err, (a -> b) -> Result b err
map = \result, transform ->
when result is
Ok v -> Ok (transform v)
Err e -> Err e
Ok v ->
Ok (transform v)
Err e ->
Err e
## If the result is `Err`, transform the value it holds by running a conversion
## function on it. Then return a new `Err` holding the transformed value.
@ -64,8 +72,10 @@ map = \result, transform ->
mapErr : Result ok a, (a -> b) -> Result ok b
mapErr = \result, transform ->
when result is
Ok v -> Ok v
Err e -> Err (transform e)
Ok v ->
Ok v
Err e ->
Err (transform e)
## If the result is `Ok`, transform the entire result by running a conversion
## function on the value the `Ok` holds. Then return that new result.
@ -78,5 +88,7 @@ mapErr = \result, transform ->
after : Result a err, (a -> Result b err) -> Result b err
after = \result, transform ->
when result is
Ok v -> transform v
Err e -> Err e
Ok v ->
transform v
Err e ->
Err e

View File

@ -18,7 +18,6 @@ interface Str
trim,
trimLeft,
trimRight,
toDec,
toF64,
toF32,
@ -41,7 +40,6 @@ interface Str
## Dealing with text is a deep topic, so by design, Roc's `Str` module sticks
## to the basics.
##
## ### Unicode
##
## Unicode can represent text values which span multiple languages, symbols, and emoji.
@ -111,8 +109,6 @@ interface Str
## and you can use it as many times as you like inside a string. The name
## between the parentheses must refer to a `Str` value that is currently in
## scope, and it must be a name - it can't be an arbitrary expression like a function call.
Utf8ByteProblem :
[
InvalidStartByte,
@ -191,7 +187,6 @@ toUtf8 : Str -> List U8
# fromUtf8 : List U8 -> Result Str [ BadUtf8 Utf8Problem ]*
# fromUtf8Range : List U8 -> Result Str [ BadUtf8 Utf8Problem Nat, OutOfBounds ]*
fromUtf8 : List U8 -> Result Str [ BadUtf8 Utf8ByteProblem Nat ]*
fromUtf8Range : List U8, { start : Nat, count : Nat } -> Result Str [ BadUtf8 Utf8ByteProblem Nat, OutOfBounds ]*

View File

@ -99,6 +99,7 @@ impl IntWidth {
matches!(self, I8 | I16 | I32 | I64 | I128)
}
pub const fn stack_size(&self) -> u32 {
use IntWidth::*;

View File

@ -11,6 +11,8 @@ pub fn module_source(module_id: ModuleId) -> &'static str {
ModuleId::SET => SET,
ModuleId::BOX => BOX,
ModuleId::BOOL => BOOL,
ModuleId::ENCODE => ENCODE,
ModuleId::JSON => JSON,
_ => panic!(
"ModuleId {:?} is not part of the standard library",
module_id
@ -26,3 +28,5 @@ const DICT: &str = include_str!("../roc/Dict.roc");
const SET: &str = include_str!("../roc/Set.roc");
const BOX: &str = include_str!("../roc/Box.roc");
const BOOL: &str = include_str!("../roc/Bool.roc");
const ENCODE: &str = include_str!("../roc/Encode.roc");
const JSON: &str = include_str!("../roc/Json.roc");

View File

@ -3,7 +3,7 @@ name = "roc_can"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
edition = "2021"
[dependencies]
roc_collections = { path = "../collections" }

View File

@ -1,4 +1,5 @@
use roc_collections::all::MutMap;
use roc_collections::{all::MutMap, VecMap, VecSet};
use roc_error_macros::internal_error;
use roc_module::symbol::Symbol;
use roc_region::all::Region;
use roc_types::{subs::Variable, types::Type};
@ -12,18 +13,42 @@ pub struct MemberVariables {
pub flex_vars: Vec<Variable>,
}
#[derive(Debug, Clone)]
pub enum MemberTypeInfo {
/// The member and its signature is defined locally, in the module the store is created for.
/// We need to instantiate and introduce this during solving.
Local {
signature_var: Variable,
signature: Type,
variables: MemberVariables,
},
/// The member was defined in another module, so we'll import its variable when it's time to
/// solve. At that point we'll resolve `var` here.
Imported { signature_var: Option<Variable> },
}
/// Stores information about an ability member definition, including the parent ability, the
/// defining type, and what type variables need to be instantiated with instances of the ability.
// TODO: SoA and put me in an arena
#[derive(Debug, Clone)]
pub struct AbilityMemberData {
pub parent_ability: Symbol,
pub signature_var: Variable,
pub signature: Type,
pub variables: MemberVariables,
pub region: Region,
pub typ: MemberTypeInfo,
}
impl AbilityMemberData {
pub fn signature_var(&self) -> Option<Variable> {
match self.typ {
MemberTypeInfo::Local { signature_var, .. } => Some(signature_var),
MemberTypeInfo::Imported { signature_var } => signature_var,
}
}
}
/// (member, specialization type) -> specialization
pub type SolvedSpecializations = VecMap<(Symbol, Symbol), MemberSpecialization>;
/// A particular specialization of an ability member.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct MemberSpecialization {
@ -32,7 +57,7 @@ pub struct MemberSpecialization {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct SpecializationId(u64);
pub struct SpecializationId(u32);
#[allow(clippy::derivable_impls)] // let's be explicit about this
impl Default for SpecializationId {
@ -66,9 +91,9 @@ pub struct AbilitiesStore {
/// Maps a tuple (member, type) specifying that `type` declares an implementation of an ability
/// member `member`, to the exact symbol that implements the ability.
declared_specializations: MutMap<(Symbol, Symbol), MemberSpecialization>,
declared_specializations: SolvedSpecializations,
next_specialization_id: u64,
next_specialization_id: u32,
/// Resolved specializations for a symbol. These might be ephemeral (known due to type solving),
/// or resolved on-the-fly during mono.
@ -77,24 +102,16 @@ pub struct AbilitiesStore {
impl AbilitiesStore {
/// Records the definition of an ability, including its members.
pub fn register_ability(
&mut self,
ability: Symbol,
members: Vec<(Symbol, Region, Variable, Type, MemberVariables)>,
) {
pub fn register_ability<I>(&mut self, ability: Symbol, members: I)
where
I: IntoIterator<Item = (Symbol, AbilityMemberData)>,
I::IntoIter: ExactSizeIterator,
{
let members = members.into_iter();
let mut members_vec = Vec::with_capacity(members.len());
for (member, region, signature_var, signature, variables) in members.into_iter() {
for (member, member_data) in members {
members_vec.push(member);
let old_member = self.ability_members.insert(
member,
AbilityMemberData {
parent_ability: ability,
signature_var,
signature,
region,
variables,
},
);
let old_member = self.ability_members.insert(member, member_data);
debug_assert!(old_member.is_none(), "Replacing existing member definition");
}
let old_ability = self.members_of_ability.insert(ability, members_vec);
@ -171,10 +188,19 @@ impl AbilitiesStore {
self.ability_members.get(&member)
}
/// Returns an iterator over pairs (ability member, type) specifying that
/// "ability member" has a specialization with type "type".
pub fn get_known_specializations(&self) -> impl Iterator<Item = (Symbol, Symbol)> + '_ {
self.declared_specializations.keys().copied()
/// Iterator over all abilities and their members that this store knows about.
pub fn iter_abilities(&self) -> impl Iterator<Item = (Symbol, &[Symbol])> {
self.members_of_ability
.iter()
.map(|(k, v)| (*k, v.as_slice()))
}
/// Returns an iterator over pairs ((ability member, type), specialization) specifying that
/// "ability member" has a "specialization" for type "type".
pub fn iter_specializations(
&self,
) -> impl Iterator<Item = ((Symbol, Symbol), MemberSpecialization)> + '_ {
self.declared_specializations.iter().map(|(k, v)| (*k, *v))
}
/// Retrieves the specialization of `member` for `typ`, if it exists.
@ -182,14 +208,12 @@ impl AbilitiesStore {
self.declared_specializations.get(&(member, typ)).copied()
}
/// Returns pairs of (type, ability member) specifying that "ability member" has a
/// specialization with type "type".
pub fn members_of_ability(&self, ability: Symbol) -> Option<&[Symbol]> {
self.members_of_ability.get(&ability).map(|v| v.as_ref())
}
pub fn fresh_specialization_id(&mut self) -> SpecializationId {
debug_assert!(self.next_specialization_id != std::u64::MAX);
debug_assert!(self.next_specialization_id != std::u32::MAX);
let id = SpecializationId(self.next_specialization_id);
self.next_specialization_id += 1;
@ -197,7 +221,8 @@ impl AbilitiesStore {
}
pub fn insert_resolved(&mut self, id: SpecializationId, specialization: Symbol) {
debug_assert!(self.is_specialization_name(specialization));
// May not be a thing in mono
// debug_assert!(self.is_specialization_name(specialization));
let old_specialization = self.resolved_specializations.insert(id, specialization);
@ -220,4 +245,119 @@ impl AbilitiesStore {
pub fn get_resolved(&self, id: SpecializationId) -> Option<Symbol> {
self.resolved_specializations.get(&id).copied()
}
/// Creates a store from [`self`] that closes over the abilities/members given by the
/// imported `symbols`, and their specializations (if any).
pub fn closure_from_imported(&self, symbols: &VecSet<Symbol>) -> Self {
let Self {
members_of_ability,
ability_members,
declared_specializations,
// Covered by `declared_specializations`
specialization_to_root: _,
// Taking closure for a new module, so specialization IDs can be fresh
next_specialization_id: _,
resolved_specializations: _,
} = self;
let mut new = Self::default();
// 1. Figure out the abilities we need to introduce.
let mut abilities_to_introduce = VecSet::with_capacity(2);
symbols.iter().for_each(|symbol| {
if let Some(member_data) = ability_members.get(symbol) {
// If the symbol is member of an ability, we need to capture the entire ability.
abilities_to_introduce.insert(member_data.parent_ability);
}
if members_of_ability.contains_key(symbol) {
abilities_to_introduce.insert(*symbol);
}
});
// 2. Add each ability, and any specializations of its members we know about.
for ability in abilities_to_introduce.into_iter() {
let members = members_of_ability.get(&ability).unwrap();
let mut imported_member_data = Vec::with_capacity(members.len());
for member in members {
let mut member_data = ability_members.get(member).unwrap().clone();
// All external members need to be marked as imported. We'll figure out their real
// type variables when it comes time to solve the module we're currently importing
// into.
member_data.typ = MemberTypeInfo::Imported {
signature_var: None,
};
imported_member_data.push((*member, member_data));
}
new.register_ability(ability, imported_member_data);
// Add any specializations of the ability's members we know about.
declared_specializations
.iter()
.filter(|((member, _), _)| members.contains(member))
.for_each(|(&(member, typ), &specialization)| {
new.register_specializing_symbol(specialization.symbol, member);
new.register_specialization_for_type(member, typ, specialization);
});
}
new
}
pub fn union(&mut self, other: Self) {
let Self {
members_of_ability: other_members_of_ability,
ability_members: mut other_ability_members,
specialization_to_root,
declared_specializations,
next_specialization_id,
resolved_specializations,
} = other;
for (ability, members) in other_members_of_ability.into_iter() {
if let Some(my_members) = self.members_of_ability(ability) {
debug_assert!(
my_members == members,
"Two abilities have different definitions, definitely a bug"
);
}
let member_data = members
.into_iter()
.map(|member| (member, other_ability_members.remove(&member).unwrap()));
self.register_ability(ability, member_data);
}
for (specialization, member) in specialization_to_root.into_iter() {
let old_root = self.specialization_to_root.insert(specialization, member);
debug_assert!(old_root.is_none() || old_root.unwrap() == member);
}
for ((member, typ), specialization) in declared_specializations.into_iter() {
let old_specialization = self
.declared_specializations
.insert((member, typ), specialization);
debug_assert!(
old_specialization.is_none() || old_specialization.unwrap() == specialization
);
}
debug_assert!(next_specialization_id == 0);
debug_assert!(self.next_specialization_id == 0);
debug_assert!(resolved_specializations.is_empty());
debug_assert!(self.resolved_specializations.is_empty());
}
pub fn resolved_imported_member_var(&mut self, member: Symbol, var: Variable) {
let member_data = self.ability_members.get_mut(&member).unwrap();
match &mut member_data.typ {
MemberTypeInfo::Imported { signature_var } => {
let old = signature_var.replace(var);
debug_assert!(old.is_none(), "Replacing existing variable!");
}
_ => internal_error!("{:?} is not imported!", member),
}
}
}

View File

@ -267,7 +267,7 @@ pub fn canonicalize_annotation(
annotation: &TypeAnnotation,
region: Region,
var_store: &mut VarStore,
abilities_in_scope: &[Symbol],
pending_abilities_in_scope: &[Symbol],
) -> Annotation {
let mut introduced_variables = IntroducedVariables::default();
let mut references = VecSet::default();
@ -284,7 +284,7 @@ pub fn canonicalize_annotation(
var_store,
&mut introduced_variables,
clause,
abilities_in_scope,
pending_abilities_in_scope,
&mut references,
);
if let Err(err_type) = opt_err {
@ -320,7 +320,7 @@ pub fn canonicalize_annotation(
}
}
fn make_apply_symbol(
pub(crate) fn make_apply_symbol(
env: &mut Env,
region: Region,
scope: &mut Scope,
@ -330,13 +330,13 @@ fn make_apply_symbol(
if module_name.is_empty() {
// Since module_name was empty, this is an unqualified type.
// Look it up in scope!
let ident: Ident = (*ident).into();
match scope.lookup(&ident, region) {
match scope.lookup_str(ident, region) {
Ok(symbol) => Ok(symbol),
Err(problem) => {
env.problem(roc_problem::can::Problem::RuntimeError(problem));
let ident: Ident = (*ident).into();
Err(Type::Erroneous(Problem::UnrecognizedIdent(ident)))
}
}
@ -908,7 +908,7 @@ fn canonicalize_has_clause(
var_store: &mut VarStore,
introduced_variables: &mut IntroducedVariables,
clause: &Loc<roc_parse::ast::HasClause<'_>>,
abilities_in_scope: &[Symbol],
pending_abilities_in_scope: &[Symbol],
references: &mut VecSet<Symbol>,
) -> Result<(), Type> {
let Loc {
@ -927,7 +927,12 @@ fn canonicalize_has_clause(
let ability = match ability.value {
TypeAnnotation::Apply(module_name, ident, _type_arguments) => {
let symbol = make_apply_symbol(env, ability.region, scope, module_name, ident)?;
if !abilities_in_scope.contains(&symbol) {
// Ability defined locally, whose members we are constructing right now...
if !pending_abilities_in_scope.contains(&symbol)
// or an ability that was imported from elsewhere
&& !scope.abilities_store.is_ability(symbol)
{
let region = ability.region;
env.problem(roc_problem::can::Problem::HasClauseIsNotAbility { region });
return Err(Type::Erroneous(Problem::HasClauseIsNotAbility(region)));

View File

@ -5436,7 +5436,7 @@ where
num_var,
precision_var,
ii.to_string().into_boxed_str(),
IntValue::I128(ii),
IntValue::I128(ii.to_ne_bytes()),
bound,
)
}
@ -5458,7 +5458,7 @@ fn num<I: Into<i128>>(num_var: Variable, i: I, bound: NumericBound) -> Expr {
Num(
num_var,
i.to_string().into_boxed_str(),
IntValue::I128(i),
IntValue::I128(i.to_ne_bytes()),
bound,
)
}

View File

@ -1,10 +1,11 @@
use crate::abilities::SpecializationId;
use crate::exhaustive::{ExhaustiveContext, SketchedRows};
use crate::expected::{Expected, PExpected};
use roc_collections::soa::{EitherIndex, Index, Slice};
use roc_module::ident::TagName;
use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::{Loc, Region};
use roc_types::subs::{ExhaustiveMark, Variable};
use roc_types::subs::{ExhaustiveMark, IllegalCycleMark, Variable};
use roc_types::types::{Category, PatternCategory, Type};
#[derive(Debug)]
@ -23,6 +24,7 @@ pub struct Constraints {
pub sketched_rows: Vec<SketchedRows>,
pub eq: Vec<Eq>,
pub pattern_eq: Vec<PatternEq>,
pub cycles: Vec<Cycle>,
}
impl Default for Constraints {
@ -47,6 +49,7 @@ impl Constraints {
let sketched_rows = Vec::new();
let eq = Vec::new();
let pattern_eq = Vec::new();
let cycles = Vec::new();
types.extend([
Type::EmptyRec,
@ -100,6 +103,7 @@ impl Constraints {
sketched_rows,
eq,
pattern_eq,
cycles,
}
}
@ -580,7 +584,9 @@ impl Constraints {
| Constraint::IsOpenType(_)
| Constraint::IncludesTag(_)
| Constraint::PatternPresence(_, _, _, _)
| Constraint::Exhaustive { .. } => false,
| Constraint::Exhaustive { .. }
| Constraint::Resolve(..)
| Constraint::CheckCycle(..) => false,
}
}
@ -643,6 +649,32 @@ impl Constraints {
Constraint::Exhaustive(equality, sketched_rows, context, exhaustive)
}
pub fn check_cycle<I, I1>(
&mut self,
loc_symbols: I,
expr_regions: I1,
cycle_mark: IllegalCycleMark,
) -> Constraint
where
I: IntoIterator<Item = (Symbol, Region)>,
I1: IntoIterator<Item = Region>,
{
let def_names = Slice::extend_new(&mut self.loc_symbols, loc_symbols);
// we add a dummy symbol to these regions, so we can store the data in the loc_symbols vec
let it = expr_regions.into_iter().map(|r| (Symbol::ATTR_ATTR, r));
let expr_regions = Slice::extend_new(&mut self.loc_symbols, it);
let expr_regions = Slice::new(expr_regions.start() as _, expr_regions.len() as _);
let cycle = Cycle {
def_names,
expr_regions,
};
let cycle_index = Index::push_new(&mut self.cycles, cycle);
Constraint::CheckCycle(cycle_index, cycle_mark)
}
}
roc_error_macros::assert_sizeof_default!(Constraint, 3 * 8);
@ -664,6 +696,31 @@ pub struct PatternEq(
pub Region,
);
/// When we come across a lookup of an ability member, we'd like to try to specialize that
/// lookup during solving (knowing the specialization statically avoids re-solving during mono,
/// and always gives us a way to show what specialization was intended in the editor).
///
/// However, we attempting to resolve the specialization right at the lookup site is futile
/// (we may not have solved enough of the surrounding context to know the specialization).
/// So, we only collect what resolutions we'd like to make, and attempt to resolve them once
/// we pass through a let-binding (a def, or a normal `=` binding). At those positions, the
/// expression is generalized, so if there is a static specialization, we'd know it at that
/// point.
///
/// Note that this entirely opportunistic; if a lookup of an ability member uses it
/// polymorphically, we won't find its specialization(s) until monomorphization.
#[derive(Clone, Copy, Debug)]
pub struct OpportunisticResolve {
/// The specialized type of this lookup, to try to resolve.
pub specialization_variable: Variable,
pub specialization_expectation: Index<Expected<Type>>,
/// The ability member to try to resolve.
pub member: Symbol,
/// If we resolve a specialization, what specialization ID to store it on.
pub specialization_id: SpecializationId,
}
#[derive(Clone, Copy)]
pub enum Constraint {
Eq(Eq),
@ -705,6 +762,9 @@ pub enum Constraint {
ExhaustiveContext,
ExhaustiveMark,
),
/// Attempt to resolve a specialization.
Resolve(OpportunisticResolve),
CheckCycle(Index<Cycle>, IllegalCycleMark),
}
#[derive(Debug, Clone, Copy, Default)]
@ -730,6 +790,12 @@ pub struct IncludesTag {
pub region: Region,
}
#[derive(Debug, Clone, Copy)]
pub struct Cycle {
pub def_names: Slice<(Symbol, Region)>,
pub expr_regions: Slice<Region>,
}
/// Custom impl to limit vertical space used by the debug output
impl std::fmt::Debug for Constraint {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@ -766,6 +832,12 @@ impl std::fmt::Debug for Constraint {
arg0, arg1, arg2, arg3
)
}
Self::Resolve(arg0) => {
write!(f, "Resolve({:?})", arg0)
}
Self::CheckCycle(arg0, arg1) => {
write!(f, "CheckCycle({:?}, {:?})", arg0, arg1)
}
}
}
}

View File

@ -1,6 +1,9 @@
use crate::abilities::AbilityMemberData;
use crate::abilities::MemberTypeInfo;
use crate::abilities::MemberVariables;
use crate::annotation::canonicalize_annotation;
use crate::annotation::find_type_def_symbols;
use crate::annotation::make_apply_symbol;
use crate::annotation::IntroducedVariables;
use crate::annotation::OwnedNamedOrAble;
use crate::env::Env;
@ -28,7 +31,9 @@ use roc_parse::pattern::PatternType;
use roc_problem::can::ShadowKind;
use roc_problem::can::{CycleEntry, Problem, RuntimeError};
use roc_region::all::{Loc, Region};
use roc_types::subs::IllegalCycleMark;
use roc_types::subs::{VarStore, Variable};
use roc_types::types::AliasCommon;
use roc_types::types::AliasKind;
use roc_types::types::AliasVar;
use roc_types::types::LambdaSet;
@ -56,7 +61,6 @@ pub struct Annotation {
pub(crate) struct CanDefs {
defs: Vec<Option<Def>>,
def_ordering: DefOrdering,
pub(crate) abilities_in_scope: Vec<Symbol>,
aliases: VecMap<Symbol, Alias>,
}
@ -98,12 +102,19 @@ impl PendingValueDef<'_> {
#[derive(Debug, Clone)]
enum PendingTypeDef<'a> {
/// A structural or opaque type alias, e.g. `Ints : List Int` or `Age := U32` respectively.
/// A structural type alias, e.g. `Ints : List Int`
Alias {
name: Loc<Symbol>,
vars: Vec<Loc<Lowercase>>,
ann: &'a Loc<ast::TypeAnnotation<'a>>,
kind: AliasKind,
},
/// An opaque type alias, e.g. `Age := U32`.
Opaque {
name: Loc<Symbol>,
vars: Vec<Loc<Lowercase>>,
ann: &'a Loc<ast::TypeAnnotation<'a>>,
derived: Option<&'a Loc<ast::Derived<'a>>>,
},
Ability {
@ -143,6 +154,17 @@ impl PendingTypeDef<'_> {
Some((name.value, region))
}
PendingTypeDef::Opaque {
name,
vars: _,
ann,
derived,
} => {
let end = derived.map(|d| d.region).unwrap_or(ann.region);
let region = Region::span_across(&name.region, &end);
Some((name.value, region))
}
PendingTypeDef::Ability { name, .. } => Some((name.value, name.region)),
PendingTypeDef::InvalidAlias { symbol, region, .. } => Some((*symbol, *region)),
PendingTypeDef::ShadowedAlias { .. } => None,
@ -158,8 +180,10 @@ impl PendingTypeDef<'_> {
#[allow(clippy::large_enum_variant)]
pub enum Declaration {
Declare(Def),
DeclareRec(Vec<Def>),
DeclareRec(Vec<Def>, IllegalCycleMark),
Builtin(Def),
/// If we know a cycle is illegal during canonicalization.
/// Otherwise we will try to detect this during solving; see [`IllegalCycleMark`].
InvalidCycle(Vec<CycleEntry>),
}
@ -168,7 +192,7 @@ impl Declaration {
use Declaration::*;
match self {
Declare(_) => 1,
DeclareRec(defs) => defs.len(),
DeclareRec(defs, _) => defs.len(),
InvalidCycle { .. } => 0,
Builtin(_) => 0,
}
@ -202,6 +226,202 @@ fn sort_type_defs_before_introduction(
.collect()
}
#[inline(always)]
#[allow(clippy::too_many_arguments)]
fn canonicalize_alias<'a>(
env: &mut Env<'a>,
output: &mut Output,
var_store: &mut VarStore,
scope: &mut Scope,
pending_abilities_in_scope: &[Symbol],
name: Loc<Symbol>,
ann: &'a Loc<ast::TypeAnnotation<'a>>,
vars: &[Loc<Lowercase>],
kind: AliasKind,
) -> Result<Alias, ()> {
let symbol = name.value;
let can_ann = canonicalize_annotation(
env,
scope,
&ann.value,
ann.region,
var_store,
pending_abilities_in_scope,
);
// Record all the annotation's references in output.references.lookups
for symbol in can_ann.references {
output.references.insert_type_lookup(symbol);
}
let mut can_vars: Vec<Loc<AliasVar>> = Vec::with_capacity(vars.len());
let mut is_phantom = false;
let IntroducedVariables {
named,
able,
wildcards,
inferred,
..
} = can_ann.introduced_variables;
let mut named: Vec<_> = (named.into_iter().map(OwnedNamedOrAble::Named))
.chain(able.into_iter().map(OwnedNamedOrAble::Able))
.collect();
for loc_lowercase in vars.iter() {
let opt_index = named
.iter()
.position(|nv| nv.ref_name() == &loc_lowercase.value);
match opt_index {
Some(index) => {
// This is a valid lowercase rigid var for the type def.
let named_variable = named.swap_remove(index);
let var = named_variable.variable();
let opt_bound_ability = named_variable.opt_ability();
let name = named_variable.name();
can_vars.push(Loc {
value: AliasVar {
name,
var,
opt_bound_ability,
},
region: loc_lowercase.region,
});
}
None => {
is_phantom = true;
env.problems.push(Problem::PhantomTypeArgument {
typ: symbol,
variable_region: loc_lowercase.region,
variable_name: loc_lowercase.value.clone(),
});
}
}
}
if is_phantom {
// Bail out
return Err(());
}
let num_unbound = named.len() + wildcards.len() + inferred.len();
if num_unbound > 0 {
let one_occurrence = named
.iter()
.map(|nv| Loc::at(nv.first_seen(), nv.variable()))
.chain(wildcards)
.chain(inferred)
.next()
.unwrap()
.region;
env.problems.push(Problem::UnboundTypeVariable {
typ: symbol,
num_unbound,
one_occurrence,
kind,
});
// Bail out
return Err(());
}
Ok(create_alias(
symbol,
name.region,
can_vars.clone(),
can_ann.typ,
kind,
))
}
#[inline(always)]
#[allow(clippy::too_many_arguments)]
fn canonicalize_opaque<'a>(
env: &mut Env<'a>,
output: &mut Output,
var_store: &mut VarStore,
scope: &mut Scope,
pending_abilities_in_scope: &[Symbol],
name: Loc<Symbol>,
ann: &'a Loc<ast::TypeAnnotation<'a>>,
vars: &[Loc<Lowercase>],
derives: Option<&'a Loc<ast::Derived<'a>>>,
) -> Result<Alias, ()> {
let alias = canonicalize_alias(
env,
output,
var_store,
scope,
pending_abilities_in_scope,
name,
ann,
vars,
AliasKind::Opaque,
)?;
if let Some(derives) = derives {
let derives = derives.value.collection();
let mut can_derives = vec![];
for derived in derives.items {
let region = derived.region;
match derived.value.extract_spaces().item {
ast::TypeAnnotation::Apply(module_name, ident, []) => {
match make_apply_symbol(env, region, scope, module_name, ident) {
Ok(ability) if ability.is_builtin_ability() => {
can_derives.push(Loc::at(region, ability));
}
Ok(_) => {
// Register the problem but keep going, we may still be able to compile the
// program even if a derive is missing.
env.problem(Problem::IllegalDerive(region));
}
Err(_) => {
// This is bad apply; an error will have been reported for it
// already.
}
}
}
_ => {
// Register the problem but keep going, we may still be able to compile the
// program even if a derive is missing.
env.problem(Problem::IllegalDerive(region));
}
}
}
if !can_derives.is_empty() {
// Fresh instance of this opaque to be checked for derivability during solving.
let fresh_inst = Type::DelayedAlias(AliasCommon {
symbol: name.value,
type_arguments: alias
.type_variables
.iter()
.map(|_| Type::Variable(var_store.fresh()))
.collect(),
lambda_set_variables: alias
.lambda_set_variables
.iter()
.map(|_| LambdaSet(Type::Variable(var_store.fresh())))
.collect(),
});
let old = output
.pending_derives
.insert(name.value, (fresh_inst, can_derives));
debug_assert!(old.is_none());
}
}
Ok(alias)
}
#[inline(always)]
pub(crate) fn canonicalize_defs<'a>(
env: &mut Env<'a>,
@ -246,17 +466,22 @@ pub(crate) fn canonicalize_defs<'a>(
}
enum TypeDef<'a> {
AliasLike(
Alias(
Loc<Symbol>,
Vec<Loc<Lowercase>>,
&'a Loc<ast::TypeAnnotation<'a>>,
AliasKind,
),
Opaque(
Loc<Symbol>,
Vec<Loc<Lowercase>>,
&'a Loc<ast::TypeAnnotation<'a>>,
Option<&'a Loc<ast::Derived<'a>>>,
),
Ability(Loc<Symbol>, &'a [AbilityMember<'a>]),
}
let mut type_defs = MutMap::default();
let mut abilities_in_scope = Vec::new();
let mut pending_abilities_in_scope = Vec::new();
let mut referenced_type_symbols = VecMap::default();
@ -269,17 +494,27 @@ pub(crate) fn canonicalize_defs<'a>(
}
match pending_def {
PendingTypeDef::Alias {
name,
vars,
ann,
kind,
} => {
PendingTypeDef::Alias { name, vars, ann } => {
let referenced_symbols = find_type_def_symbols(scope, &ann.value);
referenced_type_symbols.insert(name.value, referenced_symbols);
type_defs.insert(name.value, TypeDef::AliasLike(name, vars, ann, kind));
type_defs.insert(name.value, TypeDef::Alias(name, vars, ann));
}
PendingTypeDef::Opaque {
name,
vars,
ann,
derived,
} => {
let referenced_symbols = find_type_def_symbols(scope, &ann.value);
referenced_type_symbols.insert(name.value, referenced_symbols);
// Don't need to insert references for derived types, because these can only contain
// builtin abilities, and hence do not affect the type def sorting. We'll insert
// references of usages when canonicalizing the derives.
type_defs.insert(name.value, TypeDef::Opaque(name, vars, ann, derived));
}
PendingTypeDef::Ability { name, members } => {
let mut referenced_symbols = Vec::with_capacity(2);
@ -293,7 +528,7 @@ pub(crate) fn canonicalize_defs<'a>(
referenced_type_symbols.insert(name.value, referenced_symbols);
type_defs.insert(name.value, TypeDef::Ability(name, members));
abilities_in_scope.push(name.value);
pending_abilities_in_scope.push(name.value);
}
PendingTypeDef::InvalidAlias { .. }
| PendingTypeDef::InvalidAbility { .. }
@ -309,105 +544,40 @@ pub(crate) fn canonicalize_defs<'a>(
for type_name in sorted {
match type_defs.remove(&type_name).unwrap() {
TypeDef::AliasLike(name, vars, ann, kind) => {
let symbol = name.value;
let can_ann = canonicalize_annotation(
TypeDef::Alias(name, vars, ann) => {
let alias = canonicalize_alias(
env,
scope,
&ann.value,
ann.region,
&mut output,
var_store,
&abilities_in_scope,
scope,
&pending_abilities_in_scope,
name,
ann,
&vars,
AliasKind::Structural,
);
// Record all the annotation's references in output.references.lookups
for symbol in can_ann.references {
output.references.insert_type_lookup(symbol);
if let Ok(alias) = alias {
aliases.insert(name.value, alias);
}
}
let mut can_vars: Vec<Loc<AliasVar>> = Vec::with_capacity(vars.len());
let mut is_phantom = false;
let IntroducedVariables {
named,
able,
wildcards,
inferred,
..
} = can_ann.introduced_variables;
let mut named: Vec<_> = (named.into_iter().map(OwnedNamedOrAble::Named))
.chain(able.into_iter().map(OwnedNamedOrAble::Able))
.collect();
for loc_lowercase in vars.iter() {
let opt_index = named
.iter()
.position(|nv| nv.ref_name() == &loc_lowercase.value);
match opt_index {
Some(index) => {
// This is a valid lowercase rigid var for the type def.
let named_variable = named.swap_remove(index);
let var = named_variable.variable();
let opt_bound_ability = named_variable.opt_ability();
let name = named_variable.name();
can_vars.push(Loc {
value: AliasVar {
name,
var,
opt_bound_ability,
},
region: loc_lowercase.region,
});
}
None => {
is_phantom = true;
env.problems.push(Problem::PhantomTypeArgument {
typ: symbol,
variable_region: loc_lowercase.region,
variable_name: loc_lowercase.value.clone(),
});
}
}
}
if is_phantom {
// Bail out
continue;
}
let num_unbound = named.len() + wildcards.len() + inferred.len();
if num_unbound > 0 {
let one_occurrence = named
.iter()
.map(|nv| Loc::at(nv.first_seen(), nv.variable()))
.chain(wildcards)
.chain(inferred)
.next()
.unwrap()
.region;
env.problems.push(Problem::UnboundTypeVariable {
typ: symbol,
num_unbound,
one_occurrence,
kind,
});
// Bail out
continue;
}
let alias = create_alias(
symbol,
name.region,
can_vars.clone(),
can_ann.typ.clone(),
kind,
TypeDef::Opaque(name, vars, ann, derived) => {
let alias_and_derives = canonicalize_opaque(
env,
&mut output,
var_store,
scope,
&pending_abilities_in_scope,
name,
ann,
&vars,
derived,
);
aliases.insert(symbol, alias);
if let Ok(alias) = alias_and_derives {
aliases.insert(name.value, alias);
}
}
TypeDef::Ability(name, members) => {
@ -439,7 +609,7 @@ pub(crate) fn canonicalize_defs<'a>(
var_store,
scope,
abilities,
&abilities_in_scope,
&pending_abilities_in_scope,
pattern_type,
);
@ -507,7 +677,6 @@ pub(crate) fn canonicalize_defs<'a>(
var_store,
pattern_type,
&mut aliases,
&abilities_in_scope,
);
output = temp_output.output;
@ -521,7 +690,6 @@ pub(crate) fn canonicalize_defs<'a>(
CanDefs {
defs,
def_ordering,
abilities_in_scope,
// The result needs a thread-safe `SendMap`
aliases,
},
@ -538,7 +706,7 @@ fn resolve_abilities<'a>(
var_store: &mut VarStore,
scope: &mut Scope,
abilities: MutMap<Symbol, (Loc<Symbol>, &[AbilityMember])>,
abilities_in_scope: &[Symbol],
pending_abilities_in_scope: &[Symbol],
pattern_type: PatternType,
) {
for (loc_ability_name, members) in abilities.into_values() {
@ -551,7 +719,7 @@ fn resolve_abilities<'a>(
&member.typ.value,
member.typ.region,
var_store,
abilities_in_scope,
pending_abilities_in_scope,
);
// Record all the annotation's references in output.references.lookups
@ -643,10 +811,15 @@ fn resolve_abilities<'a>(
can_members.push((
member_sym,
name_region,
var_store.fresh(),
member_annot.typ,
variables,
AbilityMemberData {
parent_ability: loc_ability_name.value,
region: name_region,
typ: MemberTypeInfo::Local {
variables,
signature: member_annot.typ,
signature_var: var_store.fresh(),
},
},
));
}
@ -748,6 +921,7 @@ impl DefOrdering {
#[inline(always)]
pub(crate) fn sort_can_defs(
env: &mut Env<'_>,
var_store: &mut VarStore,
defs: CanDefs,
mut output: Output,
) -> (Vec<Declaration>, Output) {
@ -755,11 +929,8 @@ pub(crate) fn sort_can_defs(
mut defs,
def_ordering,
aliases,
abilities_in_scope,
} = defs;
output.abilities_in_scope = abilities_in_scope;
for (symbol, alias) in aliases.into_iter() {
output.aliases.insert(symbol, alias);
}
@ -783,7 +954,7 @@ pub(crate) fn sort_can_defs(
// recursive relations between any 2 definitions.
let sccs = def_ordering.references.strongly_connected_components_all();
let mut declarations = Vec::new();
let mut declarations = Vec::with_capacity(defs.len());
for group in sccs.groups() {
if group.count_ones() == 1 {
@ -791,6 +962,10 @@ pub(crate) fn sort_can_defs(
let index = group.iter_ones().next().unwrap();
let def = take_def!(index);
let is_specialization = matches!(
def.loc_pattern.value,
Pattern::AbilityMemberSpecialization { .. }
);
let declaration = if def_ordering.direct_references.get_row_col(index, index) {
// a definition like `x = x + 1`, which is invalid in roc
@ -803,8 +978,10 @@ pub(crate) fn sort_can_defs(
Declaration::InvalidCycle(entries)
} else if def_ordering.references.get_row_col(index, index) {
debug_assert!(!is_specialization, "Self-recursive specializations can only be determined during solving - but it was determined for {:?} now, that's a bug!", def);
// this function calls itself, and must be typechecked as a recursive def
Declaration::DeclareRec(vec![mark_def_recursive(def)])
Declaration::DeclareRec(vec![mark_def_recursive(def)], IllegalCycleMark::empty())
} else {
Declaration::Declare(def)
};
@ -821,11 +998,17 @@ pub(crate) fn sort_can_defs(
//
// boom = \{} -> boom {}
//
// In general we cannot spot faulty recursion (halting problem) so this is our best attempt
// In general we cannot spot faulty recursion (halting problem), so this is our
// purely-syntactic heuristic. We'll have a second attempt once we know the types in
// the cycle.
let direct_sccs = def_ordering
.direct_references
.strongly_connected_components_subset(group);
debug_assert!(
!group.iter_ones().any(|index| matches!((&defs[index]).as_ref().unwrap().loc_pattern.value, Pattern::AbilityMemberSpecialization{..})),
"A specialization is involved in a recursive cycle - this should not be knowable until solving");
let declaration = if direct_sccs.groups().count() == 1 {
// all defs are part of the same direct cycle, that is invalid!
let mut entries = Vec::with_capacity(group.count_ones());
@ -847,7 +1030,7 @@ pub(crate) fn sort_can_defs(
.map(|index| mark_def_recursive(take_def!(index)))
.collect();
Declaration::DeclareRec(rec_defs)
Declaration::DeclareRec(rec_defs, IllegalCycleMark::new(var_store))
};
declarations.push(declaration);
@ -984,10 +1167,12 @@ fn canonicalize_pending_value_def<'a>(
var_store: &mut VarStore,
pattern_type: PatternType,
aliases: &mut VecMap<Symbol, Alias>,
abilities_in_scope: &[Symbol],
) -> DefOutput {
use PendingValueDef::*;
// All abilities should be resolved by the time we're canonicalizing value defs.
let pending_abilities_in_scope = &[];
let output = match pending_def {
AnnotationOnly(_, loc_can_pattern, loc_ann) => {
// Make types for the body expr, even if we won't end up having a body.
@ -1002,7 +1187,7 @@ fn canonicalize_pending_value_def<'a>(
&loc_ann.value,
loc_ann.region,
var_store,
abilities_in_scope,
pending_abilities_in_scope,
);
// Record all the annotation's references in output.references.lookups
@ -1099,7 +1284,7 @@ fn canonicalize_pending_value_def<'a>(
&loc_ann.value,
loc_ann.region,
var_store,
abilities_in_scope,
pending_abilities_in_scope,
);
// Record all the annotation's references in output.references.lookups
@ -1278,7 +1463,7 @@ pub fn can_defs_with_return<'a>(
}
}
let (declarations, output) = sort_can_defs(env, unsorted, output);
let (declarations, output) = sort_can_defs(env, var_store, unsorted, output);
let mut loc_expr: Loc<Expr> = ret_expr;
@ -1295,7 +1480,9 @@ pub fn can_defs_with_return<'a>(
fn decl_to_let(decl: Declaration, loc_ret: Loc<Expr>) -> Expr {
match decl {
Declaration::Declare(def) => Expr::LetNonRec(Box::new(def), Box::new(loc_ret)),
Declaration::DeclareRec(defs) => Expr::LetRec(defs, Box::new(loc_ret)),
Declaration::DeclareRec(defs, cycle_mark) => {
Expr::LetRec(defs, Box::new(loc_ret), cycle_mark)
}
Declaration::InvalidCycle(entries) => {
Expr::RuntimeError(RuntimeError::CircularDef(entries))
}
@ -1306,6 +1493,86 @@ fn decl_to_let(decl: Declaration, loc_ret: Loc<Expr>) -> Expr {
}
}
fn to_pending_alias_or_opaque<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
name: &'a Loc<&'a str>,
vars: &'a [Loc<ast::Pattern<'a>>],
ann: &'a Loc<ast::TypeAnnotation<'a>>,
opt_derived: Option<&'a Loc<ast::Derived<'a>>>,
kind: AliasKind,
) -> PendingTypeDef<'a> {
let shadow_kind = match kind {
AliasKind::Structural => ShadowKind::Alias,
AliasKind::Opaque => ShadowKind::Opaque,
};
let region = Region::span_across(&name.region, &ann.region);
match scope.introduce_without_shadow_symbol(&Ident::from(name.value), region) {
Ok(symbol) => {
let mut can_rigids: Vec<Loc<Lowercase>> = Vec::with_capacity(vars.len());
for loc_var in vars.iter() {
match loc_var.value {
ast::Pattern::Identifier(name)
if name.chars().next().unwrap().is_lowercase() =>
{
let lowercase = Lowercase::from(name);
can_rigids.push(Loc {
value: lowercase,
region: loc_var.region,
});
}
_ => {
// any other pattern in this position is a syntax error.
let problem = Problem::InvalidAliasRigid {
alias_name: symbol,
region: loc_var.region,
};
env.problems.push(problem);
return PendingTypeDef::InvalidAlias {
kind,
symbol,
region,
};
}
}
}
let name = Loc {
region: name.region,
value: symbol,
};
match kind {
AliasKind::Structural => PendingTypeDef::Alias {
name,
vars: can_rigids,
ann,
},
AliasKind::Opaque => PendingTypeDef::Opaque {
name,
vars: can_rigids,
ann,
derived: opt_derived,
},
}
}
Err((original_region, loc_shadowed_symbol)) => {
env.problem(Problem::Shadowing {
original_region,
shadow: loc_shadowed_symbol,
kind: shadow_kind,
});
PendingTypeDef::ShadowedAlias
}
}
}
fn to_pending_type_def<'a>(
env: &mut Env<'a>,
def: &'a ast::TypeDef<'a>,
@ -1318,75 +1585,20 @@ fn to_pending_type_def<'a>(
Alias {
header: TypeHeader { name, vars },
ann,
}
| Opaque {
} => to_pending_alias_or_opaque(env, scope, name, vars, ann, None, AliasKind::Structural),
Opaque {
header: TypeHeader { name, vars },
typ: ann,
} => {
let (kind, shadow_kind) = if matches!(def, Alias { .. }) {
(AliasKind::Structural, ShadowKind::Alias)
} else {
(AliasKind::Opaque, ShadowKind::Opaque)
};
let region = Region::span_across(&name.region, &ann.region);
match scope.introduce_without_shadow_symbol(&Ident::from(name.value), region) {
Ok(symbol) => {
let mut can_rigids: Vec<Loc<Lowercase>> = Vec::with_capacity(vars.len());
for loc_var in vars.iter() {
match loc_var.value {
ast::Pattern::Identifier(name)
if name.chars().next().unwrap().is_lowercase() =>
{
let lowercase = Lowercase::from(name);
can_rigids.push(Loc {
value: lowercase,
region: loc_var.region,
});
}
_ => {
// any other pattern in this position is a syntax error.
let problem = Problem::InvalidAliasRigid {
alias_name: symbol,
region: loc_var.region,
};
env.problems.push(problem);
return PendingTypeDef::InvalidAlias {
kind,
symbol,
region,
};
}
}
}
let name = Loc {
region: name.region,
value: symbol,
};
PendingTypeDef::Alias {
name,
vars: can_rigids,
ann,
kind,
}
}
Err((original_region, loc_shadowed_symbol)) => {
env.problem(Problem::Shadowing {
original_region,
shadow: loc_shadowed_symbol,
kind: shadow_kind,
});
PendingTypeDef::ShadowedAlias
}
}
}
derived,
} => to_pending_alias_or_opaque(
env,
scope,
name,
vars,
ann,
derived.as_ref(),
AliasKind::Opaque,
),
Ability {
header, members, ..

View File

@ -8,7 +8,7 @@ use roc_module::called_via::CalledVia;
use roc_module::ident::TagName;
use roc_module::symbol::Symbol;
use roc_region::all::{Loc, Region};
use roc_types::subs::{ExhaustiveMark, RedundantMark, VarStore, Variable};
use roc_types::subs::{ExhaustiveMark, IllegalCycleMark, RedundantMark, VarStore, Variable};
use roc_types::types::{AliasKind, LambdaSet, OptAbleType, OptAbleVar, Type, TypeExtension};
#[derive(Debug, Default, Clone, Copy)]
@ -72,13 +72,19 @@ pub(crate) fn build_effect_builtins(
// Effect.forever : Effect a -> Effect b
if generated_functions.forever {
let def = helper!(build_effect_forever);
declarations.push(Declaration::DeclareRec(vec![def]));
declarations.push(Declaration::DeclareRec(
vec![def],
IllegalCycleMark::empty(),
));
}
// Effect.loop : a, (a -> Effect [ Step a, Done b ]) -> Effect b
if generated_functions.loop_ {
let def = helper!(build_effect_loop);
declarations.push(Declaration::DeclareRec(vec![def]));
declarations.push(Declaration::DeclareRec(
vec![def],
IllegalCycleMark::empty(),
));
}
// Useful when working on functions in this module. By default symbols that we named do now

View File

@ -19,11 +19,14 @@ use roc_parse::ast::{self, EscapedChar, StrLiteral};
use roc_parse::pattern::PatternType::*;
use roc_problem::can::{PrecedenceProblem, Problem, RuntimeError};
use roc_region::all::{Loc, Region};
use roc_types::subs::{ExhaustiveMark, RedundantMark, VarStore, Variable};
use roc_types::subs::{ExhaustiveMark, IllegalCycleMark, RedundantMark, VarStore, Variable};
use roc_types::types::{Alias, Category, LambdaSet, OptAbleVar, Type};
use std::fmt::{Debug, Display};
use std::{char, u32};
/// Derives that an opaque type has claimed, to checked and recorded after solving.
pub type PendingDerives = VecMap<Symbol, (Type, Vec<Loc<Symbol>>)>;
#[derive(Clone, Default, Debug)]
pub struct Output {
pub references: References,
@ -31,7 +34,7 @@ pub struct Output {
pub introduced_variables: IntroducedVariables,
pub aliases: VecMap<Symbol, Alias>,
pub non_closures: VecSet<Symbol>,
pub abilities_in_scope: Vec<Symbol>,
pub pending_derives: PendingDerives,
}
impl Output {
@ -46,20 +49,29 @@ impl Output {
.union_owned(other.introduced_variables);
self.aliases.extend(other.aliases);
self.non_closures.extend(other.non_closures);
{
let expected_derives_size = self.pending_derives.len() + other.pending_derives.len();
self.pending_derives.extend(other.pending_derives);
debug_assert!(
expected_derives_size == self.pending_derives.len(),
"Derives overwritten from nested scope - something is very wrong"
);
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum IntValue {
I128(i128),
U128(u128),
I128([u8; 16]),
U128([u8; 16]),
}
impl Display for IntValue {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
IntValue::I128(n) => Display::fmt(&n, f),
IntValue::U128(n) => Display::fmt(&n, f),
IntValue::I128(n) => Display::fmt(&i128::from_ne_bytes(*n), f),
IntValue::U128(n) => Display::fmt(&u128::from_ne_bytes(*n), f),
}
}
}
@ -87,8 +99,9 @@ pub enum Expr {
AbilityMember(
/// Actual member name
Symbol,
/// Specialization to use
/// Specialization to use, and its variable
SpecializationId,
Variable,
),
// Branching
@ -114,7 +127,7 @@ pub enum Expr {
},
// Let
LetRec(Vec<Def>, Box<Loc<Expr>>),
LetRec(Vec<Def>, Box<Loc<Expr>>, IllegalCycleMark),
LetNonRec(Box<Def>, Box<Loc<Expr>>),
/// This is *only* for calling functions, not for tag application.
@ -203,10 +216,13 @@ pub enum Expr {
lambda_set_variables: Vec<LambdaSet>,
},
// Test
/// Test
Expect(Box<Loc<Expr>>, Box<Loc<Expr>>),
// Compiles, but will crash if reached
/// Rendered as empty box in editor
TypedHole(Variable),
/// Compiles, but will crash if reached
RuntimeError(RuntimeError),
}
@ -220,10 +236,10 @@ impl Expr {
Self::SingleQuote(..) => Category::Character,
Self::List { .. } => Category::List,
&Self::Var(sym) => Category::Lookup(sym),
&Self::AbilityMember(sym, _) => Category::Lookup(sym),
&Self::AbilityMember(sym, _, _) => Category::Lookup(sym),
Self::When { .. } => Category::When,
Self::If { .. } => Category::If,
Self::LetRec(_, expr) => expr.value.category(),
Self::LetRec(_, expr, _) => expr.value.category(),
Self::LetNonRec(_, expr) => expr.value.category(),
&Self::Call(_, _, called_via) => Category::CallResult(None, called_via),
&Self::RunLowLevel { op, .. } => Category::LowLevelOpResult(op),
@ -246,7 +262,9 @@ impl Expr {
},
&Self::OpaqueRef { name, .. } => Category::OpaqueWrap(name),
Self::Expect(..) => Category::Expect,
Self::RuntimeError(..) => Category::Unknown,
// these nodes place no constraints on the expression's type
Self::TypedHole(_) | Self::RuntimeError(..) => Category::Unknown,
}
}
}
@ -421,12 +439,7 @@ pub fn canonicalize_expr<'a>(
let (expr, output) = match expr {
&ast::Expr::Num(str) => {
let answer = num_expr_from_result(
var_store,
finish_parsing_num(str).map(|result| (str, result)),
region,
env,
);
let answer = num_expr_from_result(var_store, finish_parsing_num(str), region, env);
(answer, Output::default())
}
@ -698,7 +711,7 @@ pub fn canonicalize_expr<'a>(
}
}
ast::Expr::Var { module_name, ident } => {
canonicalize_var_lookup(env, scope, module_name, ident, region)
canonicalize_var_lookup(env, var_store, scope, module_name, ident, region)
}
ast::Expr::Underscore(name) => {
// we parse underscores, but they are not valid expression syntax
@ -1312,6 +1325,7 @@ fn canonicalize_field<'a>(
fn canonicalize_var_lookup(
env: &mut Env<'_>,
var_store: &mut VarStore,
scope: &mut Scope,
module_name: &str,
ident: &str,
@ -1323,12 +1337,16 @@ fn canonicalize_var_lookup(
let can_expr = if module_name.is_empty() {
// Since module_name was empty, this is an unqualified var.
// Look it up in scope!
match scope.lookup(&(*ident).into(), region) {
match scope.lookup_str(ident, region) {
Ok(symbol) => {
output.references.insert_value_lookup(symbol);
if scope.abilities_store.is_ability_member_name(symbol) {
AbilityMember(symbol, scope.abilities_store.fresh_specialization_id())
AbilityMember(
symbol,
scope.abilities_store.fresh_specialization_id(),
var_store.fresh(),
)
} else {
Var(symbol)
}
@ -1347,7 +1365,11 @@ fn canonicalize_var_lookup(
output.references.insert_value_lookup(symbol);
if scope.abilities_store.is_ability_member_name(symbol) {
AbilityMember(symbol, scope.abilities_store.fresh_specialization_id())
AbilityMember(
symbol,
scope.abilities_store.fresh_specialization_id(),
var_store.fresh(),
)
} else {
Var(symbol)
}
@ -1386,6 +1408,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
| other @ Var(_)
| other @ AbilityMember(..)
| other @ RunLowLevel { .. }
| other @ TypedHole { .. }
| other @ ForeignCall { .. } => other,
List {
@ -1506,7 +1529,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
Expect(Box::new(loc_condition), Box::new(loc_expr))
}
LetRec(defs, loc_expr) => {
LetRec(defs, loc_expr, mark) => {
let mut new_defs = Vec::with_capacity(defs.len());
for def in defs {
@ -1527,7 +1550,7 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
value: inline_calls(var_store, scope, loc_expr.value),
};
LetRec(new_defs, Box::new(loc_expr))
LetRec(new_defs, Box::new(loc_expr), mark)
}
LetNonRec(def, loc_expr) => {

View File

@ -3,7 +3,7 @@ use crate::annotation::canonicalize_annotation;
use crate::def::{canonicalize_defs, sort_can_defs, Declaration, Def};
use crate::effect_module::HostedGeneratedFunctions;
use crate::env::Env;
use crate::expr::{ClosureData, Expr, Output};
use crate::expr::{ClosureData, Expr, Output, PendingDerives};
use crate::operator::desugar_def;
use crate::pattern::Pattern;
use crate::scope::Scope;
@ -51,6 +51,7 @@ pub struct ModuleOutput {
pub referenced_values: VecSet<Symbol>,
pub referenced_types: VecSet<Symbol>,
pub symbols_from_requires: Vec<(Loc<Symbol>, Loc<Type>)>,
pub pending_derives: PendingDerives,
pub scope: Scope,
}
@ -166,13 +167,14 @@ pub fn canonicalize_module_defs<'a>(
exposed_ident_ids: IdentIds,
dep_idents: &'a IdentIdsByModule,
aliases: MutMap<Symbol, Alias>,
imported_abilities_state: AbilitiesStore,
exposed_imports: MutMap<Ident, (Symbol, Region)>,
exposed_symbols: &VecSet<Symbol>,
symbols_from_requires: &[(Loc<Symbol>, Loc<TypeAnnotation<'a>>)],
var_store: &mut VarStore,
) -> ModuleOutput {
let mut can_exposed_imports = MutMap::default();
let mut scope = Scope::new(home, exposed_ident_ids);
let mut scope = Scope::new(home, exposed_ident_ids, imported_abilities_state);
let mut env = Env::new(home, dep_idents, module_ids);
let num_deps = dep_idents.len();
@ -254,12 +256,12 @@ pub fn canonicalize_module_defs<'a>(
{
// These are not aliases but Apply's and we make sure they are always in scope
} else {
// This is a type alias
// This is a type alias or ability
// the symbol should already be added to the scope when this module is canonicalized
debug_assert!(
scope.contains_alias(symbol),
"The {:?} is not a type alias known in {:?}",
scope.contains_alias(symbol) || scope.abilities_store.is_ability(symbol),
"The {:?} is not a type alias or ability known in {:?}",
symbol,
home
);
@ -289,6 +291,8 @@ pub fn canonicalize_module_defs<'a>(
PatternType::TopLevelDef,
);
let pending_derives = output.pending_derives;
// See if any of the new idents we defined went unused.
// If any were unused and also not exposed, report it.
for (symbol, region) in symbols_introduced {
@ -351,18 +355,26 @@ pub fn canonicalize_module_defs<'a>(
..Default::default()
};
let (mut declarations, mut output) = sort_can_defs(&mut env, defs, new_output);
let (mut declarations, mut output) = sort_can_defs(&mut env, var_store, defs, new_output);
debug_assert!(
output.pending_derives.is_empty(),
"I thought pending derives are only found during def introduction"
);
let symbols_from_requires = symbols_from_requires
.iter()
.map(|(symbol, loc_ann)| {
// We've already canonicalized the module, so there are no pending abilities.
let pending_abilities_in_scope = &[];
let ann = canonicalize_annotation(
&mut env,
&mut scope,
&loc_ann.value,
loc_ann.region,
var_store,
&output.abilities_in_scope,
pending_abilities_in_scope,
);
ann.add_to(
@ -463,7 +475,7 @@ pub fn canonicalize_module_defs<'a>(
}
}
}
DeclareRec(defs) => {
DeclareRec(defs, _) => {
for def in defs {
for (symbol, _) in def.pattern_vars.iter() {
if exposed_but_not_defined.contains(symbol) {
@ -517,8 +529,16 @@ pub fn canonicalize_module_defs<'a>(
aliases.insert(symbol, alias);
}
for member in scope.abilities_store.root_ability_members().keys() {
exposed_but_not_defined.remove(member);
for (ability, members) in scope
.abilities_store
.iter_abilities()
.filter(|(ab, _)| ab.module_id() == home)
{
exposed_but_not_defined.remove(&ability);
members.iter().for_each(|member| {
debug_assert!(member.module_id() == home);
exposed_but_not_defined.remove(member);
});
}
// By this point, all exposed symbols should have been removed from
@ -559,7 +579,9 @@ pub fn canonicalize_module_defs<'a>(
for declaration in declarations.iter_mut() {
match declaration {
Declare(def) => fix_values_captured_in_closure_def(def, &mut VecSet::default()),
DeclareRec(defs) => fix_values_captured_in_closure_defs(defs, &mut VecSet::default()),
DeclareRec(defs, _) => {
fix_values_captured_in_closure_defs(defs, &mut VecSet::default())
}
InvalidCycle(_) | Builtin(_) => {}
}
}
@ -574,6 +596,7 @@ pub fn canonicalize_module_defs<'a>(
exposed_imports: can_exposed_imports,
problems: env.problems,
symbols_from_requires,
pending_derives,
lookups,
}
}
@ -594,7 +617,9 @@ fn fix_values_captured_in_closure_defs(
) {
// recursive defs cannot capture each other
for def in defs.iter() {
no_capture_symbols.extend(crate::pattern::symbols_from_pattern(&def.loc_pattern.value));
no_capture_symbols.extend(
crate::traverse::symbols_introduced_from_pattern(&def.loc_pattern).map(|ls| ls.value),
);
}
// TODO mutually recursive functions should both capture the union of both their capture sets
@ -665,7 +690,7 @@ fn fix_values_captured_in_closure_expr(
fix_values_captured_in_closure_def(def, no_capture_symbols);
fix_values_captured_in_closure_expr(&mut loc_expr.value, no_capture_symbols);
}
LetRec(defs, loc_expr) => {
LetRec(defs, loc_expr, _) => {
// LetRec(Vec<Def>, Box<Located<Expr>>, Variable, Aliases),
fix_values_captured_in_closure_defs(defs, no_capture_symbols);
fix_values_captured_in_closure_expr(&mut loc_expr.value, no_capture_symbols);
@ -706,6 +731,7 @@ fn fix_values_captured_in_closure_expr(
| Var(_)
| AbilityMember(..)
| EmptyRecord
| TypedHole { .. }
| RuntimeError(_)
| ZeroArgumentTag { .. }
| Accessor { .. } => {}

View File

@ -107,10 +107,14 @@ pub enum ParsedNumResult {
}
#[inline(always)]
pub fn finish_parsing_num(raw: &str) -> Result<ParsedNumResult, (&str, IntErrorKind)> {
pub fn finish_parsing_num(raw: &str) -> Result<(&str, ParsedNumResult), (&str, IntErrorKind)> {
// Ignore underscores.
let radix = 10;
from_str_radix(raw.replace('_', "").as_str(), radix).map_err(|e| (raw, e))
let (_, raw_without_suffix) = parse_literal_suffix(raw);
match from_str_radix(raw.replace('_', "").as_str(), radix) {
Ok(result) => Ok((raw_without_suffix, result)),
Err(e) => Err((raw, e)),
}
}
#[inline(always)]
@ -223,7 +227,7 @@ fn from_str_radix(src: &str, radix: u32) -> Result<ParsedNumResult, IntErrorKind
use std::num::IntErrorKind as StdIEK;
let result = match i128::from_str_radix(src, radix) {
Ok(result) => IntValue::I128(result),
Ok(result) => IntValue::I128(result.to_ne_bytes()),
Err(pie) => match pie.kind() {
StdIEK::Empty => return Err(IntErrorKind::Empty),
StdIEK::InvalidDigit => return Err(IntErrorKind::InvalidDigit),
@ -231,7 +235,7 @@ fn from_str_radix(src: &str, radix: u32) -> Result<ParsedNumResult, IntErrorKind
StdIEK::PosOverflow => {
// try a u128
match u128::from_str_radix(src, radix) {
Ok(result) => IntValue::U128(result),
Ok(result) => IntValue::U128(result.to_ne_bytes()),
Err(pie) => match pie.kind() {
StdIEK::InvalidDigit => return Err(IntErrorKind::InvalidDigit),
StdIEK::PosOverflow => return Err(IntErrorKind::Overflow),
@ -248,7 +252,11 @@ fn from_str_radix(src: &str, radix: u32) -> Result<ParsedNumResult, IntErrorKind
};
let (lower_bound, is_negative) = match result {
IntValue::I128(num) => (lower_bound_of_int(num), num < 0),
IntValue::I128(bytes) => {
let num = i128::from_ne_bytes(bytes);
(lower_bound_of_int(num), num < 0)
}
IntValue::U128(_) => (IntWidth::U128, false),
};
@ -273,8 +281,8 @@ fn from_str_radix(src: &str, radix: u32) -> Result<ParsedNumResult, IntErrorKind
// TODO: this is somewhat incorrect, revisit
Ok(ParsedNumResult::Float(
match result {
IntValue::I128(n) => n as f64,
IntValue::U128(n) => n as f64,
IntValue::I128(n) => i128::from_ne_bytes(n) as f64,
IntValue::U128(n) => i128::from_ne_bytes(n) as f64,
},
FloatBound::Exact(fw),
))

View File

@ -175,61 +175,6 @@ pub enum DestructType {
Guard(Variable, Loc<Pattern>),
}
pub fn symbols_from_pattern(pattern: &Pattern) -> Vec<Symbol> {
let mut symbols = Vec::new();
symbols_from_pattern_help(pattern, &mut symbols);
symbols
}
pub fn symbols_from_pattern_help(pattern: &Pattern, symbols: &mut Vec<Symbol>) {
use Pattern::*;
match pattern {
Identifier(symbol) | Shadowed(_, _, symbol) => {
symbols.push(*symbol);
}
AbilityMemberSpecialization { ident, specializes } => {
symbols.push(*ident);
symbols.push(*specializes);
}
AppliedTag { arguments, .. } => {
for (_, nested) in arguments {
symbols_from_pattern_help(&nested.value, symbols);
}
}
UnwrappedOpaque {
opaque, argument, ..
} => {
symbols.push(*opaque);
let (_, nested) = &**argument;
symbols_from_pattern_help(&nested.value, symbols);
}
RecordDestructure { destructs, .. } => {
for destruct in destructs {
// when a record field has a pattern guard, only symbols in the guard are introduced
if let DestructType::Guard(_, subpattern) = &destruct.value.typ {
symbols_from_pattern_help(&subpattern.value, symbols);
} else {
symbols.push(destruct.value.symbol);
}
}
}
NumLiteral(..)
| IntLiteral(..)
| FloatLiteral(..)
| StrLiteral(_)
| SingleQuote(_)
| Underscore
| MalformedPattern(_, _)
| UnsupportedPattern(_)
| OpaqueNotInScope(..) => {}
}
}
pub fn canonicalize_def_header_pattern<'a>(
env: &mut Env<'a>,
var_store: &mut VarStore,
@ -246,15 +191,20 @@ pub fn canonicalize_def_header_pattern<'a>(
Identifier(name) => {
match scope.introduce_or_shadow_ability_member((*name).into(), region) {
Ok((symbol, shadowing_ability_member)) => {
output.references.insert_bound(symbol);
let can_pattern = match shadowing_ability_member {
// A fresh identifier.
None => Pattern::Identifier(symbol),
None => {
output.references.insert_bound(symbol);
Pattern::Identifier(symbol)
}
// Likely a specialization of an ability.
Some(ability_member_name) => Pattern::AbilityMemberSpecialization {
ident: symbol,
specializes: ability_member_name,
},
Some(ability_member_name) => {
output.references.insert_value_lookup(ability_member_name);
Pattern::AbilityMemberSpecialization {
ident: symbol,
specializes: ability_member_name,
}
}
};
Loc::at(region, can_pattern)
}
@ -415,20 +365,20 @@ pub fn canonicalize_pattern<'a>(
let problem = MalformedPatternProblem::MalformedInt;
malformed_pattern(env, problem, region)
}
Ok(ParsedNumResult::UnknownNum(int, bound)) => {
Pattern::NumLiteral(var_store.fresh(), (str).into(), int, bound)
Ok((parsed, ParsedNumResult::UnknownNum(int, bound))) => {
Pattern::NumLiteral(var_store.fresh(), (parsed).into(), int, bound)
}
Ok(ParsedNumResult::Int(int, bound)) => Pattern::IntLiteral(
Ok((parsed, ParsedNumResult::Int(int, bound))) => Pattern::IntLiteral(
var_store.fresh(),
var_store.fresh(),
(str).into(),
(parsed).into(),
int,
bound,
),
Ok(ParsedNumResult::Float(float, bound)) => Pattern::FloatLiteral(
Ok((parsed, ParsedNumResult::Float(float, bound))) => Pattern::FloatLiteral(
var_store.fresh(),
var_store.fresh(),
(str).into(),
(parsed).into(),
float,
bound,
),
@ -452,11 +402,15 @@ pub fn canonicalize_pattern<'a>(
malformed_pattern(env, problem, region)
}
Ok((int, bound)) => {
use std::ops::Neg;
let sign_str = if is_negative { "-" } else { "" };
let int_str = format!("{}{}", sign_str, int).into_boxed_str();
let i = match int {
// Safety: this is fine because I128::MAX = |I128::MIN| - 1
IntValue::I128(n) if is_negative => IntValue::I128(-n),
IntValue::I128(n) if is_negative => {
IntValue::I128(i128::from_ne_bytes(n).neg().to_ne_bytes())
}
IntValue::I128(n) => IntValue::I128(n),
IntValue::U128(_) => unreachable!(),
};

View File

@ -32,7 +32,11 @@ pub struct Scope {
}
impl Scope {
pub fn new(home: ModuleId, initial_ident_ids: IdentIds) -> Scope {
pub fn new(
home: ModuleId,
initial_ident_ids: IdentIds,
starting_abilities_store: AbilitiesStore,
) -> Scope {
let imports = Symbol::default_in_scope()
.into_iter()
.map(|(a, (b, c))| (a, b, c))
@ -43,22 +47,25 @@ impl Scope {
exposed_ident_count: initial_ident_ids.len(),
locals: ScopedIdentIds::from_ident_ids(home, initial_ident_ids),
aliases: VecMap::default(),
// TODO(abilities): default abilities in scope
abilities_store: AbilitiesStore::default(),
abilities_store: starting_abilities_store,
imports,
}
}
pub fn lookup(&self, ident: &Ident, region: Region) -> Result<Symbol, RuntimeError> {
self.lookup_str(ident.as_str(), region)
}
pub fn lookup_str(&self, ident: &str, region: Region) -> Result<Symbol, RuntimeError> {
use ContainsIdent::*;
match self.scope_contains_ident(ident.as_str()) {
match self.scope_contains_ident(ident) {
InScope(symbol, _) => Ok(symbol),
NotInScope(_) | NotPresent => {
let error = RuntimeError::LookupNotInScope(
Loc {
region,
value: ident.clone(),
value: Ident::from(ident),
},
self.idents_in_scope().map(|v| v.as_ref().into()).collect(),
);
@ -560,7 +567,11 @@ mod test {
#[test]
fn scope_contains_introduced() {
let _register_module_debug_names = ModuleIds::default();
let mut scope = Scope::new(ModuleId::ATTR, IdentIds::default());
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
);
let region = Region::zero();
let ident = Ident::from("mezolit");
@ -575,7 +586,11 @@ mod test {
#[test]
fn second_introduce_shadows() {
let _register_module_debug_names = ModuleIds::default();
let mut scope = Scope::new(ModuleId::ATTR, IdentIds::default());
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
);
let region1 = Region::from_pos(Position { offset: 10 });
let region2 = Region::from_pos(Position { offset: 20 });
@ -600,7 +615,11 @@ mod test {
#[test]
fn inner_scope_does_not_influence_outer() {
let _register_module_debug_names = ModuleIds::default();
let mut scope = Scope::new(ModuleId::ATTR, IdentIds::default());
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
);
let region = Region::zero();
let ident = Ident::from("uránia");
@ -617,7 +636,11 @@ mod test {
#[test]
fn default_idents_in_scope() {
let _register_module_debug_names = ModuleIds::default();
let scope = Scope::new(ModuleId::ATTR, IdentIds::default());
let scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
);
let idents: Vec<_> = scope.idents_in_scope().collect();
@ -640,7 +663,11 @@ mod test {
#[test]
fn idents_with_inner_scope() {
let _register_module_debug_names = ModuleIds::default();
let mut scope = Scope::new(ModuleId::ATTR, IdentIds::default());
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
);
let idents: Vec<_> = scope.idents_in_scope().collect();
@ -707,7 +734,11 @@ mod test {
#[test]
fn import_is_in_scope() {
let _register_module_debug_names = ModuleIds::default();
let mut scope = Scope::new(ModuleId::ATTR, IdentIds::default());
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
);
let ident = Ident::from("product");
let symbol = Symbol::LIST_PRODUCT;
@ -725,7 +756,11 @@ mod test {
#[test]
fn shadow_of_import() {
let _register_module_debug_names = ModuleIds::default();
let mut scope = Scope::new(ModuleId::ATTR, IdentIds::default());
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
);
let ident = Ident::from("product");
let symbol = Symbol::LIST_PRODUCT;

View File

@ -1,13 +1,14 @@
//! Traversals over the can ast.
use roc_module::ident::Lowercase;
use roc_module::{ident::Lowercase, symbol::Symbol};
use roc_region::all::{Loc, Region};
use roc_types::subs::Variable;
use crate::{
abilities::SpecializationId,
def::{Annotation, Declaration, Def},
expr::{AccessorData, ClosureData, Expr, Field, WhenBranch},
pattern::Pattern,
pattern::{DestructType, Pattern, RecordDestruct},
};
macro_rules! visit_list {
@ -27,7 +28,7 @@ pub fn walk_decl<V: Visitor>(visitor: &mut V, decl: &Declaration) {
Declaration::Declare(def) => {
visitor.visit_def(def);
}
Declaration::DeclareRec(defs) => {
Declaration::DeclareRec(defs, _cycle_mark) => {
visit_list!(visitor, visit_def, defs)
}
Declaration::Builtin(def) => visitor.visit_def(def),
@ -91,7 +92,7 @@ pub fn walk_expr<V: Visitor>(visitor: &mut V, expr: &Expr, var: Variable) {
branch_var,
final_else,
} => walk_if(visitor, *cond_var, branches, *branch_var, final_else),
Expr::LetRec(defs, body) => {
Expr::LetRec(defs, body, _cycle_mark) => {
defs.iter().for_each(|def| visitor.visit_def(def));
visitor.visit_expr(&body.value, body.region, var);
}
@ -167,6 +168,7 @@ pub fn walk_expr<V: Visitor>(visitor: &mut V, expr: &Expr, var: Variable) {
visitor.visit_expr(&e1.value, e1.region, Variable::NULL);
visitor.visit_expr(&e2.value, e2.region, Variable::NULL);
}
Expr::TypedHole(_) => { /* terminal */ }
Expr::RuntimeError(..) => { /* terminal */ }
}
}
@ -271,7 +273,7 @@ pub fn walk_record_fields<'a, V: Visitor>(
)
}
pub trait Visitor: Sized + PatternVisitor {
pub trait Visitor: Sized {
fn visit_decls(&mut self, decls: &[Declaration]) {
walk_decls(self, decls);
}
@ -291,16 +293,52 @@ pub trait Visitor: Sized + PatternVisitor {
fn visit_expr(&mut self, expr: &Expr, _region: Region, var: Variable) {
walk_expr(self, expr, var);
}
}
pub fn walk_pattern<V: PatternVisitor>(_visitor: &mut V, _pattern: &Pattern) {
// ignore for now
}
pub trait PatternVisitor: Sized {
fn visit_pattern(&mut self, pattern: &Pattern, _region: Region, _opt_var: Option<Variable>) {
walk_pattern(self, pattern);
}
fn visit_record_destruct(&mut self, destruct: &RecordDestruct, _region: Region) {
walk_record_destruct(self, destruct);
}
}
pub fn walk_pattern<V: Visitor>(visitor: &mut V, pattern: &Pattern) {
use Pattern::*;
match pattern {
Identifier(..) => { /* terminal */ }
AppliedTag { arguments, .. } => arguments
.iter()
.for_each(|(v, lp)| visitor.visit_pattern(&lp.value, lp.region, Some(*v))),
UnwrappedOpaque { argument, .. } => {
let (v, lp) = &**argument;
visitor.visit_pattern(&lp.value, lp.region, Some(*v));
}
RecordDestructure { destructs, .. } => destructs
.iter()
.for_each(|d| visitor.visit_record_destruct(&d.value, d.region)),
NumLiteral(..) => { /* terminal */ }
IntLiteral(..) => { /* terminal */ }
FloatLiteral(..) => { /* terminal */ }
StrLiteral(..) => { /* terminal */ }
SingleQuote(..) => { /* terminal */ }
Underscore => { /* terminal */ }
AbilityMemberSpecialization { .. } => { /* terminal */ }
Shadowed(..) => { /* terminal */ }
OpaqueNotInScope(..) => { /* terminal */ }
UnsupportedPattern(..) => { /* terminal */ }
MalformedPattern(..) => { /* terminal */ }
}
}
pub fn walk_record_destruct<V: Visitor>(visitor: &mut V, destruct: &RecordDestruct) {
use DestructType::*;
match &destruct.typ {
Required => { /* terminal */ }
Optional(var, expr) => visitor.visit_expr(&expr.value, expr.region, *var),
Guard(var, pat) => visitor.visit_pattern(&pat.value, pat.region, Some(*var)),
}
}
struct TypeAtVisitor {
@ -308,18 +346,6 @@ struct TypeAtVisitor {
typ: Option<Variable>,
}
impl PatternVisitor for TypeAtVisitor {
fn visit_pattern(&mut self, pat: &Pattern, region: Region, opt_var: Option<Variable>) {
if region == self.region {
debug_assert!(self.typ.is_none());
self.typ = opt_var;
return;
}
if region.contains(&self.region) {
walk_pattern(self, pat)
}
}
}
impl Visitor for TypeAtVisitor {
fn visit_expr(&mut self, expr: &Expr, region: Region, var: Variable) {
if region == self.region {
@ -331,6 +357,17 @@ impl Visitor for TypeAtVisitor {
walk_expr(self, expr, var);
}
}
fn visit_pattern(&mut self, pat: &Pattern, region: Region, opt_var: Option<Variable>) {
if region == self.region {
debug_assert!(self.typ.is_none());
self.typ = opt_var;
return;
}
if region.contains(&self.region) {
walk_pattern(self, pat)
}
}
}
/// Attempts to find the type of an expression at `region`, if it exists.
@ -339,3 +376,71 @@ pub fn find_type_at(region: Region, decls: &[Declaration]) -> Option<Variable> {
visitor.visit_decls(decls);
visitor.typ
}
pub fn find_ability_member_at(
region: Region,
decls: &[Declaration],
) -> Option<(Symbol, SpecializationId)> {
let mut visitor = Finder {
region,
found: None,
};
visitor.visit_decls(decls);
return visitor.found;
struct Finder {
region: Region,
found: Option<(Symbol, SpecializationId)>,
}
impl Visitor for Finder {
fn visit_expr(&mut self, expr: &Expr, region: Region, var: Variable) {
if region == self.region {
if let &Expr::AbilityMember(symbol, specialization_id, _) = expr {
debug_assert!(self.found.is_none());
self.found = Some((symbol, specialization_id));
return;
}
}
if region.contains(&self.region) {
walk_expr(self, expr, var);
}
}
}
}
pub fn symbols_introduced_from_pattern(
pattern: &Loc<Pattern>,
) -> impl Iterator<Item = Loc<Symbol>> {
let mut visitor = Collector {
symbols: Vec::new(),
};
visitor.visit_pattern(&pattern.value, pattern.region, None);
return visitor.symbols.into_iter();
struct Collector {
symbols: Vec<Loc<Symbol>>,
}
impl Visitor for Collector {
fn visit_pattern(&mut self, pattern: &Pattern, region: Region, _opt_var: Option<Variable>) {
use Pattern::*;
match pattern {
Identifier(symbol)
| Shadowed(_, _, symbol)
| AbilityMemberSpecialization { ident: symbol, .. } => {
self.symbols.push(Loc::at(region, *symbol));
}
_ => walk_pattern(self, pattern),
}
}
fn visit_record_destruct(&mut self, destruct: &RecordDestruct, region: Region) {
// when a record field has a pattern guard, only symbols in the guard are introduced
if let DestructType::Guard(_, subpattern) = &destruct.typ {
self.visit_pattern(&subpattern.value, subpattern.region, None);
} else {
self.symbols.push(Loc::at(region, destruct.symbol));
}
}
}
}

View File

@ -55,7 +55,7 @@ pub fn can_expr_with(arena: &Bump, home: ModuleId, expr_str: &str) -> CanExprOut
// rules multiple times unnecessarily.
let loc_expr = operator::desugar_expr(arena, &loc_expr);
let mut scope = Scope::new(home, IdentIds::default());
let mut scope = Scope::new(home, IdentIds::default(), Default::default());
scope.add_alias(
Symbol::NUM_INT,
Region::zero(),

View File

@ -68,7 +68,7 @@ mod test_can {
match actual_out.loc_expr.value {
Expr::Int(_, _, _, actual, _) => {
assert_eq!(IntValue::I128(expected), actual);
assert_eq!(IntValue::I128(expected.to_ne_bytes()), actual);
}
actual => {
panic!("Expected an Num.Int *, but got: {:?}", actual);
@ -82,7 +82,7 @@ mod test_can {
match actual_out.loc_expr.value {
Expr::Num(_, _, actual, _) => {
assert_eq!(IntValue::I128(expected), actual);
assert_eq!(IntValue::I128(expected.to_ne_bytes()), actual);
}
actual => {
panic!("Expected a Num, but got: {:?}", actual);
@ -658,22 +658,24 @@ mod test_can {
// TAIL CALLS
fn get_closure(expr: &Expr, i: usize) -> roc_can::expr::Recursive {
match expr {
LetRec(assignments, body) => match &assignments.get(i).map(|def| &def.loc_expr.value) {
Some(Closure(ClosureData {
recursive: recursion,
..
})) => *recursion,
Some(other) => {
panic!("assignment at {} is not a closure, but a {:?}", i, other)
}
None => {
if i > 0 {
get_closure(&body.value, i - 1)
} else {
panic!("Looking for assignment at {} but the list is too short", i)
LetRec(assignments, body, _) => {
match &assignments.get(i).map(|def| &def.loc_expr.value) {
Some(Closure(ClosureData {
recursive: recursion,
..
})) => *recursion,
Some(other) => {
panic!("assignment at {} is not a closure, but a {:?}", i, other)
}
None => {
if i > 0 {
get_closure(&body.value, i - 1)
} else {
panic!("Looking for assignment at {} but the list is too short", i)
}
}
}
},
}
LetNonRec(def, body) => {
if i > 0 {
// recurse in the body (not the def!)

View File

@ -3,7 +3,7 @@ name = "roc_collections"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
edition = "2021"
[dependencies]
im = "15.0.0"

View File

@ -10,7 +10,7 @@ mod vec_map;
mod vec_set;
pub use all::{default_hasher, BumpMap, ImEntry, ImMap, ImSet, MutMap, MutSet, SendMap};
pub use reference_matrix::{ReferenceMatrix, Sccs};
pub use reference_matrix::{ReferenceMatrix, Sccs, TopologicalSort};
pub use small_string_interner::SmallStringInterner;
pub use vec_map::VecMap;
pub use vec_set::VecSet;

View File

@ -154,7 +154,7 @@ impl ReferenceMatrix {
}
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum TopologicalSort {
/// There were no cycles, all nodes have been partitioned into groups
Groups { groups: Vec<Vec<u32>> },

View File

@ -105,6 +105,10 @@ impl<T> Slice<T> {
self.length as _
}
pub const fn start(&self) -> usize {
self.start as _
}
pub const fn is_empty(&self) -> bool {
self.length == 0
}

View File

@ -1,3 +1,5 @@
use std::iter::FromIterator;
#[derive(Debug, Clone)]
pub struct VecMap<K, V> {
keys: Vec<K>,
@ -123,7 +125,7 @@ impl<K: PartialEq, V> VecMap<K, V> {
}
}
impl<K: Ord, V> Extend<(K, V)> for VecMap<K, V> {
impl<K: PartialEq, V> Extend<(K, V)> for VecMap<K, V> {
#[inline(always)]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
let it = iter.into_iter();
@ -184,3 +186,15 @@ impl<K, V> ExactSizeIterator for IntoIter<K, V> {
self.len
}
}
impl<K: PartialEq, V> FromIterator<(K, V)> for VecMap<K, V> {
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iter = iter.into_iter();
let size_hint = iter.size_hint();
let mut map = VecMap::with_capacity(size_hint.1.unwrap_or(size_hint.0));
for (k, v) in iter {
map.insert(k, v);
}
map
}
}

View File

@ -1,3 +1,5 @@
use std::iter::FromIterator;
#[derive(Clone, Debug, PartialEq)]
pub struct VecSet<T> {
elements: Vec<T>,
@ -55,13 +57,15 @@ impl<T: PartialEq> VecSet<T> {
self.elements.contains(value)
}
pub fn remove(&mut self, value: &T) {
/// Performs a swap_remove if the element was present in the set,
/// then returns whether the value was present in the set.
pub fn remove(&mut self, value: &T) -> bool {
match self.elements.iter().position(|x| x == value) {
None => {
// just do nothing
}
None => false,
Some(index) => {
self.elements.swap_remove(index);
true
}
}
}
@ -99,6 +103,14 @@ impl<A: Ord> Extend<A> for VecSet<A> {
}
}
impl<A: Ord> FromIterator<A> for VecSet<A> {
fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self {
let mut set = VecSet::default();
set.extend(iter);
set
}
}
impl<T> IntoIterator for VecSet<T> {
type Item = T;

View File

@ -3,7 +3,7 @@ name = "roc_constrain"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
edition = "2021"
[dependencies]
roc_collections = { path = "../collections" }

View File

@ -3,7 +3,7 @@ use crate::builtins::{
};
use crate::pattern::{constrain_pattern, PatternState};
use roc_can::annotation::IntroducedVariables;
use roc_can::constraint::{Constraint, Constraints};
use roc_can::constraint::{Constraint, Constraints, OpportunisticResolve};
use roc_can::def::{Declaration, Def};
use roc_can::exhaustive::{sketch_pattern_to_rows, sketch_when_branches, ExhaustiveContext};
use roc_can::expected::Expected::{self, *};
@ -11,11 +11,12 @@ use roc_can::expected::PExpected;
use roc_can::expr::Expr::{self, *};
use roc_can::expr::{AccessorData, AnnotatedMark, ClosureData, Field, WhenBranch};
use roc_can::pattern::Pattern;
use roc_can::traverse::symbols_introduced_from_pattern;
use roc_collections::all::{HumanIndex, MutMap, SendMap};
use roc_module::ident::{Lowercase, TagName};
use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::{Loc, Region};
use roc_types::subs::Variable;
use roc_types::subs::{IllegalCycleMark, Variable};
use roc_types::types::Type::{self, *};
use roc_types::types::{
AliasKind, AnnotationSource, Category, OptAbleType, PReason, Reason, RecordField, TypeExtension,
@ -40,16 +41,16 @@ impl Info {
}
pub struct Env {
/// Whenever we encounter a user-defined type variable (a "rigid" var for short),
/// for example `a` in the annotation `identity : a -> a`, we add it to this
/// map so that expressions within that annotation can share these vars.
pub rigids: MutMap<Lowercase, Variable>,
pub resolutions_to_make: Vec<OpportunisticResolve>,
pub home: ModuleId,
}
fn constrain_untyped_args(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
arguments: &[(Variable, AnnotatedMark, Loc<Pattern>)],
closure_type: Type,
return_type: Type,
@ -88,7 +89,7 @@ fn constrain_untyped_args(
pub fn constrain_expr(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
region: Region,
expr: &Expr,
expected: Expected<Type>,
@ -258,7 +259,7 @@ pub fn constrain_expr(
let (fn_var, loc_fn, closure_var, ret_var) = &**boxed;
// The expression that evaluates to the function being called, e.g. `foo` in
// (foo) bar baz
let opt_symbol = if let Var(symbol) | AbilityMember(symbol, _) = loc_fn.value {
let opt_symbol = if let Var(symbol) | AbilityMember(symbol, _, _) = loc_fn.value {
Some(symbol)
} else {
None
@ -336,10 +337,31 @@ pub fn constrain_expr(
// make lookup constraint to lookup this symbol's type in the environment
constraints.lookup(*symbol, expected, region)
}
AbilityMember(symbol, _specialization) => {
&AbilityMember(symbol, specialization_id, specialization_var) => {
// make lookup constraint to lookup this symbol's type in the environment
constraints.lookup(*symbol, expected, region)
// TODO: consider trying to solve `_specialization` here.
let store_expected = constraints.equal_types_var(
specialization_var,
expected,
Category::Storage(file!(), line!()),
region,
);
let lookup_constr = constraints.lookup(
symbol,
Expected::NoExpectation(Type::Variable(specialization_var)),
region,
);
// Make sure we attempt to resolve the specialization.
env.resolutions_to_make.push(OpportunisticResolve {
specialization_variable: specialization_var,
specialization_expectation: constraints.push_expected_type(
Expected::NoExpectation(Type::Variable(specialization_var)),
),
member: symbol,
specialization_id,
});
constraints.and_constraint([store_expected, lookup_constr])
}
Closure(ClosureData {
function_type: fn_var,
@ -799,16 +821,8 @@ pub fn constrain_expr(
region,
);
let constraint = constrain_expr(
constraints,
&Env {
home: env.home,
rigids: MutMap::default(),
},
region,
&loc_expr.value,
record_expected,
);
let constraint =
constrain_expr(constraints, env, region, &loc_expr.value, record_expected);
let eq = constraints.equal_types_var(field_var, expected, category, region);
constraints.exists_many(
@ -884,7 +898,7 @@ pub fn constrain_expr(
cons,
)
}
LetRec(defs, loc_ret) => {
LetRec(defs, loc_ret, cycle_mark) => {
let body_con = constrain_expr(
constraints,
env,
@ -893,7 +907,7 @@ pub fn constrain_expr(
expected.clone(),
);
constrain_recursive_defs(constraints, env, defs, body_con)
constrain_recursive_defs(constraints, env, defs, body_con, *cycle_mark)
}
LetNonRec(def, loc_ret) => {
let mut stack = Vec::with_capacity(1);
@ -1130,6 +1144,15 @@ pub fn constrain_expr(
arg_cons.push(eq);
constraints.exists_many(vars, arg_cons)
}
TypedHole(var) => {
// store the expected type for this position
constraints.equal_types_var(
*var,
expected,
Category::Storage(std::file!(), std::line!()),
region,
)
}
RuntimeError(_) => {
// Runtime Errors have no constraints because they're going to crash.
Constraint::True
@ -1142,7 +1165,7 @@ pub fn constrain_expr(
#[inline(always)]
fn constrain_when_branch_help(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
region: Region,
when_branch: &WhenBranch,
pattern_expected: impl Fn(HumanIndex, Region) -> PExpected<Type>,
@ -1222,7 +1245,7 @@ fn constrain_when_branch_help(
fn constrain_field(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
field_var: Variable,
loc_expr: &Loc<Expr>,
) -> (Type, Constraint) {
@ -1260,6 +1283,7 @@ pub fn constrain_decls(
let mut env = Env {
home,
rigids: MutMap::default(),
resolutions_to_make: vec![],
};
for decl in decls.iter().rev() {
@ -1269,16 +1293,22 @@ pub fn constrain_decls(
match decl {
Declaration::Declare(def) | Declaration::Builtin(def) => {
constraint = constrain_def(constraints, &env, def, constraint);
constraint = constrain_def(constraints, &mut env, def, constraint);
}
Declaration::DeclareRec(defs) => {
constraint = constrain_recursive_defs(constraints, &env, defs, constraint);
Declaration::DeclareRec(defs, cycle_mark) => {
constraint =
constrain_recursive_defs(constraints, &mut env, defs, constraint, *cycle_mark);
}
Declaration::InvalidCycle(_) => {
// invalid cycles give a canonicalization error. we skip them here.
continue;
}
}
debug_assert!(
env.resolutions_to_make.is_empty(),
"Resolutions not attached after def!"
);
}
// this assert make the "root" of the constraint wasn't dropped
@ -1289,7 +1319,7 @@ pub fn constrain_decls(
pub fn constrain_def_pattern(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
loc_pattern: &Loc<Pattern>,
expr_type: Type,
) -> PatternState {
@ -1317,7 +1347,7 @@ pub fn constrain_def_pattern(
/// Generate constraints for a definition with a type signature
fn constrain_typed_def(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
def: &Def,
body_con: Constraint,
annotation: &roc_can::def::Annotation,
@ -1346,8 +1376,9 @@ fn constrain_typed_def(
&mut def_pattern_state.headers,
);
let env = &Env {
let env = &mut Env {
home: env.home,
resolutions_to_make: vec![],
rigids: ftv,
};
@ -1445,6 +1476,7 @@ fn constrain_typed_def(
&loc_body_expr.value,
body_type,
);
let ret_constraint = attach_resolution_constraints(constraints, env, ret_constraint);
vars.push(*fn_var);
let defs_constraint = constraints.and_constraint(argument_pattern_state.constraints);
@ -1507,6 +1539,7 @@ fn constrain_typed_def(
&def.loc_expr.value,
annotation_expected,
);
let ret_constraint = attach_resolution_constraints(constraints, env, ret_constraint);
let cons = [
ret_constraint,
@ -1529,7 +1562,7 @@ fn constrain_typed_def(
fn constrain_typed_function_arguments(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
def: &Def,
def_pattern_state: &mut PatternState,
argument_pattern_state: &mut PatternState,
@ -1655,9 +1688,20 @@ fn constrain_typed_function_arguments(
}
}
#[inline(always)]
fn attach_resolution_constraints(
constraints: &mut Constraints,
env: &mut Env,
constraint: Constraint,
) -> Constraint {
let resolution_constrs =
constraints.and_constraint(env.resolutions_to_make.drain(..).map(Constraint::Resolve));
constraints.and_constraint([constraint, resolution_constrs])
}
fn constrain_def(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
def: &Def,
body_con: Constraint,
) -> Constraint {
@ -1680,6 +1724,7 @@ fn constrain_def(
&def.loc_expr.value,
NoExpectation(expr_type),
);
let expr_con = attach_resolution_constraints(constraints, env, expr_con);
constrain_def_make_constraint(
constraints,
@ -1693,30 +1738,27 @@ fn constrain_def(
}
}
/// Create a let-constraint for a non-recursive def.
/// Recursive defs should always use `constrain_recursive_defs`.
pub fn constrain_def_make_constraint(
constraints: &mut Constraints,
new_rigid_variables: impl Iterator<Item = Variable>,
new_infer_variables: impl Iterator<Item = Variable>,
expr_con: Constraint,
body_con: Constraint,
annotation_rigid_variables: impl Iterator<Item = Variable>,
annotation_infer_variables: impl Iterator<Item = Variable>,
def_expr_con: Constraint,
after_def_con: Constraint,
def_pattern_state: PatternState,
) -> Constraint {
let and_constraint = constraints.and_constraint(def_pattern_state.constraints);
let all_flex_variables = (def_pattern_state.vars.into_iter()).chain(annotation_infer_variables);
let def_con = constraints.let_constraint(
[],
new_infer_variables,
[], // empty, because our functions have no arguments!
and_constraint,
expr_con,
);
let pattern_constraints = constraints.and_constraint(def_pattern_state.constraints);
let def_pattern_and_body_con = constraints.and_constraint([pattern_constraints, def_expr_con]);
constraints.let_constraint(
new_rigid_variables,
def_pattern_state.vars,
annotation_rigid_variables,
all_flex_variables,
def_pattern_state.headers,
def_con,
body_con,
def_pattern_and_body_con,
after_def_con,
)
}
@ -1846,9 +1888,10 @@ fn instantiate_rigids(
fn constrain_recursive_defs(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
defs: &[Def],
body_con: Constraint,
cycle_mark: IllegalCycleMark,
) -> Constraint {
rec_defs_help(
constraints,
@ -1857,16 +1900,18 @@ fn constrain_recursive_defs(
body_con,
Info::with_capacity(defs.len()),
Info::with_capacity(defs.len()),
cycle_mark,
)
}
pub fn rec_defs_help(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
defs: &[Def],
body_con: Constraint,
mut rigid_info: Info,
mut flex_info: Info,
cycle_mark: IllegalCycleMark,
) -> Constraint {
for def in defs {
let expr_var = def.expr_var;
@ -1886,6 +1931,7 @@ pub fn rec_defs_help(
&def.loc_expr.value,
NoExpectation(expr_type),
);
let expr_con = attach_resolution_constraints(constraints, env, expr_con);
let def_con = expr_con;
@ -1998,6 +2044,7 @@ pub fn rec_defs_help(
&loc_body_expr.value,
body_type,
);
let expr_con = attach_resolution_constraints(constraints, env, expr_con);
vars.push(*fn_var);
@ -2059,6 +2106,8 @@ pub fn rec_defs_help(
&def.loc_expr.value,
expected,
);
let ret_constraint =
attach_resolution_constraints(constraints, env, ret_constraint);
let cons = [
ret_constraint,
@ -2083,30 +2132,60 @@ pub fn rec_defs_help(
}
}
let flex_constraints = constraints.and_constraint(flex_info.constraints);
let inner_inner = constraints.let_constraint(
// Strategy for recursive defs:
// 1. Let-generalize the type annotations we know; these are the source of truth we'll solve
// everything else with. If there are circular type errors here, they will be caught during
// the let-generalization.
// 2. Introduce all symbols of the untyped defs, but don't generalize them yet. Now, solve
// the untyped defs' bodies. This way, when checking something like
// f = \x -> f [ x ]
// we introduce `f: b -> c`, then constrain the call `f [ x ]`,
// forcing `b -> c ~ List b -> c` and correctly picking up a recursion error.
// Had we generalized `b -> c`, the call `f [ x ]` would have been generalized, and this
// error would not be found.
// 3. Now properly let-generalize the untyped body defs, since we now know their types and
// that they don't have circular type errors.
// 4. Solve the bodies of the typed body defs, and check that they agree the types of the type
// annotation.
// 5. Solve the rest of the program that happens after this recursive def block.
// 2. Solve untyped defs without generalization of their symbols.
let untyped_body_constraints = constraints.and_constraint(flex_info.constraints);
let untyped_def_symbols_constr = constraints.let_constraint(
[],
[],
flex_info.def_types.clone(),
Constraint::True,
flex_constraints,
untyped_body_constraints,
);
let rigid_constraints = {
let mut temp = rigid_info.constraints;
temp.push(body_con);
// an extra constraint that propagates information to the solver to check for invalid recursion
// and generate a good error message there.
let (loc_symbols, expr_regions): (Vec<_>, Vec<_>) = defs
.iter()
.flat_map(|def| {
symbols_introduced_from_pattern(&def.loc_pattern)
.map(move |loc_symbol| ((loc_symbol.value, loc_symbol.region), def.loc_expr.region))
})
.unzip();
constraints.and_constraint(temp)
};
let cycle_constraint = constraints.check_cycle(loc_symbols, expr_regions, cycle_mark);
let typed_body_constraints = constraints.and_constraint(rigid_info.constraints);
let typed_body_and_final_constr =
constraints.and_constraint([typed_body_constraints, cycle_constraint, body_con]);
// 3. Properly generalize untyped defs after solving them.
let inner = constraints.let_constraint(
[],
flex_info.vars,
flex_info.def_types,
inner_inner,
rigid_constraints,
untyped_def_symbols_constr,
// 4 + 5. Solve the typed body defs, and the rest of the program.
typed_body_and_final_constr,
);
// 1. Let-generalize annotations we know.
constraints.let_constraint(
rigid_info.vars,
[],
@ -2119,7 +2198,7 @@ pub fn rec_defs_help(
#[inline(always)]
fn constrain_field_update(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
var: Variable,
region: Region,
field: Lowercase,

View File

@ -1,6 +1,6 @@
use crate::expr::{constrain_def_make_constraint, constrain_def_pattern, Env};
use roc_builtins::std::StdLib;
use roc_can::abilities::AbilitiesStore;
use roc_can::abilities::{AbilitiesStore, MemberTypeInfo, SolvedSpecializations};
use roc_can::constraint::{Constraint, Constraints};
use roc_can::def::Declaration;
use roc_can::expected::Expected;
@ -53,6 +53,10 @@ impl ExposedByModule {
output
}
pub fn iter_all(&self) -> impl Iterator<Item = (&ModuleId, &ExposedModuleTypes)> {
self.exposed.iter()
}
}
#[derive(Clone, Debug, Default)]
@ -70,7 +74,7 @@ impl ExposedForModule {
for symbol in it {
let module = exposed_by_module.exposed.get(&symbol.module_id());
if let Some(ExposedModuleTypes::Valid { .. }) = module {
if let Some(ExposedModuleTypes { .. }) = module {
imported_values.push(*symbol);
} else {
continue;
@ -86,12 +90,10 @@ impl ExposedForModule {
/// The types of all exposed values/functions of a module
#[derive(Clone, Debug)]
pub enum ExposedModuleTypes {
Invalid,
Valid {
stored_vars_by_symbol: Vec<(Symbol, Variable)>,
storage_subs: roc_types::subs::StorageSubs,
},
pub struct ExposedModuleTypes {
pub stored_vars_by_symbol: Vec<(Symbol, Variable)>,
pub storage_subs: roc_types::subs::StorageSubs,
pub solved_specializations: SolvedSpecializations,
}
pub fn constrain_module(
@ -129,11 +131,17 @@ fn constrain_symbols_from_requires(
// namespace. If this is the case, we want to introduce the symbols as if they had
// the types they are annotated with.
let rigids = Default::default();
let env = Env { home, rigids };
let mut env = Env {
home,
rigids,
resolutions_to_make: vec![],
};
let pattern = Loc::at_zero(roc_can::pattern::Pattern::Identifier(loc_symbol.value));
let def_pattern_state =
constrain_def_pattern(constraints, &env, &pattern, loc_type.value);
constrain_def_pattern(constraints, &mut env, &pattern, loc_type.value);
debug_assert!(env.resolutions_to_make.is_empty());
constrain_def_make_constraint(
constraints,
@ -173,40 +181,53 @@ pub fn frontload_ability_constraints(
mut constraint: Constraint,
) -> Constraint {
for (member_name, member_data) in abilities_store.root_ability_members().iter() {
let rigids = Default::default();
let env = Env { home, rigids };
let pattern = Loc::at_zero(roc_can::pattern::Pattern::Identifier(*member_name));
if let MemberTypeInfo::Local {
signature_var,
variables: vars,
signature,
} = &member_data.typ
{
let signature_var = *signature_var;
let rigids = Default::default();
let mut env = Env {
home,
rigids,
resolutions_to_make: vec![],
};
let pattern = Loc::at_zero(roc_can::pattern::Pattern::Identifier(*member_name));
let mut def_pattern_state = constrain_def_pattern(
constraints,
&env,
&pattern,
Type::Variable(member_data.signature_var),
);
let mut def_pattern_state = constrain_def_pattern(
constraints,
&mut env,
&pattern,
Type::Variable(signature_var),
);
def_pattern_state.vars.push(member_data.signature_var);
debug_assert!(env.resolutions_to_make.is_empty());
let vars = &member_data.variables;
let rigid_variables = vars.rigid_vars.iter().chain(vars.able_vars.iter()).copied();
let infer_variables = vars.flex_vars.iter().copied();
def_pattern_state.vars.push(signature_var);
def_pattern_state
.constraints
.push(constraints.equal_types_var(
member_data.signature_var,
Expected::NoExpectation(member_data.signature.clone()),
Category::Storage(file!(), line!()),
Region::zero(),
));
let rigid_variables = vars.rigid_vars.iter().chain(vars.able_vars.iter()).copied();
let infer_variables = vars.flex_vars.iter().copied();
constraint = constrain_def_make_constraint(
constraints,
rigid_variables,
infer_variables,
Constraint::True,
constraint,
def_pattern_state,
);
def_pattern_state
.constraints
.push(constraints.equal_types_var(
signature_var,
Expected::NoExpectation(signature.clone()),
Category::Storage(file!(), line!()),
Region::zero(),
));
constraint = constrain_def_make_constraint(
constraints,
rigid_variables,
infer_variables,
Constraint::True,
constraint,
def_pattern_state,
);
}
}
constraint
}

View File

@ -166,7 +166,7 @@ fn headers_from_annotation_help(
/// based on its knowledge of their lengths.
pub fn constrain_pattern(
constraints: &mut Constraints,
env: &Env,
env: &mut Env,
pattern: &Pattern,
region: Region,
expected: PExpected<Type>,

View File

@ -3,7 +3,7 @@ name = "roc_exhaustive"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
edition = "2021"
[dependencies]
roc_collections = { path = "../collections" }

View File

@ -7,7 +7,6 @@ use roc_module::{
symbol::Symbol,
};
use roc_region::all::Region;
use roc_std::RocDec;
use self::Pattern::*;
@ -74,13 +73,13 @@ pub enum Pattern {
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Literal {
Int(i128),
U128(u128),
Int([u8; 16]),
U128([u8; 16]),
Bit(bool),
Byte(u8),
/// Stores the float bits
Float(u64),
Decimal(RocDec),
Decimal([u8; 16]),
Str(Box<str>),
}

View File

@ -3,7 +3,7 @@ name = "roc_fmt"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
edition = "2021"
[dependencies]
roc_collections = { path = "../collections" }

View File

@ -4,7 +4,8 @@ use crate::{
Buf,
};
use roc_parse::ast::{
AssignedField, Collection, Expr, ExtractSpaces, HasClause, Tag, TypeAnnotation, TypeHeader,
AssignedField, Collection, Derived, Expr, ExtractSpaces, HasClause, Tag, TypeAnnotation,
TypeHeader,
};
use roc_parse::ident::UppercaseIdent;
use roc_region::all::Loc;
@ -43,6 +44,16 @@ pub enum Newlines {
Yes,
}
impl Newlines {
pub fn from_bool(yes: bool) -> Self {
if yes {
Self::Yes
} else {
Self::No
}
}
}
pub trait Formattable {
fn is_multiline(&self) -> bool;
@ -202,9 +213,17 @@ impl<'a> Formattable for TypeAnnotation<'a> {
buf.push('(')
}
let mut it = arguments.iter().peekable();
let mut it = arguments.iter().enumerate().peekable();
let should_add_newlines = newlines == Newlines::Yes;
while let Some((index, argument)) = it.next() {
let is_first = index == 0;
let is_multiline = &argument.value.is_multiline();
if !is_first && !is_multiline && should_add_newlines {
buf.newline();
}
while let Some(argument) = it.next() {
(&argument.value).format_with_options(
buf,
Parens::InFunctionType,
@ -214,11 +233,20 @@ impl<'a> Formattable for TypeAnnotation<'a> {
if it.peek().is_some() {
buf.push_str(",");
buf.spaces(1);
if !should_add_newlines {
buf.spaces(1);
}
}
}
buf.push_str(" ->");
if should_add_newlines {
buf.newline();
buf.indent(indent);
} else {
buf.spaces(1);
}
buf.push_str("->");
buf.spaces(1);
(&result.value).format_with_options(
@ -299,28 +327,30 @@ impl<'a> Formattable for TypeAnnotation<'a> {
Where(annot, has_clauses) => {
annot.format_with_options(buf, parens, newlines, indent);
buf.push_str(" ");
buf.spaces(1);
for (i, has) in has_clauses.iter().enumerate() {
buf.push_str(if i == 0 { "| " } else { ", " });
buf.push(if i == 0 { '|' } else { ',' });
buf.spaces(1);
has.format_with_options(buf, parens, newlines, indent);
}
}
SpaceBefore(ann, spaces) => {
let is_function = matches!(ann, TypeAnnotation::Function(..));
let next_newlines = if is_function && newlines == Newlines::Yes {
Newlines::Yes
} else {
Newlines::No
};
buf.newline();
buf.indent(indent);
fmt_comments_only(buf, spaces.iter(), NewlineAt::Bottom, indent);
ann.format_with_options(buf, parens, Newlines::No, indent)
ann.format_with_options(buf, parens, next_newlines, indent)
}
SpaceAfter(ann, spaces) => {
ann.format_with_options(buf, parens, newlines, indent);
fmt_comments_only(buf, spaces.iter(), NewlineAt::Bottom, indent);
// seems like this SpaceAfter is not constructible
// so this branch hasn't be tested. Please add some test if
// this branch is actually reached and remove this dbg_assert.
debug_assert!(false);
}
Malformed(raw) => buf.push_str(raw),
@ -523,8 +553,49 @@ impl<'a> Formattable for HasClause<'a> {
indent: u16,
) {
buf.push_str(self.var.value.extract_spaces().item);
buf.push_str(" has ");
buf.spaces(1);
buf.push_str("has");
buf.spaces(1);
self.ability
.format_with_options(buf, parens, newlines, indent);
}
}
impl<'a> Formattable for Derived<'a> {
fn is_multiline(&self) -> bool {
match self {
Derived::SpaceAfter(..) | Derived::SpaceBefore(..) => true,
Derived::Has(derived) => derived.is_multiline(),
}
}
fn format_with_options<'buf>(
&self,
buf: &mut Buf<'buf>,
parens: Parens,
newlines: Newlines,
indent: u16,
) {
match self {
Derived::Has(derived) => {
if newlines == Newlines::Yes {
buf.newline();
buf.indent(indent);
}
buf.push_str("has");
buf.spaces(1);
fmt_collection(buf, indent, '[', ']', *derived, newlines);
}
Derived::SpaceBefore(derived, spaces) => {
buf.newline();
buf.indent(indent);
fmt_comments_only(buf, spaces.iter(), NewlineAt::Bottom, indent);
derived.format_with_options(buf, parens, Newlines::No, indent)
}
Derived::SpaceAfter(derived, spaces) => {
derived.format_with_options(buf, parens, newlines, indent);
fmt_comments_only(buf, spaces.iter(), NewlineAt::Bottom, indent);
}
}
}
}

View File

@ -2,7 +2,9 @@ use crate::annotation::{Formattable, Newlines, Parens};
use crate::pattern::fmt_pattern;
use crate::spaces::{fmt_spaces, INDENT};
use crate::Buf;
use roc_parse::ast::{AbilityMember, Def, Expr, ExtractSpaces, Pattern, TypeHeader};
use roc_parse::ast::{
AbilityMember, Def, Expr, ExtractSpaces, Pattern, TypeAnnotation, TypeHeader,
};
use roc_region::all::Loc;
/// A Located formattable value is also formattable
@ -51,10 +53,6 @@ impl<'a> Formattable for Def<'a> {
Alias {
header: TypeHeader { name, vars },
ann,
}
| Opaque {
header: TypeHeader { name, vars },
typ: ann,
} => {
buf.indent(indent);
buf.push_str(name.value);
@ -64,15 +62,64 @@ impl<'a> Formattable for Def<'a> {
fmt_pattern(buf, &var.value, indent, Parens::NotNeeded);
}
buf.push_str(match def {
Alias { .. } => " :",
Opaque { .. } => " :=",
_ => unreachable!(),
});
buf.push_str(" :");
buf.spaces(1);
ann.format(buf, indent + INDENT)
}
Opaque {
header: TypeHeader { name, vars },
typ: ann,
derived,
} => {
buf.indent(indent);
buf.push_str(name.value);
for var in *vars {
buf.spaces(1);
fmt_pattern(buf, &var.value, indent, Parens::NotNeeded);
}
buf.push_str(" :=");
buf.spaces(1);
let ann_is_where_clause =
matches!(ann.extract_spaces().item, TypeAnnotation::Where(..));
let ann_has_spaces_before =
matches!(&ann.value, TypeAnnotation::SpaceBefore(..));
// Always put the has-derived clause on a newline if it is itself multiline, or
// the annotation has a where-has clause.
let derived_multiline = if let Some(derived) = derived {
!derived.value.is_empty() && (derived.is_multiline() || ann_is_where_clause)
} else {
false
};
let make_multiline = ann.is_multiline() || derived_multiline;
// If the annotation has spaces before, a newline will already be printed.
if make_multiline && !ann_has_spaces_before {
buf.newline();
buf.indent(indent + INDENT);
}
ann.format(buf, indent + INDENT);
if let Some(derived) = derived {
if !make_multiline {
buf.spaces(1);
}
derived.format_with_options(
buf,
Parens::NotNeeded,
Newlines::from_bool(make_multiline),
indent + INDENT,
);
}
}
Ability {
header: TypeHeader { name, vars },
loc_has: _,
@ -103,14 +150,50 @@ impl<'a> Formattable for Def<'a> {
Value(def) => match def {
Annotation(loc_pattern, loc_annotation) => {
loc_pattern.format(buf, indent);
if loc_annotation.is_multiline() {
buf.push_str(" :");
loc_annotation.format_with_options(
buf,
Parens::NotNeeded,
Newlines::Yes,
indent + INDENT,
);
let should_outdent = match loc_annotation.value {
TypeAnnotation::SpaceBefore(sub_def, spaces) => match sub_def {
TypeAnnotation::Record { .. } | TypeAnnotation::TagUnion { .. } => {
let is_only_newlines = spaces.iter().all(|s| s.is_newline());
is_only_newlines && sub_def.is_multiline()
}
_ => false,
},
TypeAnnotation::Record { .. } | TypeAnnotation::TagUnion { .. } => true,
_ => false,
};
if should_outdent {
buf.spaces(1);
match loc_annotation.value {
TypeAnnotation::SpaceBefore(sub_def, _) => {
sub_def.format_with_options(
buf,
Parens::NotNeeded,
Newlines::No,
indent,
);
}
_ => {
loc_annotation.format_with_options(
buf,
Parens::NotNeeded,
Newlines::No,
indent,
);
}
}
} else {
loc_annotation.format_with_options(
buf,
Parens::NotNeeded,
Newlines::Yes,
indent + INDENT,
);
}
} else {
buf.spaces(1);
buf.push_str(":");
@ -134,15 +217,41 @@ impl<'a> Formattable for Def<'a> {
body_pattern,
body_expr,
} => {
let is_type_multiline = ann_type.is_multiline();
let is_type_function = matches!(
ann_type.value,
TypeAnnotation::Function(..)
| TypeAnnotation::SpaceBefore(TypeAnnotation::Function(..), ..)
| TypeAnnotation::SpaceAfter(TypeAnnotation::Function(..), ..)
);
let next_indent = if is_type_multiline {
indent + INDENT
} else {
indent
};
ann_pattern.format(buf, indent);
buf.push_str(" :");
buf.spaces(1);
ann_type.format(buf, indent);
if is_type_multiline && is_type_function {
ann_type.format_with_options(
buf,
Parens::NotNeeded,
Newlines::Yes,
next_indent,
);
} else {
buf.spaces(1);
ann_type.format(buf, indent);
}
if let Some(comment_str) = comment {
buf.push_str(" #");
buf.spaces(1);
buf.push_str(comment_str.trim());
}
buf.newline();
fmt_body(buf, &body_pattern.value, &body_expr.value, indent);
}
@ -230,7 +339,9 @@ impl<'a> Formattable for AbilityMember<'a> {
fn format<'buf>(&self, buf: &mut Buf<'buf>, indent: u16) {
buf.push_str(self.name.value.extract_spaces().item);
buf.push_str(" : ");
buf.spaces(1);
buf.push(':');
buf.spaces(1);
self.typ.value.format(buf, indent + INDENT);
}
}

View File

@ -611,6 +611,24 @@ fn empty_line_before_expr<'a>(expr: &'a Expr<'a>) -> bool {
}
}
fn is_when_patterns_multiline(when_branch: &WhenBranch) -> bool {
let patterns = when_branch.patterns;
let (first_pattern, rest) = patterns.split_first().unwrap();
let is_multiline_patterns = if let Some((last_pattern, inner_patterns)) = rest.split_last() {
!first_pattern.value.extract_spaces().after.is_empty()
|| !last_pattern.value.extract_spaces().before.is_empty()
|| inner_patterns.iter().any(|p| {
let spaces = p.value.extract_spaces();
!spaces.before.is_empty() || !spaces.after.is_empty()
})
} else {
false
};
is_multiline_patterns
}
fn fmt_when<'a, 'buf>(
buf: &mut Buf<'buf>,
loc_condition: &'a Loc<Expr<'a>>,
@ -668,34 +686,23 @@ fn fmt_when<'a, 'buf>(
let mut it = branches.iter().peekable();
while let Some(branch) = it.next() {
let patterns = &branch.patterns;
let expr = &branch.value;
let (first_pattern, rest) = patterns.split_first().unwrap();
let is_multiline = if let Some((last_pattern, inner_patterns)) = rest.split_last() {
!first_pattern.value.extract_spaces().after.is_empty()
|| !last_pattern.value.extract_spaces().before.is_empty()
|| inner_patterns.iter().any(|p| {
let spaces = p.value.extract_spaces();
!spaces.before.is_empty() || !spaces.after.is_empty()
})
} else {
false
};
let patterns = &branch.patterns;
let is_multiline_expr = expr.is_multiline();
let is_multiline_patterns = is_when_patterns_multiline(branch);
fmt_pattern(
buf,
&first_pattern.value,
indent + INDENT,
Parens::NotNeeded,
);
for when_pattern in rest {
if is_multiline {
buf.newline();
buf.indent(indent + INDENT);
for (index, pattern) in patterns.iter().enumerate() {
if index != 0 {
if is_multiline_patterns {
buf.newline();
buf.indent(indent + INDENT);
}
buf.push_str(" |");
buf.spaces(1);
}
buf.push_str(" |");
buf.spaces(1);
fmt_pattern(buf, &when_pattern.value, indent + INDENT, Parens::NotNeeded);
fmt_pattern(buf, &pattern.value, indent + INDENT, Parens::NotNeeded);
}
if let Some(guard_expr) = &branch.guard {
@ -705,7 +712,12 @@ fn fmt_when<'a, 'buf>(
}
buf.push_str(" ->");
buf.newline();
if is_multiline_expr {
buf.newline();
} else {
buf.spaces(1);
}
match expr.value {
Expr::SpaceBefore(nested, spaces) => {
@ -729,7 +741,6 @@ fn fmt_when<'a, 'buf>(
if it.peek().is_some() {
buf.newline();
buf.newline();
}
}
}
@ -855,7 +866,9 @@ fn fmt_if<'a, 'buf>(
}
}
_ => {
loc_condition.format(buf, return_indent);
buf.newline();
loc_then.format(buf, return_indent);
buf.newline();
}
}
} else {

Some files were not shown because too many files have changed in this diff Show More