Merge branch 'trunk' of github.com:rtfeldman/roc into wasm-dce-imports

This commit is contained in:
Brian Carroll 2022-06-13 08:41:41 +01:00
commit 8493c8cfca
No known key found for this signature in database
GPG Key ID: 5C7B2EC4101703C0
89 changed files with 5296 additions and 2315 deletions

View File

@ -81,3 +81,5 @@ Oskar Hahn <mail@oshahn.de>
Nuno Ferreira <nunogcferreira@gmail.com>
Mfon Eti-mfon <mfonetimfon@gmail.com>
Drake Bennion <drake.bennion@gmail.com>
Hashi364 <49736221+Kiyoshi364@users.noreply.github.com>
Jared Forsyth <jared@jaredforsyth.com>

15
Cargo.lock generated
View File

@ -3850,6 +3850,17 @@ dependencies = [
name = "roc_ident"
version = "0.1.0"
[[package]]
name = "roc_late_solve"
version = "0.1.0"
dependencies = [
"bumpalo",
"roc_can",
"roc_solve",
"roc_types",
"roc_unify",
]
[[package]]
name = "roc_linker"
version = "0.1.0"
@ -3939,14 +3950,13 @@ dependencies = [
"roc_debug_flags",
"roc_error_macros",
"roc_exhaustive",
"roc_late_solve",
"roc_module",
"roc_problem",
"roc_region",
"roc_solve",
"roc_std",
"roc_target",
"roc_types",
"roc_unify",
"static_assertions 1.1.0",
"ven_pretty",
]
@ -4111,6 +4121,7 @@ dependencies = [
name = "roc_std"
version = "0.1.0"
dependencies = [
"arrayvec 0.7.2",
"static_assertions 0.1.1",
]

View File

@ -13,6 +13,7 @@ members = [
"compiler/constrain",
"compiler/unify",
"compiler/solve",
"compiler/late_solve",
"compiler/fmt",
"compiler/mono",
"compiler/alias_analysis",

View File

@ -322,8 +322,8 @@ addAndStringify = \counts ->
Num.toStr (counts.birds + counts.iguanas)
```
This works because `addWithStringify` only uses `counts.birds` and `counts.iguanas`.
If we were to use `counts.note` inside `addWithStringify`, then we would get an error
This works because `addAndStringify` only uses `counts.birds` and `counts.iguanas`.
If we were to use `counts.note` inside `addAndStringify`, then we would get an error
because `total` is calling `addAndStringify` passing a record that doesn't have a `note` field.
Record fields can have any combination of types we want. `totalWithNote` uses a record that

View File

@ -1938,7 +1938,7 @@ pub mod test_constrain {
use roc_parse::parser::{SourceError, SyntaxError};
use roc_region::all::Region;
use roc_types::{
pretty_print::name_and_print_var,
pretty_print::{name_and_print_var, DebugPrint},
solved_types::Solved,
subs::{Subs, VarStore, Variable},
};
@ -2059,7 +2059,8 @@ pub mod test_constrain {
all_ident_ids: dep_idents,
};
let actual_str = name_and_print_var(var, subs, mod_id, &interns);
let actual_str =
name_and_print_var(var, subs, mod_id, &interns, DebugPrint::NOTHING);
assert_eq!(actual_str, expected_str);
}

View File

@ -1,6 +1,5 @@
use bumpalo::collections::Vec as BumpVec;
use bumpalo::Bump;
use roc_module::ident::{Ident, IdentStr};
use roc_parse::{ast::CommentOrNewline, parser::SyntaxError};
use roc_region::all::Region;
@ -41,8 +40,7 @@ pub fn toplevel_defs_to_defs2<'a>(
match loc_pattern.value {
Identifier(id_str) => {
let identifier_id =
env.ident_ids.get_or_insert(&Ident(IdentStr::from(id_str)));
let identifier_id = env.ident_ids.get_or_insert(id_str);
// TODO support with annotation
Def2::ValueDef {
@ -164,7 +162,7 @@ pub fn def_to_def2<'a>(
match loc_pattern.value {
Identifier(id_str) => {
let identifier_id = env.ident_ids.get_or_insert(&Ident(IdentStr::from(id_str)));
let identifier_id = env.ident_ids.get_or_insert(id_str);
// TODO support with annotation
Def2::ValueDef {

View File

@ -109,13 +109,11 @@ impl<'a> Env<'a> {
let module_name: ModuleName = module_name.into();
match self.module_ids.get_id(&module_name) {
Some(&module_id) => {
let ident: Ident = ident.into();
Some(module_id) => {
// You can do qualified lookups on your own module, e.g.
// if I'm in the Foo module, I can do a `Foo.bar` lookup.
if module_id == self.home {
match self.ident_ids.get_id(&ident) {
match self.ident_ids.get_id(ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, ident_id);
@ -125,7 +123,7 @@ impl<'a> Env<'a> {
}
None => Err(RuntimeError::LookupNotInScope(
Loc {
value: ident,
value: Ident::from(ident),
region,
},
self.ident_ids
@ -136,7 +134,7 @@ impl<'a> Env<'a> {
}
} else {
match self.dep_idents.get(&module_id) {
Some(exposed_ids) => match exposed_ids.get_id(&ident) {
Some(exposed_ids) => match exposed_ids.get_id(ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, ident_id);
@ -154,7 +152,7 @@ impl<'a> Env<'a> {
.collect();
Err(RuntimeError::ValueNotExposed {
module_name,
ident,
ident: Ident::from(ident),
region,
exposed_values,
})

View File

@ -244,7 +244,7 @@ impl Scope {
// If this IdentId was already added previously
// when the value was exposed in the module header,
// use that existing IdentId. Otherwise, create a fresh one.
let ident_id = match exposed_ident_ids.get_id(&ident) {
let ident_id = match exposed_ident_ids.get_id(ident.as_str()) {
Some(ident_id) => ident_id,
None => all_ident_ids.add_str(ident.as_str()),
};

View File

@ -231,6 +231,7 @@ fn solve<'a>(
Success {
vars,
must_implement_ability: _,
lambda_sets_to_specialize: _, // TODO ignored
} => {
// TODO(abilities) record deferred ability checks
introduce(subs, rank, pools, &vars);
@ -328,6 +329,7 @@ fn solve<'a>(
Success {
vars,
must_implement_ability: _,
lambda_sets_to_specialize: _, // TODO ignored
} => {
// TODO(abilities) record deferred ability checks
introduce(subs, rank, pools, &vars);
@ -403,6 +405,7 @@ fn solve<'a>(
Success {
vars,
must_implement_ability: _,
lambda_sets_to_specialize: _, // TODO ignored
} => {
// TODO(abilities) record deferred ability checks
introduce(subs, rank, pools, &vars);
@ -715,6 +718,7 @@ fn solve<'a>(
Success {
vars,
must_implement_ability: _,
lambda_sets_to_specialize: _, // TODO ignored
} => {
// TODO(abilities) record deferred ability checks
introduce(subs, rank, pools, &vars);
@ -1411,6 +1415,8 @@ fn adjust_rank_content(
LambdaSet(subs::LambdaSet {
solved,
recursion_var,
// TODO: handle unspecialized
unspecialized: _,
}) => {
let mut rank = group_rank;
@ -1611,6 +1617,8 @@ fn instantiate_rigids_help(
LambdaSet(subs::LambdaSet {
solved,
recursion_var,
// TODO: handle unspecialized
unspecialized: _,
}) => {
if let Some(rec_var) = recursion_var.into_variable() {
instantiate_rigids_help(subs, max_rank, pools, rec_var);
@ -1892,6 +1900,7 @@ fn deep_copy_var_help(
LambdaSet(subs::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
let mut new_variable_slices = Vec::with_capacity(solved.len());
@ -1924,6 +1933,8 @@ fn deep_copy_var_help(
let new_content = LambdaSet(subs::LambdaSet {
solved: new_solved,
recursion_var: new_rec_var,
// TODO: actually copy
unspecialized,
});
subs.set(copy, make_descriptor(new_content));

View File

@ -1,487 +0,0 @@
use crate::structs::Structs;
use crate::types::{RocTagUnion, TypeId, Types};
use crate::{
enums::Enums,
types::{RocNum, RocType},
};
use bumpalo::Bump;
use roc_builtins::bitcode::{FloatWidth::*, IntWidth::*};
use roc_collections::VecMap;
use roc_module::ident::TagName;
use roc_module::symbol::{Interns, Symbol};
use roc_mono::layout::{
cmp_fields, ext_var_is_empty_tag_union, Builtin, Layout, LayoutCache, TagOrClosure,
};
use roc_types::subs::{Label, UnionLabels};
use roc_types::{
subs::{Content, FlatType, LambdaSet, Subs, Variable},
types::RecordField,
};
use std::fmt::Display;
pub struct Env<'a> {
pub arena: &'a Bump,
pub subs: &'a Subs,
pub layout_cache: &'a mut LayoutCache<'a>,
pub interns: &'a Interns,
pub struct_names: Structs,
pub enum_names: Enums,
pub pending_recursive_types: VecMap<TypeId, Variable>,
pub known_recursive_types: VecMap<Variable, TypeId>,
}
impl<'a> Env<'a> {
pub fn vars_to_types<I>(&mut self, variables: I) -> Types
where
I: IntoIterator<Item = Variable>,
{
let mut types = Types::default();
for var in variables {
self.add_type(var, &mut types);
}
self.resolve_pending_recursive_types(&mut types);
types
}
fn add_type(&mut self, var: Variable, types: &mut Types) -> TypeId {
let layout = self
.layout_cache
.from_var(self.arena, var, self.subs)
.expect("Something weird ended up in the content");
add_type_help(self, layout, var, None, types)
}
fn resolve_pending_recursive_types(&mut self, types: &mut Types) {
// TODO if VecMap gets a drain() method, use that instead of doing take() and into_iter
let pending = core::mem::take(&mut self.pending_recursive_types);
for (type_id, var) in pending.into_iter() {
let actual_type_id = self.known_recursive_types.get(&var).unwrap_or_else(|| {
unreachable!(
"There was no known recursive TypeId for the pending recursive variable {:?}",
var
);
});
debug_assert!(
matches!(types.get(type_id), RocType::RecursivePointer(TypeId::PENDING)),
"The TypeId {:?} was registered as a pending recursive pointer, but was not stored in Types as one.",
type_id
);
types.replace(type_id, RocType::RecursivePointer(*actual_type_id));
}
}
}
fn add_type_help<'a>(
env: &mut Env<'a>,
layout: Layout<'a>,
var: Variable,
opt_name: Option<Symbol>,
types: &mut Types,
) -> TypeId {
let subs = env.subs;
match subs.get_content_without_compacting(var) {
Content::FlexVar(_)
| Content::RigidVar(_)
| Content::FlexAbleVar(_, _)
| Content::RigidAbleVar(_, _) => {
todo!("TODO give a nice error message for a non-concrete type being passed to the host")
}
Content::Structure(FlatType::Record(fields, ext)) => {
let it = fields
.unsorted_iterator(subs, *ext)
.expect("something weird in content")
.flat_map(|(label, field)| {
match field {
RecordField::Required(field_var) | RecordField::Demanded(field_var) => {
Some((label.to_string(), field_var))
}
RecordField::Optional(_) => {
// drop optional fields
None
}
}
});
let name = match opt_name {
Some(sym) => sym.as_str(env.interns).to_string(),
None => env.struct_names.get_name(var),
};
add_struct(env, name, it, types, |name, fields| RocType::Struct {
name,
fields,
})
}
Content::LambdaSet(LambdaSet {
solved,
recursion_var: _,
}) => add_union(env, opt_name, solved, var, types),
Content::Structure(FlatType::TagUnion(tags, ext_var)) => {
debug_assert!(ext_var_is_empty_tag_union(subs, *ext_var));
add_union(env, opt_name, tags, var, types)
}
Content::Structure(FlatType::RecursiveTagUnion(_rec_var, tag_vars, ext_var)) => {
debug_assert!(ext_var_is_empty_tag_union(subs, *ext_var));
add_union(env, opt_name, tag_vars, var, types)
}
Content::Structure(FlatType::Apply(symbol, _)) => match layout {
Layout::Builtin(builtin) => add_builtin_type(env, builtin, var, opt_name, types),
_ => {
if symbol.is_builtin() {
todo!(
"Handle Apply for builtin symbol {:?} and layout {:?}",
symbol,
layout
)
} else {
todo!(
"Handle non-builtin Apply for symbol {:?} and layout {:?}",
symbol,
layout
)
}
}
},
Content::Structure(FlatType::Func(_, _, _)) => {
todo!()
}
Content::Structure(FlatType::FunctionOrTagUnion(_, _, _)) => {
todo!()
}
Content::Structure(FlatType::Erroneous(_)) => todo!(),
Content::Structure(FlatType::EmptyRecord) => todo!(),
Content::Structure(FlatType::EmptyTagUnion) => {
// This can happen when unwrapping a tag union; don't do anything.
todo!()
}
Content::Alias(name, _, real_var, _) => {
if name.is_builtin() {
match layout {
Layout::Builtin(builtin) => {
add_builtin_type(env, builtin, var, opt_name, types)
}
_ => {
unreachable!()
}
}
} else {
// If this was a non-builtin type alias, we can use that alias name
// in the generated bindings.
add_type_help(env, layout, *real_var, Some(*name), types)
}
}
Content::RangedNumber(_, _) => todo!(),
Content::Error => todo!(),
Content::RecursionVar { structure, .. } => {
let type_id = types.add(RocType::RecursivePointer(TypeId::PENDING));
env.pending_recursive_types.insert(type_id, *structure);
type_id
}
}
}
fn add_builtin_type<'a>(
env: &mut Env<'a>,
builtin: Builtin<'a>,
var: Variable,
opt_name: Option<Symbol>,
types: &mut Types,
) -> TypeId {
match builtin {
Builtin::Int(width) => match width {
U8 => types.add(RocType::Num(RocNum::U8)),
U16 => types.add(RocType::Num(RocNum::U16)),
U32 => types.add(RocType::Num(RocNum::U32)),
U64 => types.add(RocType::Num(RocNum::U64)),
U128 => types.add(RocType::Num(RocNum::U128)),
I8 => types.add(RocType::Num(RocNum::I8)),
I16 => types.add(RocType::Num(RocNum::I16)),
I32 => types.add(RocType::Num(RocNum::I32)),
I64 => types.add(RocType::Num(RocNum::I64)),
I128 => types.add(RocType::Num(RocNum::I128)),
},
Builtin::Float(width) => match width {
F32 => types.add(RocType::Num(RocNum::F32)),
F64 => types.add(RocType::Num(RocNum::F64)),
F128 => types.add(RocType::Num(RocNum::F128)),
},
Builtin::Decimal => types.add(RocType::Num(RocNum::Dec)),
Builtin::Bool => types.add(RocType::Bool),
Builtin::Str => types.add(RocType::RocStr),
Builtin::Dict(key_layout, val_layout) => {
// TODO FIXME this `var` is wrong - should have a different `var` for key and for val
let key_id = add_type_help(env, *key_layout, var, opt_name, types);
let val_id = add_type_help(env, *val_layout, var, opt_name, types);
let dict_id = types.add(RocType::RocDict(key_id, val_id));
types.depends(dict_id, key_id);
types.depends(dict_id, val_id);
dict_id
}
Builtin::Set(elem_layout) => {
let elem_id = add_type_help(env, *elem_layout, var, opt_name, types);
let set_id = types.add(RocType::RocSet(elem_id));
types.depends(set_id, elem_id);
set_id
}
Builtin::List(elem_layout) => {
let elem_id = add_type_help(env, *elem_layout, var, opt_name, types);
let list_id = types.add(RocType::RocList(elem_id));
types.depends(list_id, elem_id);
list_id
}
}
}
fn add_struct<I, L, F>(
env: &mut Env<'_>,
name: String,
fields: I,
types: &mut Types,
to_type: F,
) -> TypeId
where
I: IntoIterator<Item = (L, Variable)>,
L: Display + Ord,
F: FnOnce(String, Vec<(L, TypeId)>) -> RocType,
{
let subs = env.subs;
let fields_iter = &mut fields.into_iter();
let mut sortables =
bumpalo::collections::Vec::with_capacity_in(fields_iter.size_hint().0, env.arena);
for (label, field_var) in fields_iter {
sortables.push((
label,
field_var,
env.layout_cache
.from_var(env.arena, field_var, subs)
.unwrap(),
));
}
sortables.sort_by(|(label1, _, layout1), (label2, _, layout2)| {
cmp_fields(
label1,
layout1,
label2,
layout2,
env.layout_cache.target_info,
)
});
let fields = sortables
.into_iter()
.map(|(label, field_var, field_layout)| {
let type_id = add_type_help(env, field_layout, field_var, None, types);
(label, type_id)
})
.collect::<Vec<(L, TypeId)>>();
types.add(to_type(name, fields))
}
fn add_union<L>(
env: &mut Env<'_>,
opt_name: Option<Symbol>,
union_tags: &UnionLabels<L>,
var: Variable,
types: &mut Types,
) -> TypeId
where
L: Label + Into<TagOrClosure>,
{
let subs = env.subs;
let mut tags: Vec<(String, Vec<Variable>)> = union_tags
.iter_from_subs(subs)
.map(|(label, payload_vars)| {
let name_str = match label.clone().into() {
TagOrClosure::Tag(TagName(uppercase)) => uppercase.as_str().to_string(),
TagOrClosure::Closure(_) => unreachable!(),
};
(name_str, payload_vars.to_vec())
})
.collect();
let layout = env.layout_cache.from_var(env.arena, var, subs).unwrap();
let name = match opt_name {
Some(sym) => sym.as_str(env.interns).to_string(),
None => env.enum_names.get_name(var),
};
// Sort tags alphabetically by tag name
tags.sort_by(|(name1, _), (name2, _)| name1.cmp(name2));
let is_recursive = is_recursive_tag_union(&layout);
let mut tags: Vec<_> = tags
.into_iter()
.map(|(tag_name, payload_vars)| {
match struct_fields_needed(env, payload_vars.iter().copied()) {
0 => {
// no payload
(tag_name, None)
}
1 if !is_recursive => {
// this isn't recursive and there's 1 payload item, so it doesn't
// need its own struct - e.g. for `[Foo Str, Bar Str]` both of them
// can have payloads of plain old Str, no struct wrapper needed.
let payload_var = payload_vars.get(0).unwrap();
let layout = env
.layout_cache
.from_var(env.arena, *payload_var, env.subs)
.expect("Something weird ended up in the content");
let payload_id = add_type_help(env, layout, *payload_var, None, types);
(tag_name, Some(payload_id))
}
_ => {
// create a RocType for the payload and save it
let struct_name = format!("{}_{}", name, tag_name); // e.g. "MyUnion_MyVariant"
let fields = payload_vars.iter().copied().enumerate();
let struct_id = add_struct(env, struct_name, fields, types, |name, fields| {
RocType::TagUnionPayload { name, fields }
});
(tag_name, Some(struct_id))
}
}
})
.collect();
let typ = match layout {
Layout::Union(union_layout) => {
use roc_mono::layout::UnionLayout::*;
match union_layout {
// A non-recursive tag union
// e.g. `Result ok err : [Ok ok, Err err]`
NonRecursive(_) => RocType::TagUnion(RocTagUnion::NonRecursive { name, tags }),
// A recursive tag union (general case)
// e.g. `Expr : [Sym Str, Add Expr Expr]`
Recursive(_) => RocType::TagUnion(RocTagUnion::Recursive { name, tags }),
// A recursive tag union with just one constructor
// Optimization: No need to store a tag ID (the payload is "unwrapped")
// e.g. `RoseTree a : [Tree a (List (RoseTree a))]`
NonNullableUnwrapped(_) => {
todo!()
}
// A recursive tag union that has an empty variant
// Optimization: Represent the empty variant as null pointer => no memory usage & fast comparison
// It has more than one other variant, so they need tag IDs (payloads are "wrapped")
// e.g. `FingerTree a : [Empty, Single a, More (Some a) (FingerTree (Tuple a)) (Some a)]`
// see also: https://youtu.be/ip92VMpf_-A?t=164
NullableWrapped { .. } => {
todo!()
}
// A recursive tag union with only two variants, where one is empty.
// Optimizations: Use null for the empty variant AND don't store a tag ID for the other variant.
// e.g. `ConsList a : [Nil, Cons a (ConsList a)]`
NullableUnwrapped {
nullable_id: null_represents_first_tag,
other_fields: _, // TODO use this!
} => {
// NullableUnwrapped tag unions should always have exactly 2 tags.
debug_assert_eq!(tags.len(), 2);
let null_tag;
let non_null;
if null_represents_first_tag {
// If nullable_id is true, then the null tag is second, which means
// pop() will return it because it's at the end of the vec.
null_tag = tags.pop().unwrap().0;
non_null = tags.pop().unwrap();
} else {
// The null tag is first, which means the tag with the payload is second.
non_null = tags.pop().unwrap();
null_tag = tags.pop().unwrap().0;
}
let (non_null_tag, non_null_payload) = non_null;
RocType::TagUnion(RocTagUnion::NullableUnwrapped {
name,
null_tag,
non_null_tag,
non_null_payload: non_null_payload.unwrap(),
null_represents_first_tag,
})
}
}
}
Layout::Builtin(Builtin::Int(_)) => RocType::TagUnion(RocTagUnion::Enumeration {
name,
tags: tags.into_iter().map(|(tag_name, _)| tag_name).collect(),
}),
Layout::Builtin(_)
| Layout::Struct { .. }
| Layout::Boxed(_)
| Layout::LambdaSet(_)
| Layout::RecursivePointer => {
// These must be single-tag unions. Bindgen ordinary nonrecursive
// tag unions for them, and let Rust do the unwrapping.
//
// This should be a very rare use case, and it's not worth overcomplicating
// the rest of bindgen to make it do something different.
RocType::TagUnion(RocTagUnion::NonRecursive { name, tags })
}
};
let type_id = types.add(typ);
if is_recursive {
env.known_recursive_types.insert(var, type_id);
}
type_id
}
fn is_recursive_tag_union(layout: &Layout) -> bool {
use roc_mono::layout::UnionLayout::*;
match layout {
Layout::Union(tag_union) => match tag_union {
NonRecursive(_) => false,
Recursive(_)
| NonNullableUnwrapped(_)
| NullableWrapped { .. }
| NullableUnwrapped { .. } => true,
},
_ => false,
}
}
fn struct_fields_needed<I: IntoIterator<Item = Variable>>(env: &mut Env<'_>, vars: I) -> usize {
let subs = env.subs;
let arena = env.arena;
vars.into_iter().fold(0, |count, var| {
let layout = env.layout_cache.from_var(arena, var, subs).unwrap();
if layout.is_dropped_because_empty() {
count
} else {
count + 1
}
})
}

View File

@ -1,17 +1,17 @@
use crate::types::{RocNum, RocTagUnion, RocType, TypeId, Types};
use indexmap::IndexMap;
use roc_mono::layout::UnionLayout;
use roc_target::Architecture;
use roc_target::{Architecture, TargetInfo};
use std::convert::TryInto;
use std::fmt::Display;
pub static TEMPLATE: &[u8] = include_bytes!("../templates/template.rs");
pub static HEADER: &[u8] = include_bytes!("../templates/header.rs");
const INDENT: &str = " ";
const VARIANT_DOC_COMMENT: &str =
const DISCRIMINANT_DOC_COMMENT: &str =
"/// Returns which variant this tag union holds. Note that this never includes a payload!";
type Impls = IndexMap<Impl, IndexMap<String, Vec<Architecture>>>;
type Impls = IndexMap<Impl, IndexMap<String, Vec<TargetInfo>>>;
type Impl = Option<String>;
/// Recursive tag unions need a custom Clone which bumps refcount.
@ -43,20 +43,20 @@ const RECURSIVE_TAG_UNION_STORAGE: &str = r#"#[inline(always)]
/// Add the given declaration body, along with the architecture, to the Impls.
/// This can optionally be within an `impl`, or if no `impl` is specified,
/// then it's added at the top level.
fn add_decl(impls: &mut Impls, opt_impl: Impl, architecture: Architecture, body: String) {
fn add_decl(impls: &mut Impls, opt_impl: Impl, target_info: TargetInfo, body: String) {
let decls = impls.entry(opt_impl).or_default();
let architectures = decls.entry(body).or_default();
let targets = decls.entry(body).or_default();
architectures.push(architecture);
targets.push(target_info);
}
pub fn emit(types_by_architecture: &[(Architecture, Types)]) -> String {
pub fn emit(types_and_targets: &[(Types, TargetInfo)]) -> String {
let mut buf = String::new();
let mut impls: Impls = IndexMap::default();
for (architecture, types) in types_by_architecture.iter() {
for (types, target_info) in types_and_targets {
for id in types.sorted_ids() {
add_type(*architecture, id, types, &mut impls);
add_type(*target_info, id, types, &mut impls);
}
}
@ -73,7 +73,7 @@ pub fn emit(types_by_architecture: &[(Architecture, Types)]) -> String {
has_impl = false;
}
for (decl, architectures) in decls {
for (decl, targets) in decls {
// If we're inside an `impl` block, indent the cfg annotation
let indent = if has_impl { INDENT } else { "" };
@ -81,20 +81,23 @@ pub fn emit(types_by_architecture: &[(Architecture, Types)]) -> String {
buf.push('\n');
buf.push_str(indent);
match architectures.len() {
match targets.len() {
1 => {
let arch = arch_to_str(architectures.get(0).unwrap());
let arch = arch_to_str(targets.get(0).unwrap().architecture);
buf.push_str(&format!("#[cfg(target_arch = \"{arch}\")]"));
}
_ => {
// We should never have a decl recorded with 0 architectures!
debug_assert_ne!(architectures.len(), 0);
// We should never have a decl recorded with 0 targets!
debug_assert_ne!(targets.len(), 0);
let alternatives = architectures
let alternatives = targets
.iter()
.map(|arch| {
format!("{indent}{INDENT}target_arch = \"{}\"", arch_to_str(arch))
.map(|target_info| {
format!(
"{indent}{INDENT}target_arch = \"{}\"",
arch_to_str(target_info.architecture)
)
})
.collect::<Vec<_>>()
.join(",\n");
@ -120,13 +123,13 @@ pub fn emit(types_by_architecture: &[(Architecture, Types)]) -> String {
buf
}
fn add_type(architecture: Architecture, id: TypeId, types: &Types, impls: &mut Impls) {
match types.get(id) {
fn add_type(target_info: TargetInfo, id: TypeId, types: &Types, impls: &mut Impls) {
match types.get_type(id) {
RocType::Struct { name, fields } => {
add_struct(name, architecture, fields, id, types, impls, false)
add_struct(name, target_info, fields, id, types, impls, false)
}
RocType::TagUnionPayload { name, fields } => {
add_struct(name, architecture, fields, id, types, impls, true)
add_struct(name, target_info, fields, id, types, impls, true)
}
RocType::TagUnion(tag_union) => {
match tag_union {
@ -134,47 +137,75 @@ fn add_type(architecture: Architecture, id: TypeId, types: &Types, impls: &mut I
if tags.len() == 1 {
// An enumeration with one tag is a zero-sized unit type, so
// represent it as a zero-sized struct (e.g. "struct Foo()").
let derive = derive_str(types.get(id), types, true);
let derive = derive_str(types.get_type(id), types, true);
let struct_name = type_name(id, types);
let body = format!("{derive}\nstruct {struct_name}();");
add_decl(impls, None, architecture, body);
add_decl(impls, None, target_info, body);
} else {
add_enumeration(
name,
architecture,
types.get(id),
target_info,
types.get_type(id),
tags.iter(),
types,
impls,
)
}
}
RocTagUnion::NonRecursive { tags, name } => {
RocTagUnion::NonRecursive {
tags,
name,
discriminant_type,
} => {
// Empty tag unions can never come up at runtime,
// and so don't need declared types.
if !tags.is_empty() {
// The discriminant is placed immediately after the last byte of
// the longest variant. That means if we take the total size
// and subtract the size of the discriminant, we have its offset.
//
// Importantly, we should use the size *without* alignment rounding;
// otherwise, that might not be where the discriminant actually is!
let discriminant_offset =
types.size_ignoring_alignment(id) - discriminant_type.size();
add_tag_union(
Recursiveness::NonRecursive,
name,
architecture,
target_info,
id,
tags,
discriminant_offset,
types,
impls,
);
}
}
RocTagUnion::Recursive { tags, name } => {
RocTagUnion::Recursive {
tags,
name,
discriminant_type,
} => {
// Empty tag unions can never come up at runtime,
// and so don't need declared types.
if !tags.is_empty() {
// The discriminant is placed immediately after the last byte of
// the longest variant. That means if we take the total size
// and subtract the size of the discriminant, we have its offset.
//
// Importantly, we should use the size *without* alignment rounding;
// otherwise, that might not be where the discriminant actually is!
let discriminant_offset =
types.size_ignoring_alignment(id) - discriminant_type.size();
add_tag_union(
Recursiveness::Recursive,
name,
architecture,
target_info,
id,
tags,
discriminant_offset,
types,
impls,
);
@ -191,7 +222,7 @@ fn add_type(architecture: Architecture, id: TypeId, types: &Types, impls: &mut I
null_represents_first_tag,
} => add_nullable_unwrapped(
name,
architecture,
target_info,
id,
null_tag,
non_null_tag,
@ -222,7 +253,7 @@ fn add_type(architecture: Architecture, id: TypeId, types: &Types, impls: &mut I
fn add_discriminant(
name: &str,
architecture: Architecture,
target_info: TargetInfo,
tag_names: Vec<String>,
types: &Types,
impls: &mut Impls,
@ -234,7 +265,7 @@ fn add_discriminant(
// Bar,
// Foo,
// }
let discriminant_name = format!("variant_{name}");
let discriminant_name = format!("discriminant_{name}");
let discriminant_type = RocType::TagUnion(RocTagUnion::Enumeration {
name: discriminant_name.clone(),
tags: tag_names.clone(),
@ -242,7 +273,7 @@ fn add_discriminant(
add_enumeration(
&discriminant_name,
architecture,
target_info,
&discriminant_type,
tag_names.into_iter(),
types,
@ -258,12 +289,14 @@ enum Recursiveness {
NonRecursive,
}
#[allow(clippy::too_many_arguments)]
fn add_tag_union(
recursiveness: Recursiveness,
name: &str,
architecture: Architecture,
target_info: TargetInfo,
type_id: TypeId,
tags: &[(String, Option<TypeId>)],
discriminant_offset: u32,
types: &Types,
impls: &mut Impls,
) {
@ -272,11 +305,9 @@ fn add_tag_union(
debug_assert_ne!(tags.len(), 0);
let tag_names = tags.iter().map(|(name, _)| name).cloned().collect();
let discriminant_name = add_discriminant(name, architecture, tag_names, types, impls);
let typ = types.get(type_id);
let target_info = architecture.into();
let discriminant_offset = RocTagUnion::discriminant_offset(tags, types, target_info);
let size = typ.size(types, target_info);
let discriminant_name = add_discriminant(name, target_info, tag_names, types, impls);
let typ = types.get_type(type_id);
let size_rounded_to_alignment = types.size_rounded_to_alignment(type_id);
let (actual_self, actual_self_mut, actual_other, union_name) = match recursiveness {
Recursiveness::Recursive => (
"(&*self.union_pointer())",
@ -297,7 +328,7 @@ fn add_tag_union(
add_decl(
impls,
None,
architecture,
target_info,
format!(
r#"
pub struct {name} {{
@ -320,9 +351,9 @@ pub struct {name} {{
let mut buf = format!("#[repr(C)]\n{pub_str}union {decl_union_name} {{\n");
for (tag_name, opt_payload_id) in tags {
// If there's no payload, we don't need a variant for it.
// If there's no payload, we don't need a discriminant for it.
if let Some(payload_id) = opt_payload_id {
let payload_type = types.get(*payload_id);
let payload_type = types.get_type(*payload_id);
buf.push_str(&format!("{INDENT}{tag_name}: "));
@ -341,17 +372,24 @@ pub struct {name} {{
}
}
// When there's no alignment padding after the largest variant,
// the compiler will make extra room for the discriminant.
// We need that to be reflected in the overall size of the enum,
// so add an extra variant with the appropriate size.
//
// (Do this even if theoretically shouldn't be necessary, since
// there's no runtime cost and it more explicitly syncs the
// union's size with what we think it should be.)
buf.push_str(&format!("{INDENT}_sizer: [u8; {size}],\n}}"));
if tags.len() > 1 {
// When there's a discriminant (so, multiple tags) and there is
// no alignment padding after the largest variant,
// the compiler will make extra room for the discriminant.
// We need that to be reflected in the overall size of the enum,
// so add an extra variant with the appropriate size.
//
// (Do this even if theoretically shouldn't be necessary, since
// there's no runtime cost and it more explicitly syncs the
// union's size with what we think it should be.)
buf.push_str(&format!(
"{INDENT}_sizer: [u8; {size_rounded_to_alignment}],\n"
));
}
add_decl(impls, None, architecture, buf);
buf.push('}');
add_decl(impls, None, target_info, buf);
}
// The impl for the tag union
@ -364,20 +402,20 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
RECURSIVE_TAG_UNION_STORAGE.to_string(),
);
if tags.len() <= max_pointer_tagged_variants(architecture) {
bitmask = format!("{:#b}", tagged_pointer_bitmask(architecture));
if tags.len() <= max_pointer_tagged_variants(target_info.architecture) {
bitmask = format!("{:#b}", tagged_pointer_bitmask(target_info.architecture));
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"{VARIANT_DOC_COMMENT}
pub fn variant(&self) -> {discriminant_name} {{
r#"{DISCRIMINANT_DOC_COMMENT}
pub fn discriminant(&self) -> {discriminant_name} {{
// The discriminant is stored in the unused bytes at the end of the recursive pointer
unsafe {{ core::mem::transmute::<u8, {discriminant_name}>((self.pointer as u8) & {bitmask}) }}
}}"#
@ -387,17 +425,15 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Internal helper
fn tag_discriminant(pointer: *mut {union_name}, discriminant: {discriminant_name}) -> *mut {union_name} {{
// The discriminant is stored in the unused bytes at the end of the union pointer
unsafe {{
let untagged = (pointer as usize) & (!{bitmask} as usize);
let tagged = untagged | (discriminant as usize);
let untagged = (pointer as usize) & (!{bitmask} as usize);
let tagged = untagged | (discriminant as usize);
tagged as *mut {union_name}
}}
tagged as *mut {union_name}
}}"#
),
);
@ -405,7 +441,7 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Internal helper
fn union_pointer(&self) -> *mut {union_name} {{
@ -416,9 +452,9 @@ pub struct {name} {{
);
} else {
todo!(
"Support {} tags in a recursive tag union on architecture {:?}. (This is too many tags for pointer tagging to work, so we need to bindgen something different.)",
"Support {} tags in a recursive tag union on target_info {:?}. (This is too many tags for pointer tagging to work, so we need to bindgen something different.)",
tags.len(),
architecture
target_info
);
}
}
@ -442,10 +478,10 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"{VARIANT_DOC_COMMENT}
pub fn variant(&self) -> {discriminant_name} {{
r#"{DISCRIMINANT_DOC_COMMENT}
pub fn discriminant(&self) -> {discriminant_name} {{
unsafe {{
let bytes = core::mem::transmute::<&Self, &[u8; core::mem::size_of::<Self>()]>(self);
@ -458,7 +494,7 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Internal helper
fn set_discriminant(&mut self, discriminant: {discriminant_name}) {{
@ -480,13 +516,13 @@ pub struct {name} {{
// pub fn Foo(payload: roc_std::RocStr) -> Self {
// Self {
// tag: tag_MyTagUnion::Foo,
// variant: variant_MyTagUnion {
// discriminant: discriminant_MyTagUnion {
// Foo: core::mem::ManuallyDrop::new(payload),
// },
// }
// }
if let Some(payload_id) = opt_payload_id {
let payload_type = types.get(*payload_id);
let payload_type = types.get_type(*payload_id);
let self_for_into;
let payload_args;
let args_to_payload;
@ -501,14 +537,14 @@ pub struct {name} {{
Recursiveness::Recursive => {
if payload_type.has_pointer(types) {
owned_get_payload = format!(
r#"unsafe {{
r#"{{
let ptr = (self.pointer as usize & !{bitmask}) as *mut {union_name};
core::mem::ManuallyDrop::take(&mut (*ptr).{tag_name})
}}"#
);
borrowed_get_payload = format!(
r#"unsafe {{
r#"{{
let ptr = (self.pointer as usize & !{bitmask}) as *mut {union_name};
&(*ptr).{tag_name}
@ -518,14 +554,14 @@ pub struct {name} {{
self_for_into = "mut self";
} else {
owned_get_payload = format!(
r#"unsafe {{
r#"{{
let ptr = (self.pointer as usize & !{bitmask}) as *mut {union_name};
core::ptr::read(ptr).{tag_name}
}}"#
);
borrowed_get_payload = format!(
r#"unsafe {{
r#"{{
let ptr = (self.pointer as usize & !{bitmask}) as *mut {union_name};
(&ptr).{tag_name}
@ -537,15 +573,14 @@ pub struct {name} {{
}
Recursiveness::NonRecursive => {
if payload_type.has_pointer(types) {
owned_get_payload = format!(
"unsafe {{ core::mem::ManuallyDrop::take(&mut self.{tag_name}) }}"
);
borrowed_get_payload = format!("unsafe {{ &self.{tag_name} }}");
owned_get_payload =
format!("core::mem::ManuallyDrop::take(&mut self.{tag_name})");
borrowed_get_payload = format!("&self.{tag_name}");
// we need `mut self` for the argument because of ManuallyDrop
self_for_into = "mut self";
} else {
owned_get_payload = format!("unsafe {{ self.{tag_name} }}");
borrowed_get_payload = format!("unsafe {{ &self.{tag_name} }}");
owned_get_payload = format!("self.{tag_name}");
borrowed_get_payload = format!("&self.{tag_name}");
// we don't need `mut self` unless we need ManuallyDrop
self_for_into = "self";
};
@ -634,7 +669,7 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Construct a tag named {tag_name}, with the appropriate payload
pub fn {tag_name}({payload_args}) -> Self {{{body}
@ -646,13 +681,13 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Unsafely assume the given {name} has a .variant() of {tag_name} and convert it to {tag_name}'s payload.
/// (Always examine .variant() first to make sure this is the correct variant!)
/// Panics in debug builds if the .variant() doesn't return {tag_name}.
r#"/// Unsafely assume the given {name} has a .discriminant() of {tag_name} and convert it to {tag_name}'s payload.
/// (Always examine .discriminant() first to make sure this is the correct variant!)
/// Panics in debug builds if the .discriminant() doesn't return {tag_name}.
pub unsafe fn into_{tag_name}({self_for_into}) -> {owned_ret_type} {{
debug_assert_eq!(self.variant(), {discriminant_name}::{tag_name});
debug_assert_eq!(self.discriminant(), {discriminant_name}::{tag_name});
let payload = {owned_get_payload};
@ -664,13 +699,13 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Unsafely assume the given {name} has a .variant() of {tag_name} and return its payload.
/// (Always examine .variant() first to make sure this is the correct variant!)
/// Panics in debug builds if the .variant() doesn't return {tag_name}.
r#"/// Unsafely assume the given {name} has a .discriminant() of {tag_name} and return its payload.
/// (Always examine .discriminant() first to make sure this is the correct variant!)
/// Panics in debug builds if the .discriminant() doesn't return {tag_name}.
pub unsafe fn as_{tag_name}(&self) -> {borrowed_ret_type} {{
debug_assert_eq!(self.variant(), {discriminant_name}::{tag_name});
debug_assert_eq!(self.discriminant(), {discriminant_name}::{tag_name});
let payload = {borrowed_get_payload};
@ -682,7 +717,7 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// A tag named {tag_name}, which has no payload.
pub const {tag_name}: Self = unsafe {{
@ -698,7 +733,7 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Other `into_` methods return a payload, but since the {tag_name} tag
/// has no payload, this does nothing and is only here for completeness.
@ -711,7 +746,7 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Other `as` methods return a payload, but since the {tag_name} tag
/// has no payload, this does nothing and is only here for completeness.
@ -727,16 +762,16 @@ pub struct {name} {{
// The Drop impl for the tag union
{
let opt_impl = Some(format!("impl Drop for {name}"));
let mut buf = String::new();
let mut drop_payload = String::new();
write_impl_tags(
2,
3,
tags.iter(),
&discriminant_name,
&mut buf,
&mut drop_payload,
|tag_name, opt_payload_id| {
match opt_payload_id {
Some(payload_id) if types.get(payload_id).has_pointer(types) => {
Some(payload_id) if types.get_type(payload_id).has_pointer(types) => {
format!("unsafe {{ core::mem::ManuallyDrop::drop(&mut {actual_self_mut}.{tag_name}) }},",)
}
_ => {
@ -748,12 +783,45 @@ pub struct {name} {{
},
);
add_decl(
impls,
opt_impl,
architecture,
format!("fn drop(&mut self) {{\n{buf}{INDENT}}}"),
);
// Drop works differently for recursive vs non-recursive tag unions.
let drop_fn = match recursiveness {
Recursiveness::Recursive => {
format!(
r#"fn drop(&mut self) {{
// We only need to do any work if there's actually a heap-allocated payload.
if let Some(storage) = self.storage() {{
let mut new_storage = storage.get();
// Decrement the refcount
let needs_dealloc = !new_storage.is_readonly() && new_storage.decrease();
if needs_dealloc {{
// Drop the payload first.
{drop_payload}
// Dealloc the pointer
let alignment = core::mem::align_of::<Self>().max(core::mem::align_of::<roc_std::Storage>());
unsafe {{ crate::roc_dealloc(storage.as_ptr().cast(), alignment as u32); }}
}} else {{
// Write the storage back.
storage.set(new_storage);
}}
}}
}}"#
)
}
Recursiveness::NonRecursive => {
format!(
r#"fn drop(&mut self) {{
// Drop the payloads
{drop_payload}
}}"#
)
}
};
add_decl(impls, opt_impl, target_info, drop_fn);
}
// The PartialEq impl for the tag union
@ -765,7 +833,7 @@ pub struct {name} {{
};
let opt_impl = Some(format!("{opt_impl_prefix}impl PartialEq for {name}"));
let mut buf = r#"fn eq(&self, other: &Self) -> bool {
if self.variant() != other.variant() {
if self.discriminant() != other.discriminant() {
return false;
}
@ -796,14 +864,14 @@ pub struct {name} {{
buf.push_str(INDENT);
buf.push('}');
add_decl(impls, opt_impl, architecture, buf);
add_decl(impls, opt_impl, target_info, buf);
}
// The PartialOrd impl for the tag union
{
let opt_impl = Some(format!("impl PartialOrd for {name}"));
let mut buf = r#"fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
match self.variant().partial_cmp(&other.variant()) {
match self.discriminant().partial_cmp(&other.discriminant()) {
Some(core::cmp::Ordering::Equal) => {}
not_eq => return not_eq,
}
@ -835,14 +903,14 @@ pub struct {name} {{
buf.push_str(INDENT);
buf.push('}');
add_decl(impls, opt_impl, architecture, buf);
add_decl(impls, opt_impl, target_info, buf);
}
// The Ord impl for the tag union
{
let opt_impl = Some(format!("impl Ord for {name}"));
let mut buf = r#"fn cmp(&self, other: &Self) -> core::cmp::Ordering {
match self.variant().cmp(&other.variant()) {
match self.discriminant().cmp(&other.discriminant()) {
core::cmp::Ordering::Equal => {}
not_eq => return not_eq,
}
@ -874,7 +942,7 @@ pub struct {name} {{
buf.push_str(INDENT);
buf.push('}');
add_decl(impls, opt_impl, architecture, buf);
add_decl(impls, opt_impl, target_info, buf);
}
// The Clone impl for the tag union
@ -935,7 +1003,7 @@ pub struct {name} {{
r#"
};
answer.set_discriminant(self.variant());
answer.set_discriminant(self.discriminant());
answer
}"#,
@ -945,7 +1013,7 @@ pub struct {name} {{
}
};
add_decl(impls, opt_impl, architecture, body);
add_decl(impls, opt_impl, target_info, body);
}
// The Hash impl for the tag union
@ -977,7 +1045,7 @@ pub struct {name} {{
buf.push_str(INDENT);
buf.push('}');
add_decl(impls, opt_impl, architecture, buf);
add_decl(impls, opt_impl, target_info, buf);
}
// The Debug impl for the tag union
@ -1001,7 +1069,7 @@ pub struct {name} {{
// If it's a ManuallyDrop, we need a `*` prefix to dereference it
// (because otherwise we're using ManuallyDrop's Debug instance
// rather than the Debug instance of the value it wraps).
let payload_type = types.get(payload_id);
let payload_type = types.get_type(payload_id);
let deref_str = if payload_type.has_pointer(types) {
"&*"
} else {
@ -1017,20 +1085,10 @@ pub struct {name} {{
| RocType::RocSet(_)
| RocType::RocBox(_)
| RocType::TagUnion(_)
| RocType::Struct { .. }
| RocType::RecursivePointer { .. } => {
format!(".field({deref_str}{actual_self}.{tag_name})")
}
RocType::Struct { fields, .. } => {
let mut buf = Vec::new();
for (label, _) in fields {
buf.push(format!(
".field(&({deref_str}{actual_self}.{tag_name}).{label})"
));
}
buf.join("\n")
}
RocType::TagUnionPayload { fields, .. } => {
let mut buf = Vec::new();
@ -1061,7 +1119,7 @@ pub struct {name} {{
buf.push_str(INDENT);
buf.push('}');
add_decl(impls, opt_impl, architecture, buf);
add_decl(impls, opt_impl, target_info, buf);
}
}
@ -1078,7 +1136,7 @@ fn write_impl_tags<
) {
write_indents(indentations, buf);
buf.push_str("match self.variant() {\n");
buf.push_str("match self.discriminant() {\n");
for (tag_name, opt_payload_id) in tags {
let branch_str = to_branch_str(tag_name, *opt_payload_id);
@ -1097,7 +1155,7 @@ fn write_impl_tags<
fn add_enumeration<I: ExactSizeIterator<Item = S>, S: AsRef<str> + Display>(
name: &str,
architecture: Architecture,
target_info: TargetInfo,
typ: &RocType,
tags: I,
types: &Types,
@ -1114,7 +1172,7 @@ fn add_enumeration<I: ExactSizeIterator<Item = S>, S: AsRef<str> + Display>(
// e.g. "#[repr(u8)]\npub enum Foo {\n"
let mut buf = format!("{derive}\n#[repr(u{repr_bytes})]\npub enum {name} {{\n");
// Debug impls should never vary by architecture.
// Debug impls should never vary by target_info.
let mut debug_buf = format!(
r#"impl core::fmt::Debug for {name} {{
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {{
@ -1136,21 +1194,26 @@ fn add_enumeration<I: ExactSizeIterator<Item = S>, S: AsRef<str> + Display>(
"}}\n\n{debug_buf}{INDENT}{INDENT}}}\n{INDENT}}}\n}}"
));
add_decl(impls, None, architecture, buf);
add_decl(impls, None, target_info, buf);
}
fn add_struct<S: Display>(
name: &str,
architecture: Architecture,
target_info: TargetInfo,
fields: &[(S, TypeId)],
struct_id: TypeId,
types: &Types,
impls: &mut Impls,
is_tag_union_payload: bool,
) {
let derive = derive_str(types.get(struct_id), types, true);
let derive = derive_str(types.get_type(struct_id), types, true);
let pub_str = if is_tag_union_payload { "" } else { "pub " };
let mut buf = format!("{derive}\n#[repr(C)]\n{pub_str}struct {name} {{\n");
let repr = if fields.len() == 1 {
"transparent"
} else {
"C"
};
let mut buf = format!("{derive}\n#[repr({repr})]\n{pub_str}struct {name} {{\n");
for (label, type_id) in fields {
let type_str = type_name(*type_id, types);
@ -1168,11 +1231,11 @@ fn add_struct<S: Display>(
buf.push('}');
add_decl(impls, None, architecture, buf);
add_decl(impls, None, target_info, buf);
}
fn type_name(id: TypeId, types: &Types) -> String {
match types.get(id) {
match types.get_type(id) {
RocType::RocStr => "roc_std::RocStr".to_string(),
RocType::Bool => "bool".to_string(),
RocType::Num(RocNum::U8) => "u8".to_string(),
@ -1239,7 +1302,7 @@ fn derive_str(typ: &RocType, types: &Types, include_debug: bool) -> String {
#[allow(clippy::too_many_arguments)]
fn add_nullable_unwrapped(
name: &str,
architecture: Architecture,
target_info: TargetInfo,
id: TypeId,
null_tag: &str,
non_null_tag: &str,
@ -1252,8 +1315,8 @@ fn add_nullable_unwrapped(
tag_names.sort();
let discriminant_name = add_discriminant(name, architecture, tag_names, types, impls);
let payload_type = types.get(non_null_payload);
let discriminant_name = add_discriminant(name, target_info, tag_names, types, impls);
let payload_type = types.get_type(non_null_payload);
let payload_type_name = type_name(non_null_payload, types);
let has_pointer = payload_type.has_pointer(types);
@ -1261,20 +1324,20 @@ fn add_nullable_unwrapped(
{
// This struct needs its own Clone impl because it has
// a refcount to bump
let derive_extras = if types.get(id).has_float(types) {
let derive_extras = if types.get_type(id).has_float(types) {
""
} else {
", Eq, Ord, Hash"
};
let body = format!(
r#"#[repr(C)]
r#"#[repr(transparent)]
#[derive(PartialEq, PartialOrd{derive_extras})]
pub struct {name} {{
pointer: *mut core::mem::ManuallyDrop<{payload_type_name}>,
}}"#
);
add_decl(impls, None, architecture, body);
add_decl(impls, None, target_info, body);
}
// The impl for the tag union
@ -1284,17 +1347,17 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
RECURSIVE_TAG_UNION_STORAGE.to_string(),
);
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"{VARIANT_DOC_COMMENT}
pub fn variant(&self) -> {discriminant_name} {{
r#"{DISCRIMINANT_DOC_COMMENT}
pub fn discriminant(&self) -> {discriminant_name} {{
if self.pointer.is_null() {{
{discriminant_name}::{null_tag}
}} else {{
@ -1369,7 +1432,7 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Construct a tag named {non_null_tag}, with the appropriate payload
pub fn {non_null_tag}({payload_args}) -> Self {{
@ -1408,13 +1471,13 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Unsafely assume the given {name} has a .variant() of {non_null_tag} and convert it to {non_null_tag}'s payload.
/// (Always examine .variant() first to make sure this is the correct variant!)
/// Panics in debug builds if the .variant() doesn't return {non_null_tag}.
r#"/// Unsafely assume the given {name} has a .discriminant() of {non_null_tag} and convert it to {non_null_tag}'s payload.
/// (Always examine .discriminant() first to make sure this is the correct variant!)
/// Panics in debug builds if the .discriminant() doesn't return {non_null_tag}.
pub unsafe fn into_{non_null_tag}(self) -> {owned_ret_type} {{
debug_assert_eq!(self.variant(), {discriminant_name}::{non_null_tag});
debug_assert_eq!(self.discriminant(), {discriminant_name}::{non_null_tag});
let payload = {assign_payload};
@ -1429,13 +1492,13 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Unsafely assume the given {name} has a .variant() of {non_null_tag} and return its payload.
/// (Always examine .variant() first to make sure this is the correct variant!)
/// Panics in debug builds if the .variant() doesn't return {non_null_tag}.
r#"/// Unsafely assume the given {name} has a .discriminant() of {non_null_tag} and return its payload.
/// (Always examine .discriminant() first to make sure this is the correct variant!)
/// Panics in debug builds if the .discriminant() doesn't return {non_null_tag}.
pub unsafe fn as_{non_null_tag}(&self) -> {borrowed_ret_type} {{
debug_assert_eq!(self.variant(), {discriminant_name}::{non_null_tag});
debug_assert_eq!(self.discriminant(), {discriminant_name}::{non_null_tag});
let payload = &*self.pointer;
@ -1453,7 +1516,7 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// A tag named {null_tag}, which has no payload.
pub const {null_tag}: Self = Self {{
@ -1465,7 +1528,7 @@ pub struct {name} {{
add_decl(
impls,
opt_impl.clone(),
architecture,
target_info,
format!(
r#"/// Other `into_` methods return a payload, but since the {null_tag} tag
/// has no payload, this does nothing and is only here for completeness.
@ -1478,7 +1541,7 @@ pub struct {name} {{
add_decl(
impls,
opt_impl,
architecture,
target_info,
format!(
r#"/// Other `as` methods return a payload, but since the {null_tag} tag
/// has no payload, this does nothing and is only here for completeness.
@ -1497,7 +1560,7 @@ pub struct {name} {{
add_decl(
impls,
opt_impl,
architecture,
target_info,
RECURSIVE_TAG_UNION_CLONE.to_string(),
);
}
@ -1509,48 +1572,33 @@ pub struct {name} {{
add_decl(
impls,
opt_impl,
architecture,
format!(
r#"fn drop(&mut self) {{
target_info,
r#"fn drop(&mut self) {{
// We only need to do any work if there's actually a heap-allocated payload.
if let Some(storage) = self.storage() {{
// Decrement the refcount and return early if no dealloc is needed
{{
let mut new_storage = storage.get();
let mut new_storage = storage.get();
if new_storage.is_readonly() {{
return;
// Decrement the refcount
let needs_dealloc = !new_storage.is_readonly() && new_storage.decrease();
if needs_dealloc {{
// Drop the payload first.
unsafe {{
core::mem::ManuallyDrop::drop(&mut core::ptr::read(self.pointer));
}}
let needs_dealloc = new_storage.decrease();
if !needs_dealloc {{
// Write the storage back.
storage.set(new_storage);
return;
}}
}}
if !self.pointer.is_null() {{
// If there is a payload, drop it first.
let payload = unsafe {{ core::mem::ManuallyDrop::take(&mut *self.pointer) }};
core::mem::drop::<{payload_type_name}>(payload);
}}
// Dealloc the pointer
unsafe {{
// Dealloc the pointer
let alignment = core::mem::align_of::<Self>().max(core::mem::align_of::<roc_std::Storage>());
let alloc_ptr = self.pointer.cast::<u8>().sub(alignment);
crate::roc_dealloc(
alloc_ptr as *mut core::ffi::c_void,
alignment as u32,
);
unsafe {{
crate::roc_dealloc(storage.as_ptr().cast(), alignment as u32);
}}
}} else {{
// Write the storage back.
storage.set(new_storage);
}}
}}
}}"#
),
}}"#.to_string(),
);
}
@ -1610,11 +1658,11 @@ pub struct {name} {{
}}"#
);
add_decl(impls, opt_impl, architecture, body);
add_decl(impls, opt_impl, target_info, body);
}
}
fn arch_to_str(architecture: &Architecture) -> &'static str {
fn arch_to_str(architecture: Architecture) -> &'static str {
match architecture {
Architecture::X86_64 => "x86_64",
Architecture::X86_32 => "x86",

View File

@ -1,4 +1,3 @@
pub mod bindgen;
pub mod bindgen_c;
pub mod bindgen_rs;
pub mod bindgen_zig;

View File

@ -1,14 +1,12 @@
use crate::bindgen::Env;
use crate::types::Types;
use crate::types::{Env, Types};
use bumpalo::Bump;
use roc_can::{
def::{Declaration, Def},
pattern::Pattern,
};
use roc_load::{LoadedModule, Threading};
use roc_mono::layout::LayoutCache;
use roc_reporting::report::RenderTarget;
use roc_target::Architecture;
use roc_target::{Architecture, TargetInfo};
use std::io;
use std::path::{Path, PathBuf};
use strum::IntoEnumIterator;
@ -18,7 +16,7 @@ pub fn load_types(
full_file_path: PathBuf,
dir: &Path,
threading: Threading,
) -> Result<Vec<(Architecture, Types)>, io::Error> {
) -> Result<Vec<(Types, TargetInfo)>, io::Error> {
let target_info = (&Triple::host()).into();
let arena = &Bump::new();
@ -56,61 +54,51 @@ pub fn load_types(
);
}
let mut answer = Vec::with_capacity(Architecture::iter().size_hint().0);
for architecture in Architecture::iter() {
let defs_iter = decls.iter().flat_map(|decl| match decl {
Declaration::Declare(def) => {
vec![def.clone()]
let defs_iter = decls.iter().flat_map(|decl| match decl {
Declaration::Declare(def) => {
vec![def.clone()]
}
Declaration::DeclareRec(defs, cycle_mark) => {
if cycle_mark.is_illegal(subs) {
Vec::new()
} else {
defs.clone()
}
Declaration::DeclareRec(defs, cycle_mark) => {
if cycle_mark.is_illegal(subs) {
Vec::new()
} else {
defs.clone()
}
}
Declaration::Builtin(..) => {
unreachable!("Builtin decl in userspace module?")
}
Declaration::InvalidCycle(..) => Vec::new(),
});
let vars_iter = defs_iter.filter_map(
|Def {
loc_pattern,
pattern_vars,
..
}| {
if let Pattern::Identifier(sym) = loc_pattern.value {
let var = pattern_vars
.get(&sym)
.expect("Indetifier known but it has no var?");
Some(*var)
} else {
// figure out if we need to export non-identifier defs - when
// would that happen?
None
}
Declaration::Builtin(..) => {
unreachable!("Builtin decl in userspace module?")
}
Declaration::InvalidCycle(..) => Vec::new(),
});
},
);
let vars_iter = defs_iter.filter_map(
|Def {
loc_pattern,
pattern_vars,
..
}| {
if let Pattern::Identifier(sym) = loc_pattern.value {
let var = pattern_vars
.get(&sym)
.expect("Indetifier known but it has no var?");
let types_and_targets = Architecture::iter()
.map(|arch| {
let target_info = arch.into();
let mut env = Env::new(arena, subs, &interns, target_info);
Some(*var)
} else {
// figure out if we need to export non-identifier defs - when
// would that happen?
None
}
},
);
(env.vars_to_types(vars_iter.clone()), target_info)
})
.collect();
let mut layout_cache = LayoutCache::new(architecture.into());
let mut env = Env {
arena,
layout_cache: &mut layout_cache,
interns: &interns,
subs,
struct_names: Default::default(),
enum_names: Default::default(),
pending_recursive_types: Default::default(),
known_recursive_types: Default::default(),
};
let types = env.vars_to_types(vars_iter);
answer.push((architecture, types));
}
Ok(answer)
Ok(types_and_targets)
}

View File

@ -57,13 +57,13 @@ pub fn main() {
};
match load_types(input_path.clone(), &cwd, Threading::AllAvailable) {
Ok(types_by_architecture) => {
Ok(types_and_targets) => {
let mut buf;
match output_type {
OutputType::Rust => {
buf = std::str::from_utf8(bindgen_rs::HEADER).unwrap().to_string();
let body = bindgen_rs::emit(&types_by_architecture);
let body = bindgen_rs::emit(&types_and_targets);
buf.push_str(&body);
}

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,7 @@
// ⚠️ GENERATED CODE ⚠️ - this entire file was generated by the `roc-bindgen` CLI
#![allow(dead_code)]
#![allow(unused_mut)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]

View File

@ -0,0 +1,26 @@
platform "test-platform"
requires {} { main : _ }
exposes []
packages {}
imports []
provides [mainForHost]
Tool : [
SystemTool { name : Str, num : U32 },
FromJob { job : Job, num : U32 }
]
Command : [Command { tool : Tool }]
Job : [
Job { command : Command, inputFiles : List Str },
Foo Str,
# TODO make a recursive tag union test that doesn't try to do mutual recursion,
# just so I can get a PR up.
# WithTool Tool # Mutual recursion; Tool also references Job
]
Rbt : { default: Job }
mainForHost : Rbt
mainForHost = main

View File

@ -0,0 +1,14 @@
app "app"
packages { pf: "." }
imports []
provides [main] to pf
main = {
default: Job {
command: Command {
tool: SystemTool { name: "test", num: 42 }
},
inputFiles : ["foo"]
}
}

View File

@ -0,0 +1,99 @@
mod bindings;
use bindings::Rbt;
use indoc::indoc;
extern "C" {
#[link_name = "roc__mainForHost_1_exposed_generic"]
fn roc_main(_: *mut Rbt);
}
#[no_mangle]
pub extern "C" fn rust_main() -> i32 {
use std::cmp::Ordering;
use std::collections::hash_set::HashSet;
let tag_union = unsafe {
let mut ret: core::mem::MaybeUninit<Rbt> = core::mem::MaybeUninit::uninit();
roc_main(ret.as_mut_ptr());
ret.assume_init()
};
// Verify that it has all the expected traits.
assert!(tag_union == tag_union); // PartialEq
assert!(tag_union.clone() == tag_union.clone()); // Clone
assert!(tag_union.partial_cmp(&tag_union) == Some(Ordering::Equal)); // PartialOrd
assert!(tag_union.cmp(&tag_union) == Ordering::Equal); // Ord
print!(
indoc!(
r#"
rbt was: {:?}
"#
),
tag_union,
); // Debug
let mut set = HashSet::new();
set.insert(tag_union.clone()); // Eq, Hash
set.insert(tag_union);
assert_eq!(set.len(), 1);
// Exit code
0
}
// Externs required by roc_std and by the Roc app
use core::ffi::c_void;
use std::ffi::CStr;
use std::os::raw::c_char;
#[no_mangle]
pub unsafe extern "C" fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return libc::malloc(size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return libc::realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return libc::free(c_ptr);
}
#[no_mangle]
pub unsafe extern "C" fn roc_panic(c_ptr: *mut c_void, tag_id: u32) {
match tag_id {
0 => {
let slice = CStr::from_ptr(c_ptr as *const c_char);
let string = slice.to_str().unwrap();
eprintln!("Roc hit a panic: {}", string);
std::process::exit(1);
}
_ => todo!(),
}
}
#[no_mangle]
pub unsafe extern "C" fn roc_memcpy(dst: *mut c_void, src: *mut c_void, n: usize) -> *mut c_void {
libc::memcpy(dst, src, n)
}
#[no_mangle]
pub unsafe extern "C" fn roc_memset(dst: *mut c_void, c: i32, n: usize) -> *mut c_void {
libc::memset(dst, c, n)
}

View File

@ -11,7 +11,7 @@ mod test_gen_rs {
use crate::helpers::generate_bindings;
#[test]
fn record_aliased() {
fn basic_record_aliased() {
let module = indoc!(
r#"
MyRcd : { a : U64, b : I128 }
@ -28,11 +28,11 @@ mod test_gen_rs {
indoc!(
r#"
#[cfg(any(
target_arch = "x86_64",
target_arch = "x86",
target_arch = "aarch64",
target_arch = "arm",
target_arch = "wasm32"
target_arch = "aarch64",
target_arch = "wasm32",
target_arch = "x86",
target_arch = "x86_64"
))]
#[derive(Clone, Copy, Debug, Default, Eq, Ord, Hash, PartialEq, PartialOrd)]
#[repr(C)]
@ -65,23 +65,24 @@ mod test_gen_rs {
indoc!(
r#"
#[cfg(any(
target_arch = "x86_64",
target_arch = "aarch64"
target_arch = "arm",
target_arch = "wasm32",
target_arch = "x86"
))]
#[derive(Clone, Debug, Default, PartialEq, PartialOrd)]
#[repr(C)]
pub struct Outer {
pub x: Inner,
pub y: roc_std::RocStr,
pub z: roc_std::RocList<u8>,
pub x: Inner,
}
#[cfg(any(
target_arch = "x86_64",
target_arch = "x86",
target_arch = "aarch64",
target_arch = "arm",
target_arch = "wasm32"
target_arch = "aarch64",
target_arch = "wasm32",
target_arch = "x86",
target_arch = "x86_64"
))]
#[derive(Clone, Copy, Debug, Default, PartialEq, PartialOrd)]
#[repr(C)]
@ -91,16 +92,15 @@ mod test_gen_rs {
}
#[cfg(any(
target_arch = "x86",
target_arch = "arm",
target_arch = "wasm32"
target_arch = "aarch64",
target_arch = "x86_64"
))]
#[derive(Clone, Debug, Default, PartialEq, PartialOrd)]
#[repr(C)]
pub struct Outer {
pub x: Inner,
pub y: roc_std::RocStr,
pub z: roc_std::RocList<u8>,
pub x: Inner,
}
"#
)
@ -118,11 +118,11 @@ mod test_gen_rs {
indoc!(
r#"
#[cfg(any(
target_arch = "x86_64",
target_arch = "x86",
target_arch = "aarch64",
target_arch = "arm",
target_arch = "wasm32"
target_arch = "aarch64",
target_arch = "wasm32",
target_arch = "x86",
target_arch = "x86_64"
))]
#[derive(Clone, Copy, Debug, Default, Eq, Ord, Hash, PartialEq, PartialOrd)]
#[repr(C)]
@ -146,23 +146,24 @@ mod test_gen_rs {
indoc!(
r#"
#[cfg(any(
target_arch = "x86_64",
target_arch = "aarch64"
target_arch = "arm",
target_arch = "wasm32",
target_arch = "x86"
))]
#[derive(Clone, Debug, Default, PartialEq, PartialOrd)]
#[repr(C)]
pub struct R1 {
pub x: R2,
pub y: roc_std::RocStr,
pub z: roc_std::RocList<u8>,
pub x: R2,
}
#[cfg(any(
target_arch = "x86_64",
target_arch = "x86",
target_arch = "aarch64",
target_arch = "arm",
target_arch = "wasm32"
target_arch = "aarch64",
target_arch = "wasm32",
target_arch = "x86",
target_arch = "x86_64"
))]
#[derive(Clone, Copy, Debug, Default, PartialEq, PartialOrd)]
#[repr(C)]
@ -172,16 +173,15 @@ mod test_gen_rs {
}
#[cfg(any(
target_arch = "x86",
target_arch = "arm",
target_arch = "wasm32"
target_arch = "aarch64",
target_arch = "x86_64"
))]
#[derive(Clone, Debug, Default, PartialEq, PartialOrd)]
#[repr(C)]
pub struct R1 {
pub x: R2,
pub y: roc_std::RocStr,
pub z: roc_std::RocList<u8>,
pub x: R2,
}
"#
)
@ -206,11 +206,11 @@ mod test_gen_rs {
indoc!(
r#"
#[cfg(any(
target_arch = "x86_64",
target_arch = "x86",
target_arch = "aarch64",
target_arch = "arm",
target_arch = "wasm32"
target_arch = "aarch64",
target_arch = "wasm32",
target_arch = "x86",
target_arch = "x86_64"
))]
#[derive(Clone, Copy, Eq, Ord, Hash, PartialEq, PartialOrd)]
#[repr(u8)]

View File

@ -121,11 +121,14 @@ mod bindgen_cli_run {
`Cons "small str" Nil` is: StrConsList::Cons("small str", StrConsList::Nil)
`Nil` is: StrConsList::Nil
"#),
recursive_union:"recursive-union" => indoc!(r#"
basic_recursive_union:"basic-recursive-union" => indoc!(r#"
tag_union was: Expr::Concat(Expr::String("Hello, "), Expr::String("World!"))
`Concat (String "Hello, ") (String "World!")` is: Expr::Concat(Expr::String("Hello, "), Expr::String("World!"))
`String "this is a test"` is: Expr::String("this is a test")
"#),
advanced_recursive_union:"advanced-recursive-union" => indoc!(r#"
rbt was: Rbt { default: Job::Job(R1 { command: Command::Command(R2 { tool: Tool::SystemTool(R4 { name: "test", num: 42 }) }), inputFiles: ["foo"] }) }
"#),
}
fn check_for_tests(all_fixtures: &mut roc_collections::VecSet<String>) {

View File

@ -28,9 +28,13 @@ mod cli_run {
const VALGRIND_FLAG: &str = concatcp!("--", roc_cli::FLAG_VALGRIND);
const LINKER_FLAG: &str = concatcp!("--", roc_cli::FLAG_LINKER);
const CHECK_FLAG: &str = concatcp!("--", roc_cli::FLAG_CHECK);
const PRECOMPILED_HOST: &str = concatcp!("--", roc_cli::FLAG_PRECOMPILED, "=true");
#[allow(dead_code)]
const TARGET_FLAG: &str = concatcp!("--", roc_cli::FLAG_TARGET);
use std::sync::Once;
static BENCHMARKS_BUILD_PLATFORM: Once = Once::new();
#[derive(Debug, EnumIter)]
enum CliMode {
RocBuild,
@ -545,22 +549,44 @@ mod cli_run {
_ => {}
}
// Check with and without optimizations
check_output_with_stdin(
&file_name,
benchmark.stdin,
benchmark.executable_filename,
&[],
benchmark.input_file.and_then(|file| Some(examples_dir("benchmarks").join(file))),
benchmark.expected_ending,
benchmark.use_valgrind,
);
let mut ran_without_optimizations = false;
BENCHMARKS_BUILD_PLATFORM.call_once( || {
// Check with and without optimizations
check_output_with_stdin(
&file_name,
benchmark.stdin,
benchmark.executable_filename,
&[],
benchmark.input_file.and_then(|file| Some(examples_dir("benchmarks").join(file))),
benchmark.expected_ending,
benchmark.use_valgrind,
);
ran_without_optimizations = true;
});
// now we can pass the `PRECOMPILED_HOST` flag, because the `call_once` will
// have compiled the host
if !ran_without_optimizations {
// Check with and without optimizations
check_output_with_stdin(
&file_name,
benchmark.stdin,
benchmark.executable_filename,
&[PRECOMPILED_HOST],
benchmark.input_file.and_then(|file| Some(examples_dir("benchmarks").join(file))),
benchmark.expected_ending,
benchmark.use_valgrind,
);
}
check_output_with_stdin(
&file_name,
benchmark.stdin,
benchmark.executable_filename,
&[OPTIMIZE_FLAG],
&[PRECOMPILED_HOST, OPTIMIZE_FLAG],
benchmark.input_file.and_then(|file| Some(examples_dir("benchmarks").join(file))),
benchmark.expected_ending,
benchmark.use_valgrind,

View File

@ -439,6 +439,7 @@ fn stmt_spec<'a>(
builder.add_choice(block, &cases)
}
Expect { remainder, .. } => stmt_spec(builder, env, block, layout, remainder),
Ret(symbol) => Ok(env.symbols[symbol]),
Refcounting(modify_rc, continuation) => match modify_rc {
ModifyRc::Inc(symbol, _) => {

View File

@ -313,6 +313,7 @@ pub fn build_zig_host_wasm32(
pub fn build_c_host_native(
env_path: &str,
env_home: &str,
env_cpath: &str,
dest: &str,
sources: &[&str],
opt_level: OptLevel,
@ -322,6 +323,7 @@ pub fn build_c_host_native(
command
.env_clear()
.env("PATH", &env_path)
.env("CPATH", &env_cpath)
.env("HOME", &env_home)
.args(sources)
.args(&["-o", dest]);
@ -417,6 +419,7 @@ pub fn rebuild_host(
let env_path = env::var("PATH").unwrap_or_else(|_| "".to_string());
let env_home = env::var("HOME").unwrap_or_else(|_| "".to_string());
let env_cpath = env::var("CPATH").unwrap_or_else(|_| "".to_string());
if zig_host_src.exists() {
// Compile host.zig
@ -531,6 +534,7 @@ pub fn rebuild_host(
let output = build_c_host_native(
&env_path,
&env_home,
&env_cpath,
c_host_dest.to_str().unwrap(),
&[c_host_src.to_str().unwrap()],
opt_level,
@ -586,6 +590,7 @@ pub fn rebuild_host(
let output = build_c_host_native(
&env_path,
&env_home,
&env_cpath,
host_dest.to_str().unwrap(),
&[
c_host_src.to_str().unwrap(),
@ -599,6 +604,7 @@ pub fn rebuild_host(
let output = build_c_host_native(
&env_path,
&env_home,
&env_cpath,
c_host_dest.to_str().unwrap(),
&[c_host_src.to_str().unwrap()],
opt_level,
@ -639,6 +645,7 @@ pub fn rebuild_host(
let output = build_c_host_native(
&env_path,
&env_home,
&env_cpath,
host_dest.to_str().unwrap(),
&[c_host_src.to_str().unwrap()],
opt_level,

View File

@ -1,6 +1,6 @@
use roc_collections::{all::MutMap, VecMap, VecSet};
use roc_error_macros::internal_error;
use roc_module::symbol::Symbol;
use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::Region;
use roc_types::{subs::Variable, types::Type};
@ -13,8 +13,14 @@ pub struct MemberVariables {
pub flex_vars: Vec<Variable>,
}
/// The member and its signature is defined locally, in the module the store is created for.
/// We need to instantiate and introduce this during solving.
#[derive(Debug, Clone)]
pub enum MemberTypeInfo {
pub struct ResolvedMemberType(Variable);
/// Member type information that needs to be resolved from imports.
#[derive(Debug, Clone)]
pub enum PendingMemberType {
/// The member and its signature is defined locally, in the module the store is created for.
/// We need to instantiate and introduce this during solving.
Local {
@ -24,36 +30,74 @@ pub enum MemberTypeInfo {
},
/// The member was defined in another module, so we'll import its variable when it's time to
/// solve. At that point we'll resolve `var` here.
Imported { signature_var: Option<Variable> },
Imported,
}
pub trait ResolvePhase: std::fmt::Debug + Clone + Copy {
type MemberType: std::fmt::Debug + Clone;
}
#[derive(Default, Debug, Clone, Copy)]
pub struct Pending;
impl ResolvePhase for Pending {
type MemberType = PendingMemberType;
}
#[derive(Default, Debug, Clone, Copy)]
pub struct Resolved;
impl ResolvePhase for Resolved {
type MemberType = ResolvedMemberType;
}
/// Stores information about an ability member definition, including the parent ability, the
/// defining type, and what type variables need to be instantiated with instances of the ability.
// TODO: SoA and put me in an arena
#[derive(Debug, Clone)]
pub struct AbilityMemberData {
pub struct AbilityMemberData<Phase: ResolvePhase> {
pub parent_ability: Symbol,
pub region: Region,
pub typ: MemberTypeInfo,
pub typ: Phase::MemberType,
}
impl AbilityMemberData {
pub fn signature_var(&self) -> Option<Variable> {
match self.typ {
MemberTypeInfo::Local { signature_var, .. } => Some(signature_var),
MemberTypeInfo::Imported { signature_var } => signature_var,
}
impl AbilityMemberData<Resolved> {
pub fn signature_var(&self) -> Variable {
self.typ.0
}
}
/// (member, specialization type) -> specialization
pub type SolvedSpecializations = VecMap<(Symbol, Symbol), MemberSpecialization>;
pub type SpecializationsMap<Phase> = VecMap<(Symbol, Symbol), MemberSpecialization<Phase>>;
pub type PendingSpecializations = SpecializationsMap<Pending>;
pub type ResolvedSpecializations = SpecializationsMap<Resolved>;
/// A particular specialization of an ability member.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct MemberSpecialization {
#[derive(Debug, Clone)]
pub struct MemberSpecialization<Phase: ResolvePhase> {
_phase: std::marker::PhantomData<Phase>,
pub symbol: Symbol,
pub region: Region,
/// Solved lambda sets for an ability member specialization. For example, if we have
///
/// Default has default : {} -[[] + a:default:1]-> a | a has Default
///
/// A := {}
/// default = \{} -[[closA]]-> @A {}
///
/// and this [MemberSpecialization] is for `A`, then there is a mapping of
/// `1` to the variable representing `[[closA]]`.
pub specialization_lambda_sets: VecMap<u8, Variable>,
}
impl MemberSpecialization<Resolved> {
pub fn new(symbol: Symbol, specialization_lambda_sets: VecMap<u8, Variable>) -> Self {
Self {
_phase: Default::default(),
symbol,
specialization_lambda_sets,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -66,18 +110,20 @@ impl Default for SpecializationId {
}
}
pub enum SpecializationLambdaSetError {}
/// Stores information about what abilities exist in a scope, what it means to implement an
/// ability, and what types implement them.
// TODO(abilities): this should probably go on the Scope, I don't put it there for now because we
// are only dealing with intra-module abilities for now.
// TODO(abilities): many of these should be `VecMap`s. Do some benchmarking.
#[derive(Default, Debug, Clone)]
pub struct AbilitiesStore {
pub struct IAbilitiesStore<Phase: ResolvePhase> {
/// Maps an ability to the members defining it.
members_of_ability: MutMap<Symbol, Vec<Symbol>>,
/// Information about all members composing abilities.
ability_members: MutMap<Symbol, AbilityMemberData>,
ability_members: MutMap<Symbol, AbilityMemberData<Phase>>,
/// Map of symbols that specialize an ability member to the root ability symbol name.
/// For example, for the program
@ -91,7 +137,7 @@ pub struct AbilitiesStore {
/// Maps a tuple (member, type) specifying that `type` declares an implementation of an ability
/// member `member`, to the exact symbol that implements the ability.
declared_specializations: SolvedSpecializations,
declared_specializations: SpecializationsMap<Phase>,
next_specialization_id: u32,
@ -100,11 +146,14 @@ pub struct AbilitiesStore {
resolved_specializations: MutMap<SpecializationId, Symbol>,
}
impl AbilitiesStore {
pub type AbilitiesStore = IAbilitiesStore<Resolved>;
pub type PendingAbilitiesStore = IAbilitiesStore<Pending>;
impl<Phase: ResolvePhase> IAbilitiesStore<Phase> {
/// Records the definition of an ability, including its members.
pub fn register_ability<I>(&mut self, ability: Symbol, members: I)
where
I: IntoIterator<Item = (Symbol, AbilityMemberData)>,
I: IntoIterator<Item = (Symbol, AbilityMemberData<Phase>)>,
I::IntoIter: ExactSizeIterator,
{
let members = members.into_iter();
@ -121,25 +170,6 @@ impl AbilitiesStore {
);
}
pub fn is_ability(&self, ability: Symbol) -> bool {
self.members_of_ability.contains_key(&ability)
}
/// Records a specialization of `ability_member` with specialized type `implementing_type`.
/// Entries via this function are considered a source of truth. It must be ensured that a
/// specialization is validated before being registered here.
pub fn register_specialization_for_type(
&mut self,
ability_member: Symbol,
implementing_type: Symbol,
specialization: MemberSpecialization,
) {
let old_spec = self
.declared_specializations
.insert((ability_member, implementing_type), specialization);
debug_assert!(old_spec.is_none(), "Replacing existing specialization");
}
/// Checks if `name` is a root ability member symbol name.
/// Note that this will return `false` for specializations of an ability member, which have
/// different symbols from the root.
@ -147,11 +177,27 @@ impl AbilitiesStore {
self.ability_members.contains_key(&name)
}
pub fn is_ability(&self, ability: Symbol) -> bool {
self.members_of_ability.contains_key(&ability)
}
/// Iterator over all abilities and their members that this store knows about.
pub fn iter_abilities(&self) -> impl Iterator<Item = (Symbol, &[Symbol])> {
self.members_of_ability
.iter()
.map(|(k, v)| (*k, v.as_slice()))
}
/// Returns information about all known ability members and their root symbols.
pub fn root_ability_members(&self) -> &MutMap<Symbol, AbilityMemberData> {
pub fn root_ability_members(&self) -> &MutMap<Symbol, AbilityMemberData<Phase>> {
&self.ability_members
}
/// Returns whether a symbol is declared to specialize an ability member.
pub fn is_specialization_name(&self, symbol: Symbol) -> bool {
self.specialization_to_root.contains_key(&symbol)
}
/// Records that the symbol `specializing_symbol` claims to specialize `ability_member`; for
/// example the symbol of `hash : Id -> U64` specializing `hash : a -> U64 | a has Hash`.
pub fn register_specializing_symbol(
@ -163,51 +209,6 @@ impl AbilitiesStore {
.insert(specializing_symbol, ability_member);
}
/// Returns whether a symbol is declared to specialize an ability member.
pub fn is_specialization_name(&self, symbol: Symbol) -> bool {
self.specialization_to_root.contains_key(&symbol)
}
/// Finds the symbol name and ability member definition for a symbol specializing the ability
/// member, if it specializes any.
/// For example, suppose `hash : Id -> U64` has symbol #hash1 and specializes
/// `hash : a -> U64 | a has Hash` with symbol #hash. Calling this with #hash1 would retrieve
/// the ability member data for #hash.
pub fn root_name_and_def(
&self,
specializing_symbol: Symbol,
) -> Option<(Symbol, &AbilityMemberData)> {
let root_symbol = self.specialization_to_root.get(&specializing_symbol)?;
debug_assert!(self.ability_members.contains_key(root_symbol));
let root_data = self.ability_members.get(root_symbol).unwrap();
Some((*root_symbol, root_data))
}
/// Finds the ability member definition for a member name.
pub fn member_def(&self, member: Symbol) -> Option<&AbilityMemberData> {
self.ability_members.get(&member)
}
/// Iterator over all abilities and their members that this store knows about.
pub fn iter_abilities(&self) -> impl Iterator<Item = (Symbol, &[Symbol])> {
self.members_of_ability
.iter()
.map(|(k, v)| (*k, v.as_slice()))
}
/// Returns an iterator over pairs ((ability member, type), specialization) specifying that
/// "ability member" has a "specialization" for type "type".
pub fn iter_specializations(
&self,
) -> impl Iterator<Item = ((Symbol, Symbol), MemberSpecialization)> + '_ {
self.declared_specializations.iter().map(|(k, v)| (*k, *v))
}
/// Retrieves the specialization of `member` for `typ`, if it exists.
pub fn get_specialization(&self, member: Symbol, typ: Symbol) -> Option<MemberSpecialization> {
self.declared_specializations.get(&(member, typ)).copied()
}
pub fn members_of_ability(&self, ability: Symbol) -> Option<&[Symbol]> {
self.members_of_ability.get(&ability).map(|v| v.as_ref())
}
@ -219,6 +220,60 @@ impl AbilitiesStore {
self.next_specialization_id += 1;
id
}
}
impl IAbilitiesStore<Resolved> {
/// Finds the symbol name and ability member definition for a symbol specializing the ability
/// member, if it specializes any.
/// For example, suppose `hash : Id -> U64` has symbol #hash1 and specializes
/// `hash : a -> U64 | a has Hash` with symbol #hash. Calling this with #hash1 would retrieve
/// the ability member data for #hash.
pub fn root_name_and_def(
&self,
specializing_symbol: Symbol,
) -> Option<(Symbol, &AbilityMemberData<Resolved>)> {
let root_symbol = self.specialization_to_root.get(&specializing_symbol)?;
debug_assert!(self.ability_members.contains_key(root_symbol));
let root_data = self.ability_members.get(root_symbol).unwrap();
Some((*root_symbol, root_data))
}
/// Finds the ability member definition for a member name.
pub fn member_def(&self, member: Symbol) -> Option<&AbilityMemberData<Resolved>> {
self.ability_members.get(&member)
}
/// Returns an iterator over pairs ((ability member, type), specialization) specifying that
/// "ability member" has a "specialization" for type "type".
pub fn iter_specializations(
&self,
) -> impl Iterator<Item = ((Symbol, Symbol), &MemberSpecialization<Resolved>)> + '_ {
self.declared_specializations.iter().map(|(k, v)| (*k, v))
}
/// Retrieves the specialization of `member` for `typ`, if it exists.
pub fn get_specialization(
&self,
member: Symbol,
typ: Symbol,
) -> Option<&MemberSpecialization<Resolved>> {
self.declared_specializations.get(&(member, typ))
}
/// Records a specialization of `ability_member` with specialized type `implementing_type`.
/// Entries via this function are considered a source of truth. It must be ensured that a
/// specialization is validated before being registered here.
pub fn register_specialization_for_type(
&mut self,
ability_member: Symbol,
implementing_type: Symbol,
specialization: MemberSpecialization<Resolved>,
) {
let old_spec = self
.declared_specializations
.insert((ability_member, implementing_type), specialization);
debug_assert!(old_spec.is_none(), "Replacing existing specialization");
}
pub fn insert_resolved(&mut self, id: SpecializationId, specialization: Symbol) {
// May not be a thing in mono
@ -245,6 +300,31 @@ impl AbilitiesStore {
pub fn get_resolved(&self, id: SpecializationId) -> Option<Symbol> {
self.resolved_specializations.get(&id).copied()
}
}
impl IAbilitiesStore<Pending> {
pub fn import_specialization(
&mut self,
ability_member: Symbol,
implementing_type: Symbol,
specialization: &MemberSpecialization<impl ResolvePhase>,
) {
let MemberSpecialization {
_phase,
symbol,
specialization_lambda_sets,
} = specialization;
let old_spec = self.declared_specializations.insert(
(ability_member, implementing_type),
MemberSpecialization {
_phase: Default::default(),
symbol: *symbol,
specialization_lambda_sets: specialization_lambda_sets.clone(),
},
);
debug_assert!(old_spec.is_none(), "Replacing existing specialization");
}
/// Creates a store from [`self`] that closes over the abilities/members given by the
/// imported `symbols`, and their specializations (if any).
@ -262,7 +342,7 @@ impl AbilitiesStore {
resolved_specializations: _,
} = self;
let mut new = Self::default();
let mut new = PendingAbilitiesStore::default();
// 1. Figure out the abilities we need to introduce.
let mut abilities_to_introduce = VecSet::with_capacity(2);
@ -281,15 +361,21 @@ impl AbilitiesStore {
let members = members_of_ability.get(&ability).unwrap();
let mut imported_member_data = Vec::with_capacity(members.len());
for member in members {
let mut member_data = ability_members.get(member).unwrap().clone();
let AbilityMemberData {
parent_ability,
region,
typ: _,
} = ability_members.get(member).unwrap().clone();
// All external members need to be marked as imported. We'll figure out their real
// type variables when it comes time to solve the module we're currently importing
// into.
member_data.typ = MemberTypeInfo::Imported {
signature_var: None,
let imported_data = AbilityMemberData {
parent_ability,
region,
typ: PendingMemberType::Imported,
};
imported_member_data.push((*member, member_data));
imported_member_data.push((*member, imported_data));
}
new.register_ability(ability, imported_member_data);
@ -298,9 +384,9 @@ impl AbilitiesStore {
declared_specializations
.iter()
.filter(|((member, _), _)| members.contains(member))
.for_each(|(&(member, typ), &specialization)| {
.for_each(|(&(member, typ), specialization)| {
new.register_specializing_symbol(specialization.symbol, member);
new.register_specialization_for_type(member, typ, specialization);
new.import_specialization(member, typ, specialization);
});
}
@ -338,9 +424,10 @@ impl AbilitiesStore {
for ((member, typ), specialization) in declared_specializations.into_iter() {
let old_specialization = self
.declared_specializations
.insert((member, typ), specialization);
.insert((member, typ), specialization.clone());
debug_assert!(
old_specialization.is_none() || old_specialization.unwrap() == specialization
old_specialization.is_none()
|| old_specialization.unwrap().symbol == specialization.symbol
);
}
@ -350,14 +437,102 @@ impl AbilitiesStore {
debug_assert!(self.resolved_specializations.is_empty());
}
pub fn resolved_imported_member_var(&mut self, member: Symbol, var: Variable) {
let member_data = self.ability_members.get_mut(&member).unwrap();
match &mut member_data.typ {
MemberTypeInfo::Imported { signature_var } => {
let old = signature_var.replace(var);
debug_assert!(old.is_none(), "Replacing existing variable!");
}
_ => internal_error!("{:?} is not imported!", member),
pub fn resolve_for_module<Ctx, VarOfSymbol, ImportVar>(
self,
my_module: ModuleId,
my_module_ctx: &mut Ctx,
mut variable_of_symbol: VarOfSymbol,
mut import_lambda_set_var_from_module: ImportVar,
) -> AbilitiesStore
where
VarOfSymbol: FnMut(&mut Ctx, Symbol) -> Variable,
ImportVar: FnMut(&mut Ctx, ModuleId, Variable) -> Variable,
{
let Self {
members_of_ability,
ability_members,
specialization_to_root,
declared_specializations,
next_specialization_id,
resolved_specializations,
} = self;
let ability_members = ability_members
.into_iter()
.map(|(member_symbol, member_data)| {
let AbilityMemberData {
parent_ability,
region,
typ,
} = member_data;
let typ = match typ {
PendingMemberType::Local {
signature_var,
signature: _,
variables: _,
} => ResolvedMemberType(signature_var),
PendingMemberType::Imported => {
ResolvedMemberType(variable_of_symbol(my_module_ctx, member_symbol))
}
};
let member_data = AbilityMemberData {
parent_ability,
region,
typ,
};
(member_symbol, member_data)
})
.collect();
let declared_specializations = declared_specializations
.into_iter()
.map(
|(
key,
MemberSpecialization {
_phase,
symbol,
specialization_lambda_sets,
},
)| {
let symbol_module = symbol.module_id();
// NOTE: this totally assumes we're dealing with subs that belong to an
// individual module, things would be badly broken otherwise
let member_specialization = if symbol_module == my_module {
internal_error!("Ability store may only be pending before module solving, \
so there shouldn't be any known module specializations at this point, but we found one for {:?}", symbol);
// MemberSpecialization::new(symbol, specialization_lambda_sets)
} else {
let specialization_lambda_sets = specialization_lambda_sets
.into_iter()
.map(|(region, variable)| {
(
region,
import_lambda_set_var_from_module(
my_module_ctx,
symbol_module,
variable,
),
)
})
.collect();
MemberSpecialization::new(symbol, specialization_lambda_sets)
};
(key, member_specialization)
},
)
.collect();
AbilitiesStore {
members_of_ability,
ability_members,
specialization_to_root,
declared_specializations,
next_specialization_id,
resolved_specializations,
}
}
}

View File

@ -1,6 +1,6 @@
use crate::abilities::AbilityMemberData;
use crate::abilities::MemberTypeInfo;
use crate::abilities::MemberVariables;
use crate::abilities::PendingMemberType;
use crate::annotation::canonicalize_annotation;
use crate::annotation::find_type_def_symbols;
use crate::annotation::make_apply_symbol;
@ -49,6 +49,24 @@ pub struct Def {
pub annotation: Option<Annotation>,
}
impl Def {
pub fn region(&self) -> Region {
let head_region = match &self.annotation {
Some(ann) => {
if ann.region.start() < self.loc_pattern.region.start() {
ann.region
} else {
// Happens with annotation-only bodies like foo : T, since `T` is after the
// pattern.
self.loc_pattern.region
}
}
None => self.loc_pattern.region,
};
Region::span_across(&head_region, &self.loc_expr.region)
}
}
#[derive(Clone, Debug)]
pub struct Annotation {
pub signature: Type,
@ -197,6 +215,21 @@ impl Declaration {
Builtin(_) => 0,
}
}
pub fn region(&self) -> Region {
match self {
Declaration::Declare(def) => def.region(),
Declaration::DeclareRec(defs, _) => Region::span_across(
&defs.first().unwrap().region(),
&defs.last().unwrap().region(),
),
Declaration::Builtin(def) => def.region(),
Declaration::InvalidCycle(cycles) => Region::span_across(
&cycles.first().unwrap().expr_region,
&cycles.last().unwrap().expr_region,
),
}
}
}
/// Returns a topologically sorted sequence of alias/opaque names
@ -928,7 +961,7 @@ fn resolve_abilities<'a>(
AbilityMemberData {
parent_ability: loc_ability_name.value,
region: name_region,
typ: MemberTypeInfo::Local {
typ: PendingMemberType::Local {
variables,
signature,
signature_var: var_store.fresh(),
@ -1581,23 +1614,26 @@ pub fn can_defs_with_return<'a>(
let mut loc_expr: Loc<Expr> = ret_expr;
for declaration in declarations.into_iter().rev() {
loc_expr = Loc {
region: Region::zero(),
value: decl_to_let(declaration, loc_expr),
};
loc_expr = decl_to_let(declaration, loc_expr);
}
(loc_expr.value, output)
}
fn decl_to_let(decl: Declaration, loc_ret: Loc<Expr>) -> Expr {
fn decl_to_let(decl: Declaration, loc_ret: Loc<Expr>) -> Loc<Expr> {
match decl {
Declaration::Declare(def) => Expr::LetNonRec(Box::new(def), Box::new(loc_ret)),
Declaration::Declare(def) => {
let region = Region::span_across(&def.loc_pattern.region, &loc_ret.region);
let expr = Expr::LetNonRec(Box::new(def), Box::new(loc_ret));
Loc::at(region, expr)
}
Declaration::DeclareRec(defs, cycle_mark) => {
Expr::LetRec(defs, Box::new(loc_ret), cycle_mark)
let region = Region::span_across(&defs[0].loc_pattern.region, &loc_ret.region);
let expr = Expr::LetRec(defs, Box::new(loc_ret), cycle_mark);
Loc::at(region, expr)
}
Declaration::InvalidCycle(entries) => {
Expr::RuntimeError(RuntimeError::CircularDef(entries))
Loc::at_zero(Expr::RuntimeError(RuntimeError::CircularDef(entries)))
}
Declaration::Builtin(_) => {
// Builtins should only be added to top-level decls, not to let-exprs!

View File

@ -70,14 +70,13 @@ impl<'a> Env<'a> {
let is_type_name = ident.starts_with(|c: char| c.is_uppercase());
let module_name = ModuleName::from(module_name_str);
let ident = Ident::from(ident);
match self.module_ids.get_id(&module_name) {
Some(&module_id) => {
Some(module_id) => {
// You can do qualified lookups on your own module, e.g.
// if I'm in the Foo module, I can do a `Foo.bar` lookup.
if module_id == self.home {
match scope.locals.ident_ids.get_id(&ident) {
match scope.locals.ident_ids.get_id(ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, ident_id);
@ -92,7 +91,7 @@ impl<'a> Env<'a> {
None => {
let error = RuntimeError::LookupNotInScope(
Loc {
value: ident,
value: Ident::from(ident),
region,
},
scope
@ -107,7 +106,7 @@ impl<'a> Env<'a> {
}
} else {
match self.dep_idents.get(&module_id) {
Some(exposed_ids) => match exposed_ids.get_id(&ident) {
Some(exposed_ids) => match exposed_ids.get_id(ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, ident_id);
@ -129,7 +128,7 @@ impl<'a> Env<'a> {
.collect();
Err(RuntimeError::ValueNotExposed {
module_name,
ident,
ident: Ident::from(ident),
region,
exposed_values,
})

View File

@ -216,8 +216,12 @@ pub enum Expr {
lambda_set_variables: Vec<LambdaSet>,
},
/// Test
Expect(Box<Loc<Expr>>, Box<Loc<Expr>>),
// Test
Expect {
loc_condition: Box<Loc<Expr>>,
loc_continuation: Box<Loc<Expr>>,
lookups_in_cond: Vec<(Symbol, Variable)>,
},
/// Rendered as empty box in editor
TypedHole(Variable),
@ -261,7 +265,7 @@ impl Expr {
args_count: 0,
},
&Self::OpaqueRef { name, .. } => Category::OpaqueWrap(name),
Self::Expect(..) => Category::Expect,
Self::Expect { .. } => Category::Expect,
// these nodes place no constraints on the expression's type
Self::TypedHole(_) | Self::RuntimeError(..) => Category::Unknown,
@ -842,6 +846,10 @@ pub fn canonicalize_expr<'a>(
let (loc_condition, output1) =
canonicalize_expr(env, var_store, scope, condition.region, &condition.value);
// Get all the lookups that were referenced in the condition,
// so we can print their values later.
let lookups_in_cond = get_lookup_symbols(&loc_condition.value, var_store);
let (loc_continuation, output2) = canonicalize_expr(
env,
var_store,
@ -854,7 +862,11 @@ pub fn canonicalize_expr<'a>(
output.union(output2);
(
Expect(Box::new(loc_condition), Box::new(loc_continuation)),
Expect {
loc_condition: Box::new(loc_condition),
loc_continuation: Box::new(loc_continuation),
lookups_in_cond,
},
output,
)
}
@ -1509,18 +1521,26 @@ pub fn inline_calls(var_store: &mut VarStore, scope: &mut Scope, expr: Expr) ->
}
}
Expect(loc_condition, loc_expr) => {
Expect {
loc_condition,
loc_continuation,
lookups_in_cond,
} => {
let loc_condition = Loc {
region: loc_condition.region,
value: inline_calls(var_store, scope, loc_condition.value),
};
let loc_expr = Loc {
region: loc_expr.region,
value: inline_calls(var_store, scope, loc_expr.value),
let loc_continuation = Loc {
region: loc_continuation.region,
value: inline_calls(var_store, scope, loc_continuation.value),
};
Expect(Box::new(loc_condition), Box::new(loc_expr))
Expect {
loc_condition: Box::new(loc_condition),
loc_continuation: Box::new(loc_continuation),
lookups_in_cond,
}
}
LetRec(defs, loc_expr, mark) => {
@ -1916,3 +1936,108 @@ pub fn unescape_char(escaped: &EscapedChar) -> char {
Newline => '\n',
}
}
fn get_lookup_symbols(expr: &Expr, var_store: &mut VarStore) -> Vec<(Symbol, Variable)> {
let mut stack: Vec<&Expr> = vec![expr];
let mut symbols = Vec::new();
while let Some(expr) = stack.pop() {
match expr {
Expr::Var(symbol) | Expr::Update { symbol, .. } | Expr::AbilityMember(symbol, _, _) => {
// Don't introduce duplicates, or make unused variables
if !symbols.iter().any(|(sym, _)| sym == symbol) {
symbols.push((*symbol, var_store.fresh()));
}
}
Expr::List { loc_elems, .. } => {
stack.extend(loc_elems.iter().map(|loc_elem| &loc_elem.value));
}
Expr::When {
loc_cond, branches, ..
} => {
stack.push(&loc_cond.value);
stack.reserve(branches.len());
for branch in branches {
stack.push(&branch.value.value);
if let Some(guard) = &branch.guard {
stack.push(&guard.value);
}
}
}
Expr::If {
branches,
final_else,
..
} => {
stack.reserve(1 + branches.len() * 2);
for (loc_cond, loc_body) in branches {
stack.push(&loc_cond.value);
stack.push(&loc_body.value);
}
stack.push(&final_else.value);
}
Expr::LetRec(_, _, _) => todo!(),
Expr::LetNonRec { .. } => todo!(),
Expr::Call(boxed_expr, args, _called_via) => {
stack.reserve(1 + args.len());
match &boxed_expr.1.value {
Expr::Var(_) => {
// do nothing
}
function_expr => {
// add the expr being called
stack.push(function_expr);
}
}
for (_var, loc_arg) in args {
stack.push(&loc_arg.value);
}
}
Expr::Tag { arguments, .. } => {
stack.extend(arguments.iter().map(|(_var, loc_expr)| &loc_expr.value));
}
Expr::RunLowLevel { args, .. } | Expr::ForeignCall { args, .. } => {
stack.extend(args.iter().map(|(_var, arg)| arg));
}
Expr::OpaqueRef { argument, .. } => {
stack.push(&argument.1.value);
}
Expr::Access { loc_expr, .. }
| Expr::Closure(ClosureData {
loc_body: loc_expr, ..
}) => {
stack.push(&loc_expr.value);
}
Expr::Record { fields, .. } => {
stack.extend(fields.iter().map(|(_, field)| &field.loc_expr.value));
}
Expr::Expect {
loc_continuation, ..
} => {
stack.push(&(*loc_continuation).value);
// Intentionally ignore the lookups in the nested `expect` condition itself,
// because they couldn't possibly influence the outcome of this `expect`!
}
Expr::Num(_, _, _, _)
| Expr::Float(_, _, _, _, _)
| Expr::Int(_, _, _, _, _)
| Expr::Str(_)
| Expr::ZeroArgumentTag { .. }
| Expr::Accessor(_)
| Expr::SingleQuote(_)
| Expr::EmptyRecord
| Expr::TypedHole(_)
| Expr::RuntimeError(_) => {}
}
}
symbols
}

View File

@ -1,4 +1,4 @@
use crate::abilities::AbilitiesStore;
use crate::abilities::PendingAbilitiesStore;
use crate::annotation::canonicalize_annotation;
use crate::def::{canonicalize_toplevel_defs, sort_can_defs, Declaration, Def};
use crate::effect_module::HostedGeneratedFunctions;
@ -30,7 +30,7 @@ pub struct Module {
/// all aliases. `bool` indicates whether it is exposed
pub aliases: MutMap<Symbol, (bool, Alias)>,
pub rigid_variables: RigidVariables,
pub abilities_store: AbilitiesStore,
pub abilities_store: PendingAbilitiesStore,
}
#[derive(Debug, Default)]
@ -167,7 +167,7 @@ pub fn canonicalize_module_defs<'a>(
exposed_ident_ids: IdentIds,
dep_idents: &'a IdentIdsByModule,
aliases: MutMap<Symbol, Alias>,
imported_abilities_state: AbilitiesStore,
imported_abilities_state: PendingAbilitiesStore,
exposed_imports: MutMap<Ident, (Symbol, Region)>,
exposed_symbols: &VecSet<Symbol>,
symbols_from_requires: &[(Loc<Symbol>, Loc<TypeAnnotation<'a>>)],
@ -688,9 +688,13 @@ fn fix_values_captured_in_closure_expr(
fix_values_captured_in_closure_expr(&mut loc_expr.value, no_capture_symbols);
}
Expect(condition, loc_expr) => {
fix_values_captured_in_closure_expr(&mut condition.value, no_capture_symbols);
fix_values_captured_in_closure_expr(&mut loc_expr.value, no_capture_symbols);
Expect {
loc_condition,
loc_continuation,
lookups_in_cond: _,
} => {
fix_values_captured_in_closure_expr(&mut loc_condition.value, no_capture_symbols);
fix_values_captured_in_closure_expr(&mut loc_continuation.value, no_capture_symbols);
}
Closure(ClosureData {

View File

@ -5,7 +5,7 @@ use roc_problem::can::RuntimeError;
use roc_region::all::{Loc, Region};
use roc_types::types::{Alias, AliasKind, AliasVar, Type};
use crate::abilities::AbilitiesStore;
use crate::abilities::PendingAbilitiesStore;
use bitvec::vec::BitVec;
@ -15,7 +15,7 @@ pub struct Scope {
pub aliases: VecMap<Symbol, Alias>,
/// The abilities currently in scope, and their implementors.
pub abilities_store: AbilitiesStore,
pub abilities_store: PendingAbilitiesStore,
/// The current module being processed. This will be used to turn
/// unqualified idents into Symbols.
@ -35,7 +35,7 @@ impl Scope {
pub fn new(
home: ModuleId,
initial_ident_ids: IdentIds,
starting_abilities_store: AbilitiesStore,
starting_abilities_store: PendingAbilitiesStore,
) -> Scope {
let imports = Symbol::default_in_scope()
.into_iter()
@ -570,7 +570,7 @@ mod test {
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
PendingAbilitiesStore::default(),
);
let region = Region::zero();
@ -589,7 +589,7 @@ mod test {
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
PendingAbilitiesStore::default(),
);
let region1 = Region::from_pos(Position { offset: 10 });
@ -618,7 +618,7 @@ mod test {
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
PendingAbilitiesStore::default(),
);
let region = Region::zero();
@ -639,7 +639,7 @@ mod test {
let scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
PendingAbilitiesStore::default(),
);
let idents: Vec<_> = scope.idents_in_scope().collect();
@ -647,15 +647,15 @@ mod test {
assert_eq!(
&idents,
&[
Ident::from("Box"),
Ident::from("Set"),
Ident::from("Dict"),
Ident::from("Str"),
Ident::from("Ok"),
Ident::from("False"),
Ident::from("List"),
Ident::from("True"),
Ident::from("Str"),
Ident::from("List"),
Ident::from("Ok"),
Ident::from("Err"),
Ident::from("Dict"),
Ident::from("Set"),
Ident::from("Box"),
]
);
}
@ -666,7 +666,7 @@ mod test {
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
PendingAbilitiesStore::default(),
);
let idents: Vec<_> = scope.idents_in_scope().collect();
@ -674,15 +674,15 @@ mod test {
assert_eq!(
&idents,
&[
Ident::from("Box"),
Ident::from("Set"),
Ident::from("Dict"),
Ident::from("Str"),
Ident::from("Ok"),
Ident::from("False"),
Ident::from("List"),
Ident::from("True"),
Ident::from("Str"),
Ident::from("List"),
Ident::from("Ok"),
Ident::from("Err"),
Ident::from("Dict"),
Ident::from("Set"),
Ident::from("Box"),
]
);
@ -737,7 +737,7 @@ mod test {
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
PendingAbilitiesStore::default(),
);
let ident = Ident::from("product");
@ -759,7 +759,7 @@ mod test {
let mut scope = Scope::new(
ModuleId::ATTR,
IdentIds::default(),
AbilitiesStore::default(),
PendingAbilitiesStore::default(),
);
let ident = Ident::from("product");

View File

@ -5,9 +5,9 @@ use roc_region::all::{Loc, Region};
use roc_types::subs::Variable;
use crate::{
abilities::SpecializationId,
abilities::AbilitiesStore,
def::{Annotation, Declaration, Def},
expr::{AccessorData, ClosureData, Expr, Field, WhenBranch},
expr::{self, AccessorData, ClosureData, Expr, Field},
pattern::{DestructType, Pattern, RecordDestruct},
};
@ -163,10 +163,18 @@ pub fn walk_expr<V: Visitor>(visitor: &mut V, expr: &Expr, var: Variable) {
let (var, le) = &**argument;
visitor.visit_expr(&le.value, le.region, *var);
}
Expr::Expect(e1, e2) => {
// TODO: what type does an expect have?
visitor.visit_expr(&e1.value, e1.region, Variable::NULL);
visitor.visit_expr(&e2.value, e2.region, Variable::NULL);
Expr::Expect {
loc_condition,
loc_continuation,
lookups_in_cond: _,
} => {
// TODO: what type does an expect have? bool
visitor.visit_expr(&loc_condition.value, loc_condition.region, Variable::NULL);
visitor.visit_expr(
&loc_continuation.value,
loc_continuation.region,
Variable::NULL,
);
}
Expr::TypedHole(_) => { /* terminal */ }
Expr::RuntimeError(..) => { /* terminal */ }
@ -195,7 +203,7 @@ pub fn walk_when<V: Visitor>(
cond_var: Variable,
expr_var: Variable,
loc_cond: &Loc<Expr>,
branches: &[WhenBranch],
branches: &[expr::WhenBranch],
) {
visitor.visit_expr(&loc_cond.value, loc_cond.region, cond_var);
@ -205,8 +213,12 @@ pub fn walk_when<V: Visitor>(
}
#[inline(always)]
pub fn walk_when_branch<V: Visitor>(visitor: &mut V, branch: &WhenBranch, expr_var: Variable) {
let WhenBranch {
pub fn walk_when_branch<V: Visitor>(
visitor: &mut V,
branch: &expr::WhenBranch,
expr_var: Variable,
) {
let expr::WhenBranch {
patterns,
value,
guard,
@ -274,32 +286,48 @@ pub fn walk_record_fields<'a, V: Visitor>(
}
pub trait Visitor: Sized {
/// Most default implementations will call [Visitor::should_visit] to decide whether they
/// should descend into a node. Return `false` to skip visiting.
fn should_visit(&mut self, _region: Region) -> bool {
true
}
fn visit_decls(&mut self, decls: &[Declaration]) {
walk_decls(self, decls);
}
fn visit_decl(&mut self, decl: &Declaration) {
walk_decl(self, decl);
if self.should_visit(decl.region()) {
walk_decl(self, decl);
}
}
fn visit_def(&mut self, def: &Def) {
walk_def(self, def);
if self.should_visit(def.region()) {
walk_def(self, def);
}
}
fn visit_annotation(&mut self, _pat: &Annotation) {
// ignore by default
}
fn visit_expr(&mut self, expr: &Expr, _region: Region, var: Variable) {
walk_expr(self, expr, var);
fn visit_expr(&mut self, expr: &Expr, region: Region, var: Variable) {
if self.should_visit(region) {
walk_expr(self, expr, var);
}
}
fn visit_pattern(&mut self, pattern: &Pattern, _region: Region, _opt_var: Option<Variable>) {
walk_pattern(self, pattern);
fn visit_pattern(&mut self, pattern: &Pattern, region: Region, _opt_var: Option<Variable>) {
if self.should_visit(region) {
walk_pattern(self, pattern);
}
}
fn visit_record_destruct(&mut self, destruct: &RecordDestruct, _region: Region) {
walk_record_destruct(self, destruct);
fn visit_record_destruct(&mut self, destruct: &RecordDestruct, region: Region) {
if self.should_visit(region) {
walk_record_destruct(self, destruct);
}
}
}
@ -347,15 +375,18 @@ struct TypeAtVisitor {
}
impl Visitor for TypeAtVisitor {
fn should_visit(&mut self, region: Region) -> bool {
region.contains(&self.region)
}
fn visit_expr(&mut self, expr: &Expr, region: Region, var: Variable) {
if region == self.region {
debug_assert!(self.typ.is_none());
self.typ = Some(var);
return;
}
if region.contains(&self.region) {
walk_expr(self, expr, var);
}
walk_expr(self, expr, var);
}
fn visit_pattern(&mut self, pat: &Pattern, region: Region, opt_var: Option<Variable>) {
@ -364,9 +395,8 @@ impl Visitor for TypeAtVisitor {
self.typ = opt_var;
return;
}
if region.contains(&self.region) {
walk_pattern(self, pat)
}
walk_pattern(self, pat)
}
}
@ -377,36 +407,90 @@ pub fn find_type_at(region: Region, decls: &[Declaration]) -> Option<Variable> {
visitor.typ
}
pub fn find_ability_member_at(
/// Given an ability Foo has foo : ..., returns (T, foo1) if the symbol at the given region is a
/// symbol foo1 that specializes foo for T. Otherwise if the symbol is foo but the specialization
/// is unknown, (Foo, foo) is returned. Otherwise [None] is returned.
pub fn find_ability_member_and_owning_type_at(
region: Region,
decls: &[Declaration],
) -> Option<(Symbol, SpecializationId)> {
abilities_store: &AbilitiesStore,
) -> Option<(Symbol, Symbol)> {
let mut visitor = Finder {
region,
found: None,
abilities_store,
};
visitor.visit_decls(decls);
return visitor.found;
struct Finder {
struct Finder<'a> {
region: Region,
found: Option<(Symbol, SpecializationId)>,
abilities_store: &'a AbilitiesStore,
found: Option<(Symbol, Symbol)>,
}
impl Visitor for Finder {
impl Visitor for Finder<'_> {
fn should_visit(&mut self, region: Region) -> bool {
region.contains(&self.region)
}
fn visit_pattern(&mut self, pattern: &Pattern, region: Region, _opt_var: Option<Variable>) {
if region == self.region {
if let Pattern::AbilityMemberSpecialization {
ident: spec_symbol,
specializes: _,
} = pattern
{
debug_assert!(self.found.is_none());
let spec_type =
find_specialization_type_of_symbol(*spec_symbol, self.abilities_store)
.unwrap();
self.found = Some((spec_type, *spec_symbol))
}
}
walk_pattern(self, pattern);
}
fn visit_expr(&mut self, expr: &Expr, region: Region, var: Variable) {
if region == self.region {
if let &Expr::AbilityMember(symbol, specialization_id, _) = expr {
if let &Expr::AbilityMember(member_symbol, specialization_id, _var) = expr {
debug_assert!(self.found.is_none());
self.found = Some((symbol, specialization_id));
self.found = match self.abilities_store.get_resolved(specialization_id) {
Some(spec_symbol) => {
let spec_type = find_specialization_type_of_symbol(
spec_symbol,
self.abilities_store,
)
.unwrap();
Some((spec_type, spec_symbol))
}
None => {
let parent_ability = self
.abilities_store
.member_def(member_symbol)
.unwrap()
.parent_ability;
Some((parent_ability, member_symbol))
}
};
return;
}
}
if region.contains(&self.region) {
walk_expr(self, expr, var);
}
walk_expr(self, expr, var);
}
}
fn find_specialization_type_of_symbol(
symbol: Symbol,
abilities_store: &AbilitiesStore,
) -> Option<Symbol> {
abilities_store
.iter_specializations()
.find(|(_, ms)| ms.symbol == symbol)
.map(|(spec, _)| spec.1)
}
}
pub fn symbols_introduced_from_pattern(

View File

@ -20,6 +20,13 @@ impl<K, V> VecMap<K, V> {
debug_assert_eq!(self.keys.len(), self.values.len());
self.keys.len()
}
pub fn swap_remove(&mut self, index: usize) -> (K, V) {
let k = self.keys.swap_remove(index);
let v = self.values.swap_remove(index);
(k, v)
}
}
impl<K: PartialEq, V> VecMap<K, V> {
@ -35,13 +42,6 @@ impl<K: PartialEq, V> VecMap<K, V> {
self.keys.is_empty()
}
pub fn swap_remove(&mut self, index: usize) -> (K, V) {
let k = self.keys.swap_remove(index);
let v = self.values.swap_remove(index);
(k, v)
}
pub fn insert(&mut self, key: K, mut value: V) -> Option<V> {
match self.keys.iter().position(|x| x == &key) {
Some(index) => {
@ -123,6 +123,17 @@ impl<K: PartialEq, V> VecMap<K, V> {
pub unsafe fn zip(keys: Vec<K>, values: Vec<V>) -> Self {
Self { keys, values }
}
pub fn drain_filter<F>(&mut self, predicate: F) -> DrainFilter<K, V, F>
where
F: Fn(&K, &V) -> bool,
{
DrainFilter {
vec_map: self,
predicate,
cur_idx: 0,
}
}
}
impl<K: PartialEq, V> Extend<(K, V)> for VecMap<K, V> {
@ -198,3 +209,63 @@ impl<K: PartialEq, V> FromIterator<(K, V)> for VecMap<K, V> {
map
}
}
pub struct DrainFilter<'a, K, V, F>
where
F: Fn(&K, &V) -> bool,
{
vec_map: &'a mut VecMap<K, V>,
predicate: F,
cur_idx: usize,
}
impl<K, V, F> Iterator for DrainFilter<'_, K, V, F>
where
F: Fn(&K, &V) -> bool,
{
type Item = (K, V);
fn next(&mut self) -> Option<Self::Item> {
while self.cur_idx < self.vec_map.len() {
let key = &self.vec_map.keys[self.cur_idx];
let value = &self.vec_map.values[self.cur_idx];
let drain = (self.predicate)(key, value);
if drain {
let kv = self.vec_map.swap_remove(self.cur_idx);
return Some(kv);
} else {
self.cur_idx += 1;
}
}
None
}
}
#[cfg(test)]
mod test_drain_filter {
use crate::VecMap;
#[test]
fn test_nothing() {
let mut map = VecMap::default();
map.extend(vec![(1, 2), (2, 4)]);
let mut iter = map.drain_filter(|k, _| *k == 0);
assert!(iter.next().is_none());
assert_eq!(map.len(), 2);
}
#[test]
fn test_drain() {
let mut map = VecMap::default();
map.extend(vec![(1, 2), (2, 4), (3, 6), (4, 8), (5, 10)]);
let mut drained: Vec<_> = map.drain_filter(|k, _| k % 2 == 0).collect();
drained.sort_unstable();
assert_eq!(drained, vec![(2, 4), (4, 8)]);
assert_eq!(map.len(), 3);
let mut rest: Vec<_> = map.into_iter().collect();
rest.sort_unstable();
assert_eq!(rest, vec![(1, 2), (3, 6), (5, 10)]);
}
}

View File

@ -440,7 +440,11 @@ pub fn constrain_expr(
constraints.exists_many(vars, cons)
}
Expect(loc_cond, continuation) => {
Expect {
loc_condition,
loc_continuation,
lookups_in_cond,
} => {
let expect_bool = |region| {
let bool_type = Type::Variable(Variable::BOOL);
Expected::ForReason(Reason::ExpectCondition, bool_type, region)
@ -449,20 +453,38 @@ pub fn constrain_expr(
let cond_con = constrain_expr(
constraints,
env,
loc_cond.region,
&loc_cond.value,
expect_bool(loc_cond.region),
loc_condition.region,
&loc_condition.value,
expect_bool(loc_condition.region),
);
let continuation_con = constrain_expr(
constraints,
env,
continuation.region,
&continuation.value,
loc_continuation.region,
&loc_continuation.value,
expected,
);
constraints.exists_many([], [cond_con, continuation_con])
// + 2 for cond_con and continuation_con
let mut all_constraints = Vec::with_capacity(lookups_in_cond.len() + 2);
all_constraints.push(cond_con);
all_constraints.push(continuation_con);
let mut vars = Vec::with_capacity(lookups_in_cond.len());
for (symbol, var) in lookups_in_cond.iter() {
vars.push(*var);
all_constraints.push(constraints.lookup(
*symbol,
NoExpectation(Type::Variable(*var)),
Region::zero(),
));
}
constraints.exists_many(vars, all_constraints)
}
If {

View File

@ -1,6 +1,6 @@
use crate::expr::{constrain_def_make_constraint, constrain_def_pattern, Env};
use roc_builtins::std::StdLib;
use roc_can::abilities::{AbilitiesStore, MemberTypeInfo, SolvedSpecializations};
use roc_can::abilities::{PendingAbilitiesStore, PendingMemberType, ResolvedSpecializations};
use roc_can::constraint::{Constraint, Constraints};
use roc_can::def::Declaration;
use roc_can::expected::Expected;
@ -10,7 +10,7 @@ use roc_error_macros::internal_error;
use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::{Loc, Region};
use roc_types::solved_types::{FreeVars, SolvedType};
use roc_types::subs::{VarStore, Variable};
use roc_types::subs::{ExposedTypesStorageSubs, VarStore, Variable};
use roc_types::types::{AnnotationSource, Category, Type};
/// The types of all exposed values/functions of a collection of modules
@ -88,18 +88,18 @@ impl ExposedForModule {
}
}
/// The types of all exposed values/functions of a module
/// The types of all exposed values/functions of a module. This includes ability member
/// specializations.
#[derive(Clone, Debug)]
pub struct ExposedModuleTypes {
pub stored_vars_by_symbol: Vec<(Symbol, Variable)>,
pub storage_subs: roc_types::subs::StorageSubs,
pub solved_specializations: SolvedSpecializations,
pub exposed_types_storage_subs: ExposedTypesStorageSubs,
pub resolved_specializations: ResolvedSpecializations,
}
pub fn constrain_module(
constraints: &mut Constraints,
symbols_from_requires: Vec<(Loc<Symbol>, Loc<Type>)>,
abilities_store: &AbilitiesStore,
abilities_store: &PendingAbilitiesStore,
declarations: &[Declaration],
home: ModuleId,
) -> Constraint {
@ -176,12 +176,12 @@ fn constrain_symbols_from_requires(
pub fn frontload_ability_constraints(
constraints: &mut Constraints,
abilities_store: &AbilitiesStore,
abilities_store: &PendingAbilitiesStore,
home: ModuleId,
mut constraint: Constraint,
) -> Constraint {
for (member_name, member_data) in abilities_store.root_ability_members().iter() {
if let MemberTypeInfo::Local {
if let PendingMemberType::Local {
signature_var,
variables: vars,
signature,

View File

@ -981,6 +981,9 @@ trait Backend<'a> {
self.set_last_seen(*sym, stmt);
}
}
Stmt::Expect { .. } => todo!("expect is not implemented in the wasm backend"),
Stmt::RuntimeError(_) => {}
}
}

View File

@ -2737,6 +2737,81 @@ pub fn build_exp_stmt<'a, 'ctx, 'env>(
}
}
Expect {
condition: cond,
region: _,
lookups: _,
layouts: _,
remainder,
} => {
// do stuff
let bd = env.builder;
let context = env.context;
let (cond, _cond_layout) = load_symbol_and_layout(scope, cond);
let condition = bd.build_int_compare(
IntPredicate::EQ,
cond.into_int_value(),
context.bool_type().const_int(1, false),
"is_true",
);
let then_block = context.append_basic_block(parent, "then_block");
let throw_block = context.append_basic_block(parent, "throw_block");
bd.build_conditional_branch(condition, then_block, throw_block);
{
bd.position_at_end(throw_block);
match env.target_info.ptr_width() {
roc_target::PtrWidth::Bytes8 => {
let func = env
.module
.get_function(bitcode::UTILS_EXPECT_FAILED)
.unwrap();
// TODO get the actual line info instead of
// hardcoding as zero!
let callable = CallableValue::try_from(func).unwrap();
let start_line = context.i32_type().const_int(0, false);
let end_line = context.i32_type().const_int(0, false);
let start_col = context.i16_type().const_int(0, false);
let end_col = context.i16_type().const_int(0, false);
bd.build_call(
callable,
&[
start_line.into(),
end_line.into(),
start_col.into(),
end_col.into(),
],
"call_expect_failed",
);
bd.build_unconditional_branch(then_block);
}
roc_target::PtrWidth::Bytes4 => {
// temporary WASM implementation
throw_exception(env, "An expectation failed!");
}
}
}
bd.position_at_end(then_block);
build_exp_stmt(
env,
layout_ids,
func_spec_solutions,
scope,
parent,
remainder,
)
}
RuntimeError(error_msg) => {
throw_exception(env, error_msg);
@ -6127,67 +6202,6 @@ fn run_low_level<'a, 'ctx, 'env>(
set
}
ExpectTrue => {
debug_assert_eq!(args.len(), 1);
let context = env.context;
let bd = env.builder;
let (cond, _cond_layout) = load_symbol_and_layout(scope, &args[0]);
let condition = bd.build_int_compare(
IntPredicate::EQ,
cond.into_int_value(),
context.bool_type().const_int(1, false),
"is_true",
);
let then_block = context.append_basic_block(parent, "then_block");
let throw_block = context.append_basic_block(parent, "throw_block");
bd.build_conditional_branch(condition, then_block, throw_block);
{
bd.position_at_end(throw_block);
match env.target_info.ptr_width() {
roc_target::PtrWidth::Bytes8 => {
let func = env
.module
.get_function(bitcode::UTILS_EXPECT_FAILED)
.unwrap();
// TODO get the actual line info instead of
// hardcoding as zero!
let callable = CallableValue::try_from(func).unwrap();
let start_line = context.i32_type().const_int(0, false);
let end_line = context.i32_type().const_int(0, false);
let start_col = context.i16_type().const_int(0, false);
let end_col = context.i16_type().const_int(0, false);
bd.build_call(
callable,
&[
start_line.into(),
end_line.into(),
start_col.into(),
end_col.into(),
],
"call_expect_failed",
);
bd.build_unconditional_branch(then_block);
}
roc_target::PtrWidth::Bytes4 => {
// temporary WASM implementation
throw_exception(env, "An expectation failed!");
}
}
}
bd.position_at_end(then_block);
cond
}
ListMap | ListMap2 | ListMap3 | ListMap4 | ListMapWithIndex | ListKeepIf | ListWalk
| ListWalkUntil | ListWalkBackwards | ListKeepOks | ListKeepErrs | ListSortWith

View File

@ -620,6 +620,8 @@ impl<'a> WasmBackend<'a> {
Stmt::Refcounting(modify, following) => self.stmt_refcounting(modify, following),
Stmt::Expect { .. } => todo!("expect is not implemented in the wasm backend"),
Stmt::RuntimeError(msg) => self.stmt_runtime_error(msg),
}
}

View File

@ -802,7 +802,6 @@ impl<'a> LowLevelCall<'a> {
self.load_args(backend);
backend.code_builder.i32_eqz();
}
ExpectTrue => todo!("{:?}", self.lowlevel),
RefCountInc => self.load_args_and_call_zig(backend, bitcode::UTILS_INCREF),
RefCountDec => self.load_args_and_call_zig(backend, bitcode::UTILS_DECREF),

View File

@ -272,7 +272,15 @@ impl std::hash::Hash for IdentStr {
impl Clone for IdentStr {
fn clone(&self) -> Self {
Self::from_str(self.as_str())
if self.is_empty() || self.is_small_str() {
// we can just copy the bytes
Self {
elements: self.elements,
length: self.length,
}
} else {
Self::from_str(self.as_str())
}
}
}

View File

@ -0,0 +1,13 @@
[package]
name = "roc_late_solve"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2021"
[dependencies]
roc_types = { path = "../types" }
roc_can = { path = "../can" }
roc_unify = { path = "../unify" }
roc_solve = { path = "../solve" }
bumpalo = { version = "3.8.0", features = ["collections"] }

View File

@ -0,0 +1,52 @@
//! Crate roc_late_solve exposes type unification and solving primitives from the perspective of
//! the compiler backend.
use bumpalo::Bump;
use roc_can::abilities::AbilitiesStore;
use roc_solve::solve::{compact_lambda_sets_of_vars, Phase, Pools};
use roc_types::subs::{Subs, Variable};
use roc_unify::unify::{unify as unify_unify, Mode, Unified};
#[derive(Debug)]
pub struct UnificationFailed;
/// Unifies two variables and performs lambda set compaction.
/// Ranks and other ability demands are disregarded.
pub fn unify(
arena: &Bump,
subs: &mut Subs,
abilities_store: &AbilitiesStore,
left: Variable,
right: Variable,
) -> Result<(), UnificationFailed> {
let unified = unify_unify(subs, left, right, Mode::EQ);
match unified {
Unified::Success {
vars: _,
must_implement_ability: _,
lambda_sets_to_specialize,
} => {
let mut pools = Pools::default();
compact_lambda_sets_of_vars(
subs,
arena,
&mut pools,
abilities_store,
lambda_sets_to_specialize,
Phase::Late,
);
// Pools are only used to keep track of variable ranks for generalization purposes.
// Since we break generalization during monomorphization, `pools` is irrelevant
// here. We only need it for `compact_lambda_sets_of_vars`, which is also used in a
// solving context where pools are relevant.
Ok(())
}
Unified::Failure(..) | Unified::BadType(..) => Err(UnificationFailed),
}
}
pub use roc_solve::solve::instantiate_rigids;
pub use roc_solve::ability::resolve_ability_specialization;
pub use roc_solve::ability::Resolved;

View File

@ -158,7 +158,7 @@ fn generate_entry_docs<'a>(
ValueDef::Annotation(loc_pattern, loc_ann) => {
if let Pattern::Identifier(identifier) = loc_pattern.value {
// Check if the definition is exposed
if ident_ids.get_id(&identifier.into()).is_some() {
if ident_ids.get_id(identifier).is_some() {
let name = identifier.to_string();
let doc_def = DocDef {
name,
@ -179,7 +179,7 @@ fn generate_entry_docs<'a>(
} => {
if let Pattern::Identifier(identifier) = ann_pattern.value {
// Check if the definition is exposed
if ident_ids.get_id(&identifier.into()).is_some() {
if ident_ids.get_id(identifier).is_some() {
let doc_def = DocDef {
name: identifier.to_string(),
type_annotation: type_to_docs(false, ann_type.value),

View File

@ -6,7 +6,7 @@ use crossbeam::thread;
use parking_lot::Mutex;
use roc_builtins::roc::module_source;
use roc_builtins::std::borrow_stdlib;
use roc_can::abilities::{AbilitiesStore, SolvedSpecializations};
use roc_can::abilities::{AbilitiesStore, PendingAbilitiesStore, ResolvedSpecializations};
use roc_can::constraint::{Constraint as ConstraintSoa, Constraints};
use roc_can::def::Declaration;
use roc_can::expr::PendingDerives;
@ -128,7 +128,7 @@ struct ModuleCache<'a> {
headers: MutMap<ModuleId, ModuleHeader<'a>>,
parsed: MutMap<ModuleId, ParsedModule<'a>>,
aliases: MutMap<ModuleId, MutMap<Symbol, (bool, Alias)>>,
abilities: MutMap<ModuleId, AbilitiesStore>,
pending_abilities: MutMap<ModuleId, PendingAbilitiesStore>,
constrained: MutMap<ModuleId, ConstrainedModule>,
typechecked: MutMap<ModuleId, TypeCheckedModule<'a>>,
found_specializations: MutMap<ModuleId, FoundSpecializationsModule<'a>>,
@ -175,7 +175,7 @@ impl Default for ModuleCache<'_> {
headers: Default::default(),
parsed: Default::default(),
aliases: Default::default(),
abilities: Default::default(),
pending_abilities: Default::default(),
constrained: Default::default(),
typechecked: Default::default(),
found_specializations: Default::default(),
@ -295,7 +295,7 @@ fn start_phase<'a>(
.clone();
let mut aliases = MutMap::default();
let mut abilities_store = AbilitiesStore::default();
let mut abilities_store = PendingAbilitiesStore::default();
for imported in parsed.imported_modules.keys() {
match state.module_cache.aliases.get(imported) {
@ -315,7 +315,7 @@ fn start_phase<'a>(
}
}
match state.module_cache.abilities.get(imported) {
match state.module_cache.pending_abilities.get(imported) {
None => unreachable!(
r"imported module {:?} did not register its abilities, so {:?} cannot use them",
imported, parsed.module_id,
@ -880,7 +880,7 @@ enum BuildTask<'a> {
dep_idents: IdentIdsByModule,
exposed_symbols: VecSet<Symbol>,
aliases: MutMap<Symbol, Alias>,
abilities_store: AbilitiesStore,
abilities_store: PendingAbilitiesStore,
skip_constraint_gen: bool,
},
Solve {
@ -2046,7 +2046,7 @@ fn update<'a>(
state
.module_cache
.abilities
.pending_abilities
.insert(module_id, constrained_module.module.abilities_store.clone());
state
@ -2144,9 +2144,8 @@ fn update<'a>(
state.exposed_types.insert(
module_id,
ExposedModuleTypes {
stored_vars_by_symbol: solved_module.stored_vars_by_symbol,
storage_subs: solved_module.storage_subs,
solved_specializations: solved_module.solved_specializations,
exposed_types_storage_subs: solved_module.exposed_types,
resolved_specializations: solved_module.solved_specializations,
},
);
@ -2234,6 +2233,8 @@ fn update<'a>(
layout_cache,
..
} => {
debug_assert!(state.goal_phase == Phase::MakeSpecializations);
log!("made specializations for {:?}", module_id);
// in the future, layouts will be in SoA form and we'll want to hold on to this data
@ -2246,10 +2247,22 @@ fn update<'a>(
.dependencies
.notify(module_id, Phase::MakeSpecializations);
if work.is_empty()
&& state.dependencies.solved_all()
&& state.goal_phase == Phase::MakeSpecializations
{
if work.is_empty() && state.dependencies.solved_all() {
if !external_specializations_requested.is_empty()
|| !state
.module_cache
.external_specializations_requested
.is_empty()
{
internal_error!(
"No more work left, but external specializations left over: {:?}, {:?}",
external_specializations_requested,
state.module_cache.external_specializations_requested
)
}
log!("specializations complete from {:?}", module_id);
debug_print_ir!(state, ROC_PRINT_IR_AFTER_SPECIALIZATION);
Proc::insert_reset_reuse_operations(
@ -2283,19 +2296,6 @@ fn update<'a>(
state.constrained_ident_ids.insert(module_id, ident_ids);
for (module_id, requested) in external_specializations_requested {
let existing = match state
.module_cache
.external_specializations_requested
.entry(module_id)
{
Vacant(entry) => entry.insert(vec![]),
Occupied(entry) => entry.into_mut(),
};
existing.push(requested);
}
// use the subs of the root module;
// this is used in the repl to find the type of `main`
let subs = if module_id == state.root_id {
@ -2312,9 +2312,7 @@ fn update<'a>(
})
.map_err(|_| LoadingProblem::MsgChannelDied)?;
// As far as type-checking goes, once we've solved
// the originally requested module, we're all done!
return Ok(state);
Ok(state)
} else {
// record the subs of the root module;
// this is used in the repl to find the type of `main`
@ -2338,9 +2336,9 @@ fn update<'a>(
}
start_tasks(arena, &mut state, work, injector, worker_listeners)?;
}
Ok(state)
Ok(state)
}
}
Msg::FinishedAllTypeChecking { .. } => {
unreachable!();
@ -3152,11 +3150,11 @@ fn send_header<'a>(
let ident_ids = ident_ids_by_module.get_or_insert(module_id);
for ident in exposed_idents {
let ident_id = ident_ids.get_or_insert(&ident);
let ident_id = ident_ids.get_or_insert(ident.as_str());
let symbol = Symbol::new(module_id, ident_id);
// Since this value is exposed, add it to our module's default scope.
debug_assert!(!scope.contains_key(&ident.clone()));
debug_assert!(!scope.contains_key(&ident));
scope.insert(ident, (symbol, region));
}
@ -3178,7 +3176,7 @@ fn send_header<'a>(
// For example, if module A has [B.{ foo }], then
// when we get here for B, `foo` will already have
// an IdentId. We must reuse that!
let ident_id = ident_ids.get_or_insert(&loc_exposed.value.as_str().into());
let ident_id = ident_ids.get_or_insert(loc_exposed.value.as_str());
let symbol = Symbol::new(home, ident_id);
exposed.push(symbol);
@ -3361,7 +3359,7 @@ fn send_header_two<'a>(
let ident_ids = ident_ids_by_module.get_or_insert(module_id);
for ident in exposed_idents {
let ident_id = ident_ids.get_or_insert(&ident);
let ident_id = ident_ids.get_or_insert(ident.as_str());
let symbol = Symbol::new(module_id, ident_id);
// Since this value is exposed, add it to our module's default scope.
@ -3385,7 +3383,7 @@ fn send_header_two<'a>(
for entry in requires {
let entry = entry.value;
let ident: Ident = entry.ident.value.into();
let ident_id = ident_ids.get_or_insert(&ident);
let ident_id = ident_ids.get_or_insert(entry.ident.value);
let symbol = Symbol::new(module_id, ident_id);
// Since this value is exposed, add it to our module's default scope.
@ -3398,11 +3396,11 @@ fn send_header_two<'a>(
for entry in requires_types {
let string: &str = entry.value.into();
let ident: Ident = string.into();
let ident_id = ident_ids.get_or_insert(&ident);
let ident_id = ident_ids.get_or_insert(string);
let symbol = Symbol::new(module_id, ident_id);
// Since this value is exposed, add it to our module's default scope.
debug_assert!(!scope.contains_key(&ident.clone()));
debug_assert!(!scope.contains_key(&ident));
scope.insert(ident, (symbol, entry.region));
}
}
@ -3423,7 +3421,7 @@ fn send_header_two<'a>(
// For example, if module A has [B.{ foo }], then
// when we get here for B, `foo` will already have
// an IdentId. We must reuse that!
let ident_id = ident_ids.get_or_insert(&loc_exposed.value.as_str().into());
let ident_id = ident_ids.get_or_insert(loc_exposed.value.as_str());
let symbol = Symbol::new(home, ident_id);
exposed.push(symbol);
@ -3450,8 +3448,7 @@ fn send_header_two<'a>(
let module_name = ModuleNameEnum::PkgConfig;
let main_for_host = {
let ident_str: Ident = provides[0].value.as_str().into();
let ident_id = ident_ids.get_or_insert(&ident_str);
let ident_id = ident_ids.get_or_insert(provides[0].value.as_str());
Symbol::new(home, ident_id)
};
@ -3502,7 +3499,7 @@ impl<'a> BuildTask<'a> {
// TODO trim down these arguments - possibly by moving Constraint into Module
#[allow(clippy::too_many_arguments)]
fn solve_module(
mut module: Module,
module: Module,
ident_ids: IdentIds,
module_timing: ModuleTiming,
constraints: Constraints,
@ -3517,21 +3514,6 @@ impl<'a> BuildTask<'a> {
) -> Self {
let exposed_by_module = exposed_types.retain_modules(imported_modules.keys());
let abilities_store = &mut module.abilities_store;
for module in imported_modules.keys() {
let exposed = exposed_by_module
.get(module)
.unwrap_or_else(|| internal_error!("No exposed types for {:?}", module));
let ExposedModuleTypes {
solved_specializations,
..
} = exposed;
for ((member, typ), specialization) in solved_specializations.iter() {
abilities_store.register_specialization_for_type(*member, *typ, *specialization);
}
}
let exposed_for_module =
ExposedForModule::new(module.referenced_values.iter(), exposed_by_module);
@ -3561,62 +3543,120 @@ impl<'a> BuildTask<'a> {
}
fn add_imports(
my_module: ModuleId,
subs: &mut Subs,
abilities_store: &mut AbilitiesStore,
mut pending_abilities: PendingAbilitiesStore,
mut exposed_for_module: ExposedForModule,
def_types: &mut Vec<(Symbol, Loc<roc_types::types::Type>)>,
rigid_vars: &mut Vec<Variable>,
) -> Vec<Variable> {
) -> (Vec<Variable>, AbilitiesStore) {
let mut import_variables = Vec::new();
for symbol in exposed_for_module.imported_values {
let module_id = symbol.module_id();
match exposed_for_module.exposed_by_module.get_mut(&module_id) {
Some(ExposedModuleTypes {
stored_vars_by_symbol,
storage_subs,
solved_specializations: _,
}) => {
let variable = match stored_vars_by_symbol.iter().find(|(s, _)| *s == symbol) {
None => {
// Today we define builtins in each module that uses them
// so even though they have a different module name from
// the surrounding module, they are not technically imported
debug_assert!(symbol.is_builtin());
continue;
}
Some((_, x)) => *x,
};
let mut cached_symbol_vars = VecMap::default();
let copied_import = storage_subs.export_variable_to(subs, variable);
macro_rules! import_var_for_symbol {
($subs:expr, $exposed_by_module:expr, $symbol:ident, $break:stmt) => {
let module_id = $symbol.module_id();
match $exposed_by_module.get_mut(&module_id) {
Some(ExposedModuleTypes {
exposed_types_storage_subs: exposed_types,
resolved_specializations: _,
}) => {
let variable = match exposed_types.stored_vars_by_symbol.iter().find(|(s, _)| **s == $symbol) {
None => {
// Today we define builtins in each module that uses them
// so even though they have a different module name from
// the surrounding module, they are not technically imported
debug_assert!($symbol.is_builtin());
$break
}
Some((_, x)) => *x,
};
def_types.push((
symbol,
Loc::at_zero(roc_types::types::Type::Variable(copied_import.variable)),
));
let copied_import = exposed_types.storage_subs.export_variable_to($subs, variable);
// not a typo; rigids are turned into flex during type inference, but when imported we must
// consider them rigid variables
rigid_vars.extend(copied_import.rigid);
rigid_vars.extend(copied_import.flex);
def_types.push((
$symbol,
Loc::at_zero(roc_types::types::Type::Variable(copied_import.variable)),
));
// Rigid vars bound to abilities are also treated like rigids.
rigid_vars.extend(copied_import.rigid_able);
rigid_vars.extend(copied_import.flex_able);
// not a typo; rigids are turned into flex during type inference, but when imported we must
// consider them rigid variables
rigid_vars.extend(copied_import.rigid);
rigid_vars.extend(copied_import.flex);
import_variables.extend(copied_import.registered);
// Rigid vars bound to abilities are also treated like rigids.
rigid_vars.extend(copied_import.rigid_able);
rigid_vars.extend(copied_import.flex_able);
if abilities_store.is_ability_member_name(symbol) {
abilities_store.resolved_imported_member_var(symbol, copied_import.variable);
import_variables.extend(copied_import.registered);
cached_symbol_vars.insert($symbol, copied_import.variable);
}
None => {
internal_error!("Imported module {:?} is not available", module_id)
}
}
None => {
internal_error!("Imported module {:?} is not available", module_id)
}
}
}
import_variables
for symbol in exposed_for_module.imported_values {
import_var_for_symbol!(subs, exposed_for_module.exposed_by_module, symbol, continue);
}
// TODO: see if we can reduce the amount of specializations we need to import.
// One idea is to just always assume external modules fulfill their specialization obligations
// and save lambda set resolution for mono.
for (_, module_types) in exposed_for_module.exposed_by_module.iter_all() {
for ((member, typ), specialization) in module_types.resolved_specializations.iter() {
pending_abilities.import_specialization(*member, *typ, specialization)
}
}
struct Ctx<'a> {
subs: &'a mut Subs,
exposed_by_module: &'a mut ExposedByModule,
}
let abilities_store = pending_abilities.resolve_for_module(
my_module,
&mut Ctx {
subs,
exposed_by_module: &mut exposed_for_module.exposed_by_module,
},
|ctx, symbol| match cached_symbol_vars.get(&symbol).copied() {
Some(var) => var,
None => {
import_var_for_symbol!(
ctx.subs,
ctx.exposed_by_module,
symbol,
internal_error!("Import ability member {:?} not available", symbol)
);
*cached_symbol_vars.get(&symbol).unwrap()
}
},
|ctx, module, lset_var| match ctx.exposed_by_module.get_mut(&module) {
Some(ExposedModuleTypes {
exposed_types_storage_subs: exposed_types,
resolved_specializations: _,
}) => {
let var = exposed_types
.stored_specialization_lambda_set_vars
.get(&lset_var)
.expect("Lambda set var from other module not available");
let copied_import = exposed_types
.storage_subs
.export_variable_to(ctx.subs, *var);
copied_import.variable
}
None => internal_error!("Imported module {:?} is not available", module),
},
);
(import_variables, abilities_store)
}
#[allow(clippy::complexity)]
@ -3630,7 +3670,7 @@ fn run_solve_solve(
module: Module,
) -> (
Solved<Subs>,
SolvedSpecializations,
ResolvedSpecializations,
Vec<(Symbol, Variable)>,
Vec<solve::TypeError>,
AbilitiesStore,
@ -3639,7 +3679,7 @@ fn run_solve_solve(
exposed_symbols,
aliases,
rigid_variables,
mut abilities_store,
abilities_store: pending_abilities,
..
} = module;
@ -3648,9 +3688,10 @@ fn run_solve_solve(
let mut subs = Subs::new_from_varstore(var_store);
let import_variables = add_imports(
let (import_variables, abilities_store) = add_imports(
module.module_id,
&mut subs,
&mut abilities_store,
pending_abilities,
exposed_for_module,
&mut def_types,
&mut rigid_vars,
@ -3678,13 +3719,14 @@ fn run_solve_solve(
let module_id = module.module_id;
// Figure out what specializations belong to this module
let solved_specializations: SolvedSpecializations = abilities_store
let solved_specializations: ResolvedSpecializations = abilities_store
.iter_specializations()
.filter(|((member, typ), _)| {
// This module solved this specialization if either the member or the type comes from the
// module.
member.module_id() == module_id || typ.module_id() == module_id
})
.map(|(key, specialization)| (key, specialization.clone()))
.collect();
let is_specialization_symbol =
@ -3775,16 +3817,18 @@ fn run_solve<'a>(
};
let mut solved_subs = solved_subs;
let (storage_subs, stored_vars_by_symbol) =
roc_solve::module::exposed_types_storage_subs(&mut solved_subs, &exposed_vars_by_symbol);
let exposed_types = roc_solve::module::exposed_types_storage_subs(
&mut solved_subs,
&exposed_vars_by_symbol,
&solved_specializations,
);
let solved_module = SolvedModule {
exposed_vars_by_symbol,
problems,
aliases,
stored_vars_by_symbol,
solved_specializations,
storage_subs,
exposed_types,
};
// Record the final timings
@ -3862,7 +3906,7 @@ fn canonicalize_and_constrain<'a>(
dep_idents: IdentIdsByModule,
exposed_symbols: VecSet<Symbol>,
aliases: MutMap<Symbol, Alias>,
imported_abilities_state: AbilitiesStore,
imported_abilities_state: PendingAbilitiesStore,
parsed: ParsedModule<'a>,
skip_constraint_gen: bool,
) -> CanAndCon {
@ -4122,7 +4166,7 @@ fn make_specializations<'a>(
specializations_we_must_make: Vec<ExternalSpecializations>,
mut module_timing: ModuleTiming,
target_info: TargetInfo,
mut abilities_store: AbilitiesStore,
abilities_store: AbilitiesStore,
) -> Msg<'a> {
let make_specializations_start = SystemTime::now();
let mut update_mode_ids = UpdateModeIds::new();
@ -4136,7 +4180,7 @@ fn make_specializations<'a>(
update_mode_ids: &mut update_mode_ids,
// call_specialization_counter=0 is reserved
call_specialization_counter: 1,
abilities_store: &mut abilities_store,
abilities_store: &abilities_store,
};
let mut procs = Procs::new_in(arena);
@ -4206,7 +4250,7 @@ fn build_pending_specializations<'a>(
target_info: TargetInfo,
// TODO remove
exposed_to_host: ExposedToHost,
mut abilities_store: AbilitiesStore,
abilities_store: AbilitiesStore,
) -> Msg<'a> {
let find_specializations_start = SystemTime::now();
@ -4231,7 +4275,7 @@ fn build_pending_specializations<'a>(
update_mode_ids: &mut update_mode_ids,
// call_specialization_counter=0 is reserved
call_specialization_counter: 1,
abilities_store: &mut abilities_store,
abilities_store: &abilities_store,
};
// Add modules' decls to Procs

View File

@ -30,6 +30,7 @@ mod test_load {
use roc_reporting::report::RocDocAllocator;
use roc_target::TargetInfo;
use roc_types::pretty_print::name_and_print_var;
use roc_types::pretty_print::DebugPrint;
use roc_types::subs::Subs;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
@ -237,7 +238,8 @@ mod test_load {
expected_types: &mut HashMap<&str, &str>,
) {
for (symbol, expr_var) in &def.pattern_vars {
let actual_str = name_and_print_var(*expr_var, subs, home, interns);
let actual_str =
name_and_print_var(*expr_var, subs, home, interns, DebugPrint::NOTHING);
let fully_qualified = symbol.fully_qualified(interns, home).to_string();
let expected_type = expected_types
.remove(fully_qualified.as_str())

View File

@ -123,7 +123,6 @@ pub enum LowLevel {
Or,
Not,
Hash,
ExpectTrue,
PtrCast,
RefCountInc,
RefCountDec,

View File

@ -1,10 +1,9 @@
use crate::ident::{Ident, ModuleName};
use crate::module_err::{IdentIdNotFound, ModuleIdNotFound, ModuleResult};
use roc_collections::{default_hasher, MutMap, SendMap, SmallStringInterner, VecMap};
use roc_collections::{SmallStringInterner, VecMap};
use roc_ident::IdentStr;
use roc_region::all::Region;
use snafu::OptionExt;
use std::collections::HashMap;
use std::num::NonZeroU32;
use std::{fmt, u32};
@ -238,12 +237,12 @@ lazy_static! {
/// which displays not only the Module ID, but also the Module Name which
/// corresponds to that ID.
///
static ref DEBUG_MODULE_ID_NAMES: std::sync::Mutex<roc_collections::all::MutMap<u32, Box<str>>> =
static ref DEBUG_MODULE_ID_NAMES: std::sync::Mutex<roc_collections::SmallStringInterner> =
// This stores a u32 key instead of a ModuleId key so that if there's
// a problem with ModuleId's Debug implementation, logging this for diagnostic
// purposes won't recursively trigger ModuleId's Debug instance in the course of printing
// this out.
std::sync::Mutex::new(roc_collections::all::MutMap::default());
std::sync::Mutex::new(roc_collections::SmallStringInterner::with_capacity(10));
}
#[derive(Debug, Default)]
@ -270,7 +269,7 @@ impl Interns {
let ident: Ident = ident.into();
match self.all_ident_ids.get(&module_id) {
Some(ident_ids) => match ident_ids.get_id(&ident) {
Some(ident_ids) => match ident_ids.get_id(ident.as_str()) {
Some(ident_id) => Symbol::new(module_id, ident_id),
None => {
panic!("Interns::symbol could not find ident entry for {:?} for module {:?} in Interns {:?}", ident, module_id, self);
@ -319,12 +318,12 @@ lazy_static! {
/// This is used in Debug builds only, to let us have a Debug instance
/// which displays not only the Module ID, but also the Module Name which
/// corresponds to that ID.
static ref DEBUG_IDENT_IDS_BY_MODULE_ID: std::sync::Mutex<roc_collections::all::MutMap<u32, IdentIds>> =
static ref DEBUG_IDENT_IDS_BY_MODULE_ID: std::sync::Mutex<roc_collections::VecMap<u32, IdentIds>> =
// This stores a u32 key instead of a ModuleId key so that if there's
// a problem with ModuleId's Debug implementation, logging this for diagnostic
// purposes won't recursively trigger ModuleId's Debug instance in the course of printing
// this out.
std::sync::Mutex::new(roc_collections::all::MutMap::default());
std::sync::Mutex::new(roc_collections::VecMap::default());
}
/// A globally unique ID that gets assigned to each module as it is loaded.
@ -386,8 +385,8 @@ impl fmt::Debug for ModuleId {
.expect("Failed to acquire lock for Debug reading from DEBUG_MODULE_ID_NAMES, presumably because a thread panicked.");
if PRETTY_PRINT_DEBUG_SYMBOLS {
match names.get(&(self.to_zero_indexed() as u32)) {
Some(str_ref) => write!(f, "{}", str_ref.clone()),
match names.try_get(self.to_zero_indexed()) {
Some(str_ref) => write!(f, "{}", str_ref),
None => {
panic!(
"Could not find a Debug name for module ID {} in {:?}",
@ -434,60 +433,49 @@ impl<'a, T> PackageQualified<'a, T> {
#[derive(Debug, Clone)]
pub struct PackageModuleIds<'a> {
by_name: MutMap<PQModuleName<'a>, ModuleId>,
by_id: Vec<PQModuleName<'a>>,
}
impl<'a> PackageModuleIds<'a> {
pub fn get_or_insert(&mut self, module_name: &PQModuleName<'a>) -> ModuleId {
match self.by_name.get(module_name) {
Some(id) => *id,
None => {
let by_id = &mut self.by_id;
let module_id = ModuleId::from_zero_indexed(by_id.len());
by_id.push(module_name.clone());
self.by_name.insert(module_name.clone(), module_id);
if cfg!(debug_assertions) {
Self::insert_debug_name(module_id, module_name);
}
module_id
}
if let Some(module_id) = self.get_id(module_name) {
return module_id;
}
// didn't find it, so we'll add it
let module_id = ModuleId::from_zero_indexed(self.by_id.len());
self.by_id.push(module_name.clone());
if cfg!(debug_assertions) {
Self::insert_debug_name(module_id, module_name);
}
module_id
}
pub fn into_module_ids(self) -> ModuleIds {
let by_name: MutMap<ModuleName, ModuleId> = self
.by_name
.into_iter()
.map(|(pqname, module_id)| (pqname.as_inner().clone(), module_id))
.collect();
let by_id: Vec<ModuleName> = self
.by_id
.into_iter()
.map(|pqname| pqname.as_inner().clone())
.collect();
ModuleIds { by_name, by_id }
ModuleIds { by_id }
}
#[cfg(debug_assertions)]
fn insert_debug_name(module_id: ModuleId, module_name: &PQModuleName) {
let mut names = DEBUG_MODULE_ID_NAMES.lock().expect("Failed to acquire lock for Debug interning into DEBUG_MODULE_ID_NAMES, presumably because a thread panicked.");
names
.entry(module_id.to_zero_indexed() as u32)
.or_insert_with(|| match module_name {
PQModuleName::Unqualified(module) => module.as_str().into(),
PQModuleName::Qualified(package, module) => {
let name = format!("{}.{}", package, module.as_str()).into();
name
if names.try_get(module_id.to_zero_indexed()).is_none() {
match module_name {
PQModuleName::Unqualified(module) => {
names.insert(module.as_str());
}
});
PQModuleName::Qualified(package, module) => {
names.insert(&format!("{}.{}", package, module.as_str()));
}
}
}
}
#[cfg(not(debug_assertions))]
@ -495,8 +483,14 @@ impl<'a> PackageModuleIds<'a> {
// By design, this is a no-op in release builds!
}
pub fn get_id(&self, module_name: &PQModuleName<'a>) -> Option<&ModuleId> {
self.by_name.get(module_name)
pub fn get_id(&self, module_name: &PQModuleName<'a>) -> Option<ModuleId> {
for (index, name) in self.by_id.iter().enumerate() {
if name == module_name {
return Some(ModuleId::from_zero_indexed(index));
}
}
None
}
pub fn get_name(&self, id: ModuleId) -> Option<&PQModuleName> {
@ -509,35 +503,26 @@ impl<'a> PackageModuleIds<'a> {
}
/// Stores a mapping between ModuleId and InlinableString.
///
/// Each module name is stored twice, for faster lookups.
/// Since these are interned strings, this shouldn't result in many total allocations in practice.
#[derive(Debug, Clone)]
pub struct ModuleIds {
by_name: MutMap<ModuleName, ModuleId>,
/// Each ModuleId is an index into this Vec
by_id: Vec<ModuleName>,
}
impl ModuleIds {
pub fn get_or_insert(&mut self, module_name: &ModuleName) -> ModuleId {
match self.by_name.get(module_name) {
Some(id) => *id,
None => {
let by_id = &mut self.by_id;
let module_id = ModuleId::from_zero_indexed(by_id.len());
by_id.push(module_name.clone());
self.by_name.insert(module_name.clone(), module_id);
if cfg!(debug_assertions) {
Self::insert_debug_name(module_id, module_name);
}
module_id
}
if let Some(module_id) = self.get_id(module_name) {
return module_id;
}
// didn't find it, so we'll add it
let module_id = ModuleId::from_zero_indexed(self.by_id.len());
self.by_id.push(module_name.clone());
if cfg!(debug_assertions) {
Self::insert_debug_name(module_id, module_name);
}
module_id
}
#[cfg(debug_assertions)]
@ -545,9 +530,9 @@ impl ModuleIds {
let mut names = DEBUG_MODULE_ID_NAMES.lock().expect("Failed to acquire lock for Debug interning into DEBUG_MODULE_ID_NAMES, presumably because a thread panicked.");
// TODO make sure modules are never added more than once!
names
.entry(module_id.to_zero_indexed() as u32)
.or_insert_with(|| module_name.as_str().to_string().into());
if names.try_get(module_id.to_zero_indexed()).is_none() {
names.insert(module_name.as_str());
}
}
#[cfg(not(debug_assertions))]
@ -555,8 +540,15 @@ impl ModuleIds {
// By design, this is a no-op in release builds!
}
pub fn get_id(&self, module_name: &ModuleName) -> Option<&ModuleId> {
self.by_name.get(module_name)
#[inline]
pub fn get_id(&self, module_name: &ModuleName) -> Option<ModuleId> {
for (index, name) in self.by_id.iter().enumerate() {
if name == module_name {
return Some(ModuleId::from_zero_indexed(index));
}
}
None
}
pub fn get_name(&self, id: ModuleId) -> Option<&ModuleName> {
@ -597,10 +589,6 @@ impl IdentIds {
.map(|(index, ident)| (IdentId(index as u32), ident))
}
pub fn add_ident(&mut self, ident_name: &Ident) -> IdentId {
self.add_str(ident_name.as_str())
}
pub fn add_str(&mut self, ident_name: &str) -> IdentId {
IdentId(self.interner.insert(ident_name) as u32)
}
@ -609,10 +597,10 @@ impl IdentIds {
IdentId(self.interner.duplicate(ident_id.0 as usize) as u32)
}
pub fn get_or_insert(&mut self, name: &Ident) -> IdentId {
pub fn get_or_insert(&mut self, name: &str) -> IdentId {
match self.get_id(name) {
Some(id) => id,
None => self.add_str(name.as_str()),
None => self.add_str(name),
}
}
@ -637,9 +625,9 @@ impl IdentIds {
}
#[inline(always)]
pub fn get_id(&self, ident_name: &Ident) -> Option<IdentId> {
pub fn get_id(&self, ident_name: &str) -> Option<IdentId> {
self.interner
.find_index(ident_name.as_str())
.find_index(ident_name)
.map(|i| IdentId(i as u32))
}
@ -867,7 +855,6 @@ macro_rules! define_builtins {
// +1 because the user will be compiling at least 1 non-builtin module!
let capacity = $total + 1;
let mut by_name = HashMap::with_capacity_and_hasher(capacity, default_hasher());
let mut by_id = Vec::with_capacity(capacity);
let mut insert_both = |id: ModuleId, name_str: &'static str| {
@ -877,7 +864,6 @@ macro_rules! define_builtins {
Self::insert_debug_name(id, &name);
}
by_name.insert(name.clone(), id);
by_id.push(name);
};
@ -885,7 +871,7 @@ macro_rules! define_builtins {
insert_both(ModuleId::$module_const, $module_name);
)+
ModuleIds { by_name, by_id }
ModuleIds { by_id }
}
}
@ -894,7 +880,6 @@ macro_rules! define_builtins {
// +1 because the user will be compiling at least 1 non-builtin module!
let capacity = $total + 1;
let mut by_name = HashMap::with_capacity_and_hasher(capacity, default_hasher());
let mut by_id = Vec::with_capacity(capacity);
let mut insert_both = |id: ModuleId, name_str: &'static str| {
@ -905,7 +890,6 @@ macro_rules! define_builtins {
Self::insert_debug_name(id, &name);
}
by_name.insert(name.clone(), id);
by_id.push(name);
};
@ -913,7 +897,7 @@ macro_rules! define_builtins {
insert_both(ModuleId::$module_const, $module_name);
)+
PackageModuleIds { by_name, by_id }
PackageModuleIds { by_id }
}
}
@ -928,8 +912,8 @@ macro_rules! define_builtins {
/// and what symbols they should resolve to.
///
/// This is for type aliases like `Int` and `Str` and such.
pub fn default_in_scope() -> SendMap<Ident, (Symbol, Region)> {
let mut scope = SendMap::default();
pub fn default_in_scope() -> VecMap<Ident, (Symbol, Region)> {
let mut scope = VecMap::default();
$(
$(

View File

@ -12,8 +12,7 @@ roc_region = { path = "../region" }
roc_module = { path = "../module" }
roc_types = { path = "../types" }
roc_can = { path = "../can" }
roc_unify = { path = "../unify" }
roc_solve = { path = "../solve" }
roc_late_solve = { path = "../late_solve" }
roc_std = { path = "../../roc_std", default-features = false }
roc_problem = { path = "../problem" }
roc_builtins = { path = "../builtins" }

View File

@ -307,6 +307,9 @@ impl<'a> ParamMap<'a> {
Let(_, _, _, cont) => {
stack.push(cont);
}
Expect { remainder, .. } => stack.push(remainder),
Switch {
branches,
default_branch,
@ -835,6 +838,11 @@ impl<'a> BorrowInfState<'a> {
}
self.collect_stmt(param_map, default_branch.1);
}
Expect { remainder, .. } => {
self.collect_stmt(param_map, remainder);
}
Refcounting(_, _) => unreachable!("these have not been introduced yet"),
Ret(_) | RuntimeError(_) => {
@ -967,8 +975,6 @@ pub fn lowlevel_borrow_signature(arena: &Bump, op: LowLevel) -> &[bool] {
SetFromList => arena.alloc_slice_copy(&[owned]),
SetToDict => arena.alloc_slice_copy(&[owned]),
ExpectTrue => arena.alloc_slice_copy(&[irrelevant]),
ListIsUnique => arena.alloc_slice_copy(&[borrowed]),
BoxExpr | UnboxExpr => {
@ -1027,6 +1033,9 @@ fn call_info_stmt<'a>(arena: &'a Bump, stmt: &Stmt<'a>, info: &mut CallInfo<'a>)
stack.extend(branches.iter().map(|b| &b.2));
stack.push(default_branch.1);
}
Expect { remainder, .. } => stack.push(remainder),
Refcounting(_, _) => unreachable!("these have not been introduced yet"),
Ret(_) | Jump(_, _) | RuntimeError(_) => {

View File

@ -4,9 +4,12 @@ use roc_can::{
def::Def,
expr::{AccessorData, ClosureData, Expr, Field, WhenBranch},
};
use roc_types::subs::{
self, AliasVariables, Descriptor, OptVariable, RecordFields, Subs, SubsSlice, UnionLambdas,
UnionTags, Variable, VariableSubsSlice,
use roc_types::{
subs::{
self, AliasVariables, Descriptor, OptVariable, RecordFields, Subs, SubsSlice, UnionLambdas,
UnionTags, Variable, VariableSubsSlice,
},
types::Uls,
};
/// Deep copies the type variables in the type hosted by [`var`] into [`expr`].
@ -360,7 +363,15 @@ pub fn deep_copy_type_vars_into_expr<'a>(
lambda_set_variables: lambda_set_variables.clone(),
},
Expect(e1, e2) => Expect(Box::new(e1.map(go_help)), Box::new(e2.map(go_help))),
Expect {
loc_condition,
loc_continuation,
lookups_in_cond,
} => Expect {
loc_condition: Box::new(loc_condition.map(go_help)),
loc_continuation: Box::new(loc_continuation.map(go_help)),
lookups_in_cond: lookups_in_cond.to_vec(),
},
TypedHole(v) => TypedHole(sub!(*v)),
@ -610,12 +621,17 @@ fn deep_copy_type_vars<'a>(
LambdaSet(subs::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
let new_rec_var = recursion_var.map(|var| descend_var!(var));
for variables_slice_index in solved.variables() {
let variables_slice = subs[variables_slice_index];
descend_slice!(variables_slice);
}
for uls_index in unspecialized {
let Uls(var, _, _) = subs[uls_index];
descend_var!(var);
}
perform_clone!({
let new_variable_slices =
@ -630,9 +646,19 @@ fn deep_copy_type_vars<'a>(
let new_solved =
UnionLambdas::from_slices(solved.labels(), new_variable_slices);
let new_unspecialized = SubsSlice::reserve_uls_slice(subs, unspecialized.len());
for (target_index, uls_index) in
(new_unspecialized.into_iter()).zip(unspecialized.into_iter())
{
let Uls(var, sym, region) = subs[uls_index];
let copy_var = subs.get_copy(var).into_variable().unwrap_or(var);
subs[target_index] = Uls(copy_var, sym, region);
}
LambdaSet(subs::LambdaSet {
solved: new_solved,
recursion_var: new_rec_var,
unspecialized: new_unspecialized,
})
})
}

View File

@ -108,6 +108,15 @@ pub fn occurring_variables(stmt: &Stmt<'_>) -> (MutSet<Symbol>, MutSet<Symbol>)
stack.push(cont);
}
Expect {
condition,
remainder,
..
} => {
result.insert(*condition);
stack.push(remainder);
}
Jump(_, arguments) => {
result.extend(arguments.iter().copied());
}
@ -1196,6 +1205,8 @@ impl<'a> Context<'a> {
(switch, case_live_vars)
}
Expect { remainder, .. } => self.visit_stmt(codegen, remainder),
RuntimeError(_) | Refcounting(_, _) => (stmt, MutSet::default()),
}
}
@ -1299,6 +1310,15 @@ pub fn collect_stmt(
collect_stmt(cont, jp_live_vars, vars)
}
Expect {
condition,
remainder,
..
} => {
vars.insert(*condition);
collect_stmt(remainder, jp_live_vars, vars)
}
Join {
id: j,
parameters,

View File

@ -10,7 +10,7 @@ use roc_builtins::bitcode::{FloatWidth, IntWidth};
use roc_can::abilities::{AbilitiesStore, SpecializationId};
use roc_can::expr::{AnnotatedMark, ClosureData, IntValue};
use roc_collections::all::{default_hasher, BumpMap, BumpMapDefault, MutMap};
use roc_collections::{MutSet, VecMap};
use roc_collections::VecMap;
use roc_debug_flags::dbg_do;
#[cfg(debug_assertions)]
use roc_debug_flags::{
@ -18,19 +18,20 @@ use roc_debug_flags::{
};
use roc_error_macros::todo_abilities;
use roc_exhaustive::{Ctor, CtorName, Guard, RenderAs, TagId};
use roc_late_solve::{
instantiate_rigids, resolve_ability_specialization, Resolved, UnificationFailed,
};
use roc_module::ident::{ForeignSymbol, Lowercase, TagName};
use roc_module::low_level::LowLevel;
use roc_module::symbol::{IdentIds, ModuleId, Symbol};
use roc_problem::can::{RuntimeError, ShadowKind};
use roc_region::all::{Loc, Region};
use roc_solve::ability::{resolve_ability_specialization, Resolved};
use roc_std::RocDec;
use roc_target::TargetInfo;
use roc_types::subs::{
Content, ExhaustiveMark, FlatType, RedundantMark, StorageSubs, Subs, Variable,
VariableSubsSlice,
};
use roc_unify::unify::Mode;
use std::collections::HashMap;
use ven_pretty::{BoxAllocator, DocAllocator, DocBuilder};
@ -1241,7 +1242,7 @@ pub struct Env<'a, 'i> {
pub target_info: TargetInfo,
pub update_mode_ids: &'i mut UpdateModeIds,
pub call_specialization_counter: u32,
pub abilities_store: &'i mut AbilitiesStore,
pub abilities_store: &'i AbilitiesStore,
}
impl<'a, 'i> Env<'a, 'i> {
@ -1268,6 +1269,12 @@ impl<'a, 'i> Env<'a, 'i> {
pub fn is_imported_symbol(&self, symbol: Symbol) -> bool {
symbol.module_id() != self.home
}
/// Unifies two variables and performs lambda set compaction.
/// Use this rather than [roc_unify::unify] directly!
fn unify(&mut self, left: Variable, right: Variable) -> Result<(), UnificationFailed> {
roc_late_solve::unify(self.arena, self.subs, self.abilities_store, left, right)
}
}
#[derive(Clone, Debug, PartialEq, Copy, Eq, Hash)]
@ -1326,6 +1333,14 @@ pub enum Stmt<'a> {
},
Ret(Symbol),
Refcounting(ModifyRc, &'a Stmt<'a>),
Expect {
condition: Symbol,
region: Region,
lookups: &'a [Symbol],
layouts: &'a [Layout<'a>],
/// what happens after the expect
remainder: &'a Stmt<'a>,
},
/// a join point `join f <params> = <continuation> in remainder`
Join {
id: JoinPointId,
@ -1912,6 +1927,10 @@ impl<'a> Stmt<'a> {
.append(alloc.hardline())
.append(cont.to_doc(alloc)),
Expect { condition, .. } => alloc
.text("expect ")
.append(symbol_to_doc(alloc, *condition)),
Ret(symbol) => alloc
.text("ret ")
.append(symbol_to_doc(alloc, *symbol))
@ -2246,14 +2265,7 @@ fn from_can_let<'a>(
needed_specializations.next().unwrap();
// Unify the expr_var with the requested specialization once.
let _res = roc_unify::unify::unify(env.subs, var, def.expr_var, Mode::EQ);
resolve_abilities_in_specialized_body(
env,
procs,
&def.loc_expr.value,
def.expr_var,
);
let _res = env.unify(var, def.expr_var);
with_hole(
env,
@ -2285,15 +2297,7 @@ fn from_can_let<'a>(
"expr marked as having specializations, but it has no type variables!",
);
let _res =
roc_unify::unify::unify(env.subs, var, new_def_expr_var, Mode::EQ);
resolve_abilities_in_specialized_body(
env,
procs,
&def.loc_expr.value,
def.expr_var,
);
let _res = env.unify(var, new_def_expr_var);
stmt = with_hole(
env,
@ -2350,8 +2354,6 @@ fn from_can_let<'a>(
let outer_symbol = env.unique_symbol();
stmt = store_pattern(env, procs, layout_cache, &mono_pattern, outer_symbol, stmt);
resolve_abilities_in_specialized_body(env, procs, &def.loc_expr.value, def.expr_var);
// convert the def body, store in outer_symbol
with_hole(
env,
@ -2772,125 +2774,6 @@ fn generate_runtime_error_function<'a>(
}
}
fn resolve_abilities_in_specialized_body<'a>(
env: &mut Env<'a, '_>,
procs: &Procs<'a>,
specialized_body: &roc_can::expr::Expr,
body_var: Variable,
) -> std::vec::Vec<SpecializationId> {
use roc_can::expr::Expr;
use roc_can::traverse::{walk_expr, Visitor};
use roc_unify::unify::unify;
struct Resolver<'a> {
subs: &'a mut Subs,
procs: &'a Procs<'a>,
abilities_store: &'a mut AbilitiesStore,
seen_defs: MutSet<Symbol>,
specialized: std::vec::Vec<SpecializationId>,
}
impl Visitor for Resolver<'_> {
fn visit_expr(&mut self, expr: &Expr, _region: Region, var: Variable) {
match expr {
Expr::Closure(..) => {
// Don't walk down closure bodies. They will have their types refined when they
// are themselves specialized, so we'll handle ability resolution in them at
// that time too.
}
Expr::LetRec(..) | Expr::LetNonRec(..) => {
// Also don't walk down let-bindings. These may be generalized and we won't
// know their specializations until we collect them while building up the def.
// So, we'll resolve any nested abilities when we know their specialized type
// during def construction.
}
Expr::AbilityMember(member_sym, specialization_id, _specialization_var) => {
let (specialization, specialization_def) = match self
.abilities_store
.get_resolved(*specialization_id)
{
Some(specialization) => (
specialization,
// If we know the specialization at this point, the specialization must
// be static. That means the relevant type state was populated during
// solving, so we don't need additional unification here.
//
// However, we do need to walk the specialization def, because it may
// itself contain unspecialized defs.
self.procs
.partial_procs
.get_symbol(specialization)
.expect("Specialization found, but it's not in procs"),
),
None => {
let specialization = resolve_ability_specialization(
self.subs,
self.abilities_store,
*member_sym,
var,
)
.expect("Ability specialization is unknown - code generation cannot proceed!");
let specialization = match specialization {
Resolved::Specialization(symbol) => symbol,
Resolved::NeedsGenerated => {
todo_abilities!("Generate impls for structural types")
}
};
self.abilities_store
.insert_resolved(*specialization_id, specialization);
debug_assert!(!self.specialized.contains(specialization_id));
self.specialized.push(*specialization_id);
// We must now refine the current type state to account for this specialization,
// since `var` may only have partial specialization information - enough to
// figure out what specialization we need, but not the types of all arguments
// and return types. So, unify with the variable with the specialization's type.
let specialization_def = self
.procs
.partial_procs
.get_symbol(specialization)
.expect("Specialization found, but it's not in procs");
let specialization_var = specialization_def.annotation;
let unified = unify(self.subs, var, specialization_var, Mode::EQ);
unified.expect_success(
"Specialization does not unify - this is a typechecker bug!",
);
(specialization, specialization_def)
}
};
// Now walk the specialization def to pick up any more needed types. Of course,
// we only want to pass through it once to avoid unbounded recursion.
if !self.seen_defs.contains(&specialization) {
self.visit_expr(
&specialization_def.body,
Region::zero(),
specialization_def.body_var,
);
self.seen_defs.insert(specialization);
}
}
_ => walk_expr(self, expr, var),
}
}
}
let mut resolver = Resolver {
subs: env.subs,
procs,
abilities_store: env.abilities_store,
seen_defs: MutSet::default(),
specialized: vec![],
};
resolver.visit_expr(specialized_body, Region::zero(), body_var);
resolver.specialized
}
fn specialize_external<'a>(
env: &mut Env<'a, '_>,
procs: &mut Procs<'a>,
@ -2907,12 +2790,7 @@ fn specialize_external<'a>(
let snapshot = env.subs.snapshot();
let cache_snapshot = layout_cache.snapshot();
let _unified = roc_unify::unify::unify(
env.subs,
partial_proc.annotation,
fn_var,
roc_unify::unify::Mode::EQ,
);
let _unified = env.unify(partial_proc.annotation, fn_var);
// This will not hold for programs with type errors
// let is_valid = matches!(unified, roc_unify::unify::Unified::Success(_));
@ -3026,17 +2904,9 @@ fn specialize_external<'a>(
};
let body = partial_proc.body.clone();
let resolved_ability_specializations =
resolve_abilities_in_specialized_body(env, procs, &body, partial_proc.body_var);
let mut specialized_body = from_can(env, partial_proc.body_var, body, procs, layout_cache);
// reset the resolved ability specializations so as not to interfere with other specializations
// of this proc.
resolved_ability_specializations
.into_iter()
.for_each(|sid| env.abilities_store.remove_resolved(sid));
match specialized {
SpecializedLayout::FunctionPointerBody {
ret_layout,
@ -3484,7 +3354,6 @@ where
F: FnOnce(&mut Env<'a, '_>) -> Variable,
{
// add the specializations that other modules require of us
use roc_solve::solve::instantiate_rigids;
let snapshot = env.subs.snapshot();
let cache_snapshot = layout_cache.snapshot();
@ -3861,7 +3730,7 @@ pub fn with_hole<'a>(
Var(mut symbol) => {
// If this symbol is a raw value, find the real name we gave to its specialized usage.
if let ReuseSymbol::Value(_symbol) =
can_reuse_symbol(env, procs, &roc_can::expr::Expr::Var(symbol))
can_reuse_symbol(env, procs, &roc_can::expr::Expr::Var(symbol), variable)
{
let real_symbol =
procs
@ -3958,7 +3827,7 @@ pub fn with_hole<'a>(
OpaqueRef { argument, .. } => {
let (arg_var, loc_arg_expr) = *argument;
match can_reuse_symbol(env, procs, &loc_arg_expr.value) {
match can_reuse_symbol(env, procs, &loc_arg_expr.value, arg_var) {
// Opaques decay to their argument.
ReuseSymbol::Value(symbol) => {
let real_name = procs.symbol_specializations.get_or_insert(
@ -4013,26 +3882,30 @@ pub fn with_hole<'a>(
// TODO how should function pointers be handled here?
use ReuseSymbol::*;
match fields.remove(&label) {
Some(field) => match can_reuse_symbol(env, procs, &field.loc_expr.value) {
Imported(symbol) | LocalFunction(symbol) | UnspecializedExpr(symbol) => {
field_symbols.push(symbol);
can_fields.push(Field::Function(symbol, variable));
Some(field) => {
match can_reuse_symbol(env, procs, &field.loc_expr.value, field.var) {
Imported(symbol)
| LocalFunction(symbol)
| UnspecializedExpr(symbol) => {
field_symbols.push(symbol);
can_fields.push(Field::Function(symbol, variable));
}
Value(symbol) => {
let reusable = procs.symbol_specializations.get_or_insert(
env,
layout_cache,
symbol,
field.var,
);
field_symbols.push(reusable);
can_fields.push(Field::ValueSymbol);
}
NotASymbol => {
field_symbols.push(env.unique_symbol());
can_fields.push(Field::Field(field));
}
}
Value(symbol) => {
let reusable = procs.symbol_specializations.get_or_insert(
env,
layout_cache,
symbol,
field.var,
);
field_symbols.push(reusable);
can_fields.push(Field::ValueSymbol);
}
NotASymbol => {
field_symbols.push(env.unique_symbol());
can_fields.push(Field::Field(field));
}
},
}
None => {
// this field was optional, but not given
continue;
@ -4091,7 +3964,7 @@ pub fn with_hole<'a>(
EmptyRecord => let_empty_struct(assigned, hole),
Expect(_, _) => unreachable!("I think this is unreachable"),
Expect { .. } => unreachable!("I think this is unreachable"),
If {
cond_var,
@ -4746,16 +4619,15 @@ pub fn with_hole<'a>(
hole,
)
}
roc_can::expr::Expr::AbilityMember(_, specialization_id, _) => {
let proc_name = env.abilities_store.get_resolved(specialization_id).expect(
"Ability specialization is unknown - code generation cannot proceed!",
);
roc_can::expr::Expr::AbilityMember(member, specialization_id, _) => {
let specialization_proc_name =
late_resolve_ability_specialization(env, member, specialization_id, fn_var);
call_by_name(
env,
procs,
fn_var,
proc_name,
specialization_proc_name,
loc_args,
layout_cache,
assigned,
@ -4802,7 +4674,7 @@ pub fn with_hole<'a>(
// re-use that symbol, and don't define its value again
let mut result;
use ReuseSymbol::*;
match can_reuse_symbol(env, procs, &loc_expr.value) {
match can_reuse_symbol(env, procs, &loc_expr.value, fn_var) {
LocalFunction(_) => {
unreachable!("if this was known to be a function, we would not be here")
}
@ -5248,6 +5120,51 @@ pub fn with_hole<'a>(
}
}
#[inline(always)]
fn late_resolve_ability_specialization<'a>(
env: &mut Env<'a, '_>,
member: Symbol,
specialization_id: SpecializationId,
specialization_var: Variable,
) -> Symbol {
if let Some(spec_symbol) = env.abilities_store.get_resolved(specialization_id) {
// Fast path: specialization is monomorphic, was found during solving.
spec_symbol
} else if let Content::Structure(FlatType::Func(_, lambda_set, _)) =
env.subs.get_content_without_compacting(specialization_var)
{
// Fast path: the member is a function, so the lambda set will tell us the
// specialization.
use roc_types::subs::LambdaSet;
let LambdaSet {
solved,
unspecialized,
recursion_var: _,
} = env.subs.get_lambda_set(*lambda_set);
debug_assert!(unspecialized.is_empty());
let mut iter_lambda_set = solved.iter_all();
debug_assert_eq!(iter_lambda_set.len(), 1);
let spec_symbol_index = iter_lambda_set.next().unwrap().0;
env.subs[spec_symbol_index]
} else {
// Otherwise, resolve by checking the able var.
let specialization = resolve_ability_specialization(
env.subs,
env.abilities_store,
member,
specialization_var,
)
.expect("Ability specialization is unknown - code generation cannot proceed!");
match specialization {
Resolved::Specialization(symbol) => symbol,
Resolved::NeedsGenerated => {
todo_abilities!("Generate impls for structural types")
}
}
}
}
#[allow(clippy::too_many_arguments)]
fn construct_closure_data<'a, I>(
env: &mut Env<'a, '_>,
@ -5933,39 +5850,43 @@ pub fn from_can<'a>(
stmt
}
Expect(condition, rest) => {
let rest = from_can(env, variable, rest.value, procs, layout_cache);
let bool_layout = Layout::Builtin(Builtin::Bool);
Expect {
loc_condition,
loc_continuation,
lookups_in_cond,
} => {
let rest = from_can(env, variable, loc_continuation.value, procs, layout_cache);
let cond_symbol = env.unique_symbol();
let op = LowLevel::ExpectTrue;
let call_type = CallType::LowLevel {
op,
update_mode: env.next_update_mode_id(),
};
let arguments = env.arena.alloc([cond_symbol]);
let call = self::Call {
call_type,
arguments,
let lookups = Vec::from_iter_in(lookups_in_cond.iter().map(|t| t.0), env.arena);
let mut layouts = Vec::with_capacity_in(lookups_in_cond.len(), env.arena);
for (_, var) in lookups_in_cond {
let res_layout = layout_cache.from_var(env.arena, var, env.subs);
let layout = return_on_layout_error!(env, res_layout);
layouts.push(layout);
}
let mut stmt = Stmt::Expect {
condition: cond_symbol,
region: loc_condition.region,
lookups: lookups.into_bump_slice(),
layouts: layouts.into_bump_slice(),
remainder: env.arena.alloc(rest),
};
let rest = Stmt::Let(
env.unique_symbol(),
Expr::Call(call),
bool_layout,
env.arena.alloc(rest),
);
with_hole(
stmt = with_hole(
env,
condition.value,
loc_condition.value,
variable,
procs,
layout_cache,
cond_symbol,
env.arena.alloc(rest),
)
env.arena.alloc(stmt),
);
stmt
}
LetRec(defs, cont, _cycle_mark) => {
@ -6306,6 +6227,26 @@ fn substitute_in_stmt_help<'a>(
}
}
Expect {
condition,
region,
lookups,
layouts,
remainder,
} => {
// TODO should we substitute in the ModifyRc?
match substitute_in_stmt_help(arena, remainder, subs) {
Some(cont) => Some(arena.alloc(Expect {
condition: *condition,
region: *region,
lookups,
layouts,
remainder: cont,
})),
None => None,
}
}
Jump(id, args) => {
let mut did_change = false;
let new_args = Vec::from_iter_in(
@ -6978,15 +6919,15 @@ fn can_reuse_symbol<'a>(
env: &mut Env<'a, '_>,
procs: &Procs<'a>,
expr: &roc_can::expr::Expr,
expr_var: Variable,
) -> ReuseSymbol {
use roc_can::expr::Expr::*;
use ReuseSymbol::*;
let symbol = match expr {
AbilityMember(_, specialization_id, _) => env
.abilities_store
.get_resolved(*specialization_id)
.expect("Specialization must be known!"),
AbilityMember(member, specialization_id, _) => {
late_resolve_ability_specialization(env, *member, *specialization_id, expr_var)
}
Var(symbol) => *symbol,
_ => return NotASymbol,
};
@ -7021,7 +6962,7 @@ fn possible_reuse_symbol_or_specialize<'a>(
expr: &roc_can::expr::Expr,
var: Variable,
) -> Symbol {
match can_reuse_symbol(env, procs, expr) {
match can_reuse_symbol(env, procs, expr, var) {
ReuseSymbol::Value(symbol) => {
procs
.symbol_specializations
@ -7333,7 +7274,7 @@ fn assign_to_symbol<'a>(
result: Stmt<'a>,
) -> Stmt<'a> {
use ReuseSymbol::*;
match can_reuse_symbol(env, procs, &loc_arg.value) {
match can_reuse_symbol(env, procs, &loc_arg.value, arg_var) {
Imported(original) | LocalFunction(original) | UnspecializedExpr(original) => {
// for functions we must make sure they are specialized correctly
specialize_symbol(

View File

@ -1698,7 +1698,16 @@ fn layout_from_lambda_set<'a>(
let subs::LambdaSet {
solved,
recursion_var,
unspecialized,
} = lset;
if !unspecialized.is_empty() {
internal_error!(
"unspecialized lambda sets remain during layout generation for {:?}",
roc_types::subs::SubsFmtContent(&Content::LambdaSet(lset), env.subs)
);
}
match recursion_var.into_variable() {
None => {
let labels = solved.unsorted_lambdas(env.subs);
@ -2250,7 +2259,7 @@ where
for (index, &(tag_name, arguments)) in tags_list.into_iter().enumerate() {
// reserve space for the tag discriminant
if matches!(nullable, Some((i, _)) if i as usize == index) {
if matches!(nullable, Some((i, _)) if i as usize == index) {
debug_assert!(arguments.is_empty());
continue;
}

View File

@ -276,8 +276,11 @@ impl LambdaSet {
let subs::LambdaSet {
solved,
recursion_var: _,
unspecialized: _,
} = lset;
// TODO: handle unspecialized
debug_assert!(
!solved.is_empty(),
"lambda set must contain atleast the function itself"
@ -696,8 +699,11 @@ impl Layout {
let subs::LambdaSet {
solved,
recursion_var,
unspecialized: _,
} = lset;
// TODO: handle unspecialized lambda set
match recursion_var.into_variable() {
Some(rec_var) => {
let rec_var = subs.get_root_key_without_compacting(rec_var);

View File

@ -206,5 +206,4 @@ enum FirstOrder {
Or,
Not,
Hash,
ExpectTrue,
}

View File

@ -192,6 +192,32 @@ fn function_s<'a, 'i>(
arena.alloc(new_refcounting)
}
}
Expect {
condition,
region,
lookups,
layouts,
remainder,
} => {
let continuation: &Stmt = *remainder;
let new_continuation = function_s(env, w, c, continuation);
if std::ptr::eq(continuation, new_continuation) || continuation == new_continuation {
stmt
} else {
let new_refcounting = Expect {
condition: *condition,
region: *region,
lookups,
layouts,
remainder: new_continuation,
};
arena.alloc(new_refcounting)
}
}
Ret(_) | Jump(_, _) | RuntimeError(_) => stmt,
}
}
@ -388,6 +414,40 @@ fn function_d_main<'a, 'i>(
(arena.alloc(refcounting), found)
}
}
Expect {
condition,
region,
lookups,
layouts,
remainder,
} => {
let (b, found) = function_d_main(env, x, c, remainder);
if found || *condition != x {
let refcounting = Expect {
condition: *condition,
region: *region,
lookups,
layouts,
remainder: b,
};
(arena.alloc(refcounting), found)
} else {
let b = try_function_s(env, x, c, b);
let refcounting = Expect {
condition: *condition,
region: *region,
lookups,
layouts,
remainder: b,
};
(arena.alloc(refcounting), found)
}
}
Join {
id,
parameters,
@ -540,6 +600,26 @@ fn function_r<'a, 'i>(env: &mut Env<'a, 'i>, stmt: &'a Stmt<'a>) -> &'a Stmt<'a>
arena.alloc(Refcounting(*modify_rc, b))
}
Expect {
condition,
region,
lookups,
layouts,
remainder,
} => {
let b = function_r(env, remainder);
let expect = Expect {
condition: *condition,
region: *region,
lookups,
layouts,
remainder: b,
};
arena.alloc(expect)
}
Ret(_) | Jump(_, _) | RuntimeError(_) => {
// terminals
stmt
@ -570,6 +650,11 @@ fn has_live_var<'a>(jp_live_vars: &JPLiveVarMap, stmt: &'a Stmt<'a>, needle: Sym
Refcounting(modify_rc, cont) => {
modify_rc.get_symbol() == needle || has_live_var(jp_live_vars, cont, needle)
}
Expect {
condition,
remainder,
..
} => *condition == needle || has_live_var(jp_live_vars, remainder, needle),
Join {
id,
parameters,

View File

@ -191,6 +191,23 @@ fn insert_jumps<'a>(
None => None,
},
Expect {
condition,
region,
lookups,
layouts,
remainder,
} => match insert_jumps(arena, remainder, goal_id, needle) {
Some(cont) => Some(arena.alloc(Expect {
condition: *condition,
region: *region,
lookups,
layouts,
remainder: cont,
})),
None => None,
},
Ret(_) => None,
Jump(_, _) => None,
RuntimeError(_) => None,

View File

@ -2,9 +2,9 @@
// See github.com/rtfeldman/roc/issues/800 for discussion of the large_enum_variant check.
#![allow(clippy::large_enum_variant)]
use strum_macros::EnumIter;
use strum_macros::{EnumCount, EnumIter};
#[derive(Debug, Clone, Copy)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct TargetInfo {
pub architecture: Architecture,
}
@ -65,13 +65,15 @@ pub enum PtrWidth {
Bytes8 = 8,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumIter)]
/// These should be sorted alphabetically!
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, EnumIter, EnumCount)]
#[repr(u8)]
pub enum Architecture {
X86_64,
X86_32,
Aarch64,
Aarch32,
Aarch64,
Wasm32,
X86_32,
X86_64,
}
impl Architecture {

View File

@ -625,15 +625,14 @@ pub fn resolve_ability_specialization(
.member_def(ability_member)
.expect("Not an ability member symbol");
// Figure out the ability we're resolving in a temporary subs snapshot.
let snapshot = subs.snapshot();
let signature_var = member_def
.signature_var()
.unwrap_or_else(|| internal_error!("Signature var not resolved for {:?}", ability_member));
let signature_var = member_def.signature_var();
instantiate_rigids(subs, signature_var);
let (_, must_implement_ability) = unify(subs, specialization_var, signature_var, Mode::EQ)
.expect_success(
let (_vars, must_implement_ability, _lambda_sets_to_specialize) =
unify(subs, specialization_var, signature_var, Mode::EQ).expect_success(
"If resolving a specialization, the specialization must be known to typecheck.",
);

View File

@ -1,12 +1,13 @@
use crate::solve::{self, Aliases};
use roc_can::abilities::{AbilitiesStore, SolvedSpecializations};
use roc_can::abilities::{AbilitiesStore, ResolvedSpecializations};
use roc_can::constraint::{Constraint as ConstraintSoa, Constraints};
use roc_can::expr::PendingDerives;
use roc_can::module::RigidVariables;
use roc_collections::all::MutMap;
use roc_collections::VecMap;
use roc_module::symbol::Symbol;
use roc_types::solved_types::Solved;
use roc_types::subs::{StorageSubs, Subs, Variable};
use roc_types::subs::{ExposedTypesStorageSubs, StorageSubs, Subs, Variable};
use roc_types::types::Alias;
#[derive(Debug)]
@ -27,9 +28,8 @@ pub struct SolvedModule {
pub exposed_vars_by_symbol: Vec<(Symbol, Variable)>,
/// Used when importing this module into another module
pub stored_vars_by_symbol: Vec<(Symbol, Variable)>,
pub storage_subs: StorageSubs,
pub solved_specializations: SolvedSpecializations,
pub solved_specializations: ResolvedSpecializations,
pub exposed_types: ExposedTypesStorageSubs,
}
pub fn run_solve(
@ -76,18 +76,36 @@ pub fn run_solve(
(solved_subs, solved_env, problems, abilities_store)
}
/// Copies exposed types and all ability specializations, which may be implicitly exposed.
pub fn exposed_types_storage_subs(
solved_subs: &mut Solved<Subs>,
exposed_vars_by_symbol: &[(Symbol, Variable)],
) -> (StorageSubs, Vec<(Symbol, Variable)>) {
solved_specializations: &ResolvedSpecializations,
) -> ExposedTypesStorageSubs {
let subs = solved_subs.inner_mut();
let mut storage_subs = StorageSubs::new(Subs::new());
let mut stored_vars_by_symbol = Vec::with_capacity(exposed_vars_by_symbol.len());
let mut stored_vars_by_symbol = VecMap::with_capacity(exposed_vars_by_symbol.len());
let mut stored_specialization_lambda_set_vars =
VecMap::with_capacity(solved_specializations.len());
for (symbol, var) in exposed_vars_by_symbol.iter() {
let new_var = storage_subs.import_variable_from(subs, *var).variable;
stored_vars_by_symbol.push((*symbol, new_var));
stored_vars_by_symbol.insert(*symbol, new_var);
}
(storage_subs, stored_vars_by_symbol)
for (_, member_specialization) in solved_specializations.iter() {
for (_, &specialization_lset_var) in member_specialization.specialization_lambda_sets.iter()
{
let new_var = storage_subs
.import_variable_from(subs, specialization_lset_var)
.variable;
stored_specialization_lambda_set_vars.insert(specialization_lset_var, new_var);
}
}
ExposedTypesStorageSubs {
storage_subs,
stored_vars_by_symbol,
stored_specialization_lambda_set_vars,
}
}

View File

@ -9,6 +9,7 @@ use roc_can::constraint::{Constraints, Cycle, LetConstraint, OpportunisticResolv
use roc_can::expected::{Expected, PExpected};
use roc_can::expr::PendingDerives;
use roc_collections::all::MutMap;
use roc_collections::{VecMap, VecSet};
use roc_debug_flags::dbg_do;
#[cfg(debug_assertions)]
use roc_debug_flags::ROC_VERIFY_RIGID_LET_GENERALIZED;
@ -19,13 +20,14 @@ use roc_problem::can::CycleEntry;
use roc_region::all::{Loc, Region};
use roc_types::solved_types::Solved;
use roc_types::subs::{
self, AliasVariables, Content, Descriptor, FlatType, Mark, OptVariable, Rank, RecordFields,
Subs, SubsIndex, SubsSlice, UnionLabels, UnionLambdas, UnionTags, Variable, VariableSubsSlice,
self, AliasVariables, Content, Descriptor, FlatType, GetSubsSlice, LambdaSet, Mark,
OptVariable, Rank, RecordFields, Subs, SubsIndex, SubsSlice, UlsOfVar, UnionLabels,
UnionLambdas, UnionTags, Variable, VariableSubsSlice,
};
use roc_types::types::Type::{self, *};
use roc_types::types::{
gather_fields_unsorted_iter, AliasCommon, AliasKind, Category, ErrorType, OptAbleType,
OptAbleVar, PatternCategory, Reason, TypeExtension,
OptAbleVar, PatternCategory, Reason, TypeExtension, Uls,
};
use roc_unify::unify::{unify, Mode, Obligated, Unified::*};
@ -79,6 +81,18 @@ use roc_unify::unify::{unify, Mode, Obligated, Unified::*};
// Ranks are used to limit the number of type variables considered for generalization. Only those inside
// of the let (so those used in inferring the type of `\x -> x`) are considered.
/// What phase in the compiler is reaching out to solve types.
/// This is important to distinguish subtle differences in the behavior of the solving algorithm.
#[derive(Clone, Copy)]
pub enum Phase {
/// The regular type-solving phase.
/// Here we can assume that some information is still unknown, and react to that.
Solve,
/// Calls into solve during later phases of compilation, namely monomorphization.
/// Here we expect all information is known.
Late,
}
#[derive(Debug, Clone)]
pub enum TypeError {
BadExpr(Region, Category, ErrorType, Expected<ErrorType>),
@ -425,7 +439,7 @@ impl Env {
const DEFAULT_POOLS: usize = 8;
#[derive(Clone, Debug)]
pub(crate) struct Pools(Vec<Vec<Variable>>);
pub struct Pools(Vec<Vec<Variable>>);
impl Default for Pools {
fn default() -> Self {
@ -442,6 +456,10 @@ impl Pools {
self.0.len()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn get_mut(&mut self, rank: Rank) -> &mut Vec<Variable> {
match self.0.get_mut(rank.into_usize()) {
Some(reference) => reference,
@ -526,6 +544,10 @@ fn run_in_place(
let pending_derives = PendingDerivesTable::new(subs, aliases, pending_derives);
let mut deferred_obligations = DeferredObligations::new(pending_derives);
// Because we don't know what ability specializations are available until the entire module is
// solved, we must wait to solve unspecialized lambda sets then.
let mut deferred_uls_to_resolve = UlsOfVar::default();
let state = solve(
&arena,
constraints,
@ -538,6 +560,7 @@ fn run_in_place(
constraint,
abilities_store,
&mut deferred_obligations,
&mut deferred_uls_to_resolve,
);
// Now that the module has been solved, we can run through and check all
@ -546,6 +569,15 @@ fn run_in_place(
let (obligation_problems, _derived) = deferred_obligations.check_all(subs, abilities_store);
problems.extend(obligation_problems);
compact_lambda_sets_of_vars(
subs,
&arena,
&mut pools,
abilities_store,
deferred_uls_to_resolve,
Phase::Solve,
);
state.env
}
@ -598,6 +630,7 @@ fn solve(
constraint: &Constraint,
abilities_store: &mut AbilitiesStore,
deferred_obligations: &mut DeferredObligations,
deferred_uls_to_resolve: &mut UlsOfVar,
) -> State {
let initial = Work::Constraint {
env: &Env::default(),
@ -658,6 +691,7 @@ fn solve(
abilities_store,
problems,
deferred_obligations,
deferred_uls_to_resolve,
*symbol,
*loc_var,
);
@ -763,6 +797,7 @@ fn solve(
abilities_store,
problems,
deferred_obligations,
deferred_uls_to_resolve,
*symbol,
*loc_var,
);
@ -814,6 +849,7 @@ fn solve(
Success {
vars,
must_implement_ability,
lambda_sets_to_specialize,
} => {
introduce(subs, rank, pools, &vars);
if !must_implement_ability.is_empty() {
@ -822,6 +858,7 @@ fn solve(
AbilityImplError::BadExpr(*region, category.clone(), actual),
);
}
deferred_uls_to_resolve.union(lambda_sets_to_specialize);
state
}
@ -866,9 +903,12 @@ fn solve(
vars,
// ERROR NOT REPORTED
must_implement_ability: _,
lambda_sets_to_specialize,
} => {
introduce(subs, rank, pools, &vars);
deferred_uls_to_resolve.union(lambda_sets_to_specialize);
state
}
Failure(vars, _actual_type, _expected_type, _bad_impls) => {
@ -921,6 +961,7 @@ fn solve(
Success {
vars,
must_implement_ability,
lambda_sets_to_specialize,
} => {
introduce(subs, rank, pools, &vars);
if !must_implement_ability.is_empty() {
@ -933,6 +974,7 @@ fn solve(
),
);
}
deferred_uls_to_resolve.union(lambda_sets_to_specialize);
state
}
@ -998,6 +1040,7 @@ fn solve(
Success {
vars,
must_implement_ability,
lambda_sets_to_specialize,
} => {
introduce(subs, rank, pools, &vars);
if !must_implement_ability.is_empty() {
@ -1006,6 +1049,7 @@ fn solve(
AbilityImplError::BadPattern(*region, category.clone(), actual),
);
}
deferred_uls_to_resolve.union(lambda_sets_to_specialize);
state
}
@ -1160,6 +1204,7 @@ fn solve(
Success {
vars,
must_implement_ability,
lambda_sets_to_specialize,
} => {
introduce(subs, rank, pools, &vars);
if !must_implement_ability.is_empty() {
@ -1172,6 +1217,7 @@ fn solve(
),
);
}
deferred_uls_to_resolve.union(lambda_sets_to_specialize);
state
}
@ -1264,6 +1310,7 @@ fn solve(
Success {
vars,
must_implement_ability,
lambda_sets_to_specialize,
} => {
subs.commit_snapshot(snapshot);
@ -1272,6 +1319,8 @@ fn solve(
internal_error!("Didn't expect ability vars to land here");
}
deferred_uls_to_resolve.union(lambda_sets_to_specialize);
// Case 1: unify error types, but don't check exhaustiveness.
// Case 2: run exhaustiveness to check for redundant branches.
should_check_exhaustiveness = !already_have_error;
@ -1492,16 +1541,15 @@ fn check_ability_specialization(
abilities_store: &mut AbilitiesStore,
problems: &mut Vec<TypeError>,
deferred_obligations: &mut DeferredObligations,
deferred_uls_to_resolve: &mut UlsOfVar,
symbol: Symbol,
symbol_loc_var: Loc<Variable>,
) {
// If the symbol specializes an ability member, we need to make sure that the
// inferred type for the specialization actually aligns with the expected
// implementation.
if let Some((root_symbol, root_data)) = abilities_store.root_name_and_def(symbol) {
let root_signature_var = root_data
.signature_var()
.unwrap_or_else(|| internal_error!("Signature var not resolved for {:?}", root_symbol));
if let Some((ability_member, root_data)) = abilities_store.root_name_and_def(symbol) {
let root_signature_var = root_data.signature_var();
let parent_ability = root_data.parent_ability;
// Check if they unify - if they don't, then the claimed specialization isn't really one,
@ -1520,6 +1568,7 @@ fn check_ability_specialization(
Success {
vars,
must_implement_ability,
lambda_sets_to_specialize,
} => {
let specialization_type =
type_implementing_specialization(&must_implement_ability, parent_ability);
@ -1531,13 +1580,20 @@ fn check_ability_specialization(
subs.commit_snapshot(snapshot);
introduce(subs, rank, pools, &vars);
let (other_lambda_sets_to_specialize, specialization_lambda_sets) =
find_specialization_lambda_sets(
subs,
opaque,
ability_member,
lambda_sets_to_specialize,
);
deferred_uls_to_resolve.union(other_lambda_sets_to_specialize);
let specialization_region = symbol_loc_var.region;
let specialization = MemberSpecialization {
symbol,
region: specialization_region,
};
let specialization =
MemberSpecialization::new(symbol, specialization_lambda_sets);
abilities_store.register_specialization_for_type(
root_symbol,
ability_member,
opaque,
specialization,
);
@ -1567,7 +1623,7 @@ fn check_ability_specialization(
region: symbol_loc_var.region,
typ,
ability: parent_ability,
member: root_symbol,
member: ability_member,
};
problems.push(problem);
@ -1585,13 +1641,13 @@ fn check_ability_specialization(
let (actual_type, _problems) = subs.var_to_error_type(symbol_loc_var.value);
let reason = Reason::GeneralizedAbilityMemberSpecialization {
member_name: root_symbol,
member_name: ability_member,
def_region: root_data.region,
};
let problem = TypeError::BadExpr(
symbol_loc_var.region,
Category::AbilityMemberSpecialization(root_symbol),
Category::AbilityMemberSpecialization(ability_member),
actual_type,
Expected::ForReason(reason, expected_type, symbol_loc_var.region),
);
@ -1606,14 +1662,14 @@ fn check_ability_specialization(
introduce(subs, rank, pools, &vars);
let reason = Reason::InvalidAbilityMemberSpecialization {
member_name: root_symbol,
member_name: ability_member,
def_region: root_data.region,
unimplemented_abilities,
};
let problem = TypeError::BadExpr(
symbol_loc_var.region,
Category::AbilityMemberSpecialization(root_symbol),
Category::AbilityMemberSpecialization(ability_member),
actual_type,
Expected::ForReason(reason, expected_type, symbol_loc_var.region),
);
@ -1630,6 +1686,241 @@ fn check_ability_specialization(
}
}
/// Finds the lambda sets in an ability member specialization.
///
/// Suppose we have
///
/// Default has default : {} -[[] + a:default:1]-> a | a has Default
///
/// A := {}
/// default = \{} -[[closA]]-> @A {}
///
/// Now after solving the `default` specialization we have unified it with the ability signature,
/// yielding
///
/// {} -[[closA] + A:default:1]-> A
///
/// But really, what we want is to only keep around the original lambda sets, and associate
/// `A:default:1` to resolve to the lambda set `[[closA]]`. There might be other unspecialized lambda
/// sets in the lambda sets for this implementation, which we need to account for as well; that is,
/// it may really be `[[closA] + v123:otherAbilityMember:4 + ...]`.
#[inline(always)]
fn find_specialization_lambda_sets(
subs: &mut Subs,
opaque: Symbol,
ability_member: Symbol,
uls: UlsOfVar,
) -> (UlsOfVar, VecMap<u8, Variable>) {
// unspecialized lambda sets that don't belong to our specialization, and should be resolved
// later.
let mut leftover_uls = UlsOfVar::default();
let mut specialization_lambda_sets: VecMap<u8, Variable> = VecMap::with_capacity(uls.len());
for (spec_var, lambda_sets) in uls.drain() {
if !matches!(subs.get_content_without_compacting(spec_var), Content::Alias(name, _, _, AliasKind::Opaque) if *name == opaque)
{
// These lambda sets aren't resolved to the current specialization, they need to be
// solved at a later time.
leftover_uls.extend(spec_var, lambda_sets);
continue;
}
for lambda_set in lambda_sets {
let &LambdaSet {
solved,
recursion_var,
unspecialized,
} = match subs.get_content_without_compacting(lambda_set) {
Content::LambdaSet(lambda_set) => lambda_set,
_ => internal_error!("Not a lambda set"),
};
// Figure out the unspecailized lambda set that corresponds to our specialization
// (`A:default:1` in the example), and those that need to stay part of the lambda set.
let mut split_index_and_region = None;
let uls_slice = subs.get_subs_slice(unspecialized).to_owned();
for (i, &Uls(var, _sym, region)) in uls_slice.iter().enumerate() {
if var == spec_var {
debug_assert!(split_index_and_region.is_none());
debug_assert!(_sym == ability_member, "unspecialized lambda set var is the same as the specialization, but points to a different ability member");
split_index_and_region = Some((i, region));
}
}
let (split_index, specialized_lset_region) =
split_index_and_region.expect("no unspecialization lambda set found");
let (uls_before, uls_after) =
(&uls_slice[0..split_index], &uls_slice[split_index + 1..]);
let new_unspecialized = SubsSlice::extend_new(
&mut subs.unspecialized_lambda_sets,
uls_before.iter().chain(uls_after.iter()).copied(),
);
let new_lambda_set_content = Content::LambdaSet(LambdaSet {
solved,
recursion_var,
unspecialized: new_unspecialized,
});
subs.set_content(lambda_set, new_lambda_set_content);
let old_specialized =
specialization_lambda_sets.insert(specialized_lset_region, lambda_set);
debug_assert!(
old_specialized.is_none(),
"Specialization of lambda set already exists"
);
}
}
(leftover_uls, specialization_lambda_sets)
}
pub fn compact_lambda_sets_of_vars(
subs: &mut Subs,
arena: &Bump,
pools: &mut Pools,
abilities_store: &AbilitiesStore,
uls_of_var: UlsOfVar,
phase: Phase,
) {
let mut seen = VecSet::default();
for (_, lambda_sets) in uls_of_var.drain() {
for lset in lambda_sets {
let root_lset = subs.get_root_key_without_compacting(lset);
if seen.contains(&root_lset) {
continue;
}
compact_lambda_set(subs, arena, pools, abilities_store, root_lset, phase);
seen.insert(root_lset);
}
}
}
fn compact_lambda_set(
subs: &mut Subs,
arena: &Bump,
pools: &mut Pools,
abilities_store: &AbilitiesStore,
this_lambda_set: Variable,
phase: Phase,
) {
let LambdaSet {
solved,
recursion_var,
unspecialized,
} = subs.get_lambda_set(this_lambda_set);
let target_rank = subs.get_rank(this_lambda_set);
if unspecialized.is_empty() {
return;
}
let mut new_unspecialized = vec![];
let mut specialized_to_unify_with = Vec::with_capacity(1);
for uls_index in unspecialized.into_iter() {
let uls @ Uls(var, member, region) = subs[uls_index];
use Content::*;
let opaque = match subs.get_content_without_compacting(var) {
FlexAbleVar(_, _) => {
/* not specialized yet */
new_unspecialized.push(uls);
continue;
}
Structure(_) | Alias(_, _, _, AliasKind::Structural) => {
// TODO: figure out a convention for references to structural types in the
// unspecialized lambda set. This may very well happen, for example
//
// Default has default : {} -> a | a has Default
//
// {a, b} = default {}
// # ^^^^^^^ {} -[{a: t1, b: t2}:default:1]
new_unspecialized.push(uls);
continue;
}
Alias(opaque, _, _, AliasKind::Opaque) => opaque,
Error => {
/* skip */
continue;
}
RigidVar(..)
| RigidAbleVar(..)
| FlexVar(..)
| RecursionVar { .. }
| LambdaSet(..)
| RangedNumber(_, _) => {
internal_error!("unexpected")
}
};
let opt_specialization = abilities_store.get_specialization(member, *opaque);
let specialized_lambda_set = match (phase, opt_specialization) {
(Phase::Solve, None) => {
// doesn't specialize, we'll have reported an error for this
continue;
}
(Phase::Late, None) => {
internal_error!(
"expected to know a specialization for {:?}#{:?}, but it wasn't found",
opaque,
member
);
}
(_, Some(specialization)) => *specialization
.specialization_lambda_sets
.get(&region)
.expect("lambda set region not resolved"),
};
// Ensure the specialization lambda set is already compacted.
if subs.get_root_key(specialized_lambda_set) != subs.get_root_key(this_lambda_set) {
compact_lambda_set(
subs,
arena,
pools,
abilities_store,
specialized_lambda_set,
phase,
);
}
// Ensure the specialization lambda set we'll unify with is not a generalized one, but one
// at the rank of the lambda set being compacted.
let copy_specialized_lambda_set =
deep_copy_var_in(subs, target_rank, pools, specialized_lambda_set, arena);
specialized_to_unify_with.push(copy_specialized_lambda_set);
}
let new_unspecialized_slice =
SubsSlice::extend_new(&mut subs.unspecialized_lambda_sets, new_unspecialized);
let partial_compacted_lambda_set = Content::LambdaSet(LambdaSet {
solved,
recursion_var,
unspecialized: new_unspecialized_slice,
});
subs.set_content(this_lambda_set, partial_compacted_lambda_set);
for other_specialized in specialized_to_unify_with.into_iter() {
let (vars, must_implement_ability, lambda_sets_to_specialize) =
unify(subs, this_lambda_set, other_specialized, Mode::EQ)
.expect_success("lambda sets don't unify");
introduce(subs, subs.get_rank(this_lambda_set), pools, &vars);
debug_assert!(
must_implement_ability.is_empty(),
"didn't expect abilities instantiated in this position"
);
debug_assert!(
lambda_sets_to_specialize.is_empty(),
"didn't expect more lambda sets in this position"
);
}
}
#[derive(Debug)]
enum LocalDefVarsVec<T> {
Stack(arrayvec::ArrayVec<T, 32>),
@ -1883,17 +2174,20 @@ fn type_to_variable<'a>(
// We may figure out the lambda set is recursive during solving, but it never
// is to begin with.
recursion_var: OptVariable::NONE,
unspecialized: SubsSlice::default(),
});
register_with_known_var(subs, destination, rank, pools, content)
}
UnspecializedLambdaSet(..) => {
// TODO: instantiate properly!
let union_lambdas =
UnionLambdas::from_slices(SubsSlice::new(0, 0), SubsSlice::new(0, 0));
UnspecializedLambdaSet(uls) => {
let unspecialized = SubsSlice::extend_new(
&mut subs.unspecialized_lambda_sets,
std::iter::once(*uls),
);
let content = Content::LambdaSet(subs::LambdaSet {
solved: union_lambdas,
unspecialized,
solved: UnionLabels::default(),
recursion_var: OptVariable::NONE,
});
@ -2555,8 +2849,9 @@ fn check_for_infinite_type(
&Content::LambdaSet(subs::LambdaSet {
solved,
recursion_var: _,
unspecialized,
}) => {
subs.mark_lambda_set_recursive(recursive, solved);
subs.mark_lambda_set_recursive(recursive, solved, unspecialized);
}
_other => circular_error(subs, problems, symbol, &loc_var),
@ -2903,6 +3198,7 @@ fn adjust_rank_content(
LambdaSet(subs::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
let mut rank = group_rank;
@ -2914,6 +3210,11 @@ fn adjust_rank_content(
}
}
for uls_index in *unspecialized {
let Uls(var, _, _) = subs[uls_index];
rank = rank.max(adjust_rank(subs, young_mark, visit_mark, group_rank, var));
}
if let (true, Some(rec_var)) = (cfg!(debug_assertions), recursion_var.into_variable()) {
// THEORY: unlike the situation for recursion vars under recursive tag unions,
// recursive vars inside lambda sets can't escape into higher let-generalized regions
@ -3092,6 +3393,7 @@ fn instantiate_rigids_help(subs: &mut Subs, max_rank: Rank, initial: Variable) {
LambdaSet(subs::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
for slice_index in solved.variables() {
let slice = subs.variable_slices[slice_index.index as usize];
@ -3101,6 +3403,10 @@ fn instantiate_rigids_help(subs: &mut Subs, max_rank: Rank, initial: Variable) {
if let Some(rec_var) = recursion_var.into_variable() {
stack.push(rec_var);
}
for Uls(var, _, _) in subs.get_subs_slice(*unspecialized) {
stack.push(*var);
}
}
&RangedNumber(typ, _) => {
stack.push(typ);
@ -3364,15 +3670,33 @@ fn deep_copy_var_help(
LambdaSet(subs::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
let lambda_set_var = copy;
let new_solved = copy_union!(solved);
let new_rec_var = recursion_var.map(|v| work!(v));
let new_unspecialized = SubsSlice::reserve_uls_slice(subs, unspecialized.len());
for (new_uls_index, uls_index) in
(new_unspecialized.into_iter()).zip(unspecialized.into_iter())
{
let Uls(var, sym, region) = subs[uls_index];
let new_var = work!(var);
deep_copy_uls_precondition(subs, var, new_var);
subs[new_uls_index] = Uls(new_var, sym, region);
subs.uls_of_var.add(new_var, lambda_set_var);
}
subs.set_content_unchecked(
copy,
lambda_set_var,
LambdaSet(subs::LambdaSet {
solved: new_solved,
recursion_var: new_rec_var,
unspecialized: new_unspecialized,
}),
);
}
@ -3388,6 +3712,26 @@ fn deep_copy_var_help(
initial_copy
}
#[inline(always)]
fn deep_copy_uls_precondition(subs: &Subs, original_var: Variable, new_var: Variable) {
if cfg!(debug_assertions) {
let content = subs.get_content_without_compacting(original_var);
debug_assert!(
matches!(
content,
Content::FlexAbleVar(..) | Content::RigidAbleVar(..)
),
"var in unspecialized lamba set is not bound to an ability, it is {:?}",
roc_types::subs::SubsFmtContent(content, subs)
);
debug_assert!(
original_var != new_var,
"unspecialized lamba set var was not instantiated"
);
}
}
#[inline(always)]
fn register(subs: &mut Subs, rank: Rank, pools: &mut Pools, content: Content) -> Variable {
let descriptor = Descriptor {

View File

@ -12,14 +12,14 @@ mod solve_expr {
use crate::helpers::with_larger_debug_stack;
use lazy_static::lazy_static;
use regex::Regex;
use roc_can::traverse::{find_ability_member_at, find_type_at};
use roc_can::traverse::{find_ability_member_and_owning_type_at, find_type_at};
use roc_load::LoadedModule;
use roc_module::symbol::{Interns, ModuleId};
use roc_problem::can::Problem;
use roc_region::all::{LineColumn, LineColumnRegion, LineInfo, Region};
use roc_reporting::report::{can_problem, type_problem, RocDocAllocator};
use roc_solve::solve::TypeError;
use roc_types::pretty_print::name_and_print_var;
use roc_types::pretty_print::{name_and_print_var, DebugPrint};
use std::path::PathBuf;
// HELPERS
@ -177,7 +177,7 @@ mod solve_expr {
debug_assert!(exposed_to_host.len() == 1);
let (_symbol, variable) = exposed_to_host.into_iter().next().unwrap();
let actual_str = name_and_print_var(variable, subs, home, &interns);
let actual_str = name_and_print_var(variable, subs, home, &interns, DebugPrint::NOTHING);
Ok((type_problems, can_problems, actual_str))
}
@ -235,7 +235,7 @@ mod solve_expr {
assert_eq!(actual, expected.to_string());
}
fn infer_queries(src: &str, expected: &[&'static str]) {
fn infer_queries_help(src: &str, expected: &[&'static str], print_only_under_alias: bool) {
let (
LoadedModule {
module_id: home,
@ -275,28 +275,34 @@ mod solve_expr {
let end = region.end().offset;
let text = &src[start as usize..end as usize];
let var = find_type_at(region, &decls)
.unwrap_or_else(|| panic!("No type for {} ({:?})!", &text, region));
.unwrap_or_else(|| panic!("No type for {:?} ({:?})!", &text, region));
let actual_str = name_and_print_var(var, subs, home, &interns);
let actual_str = name_and_print_var(
var,
subs,
home,
&interns,
DebugPrint {
print_lambda_sets: true,
print_only_under_alias,
},
);
let elaborated = match find_ability_member_at(region, &decls) {
Some((member, specialization_id)) => {
let qual = match abilities_store.get_resolved(specialization_id) {
Some(specialization) => {
abilities_store
.iter_specializations()
.find(|(_, ms)| ms.symbol == specialization)
.unwrap()
.0
.1
}
None => abilities_store.member_def(member).unwrap().parent_ability,
};
let qual_str = qual.as_str(&interns);
format!("{}#{} : {}", qual_str, text, actual_str)
}
None => format!("{} : {}", text, actual_str),
};
let elaborated =
match find_ability_member_and_owning_type_at(region, &decls, &abilities_store) {
Some((spec_type, spec_symbol)) => {
format!(
"{}#{}({}) : {}",
spec_type.as_str(&interns),
text,
spec_symbol.ident_id().index(),
actual_str
)
}
None => {
format!("{} : {}", text, actual_str)
}
};
solved_queries.push(elaborated);
}
@ -304,6 +310,15 @@ mod solve_expr {
assert_eq!(solved_queries, expected)
}
macro_rules! infer_queries {
($program:expr, $queries:expr $(,)?) => {
infer_queries_help($program, $queries, false)
};
($program:expr, $queries:expr, print_only_under_alias=true $(,)?) => {
infer_queries_help($program, $queries, true)
};
}
fn check_inferred_abilities<'a, I>(src: &'a str, expected_specializations: I)
where
I: IntoIterator<Item = (&'a str, &'a str)>,
@ -6153,7 +6168,7 @@ mod solve_expr {
#[test]
fn intermediate_branch_types() {
infer_queries(
infer_queries!(
indoc!(
r#"
app "test" provides [foo] to "./platform"
@ -6281,7 +6296,7 @@ mod solve_expr {
#[test]
fn encoder() {
infer_queries(
infer_queries!(
indoc!(
r#"
app "test" provides [myU8Bytes] to "./platform"
@ -6318,8 +6333,8 @@ mod solve_expr {
"#
),
&[
"u8 : U8 -> Encoder Linear",
"toEncoder : MyU8 -> Encoder fmt | fmt has Format",
"Linear#u8(22) : U8 -[[u8(22)]]-> Encoder Linear",
"MyU8#toEncoder(23) : MyU8 -[[toEncoder(23)]]-> Encoder fmt | fmt has Format",
"myU8Bytes : List U8",
],
)
@ -6327,7 +6342,7 @@ mod solve_expr {
#[test]
fn decoder() {
infer_queries(
infer_queries!(
indoc!(
r#"
app "test" provides [myU8] to "./platform"
@ -6379,8 +6394,8 @@ mod solve_expr {
"#
),
&[
"u8 : Decoder U8 Linear",
"decoder : Decoder MyU8 fmt | fmt has DecoderFormatting",
"Linear#u8(27) : Decoder U8 Linear",
"MyU8#decoder(28) : Decoder MyU8 fmt | fmt has DecoderFormatting",
"myU8 : Result MyU8 DecodeError",
],
)
@ -6409,7 +6424,7 @@ mod solve_expr {
#[test]
fn static_specialization() {
infer_queries(
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
@ -6426,7 +6441,7 @@ mod solve_expr {
a
"#
),
&["A#default : {} -> A"],
&["A#default(5) : {} -[[default(5)]]-> A"],
)
}
@ -6458,7 +6473,7 @@ mod solve_expr {
#[test]
fn encode_record() {
infer_queries(
infer_queries!(
indoc!(
r#"
app "test"
@ -6469,13 +6484,15 @@ mod solve_expr {
# ^^^^^^^^^
"#
),
&["Encoding#toEncoder : { a : Str } -> Encoder fmt | fmt has EncoderFormatting"],
&[
"Encoding#toEncoder(2) : { a : Str } -[[] + { a : Str }:toEncoder(2):1]-> Encoder fmt | fmt has EncoderFormatting",
],
)
}
#[test]
fn encode_record_with_nested_custom_impl() {
infer_queries(
infer_queries!(
indoc!(
r#"
app "test"
@ -6489,7 +6506,315 @@ mod solve_expr {
# ^^^^^^^^^
"#
),
&["Encoding#toEncoder : { a : A } -> Encoder fmt | fmt has EncoderFormatting"],
&["Encoding#toEncoder(2) : { a : A } -[[] + { a : A }:toEncoder(2):1]-> Encoder fmt | fmt has EncoderFormatting"],
)
}
#[test]
fn resolve_lambda_set_generalized_ability_alias() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Id has id : a -> a | a has Id
A := {}
id = \@A {} -> @A {}
#^^{-1}
main =
alias1 = id
# ^^
alias2 = alias1
# ^^^^^^
a : A
a = alias2 (@A {})
# ^^^^^^
a
"#
),
&[
"A#id(5) : A -[[id(5)]]-> A",
"Id#id(4) : a -[[] + a:id(4):1]-> a | a has Id",
"alias1 : a -[[] + a:id(4):1]-> a | a has Id",
"alias2 : A -[[id(5)]]-> A",
],
)
}
#[test]
fn resolve_lambda_set_ability_chain() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Id1 has id1 : a -> a | a has Id1
Id2 has id2 : a -> a | a has Id2
A := {}
id1 = \@A {} -> @A {}
#^^^{-1}
id2 = \@A {} -> id1 (@A {})
#^^^{-1} ^^^
main =
a : A
a = id2 (@A {})
# ^^^
a
"#
),
&[
"A#id1(8) : A -[[id1(8)]]-> A",
//
"A#id2(9) : A -[[id2(9)]]-> A",
"A#id1(8) : A -[[id1(8)]]-> A",
//
"A#id2(9) : A -[[id2(9)]]-> A",
],
)
}
#[test]
fn resolve_lambda_set_branches_ability_vs_non_ability() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Id has id : a -> a | a has Id
A := {}
id = \@A {} -> @A {}
#^^{-1}
idNotAbility = \x -> x
#^^^^^^^^^^^^{-1}
main =
choice : [T, U]
idChoice =
#^^^^^^^^{-1}
when choice is
T -> id
U -> idNotAbility
idChoice (@A {})
#^^^^^^^^{-1}
"#
),
&[
"A#id(5) : A -[[id(5)]]-> A",
"idNotAbility : a -[[idNotAbility(6)]]-> a",
"idChoice : a -[[idNotAbility(6)] + a:id(4):1]-> a | a has Id",
"idChoice : A -[[id(5), idNotAbility(6)]]-> A",
],
)
}
#[test]
fn resolve_lambda_set_branches_same_ability() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Id has id : a -> a | a has Id
A := {}
id = \@A {} -> @A {}
#^^{-1}
main =
choice : [T, U]
idChoice =
#^^^^^^^^{-1}
when choice is
T -> id
U -> id
idChoice (@A {})
#^^^^^^^^{-1}
"#
),
&[
"A#id(5) : A -[[id(5)]]-> A",
"idChoice : a -[[] + a:id(4):1]-> a | a has Id",
"idChoice : A -[[id(5)]]-> A",
],
)
}
#[test]
fn resolve_unspecialized_lambda_set_behind_alias() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Thunk a : {} -> a
Id has id : a -> Thunk a | a has Id
A := {}
id = \@A {} -> \{} -> @A {}
#^^{-1}
main =
alias = id
# ^^
a : A
a = (alias (@A {})) {}
# ^^^^^
a
"#
),
&[
"A#id(7) : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
"Id#id(6) : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
"alias : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
],
print_only_under_alias = true,
)
}
#[test]
fn resolve_unspecialized_lambda_set_behind_opaque() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Thunk a := {} -> a
Id has id : a -> Thunk a | a has Id
A := {}
id = \@A {} -> @Thunk (\{} -> @A {})
#^^{-1}
main =
thunk = id (@A {})
@Thunk it = thunk
it {}
#^^{-1}
"#
),
&[
"A#id(7) : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
"it : {} -[[8(8)]]-> {}",
],
print_only_under_alias = true,
)
}
#[test]
fn resolve_two_unspecialized_lambda_sets_in_one_lambda_set() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Thunk a : {} -> a
Id has id : a -> Thunk a | a has Id
A := {}
id = \@A {} -> \{} -> @A {}
#^^{-1}
main =
a : A
a = (id (@A {})) {}
# ^^
a
"#
),
&[
"A#id(7) : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
"A#id(7) : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
],
print_only_under_alias = true,
)
}
#[test]
fn resolve_recursive_ability_lambda_set() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Diverge has diverge : a -> a | a has Diverge
A := {}
diverge = \@A {} -> diverge (@A {})
#^^^^^^^{-1} ^^^^^^^
main =
a : A
a = diverge (@A {})
# ^^^^^^^
a
"#
),
&[
"A#diverge(5) : A -[[diverge(5)]]-> A",
"Diverge#diverge(4) : A -[[diverge(5)]]-> A",
//
"A#diverge(5) : A -[[diverge(5)]]-> A",
],
)
}
#[test]
fn resolve_mutually_recursive_ability_lambda_sets() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Bounce has
ping : a -> a | a has Bounce
pong : a -> a | a has Bounce
A := {}
ping = \@A {} -> pong (@A {})
#^^^^{-1} ^^^^
pong = \@A {} -> ping (@A {})
#^^^^{-1} ^^^^
main =
a : A
a = ping (@A {})
# ^^^^
a
"#
),
&[
"A#ping(7) : A -[[ping(7)]]-> A",
"Bounce#pong(6) : A -[[pong(8)]]-> A",
//
"A#pong(8) : A -[[pong(8)]]-> A",
"A#ping(7) : A -[[ping(7)]]-> A",
//
"A#ping(7) : A -[[ping(7)]]-> A",
],
)
}
}

View File

@ -64,9 +64,6 @@ fn hash_specialization_multiple_add() {
}
#[test]
#[ignore = r#"This currently fails because the lambda set under `aliasedHash`
is unbound, since the `hash` prototype's lambda set is unbound.
Unspecialized lambda sets will solve this!"#]
#[cfg(any(feature = "gen-llvm", feature = "gen-wasm"))]
fn alias_member_specialization() {
assert_evals_to!(

View File

@ -1,15 +1,11 @@
#[cfg(feature = "gen-llvm")]
use crate::helpers::llvm::assert_evals_to;
#[cfg(feature = "gen-llvm")]
use crate::helpers::llvm::assert_expect_failed;
#[cfg(feature = "gen-llvm")]
use crate::helpers::llvm::assert_non_opt_evals_to;
#[cfg(feature = "gen-dev")]
use crate::helpers::dev::assert_evals_to;
// #[cfg(feature = "gen-dev")]
// use crate::helpers::dev::assert_expect_failed;
// #[cfg(feature = "gen-dev")]
// use crate::helpers::dev::assert_evals_to as assert_llvm_evals_to;
// #[cfg(feature = "gen-dev")]
// use crate::helpers::dev::assert_evals_to as assert_non_opt_evals_to;
@ -19,8 +15,6 @@ use crate::helpers::wasm::assert_evals_to;
#[cfg(feature = "gen-wasm")]
use crate::helpers::wasm::assert_evals_to as assert_non_opt_evals_to;
// #[cfg(feature = "gen-wasm")]
// use crate::helpers::dev::assert_expect_failed;
// #[cfg(feature = "gen-wasm")]
// use crate::helpers::wasm::assert_evals_to as assert_llvm_evals_to;
// #[cfg(feature = "gen-wasm")]
// use crate::helpers::wasm::assert_evals_to as assert_non_opt_evals_to;
@ -2539,23 +2533,6 @@ fn call_invalid_layout() {
);
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
#[should_panic(expected = "Failed with 1 failures. Failures: ")]
fn expect_fail() {
assert_expect_failed!(
indoc!(
r#"
expect 1 == 2
3
"#
),
3,
i64
);
}
#[test]
#[cfg(any(feature = "gen-llvm"))]
fn increment_or_double_closure() {

View File

@ -70,7 +70,7 @@ impl FromWasmerMemory for RocStr {
&memory_bytes[big_elem_ptr..][..big_length]
};
unsafe { RocStr::from_slice(slice) }
unsafe { RocStr::from_slice_unchecked(slice) }
}
}

View File

@ -90,7 +90,7 @@ use roc_std::{RocList, RocStr};
// "#
// ),
// RocStr::from_slice(b"JJJJJJJJJJJJJJJJJJJJJJJJJ"),
// RocStr::from_slice_unchecked(b"JJJJJJJJJJJJJJJJJJJJJJJJJ"),
// RocStr
// );
// }
@ -108,7 +108,7 @@ use roc_std::{RocList, RocStr};
// _ -> ""
// "#
// ),
// RocStr::from_slice(b"JJJ"),
// RocStr::from_slice_unchecked(b"JJJ"),
// RocStr
// );
// }
@ -122,8 +122,8 @@ use roc_std::{RocList, RocStr};
// "#
// ),
// RocList::from_slice(&[
// RocStr::from_slice(b"01234567789abcdefghi"),
// RocStr::from_slice(b"01234567789abcdefghi")
// RocStr::from_slice_unchecked(b"01234567789abcdefghi"),
// RocStr::from_slice_unchecked(b"01234567789abcdefghi")
// ]),
// RocList<RocStr>
// );
@ -135,8 +135,8 @@ use roc_std::{RocList, RocStr};
// "#
// ),
// RocList::from_slice(&[
// RocStr::from_slice(b"01234567789abcdefghi "),
// RocStr::from_slice(b" 01234567789abcdefghi")
// RocStr::from_slice_unchecked(b"01234567789abcdefghi "),
// RocStr::from_slice_unchecked(b" 01234567789abcdefghi")
// ]),
// RocList<RocStr>
// );
@ -151,9 +151,9 @@ use roc_std::{RocList, RocStr};
// "#
// ),
// RocList::from_slice(&[
// RocStr::from_slice(b"J"),
// RocStr::from_slice(b"J"),
// RocStr::from_slice(b"J")
// RocStr::from_slice_unchecked(b"J"),
// RocStr::from_slice_unchecked(b"J"),
// RocStr::from_slice_unchecked(b"J")
// ]),
// RocList<RocStr>
// );
@ -169,7 +169,7 @@ use roc_std::{RocList, RocStr};
// "than the delimiter which happens to be very very long"
// "#
// ),
// RocList::from_slice(&[RocStr::from_slice(b"string to split is shorter")]),
// RocList::from_slice(&[RocStr::from_slice_unchecked(b"string to split is shorter")]),
// RocList<RocStr>
// );
// }
@ -182,7 +182,7 @@ use roc_std::{RocList, RocStr};
// Str.split "" ""
// "#
// ),
// RocList::from_slice(&[RocStr::from_slice(b"")]),
// RocList::from_slice(&[RocStr::from_slice_unchecked(b"")]),
// RocList<RocStr>
// );
// }
@ -195,7 +195,7 @@ use roc_std::{RocList, RocStr};
// Str.split "a," ","
// "#
// ),
// RocList::from_slice(&[RocStr::from_slice(b"a"), RocStr::from_slice(b"")]),
// RocList::from_slice(&[RocStr::from_slice_unchecked(b"a"), RocStr::from_slice_unchecked(b"")]),
// RocList<RocStr>
// )
// }
@ -224,9 +224,9 @@ use roc_std::{RocList, RocStr};
// "#
// ),
// RocList::from_slice(&[
// RocStr::from_slice(b"1"),
// RocStr::from_slice(b"2"),
// RocStr::from_slice(b"")
// RocStr::from_slice_unchecked(b"1"),
// RocStr::from_slice_unchecked(b"2"),
// RocStr::from_slice_unchecked(b"")
// ]),
// RocList<RocStr>
// );
@ -243,9 +243,9 @@ use roc_std::{RocList, RocStr};
// "#
// ),
// RocList::from_slice(&[
// RocStr::from_slice(b"3"),
// RocStr::from_slice(b"4"),
// RocStr::from_slice(b"")
// RocStr::from_slice_unchecked(b"3"),
// RocStr::from_slice_unchecked(b"4"),
// RocStr::from_slice_unchecked(b"")
// ]),
// RocList<RocStr>
// );
@ -261,7 +261,7 @@ use roc_std::{RocList, RocStr};
// "Second string that is also fairly long. Two long strings test things that might not appear with short strings."
// "#
// ),
// RocStr::from_slice(b"First string that is fairly long. Longer strings make for different errors. Second string that is also fairly long. Two long strings test things that might not appear with short strings."),
// RocStr::from_slice_unchecked(b"First string that is fairly long. Longer strings make for different errors. Second string that is also fairly long. Two long strings test things that might not appear with short strings."),
// RocStr
// );
// }
@ -498,7 +498,7 @@ fn str_starts_with_false_small_str() {
// Err _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("a".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("a".as_bytes()),
// roc_std::RocStr
// );
// }
@ -513,7 +513,7 @@ fn str_starts_with_false_small_str() {
// Err _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("abc~".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("abc~".as_bytes()),
// roc_std::RocStr
// );
// }
@ -528,7 +528,7 @@ fn str_starts_with_false_small_str() {
// Err _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("∆".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("∆".as_bytes()),
// roc_std::RocStr
// );
// }
@ -543,7 +543,7 @@ fn str_starts_with_false_small_str() {
// Err _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("∆œ¬".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("∆œ¬".as_bytes()),
// roc_std::RocStr
// );
// }
@ -558,7 +558,7 @@ fn str_starts_with_false_small_str() {
// Err _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("💖".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("💖".as_bytes()),
// roc_std::RocStr
// );
// }
@ -573,7 +573,7 @@ fn str_starts_with_false_small_str() {
// Err _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("💖🤠🚀".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("💖🤠🚀".as_bytes()),
// roc_std::RocStr
// );
// }
@ -588,7 +588,7 @@ fn str_starts_with_false_small_str() {
// Err _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("💖b∆".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("💖b∆".as_bytes()),
// roc_std::RocStr
// );
// }
@ -607,7 +607,7 @@ fn str_starts_with_false_small_str() {
// _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("a".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("a".as_bytes()),
// roc_std::RocStr
// );
// }
@ -626,7 +626,7 @@ fn str_starts_with_false_small_str() {
// _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("a".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("a".as_bytes()),
// roc_std::RocStr
// );
// }
@ -645,7 +645,7 @@ fn str_starts_with_false_small_str() {
// _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("a".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("a".as_bytes()),
// roc_std::RocStr
// );
// }
@ -664,7 +664,7 @@ fn str_starts_with_false_small_str() {
// _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("a".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("a".as_bytes()),
// roc_std::RocStr
// );
// }
@ -683,7 +683,7 @@ fn str_starts_with_false_small_str() {
// _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("a".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("a".as_bytes()),
// roc_std::RocStr
// );
// }
@ -702,7 +702,7 @@ fn str_starts_with_false_small_str() {
// _ -> ""
// "#
// ),
// roc_std::RocStr::from_slice("a".as_bytes()),
// roc_std::RocStr::from_slice_unchecked("a".as_bytes()),
// roc_std::RocStr
// );
// }
@ -744,7 +744,7 @@ fn str_equality() {
// printExpr expr
// "#
// ),
// RocStr::from_slice(b"Add (Add (Val 3) (Val 1)) (Add (Val 1) (Var 1))"),
// RocStr::from_slice_unchecked(b"Add (Add (Val 3) (Val 1)) (Add (Val 1) (Var 1))"),
// RocStr
// );
// }

View File

@ -1,8 +1,8 @@
use crate::subs::{
self, AliasVariables, Content, FlatType, GetSubsSlice, Label, Subs, SubsIndex, UnionLabels,
UnionTags, Variable,
UnionTags, UnsortedUnionLabels, Variable,
};
use crate::types::{name_type_var, RecordField};
use crate::types::{name_type_var, RecordField, Uls};
use roc_collections::all::MutMap;
use roc_error_macros::internal_error;
use roc_module::ident::{Lowercase, TagName};
@ -48,9 +48,22 @@ macro_rules! write_parens {
};
}
pub struct DebugPrint {
pub print_lambda_sets: bool,
pub print_only_under_alias: bool,
}
impl DebugPrint {
pub const NOTHING: DebugPrint = DebugPrint {
print_lambda_sets: false,
print_only_under_alias: false,
};
}
struct Env<'a> {
home: ModuleId,
interns: &'a Interns,
debug: DebugPrint,
}
/// How many times a root variable appeared in Subs.
@ -234,6 +247,7 @@ fn find_names_needed(
LambdaSet(subs::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
for slice_index in solved.variables() {
let slice = subs[slice_index];
@ -243,6 +257,11 @@ fn find_names_needed(
}
}
for uls_index in unspecialized.into_iter() {
let Uls(var, _, _) = subs[uls_index];
find_names_needed(var, subs, roots, root_appearances, names_taken);
}
if let Some(rec_var) = recursion_var.into_variable() {
find_names_needed(rec_var, subs, roots, root_appearances, names_taken);
}
@ -364,9 +383,14 @@ fn content_to_string(
home: ModuleId,
interns: &Interns,
named_result: NamedResult,
debug_print: DebugPrint,
) -> String {
let mut buf = String::new();
let env = Env { home, interns };
let env = Env {
home,
interns,
debug: debug_print,
};
let mut ctx = Context {
able_variables: vec![],
recursion_structs_to_expand: named_result.recursion_structs_to_expand,
@ -391,10 +415,11 @@ pub fn name_and_print_var(
subs: &mut Subs,
home: ModuleId,
interns: &Interns,
debug_print: DebugPrint,
) -> String {
let named_result = name_all_type_vars(var, subs);
let content = subs.get_content_without_compacting(var);
content_to_string(content, subs, home, interns, named_result)
content_to_string(content, subs, home, interns, named_result, debug_print)
}
pub fn get_single_arg<'a>(subs: &'a Subs, args: &'a AliasVariables) -> &'a Content {
@ -470,7 +495,7 @@ fn write_content<'a>(
}
},
Structure(flat_type) => write_flat_type(env, ctx, flat_type, subs, buf, parens),
Alias(symbol, args, _actual, _kind) => {
Alias(symbol, args, actual, _kind) => {
let write_parens = parens == Parens::InTypeParam && !args.is_empty();
match *symbol {
@ -528,6 +553,11 @@ fn write_content<'a>(
write_parens,
),
_ if env.debug.print_only_under_alias => write_parens!(write_parens, buf, {
let content = subs.get_content_without_compacting(*actual);
write_content(env, ctx, content, subs, buf, parens)
}),
_ => write_parens!(write_parens, buf, {
write_symbol(env, *symbol, buf);
@ -546,15 +576,70 @@ fn write_content<'a>(
roc_debug_flags::dbg_do!(roc_debug_flags::ROC_PRETTY_PRINT_ALIAS_CONTENTS, {
buf.push_str("[[ but really ");
let content = subs.get_content_without_compacting(*_actual);
let content = subs.get_content_without_compacting(*actual);
write_content(env, ctx, content, subs, buf, parens);
buf.push_str("]]");
});
}),
}
}
LambdaSet(_) => {
// lambda sets never exposed to the user
LambdaSet(subs::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
debug_assert!(env.debug.print_lambda_sets);
buf.push_str("[[");
let print_symbol = |symbol: &Symbol| {
format!(
"{}({})",
symbol.as_str(env.interns),
symbol.ident_id().index(),
)
};
write_sorted_tags2(
env,
ctx,
subs,
buf,
solved.unsorted_lambdas(subs),
print_symbol,
);
buf.push(']');
if let Some(rec_var) = recursion_var.into_variable() {
buf.push_str(" as ");
write_content(
env,
ctx,
subs.get_content_without_compacting(rec_var),
subs,
buf,
parens,
)
}
for Uls(var, member, region) in subs.get_subs_slice(*unspecialized) {
buf.push_str(" + ");
write_content(
env,
ctx,
subs.get_content_without_compacting(*var),
subs,
buf,
Parens::Unnecessary,
);
buf.push(':');
buf.push_str(&print_symbol(member));
buf.push(':');
buf.push_str(&region.to_string());
}
buf.push(']');
}
RangedNumber(typ, _range_vars) => write_content(
env,
@ -672,19 +757,19 @@ fn write_ext_content<'a>(
}
}
fn write_sorted_tags2<'a>(
fn write_sorted_tags2<'a, L>(
env: &Env,
ctx: &mut Context<'a>,
subs: &'a Subs,
buf: &mut String,
tags: &UnionTags,
ext_var: Variable,
) -> ExtContent<'a> {
// Sort the fields so they always end up in the same order.
let (tags, new_ext_var) = tags.unsorted_tags_and_ext(subs, ext_var);
tags: UnsortedUnionLabels<L>,
label_to_string: impl Fn(&L) -> String,
) where
L: Label + Ord,
{
let mut sorted_fields = tags.tags;
sorted_fields.sort_by(|(a, _), (b, _)| a.as_ident_str().cmp(&b.as_ident_str()));
sorted_fields.sort_by(|(a, _), (b, _)| a.cmp(b));
let mut any_written_yet = false;
@ -695,7 +780,7 @@ fn write_sorted_tags2<'a>(
any_written_yet = true;
}
buf.push_str(label.as_ident_str().as_str());
buf.push_str(&label_to_string(label));
for var in vars {
buf.push(' ');
@ -709,8 +794,6 @@ fn write_sorted_tags2<'a>(
);
}
}
ExtContent::from_var(subs, new_ext_var)
}
fn write_sorted_tags<'a>(
@ -788,10 +871,11 @@ fn write_flat_type<'a>(
),
EmptyRecord => buf.push_str(EMPTY_RECORD),
EmptyTagUnion => buf.push_str(EMPTY_TAG_UNION),
Func(args, _closure, ret) => write_fn(
Func(args, closure, ret) => write_fn(
env,
ctx,
subs.get_subs_slice(*args),
*closure,
*ret,
subs,
buf,
@ -863,11 +947,20 @@ fn write_flat_type<'a>(
TagUnion(tags, ext_var) => {
buf.push('[');
let ext_content = write_sorted_tags2(env, ctx, subs, buf, tags, *ext_var);
// Sort the fields so they always end up in the same order.
let (tags, new_ext_var) = tags.unsorted_tags_and_ext(subs, *ext_var);
write_sorted_tags2(env, ctx, subs, buf, tags, |tag| tag.0.as_str().to_string());
buf.push(']');
write_ext_content(env, ctx, subs, buf, ext_content, parens)
write_ext_content(
env,
ctx,
subs,
buf,
ExtContent::from_var(subs, new_ext_var),
parens,
)
}
FunctionOrTagUnion(tag_name, _, ext_var) => {
@ -885,11 +978,19 @@ fn write_flat_type<'a>(
RecursiveTagUnion(rec_var, tags, ext_var) => {
buf.push('[');
let ext_content = write_sorted_tags2(env, ctx, subs, buf, tags, *ext_var);
let (tags, new_ext_var) = tags.unsorted_tags_and_ext(subs, *ext_var);
write_sorted_tags2(env, ctx, subs, buf, tags, |tag| tag.0.as_str().to_string());
buf.push(']');
write_ext_content(env, ctx, subs, buf, ext_content, parens);
write_ext_content(
env,
ctx,
subs,
buf,
ExtContent::from_var(subs, new_ext_var),
parens,
);
buf.push_str(" as ");
write_content(
@ -965,7 +1066,12 @@ pub fn resolve_lambda_set(subs: &Subs, mut var: Variable) -> ResolvedLambdaSet {
Content::LambdaSet(subs::LambdaSet {
solved,
recursion_var: _,
unspecialized,
}) => {
debug_assert!(
unspecialized.is_empty(),
"unspecialized lambda sets left over during resolution"
);
push_union(subs, solved, &mut set);
return ResolvedLambdaSet::Set(set);
}
@ -1056,10 +1162,12 @@ fn write_apply<'a>(
}
}
#[allow(clippy::too_many_arguments)]
fn write_fn<'a>(
env: &Env,
ctx: &mut Context<'a>,
args: &[Variable],
closure: Variable,
ret: Variable,
subs: &'a Subs,
buf: &mut String,
@ -1089,7 +1197,21 @@ fn write_fn<'a>(
);
}
buf.push_str(" -> ");
if !env.debug.print_lambda_sets {
buf.push_str(" -> ");
} else {
buf.push_str(" -");
write_content(
env,
ctx,
subs.get_content_without_compacting(closure),
subs,
buf,
parens,
);
buf.push_str("-> ");
}
write_content(
env,
ctx,

View File

@ -1,8 +1,9 @@
#![deny(unsafe_op_in_unsafe_fn)]
use crate::types::{
name_type_var, AliasKind, ErrorType, Problem, RecordField, RecordFieldsError, TypeExt,
name_type_var, AliasKind, ErrorType, Problem, RecordField, RecordFieldsError, TypeExt, Uls,
};
use roc_collections::all::{ImMap, ImSet, MutSet, SendMap};
use roc_collections::{VecMap, VecSet};
use roc_error_macros::internal_error;
use roc_module::ident::{Lowercase, TagName, Uppercase};
use roc_module::symbol::Symbol;
@ -75,6 +76,7 @@ struct SubsHeader {
field_names: u64,
record_fields: u64,
variable_slices: u64,
unspecialized_lambda_sets: u64,
exposed_vars_by_symbol: u64,
}
@ -92,6 +94,7 @@ impl SubsHeader {
field_names: subs.field_names.len() as u64,
record_fields: subs.record_fields.len() as u64,
variable_slices: subs.variable_slices.len() as u64,
unspecialized_lambda_sets: subs.unspecialized_lambda_sets.len() as u64,
exposed_vars_by_symbol: exposed_vars_by_symbol as u64,
}
}
@ -138,6 +141,7 @@ impl Subs {
written = Self::serialize_field_names(&self.field_names, writer, written)?;
written = Self::serialize_slice(&self.record_fields, writer, written)?;
written = Self::serialize_slice(&self.variable_slices, writer, written)?;
written = Self::serialize_slice(&self.unspecialized_lambda_sets, writer, written)?;
written = Self::serialize_slice(exposed_vars_by_symbol, writer, written)?;
Ok(written)
@ -221,6 +225,8 @@ impl Subs {
Self::deserialize_slice(bytes, header.record_fields as usize, offset);
let (variable_slices, offset) =
Self::deserialize_slice(bytes, header.variable_slices as usize, offset);
let (unspecialized_lambda_sets, offset) =
Self::deserialize_slice(bytes, header.unspecialized_lambda_sets as usize, offset);
let (exposed_vars_by_symbol, _) =
Self::deserialize_slice(bytes, header.exposed_vars_by_symbol as usize, offset);
@ -233,8 +239,10 @@ impl Subs {
field_names,
record_fields: record_fields.to_vec(),
variable_slices: variable_slices.to_vec(),
unspecialized_lambda_sets: unspecialized_lambda_sets.to_vec(),
tag_name_cache: Default::default(),
problems: Default::default(),
uls_of_var: Default::default(),
},
exposed_vars_by_symbol,
)
@ -300,6 +308,52 @@ impl Subs {
}
}
/// Mapping of variables to [Content::LambdaSet]s containing unspecialized lambda sets depending on
/// that variable.
#[derive(Clone, Default, Debug)]
pub struct UlsOfVar(VecMap<Variable, VecSet<Variable>>);
impl UlsOfVar {
pub fn add(&mut self, var: Variable, dependent_lambda_set: Variable) -> bool {
// NOTE: this adds the var directly without following unification links.
// [Subs::remove_dependent_unspecialized_lambda_sets] follows unifications when removing.
let set = self.0.get_or_insert(var, Default::default);
set.insert(dependent_lambda_set)
}
pub fn extend(
&mut self,
var: Variable,
dependent_lambda_sets: impl IntoIterator<Item = Variable>,
) {
// NOTE: this adds the var directly without following unification links.
// [Subs::remove_dependent_unspecialized_lambda_sets] follows unifications when removing.
let set = self.0.get_or_insert(var, Default::default);
set.extend(dependent_lambda_sets);
}
pub fn union(&mut self, other: Self) {
for (key, lset) in other.drain() {
self.extend(key, lset);
}
}
/// NOTE: this does not follow unification links.
pub fn drain(self) -> impl Iterator<Item = (Variable, impl Iterator<Item = Variable>)> {
self.0
.into_iter()
.map(|(v, set): (Variable, VecSet<Variable>)| (v, set.into_iter()))
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
#[derive(Clone)]
pub struct Subs {
utable: UnificationTable,
@ -309,8 +363,10 @@ pub struct Subs {
pub field_names: Vec<Lowercase>,
pub record_fields: Vec<RecordField<()>>,
pub variable_slices: Vec<VariableSubsSlice>,
pub unspecialized_lambda_sets: Vec<Uls>,
pub tag_name_cache: TagNameCache,
pub problems: Vec<Problem>,
pub uls_of_var: UlsOfVar,
}
#[derive(Debug, Clone, Default)]
@ -408,6 +464,20 @@ impl std::ops::IndexMut<SubsIndex<Symbol>> for Subs {
}
}
impl std::ops::Index<SubsIndex<Uls>> for Subs {
type Output = Uls;
fn index(&self, index: SubsIndex<Uls>) -> &Self::Output {
&self.unspecialized_lambda_sets[index.index as usize]
}
}
impl std::ops::IndexMut<SubsIndex<Uls>> for Subs {
fn index_mut(&mut self, index: SubsIndex<Uls>) -> &mut Self::Output {
&mut self.unspecialized_lambda_sets[index.index as usize]
}
}
impl std::ops::IndexMut<SubsIndex<Lowercase>> for Subs {
fn index_mut(&mut self, index: SubsIndex<Lowercase>) -> &mut Self::Output {
&mut self.field_names[index.index as usize]
@ -567,6 +637,17 @@ impl SubsSlice<TagName> {
}
}
impl SubsSlice<Uls> {
pub fn reserve_uls_slice(subs: &mut Subs, length: usize) -> Self {
let start = subs.unspecialized_lambda_sets.len() as u32;
subs.unspecialized_lambda_sets
.extend(std::iter::repeat(Uls(Variable::NULL, Symbol::UNDERSCORE, 0)).take(length));
Self::new(start, length as u16)
}
}
impl<T> SubsIndex<T> {
pub const fn new(start: u32) -> Self {
Self {
@ -640,6 +721,12 @@ impl GetSubsSlice<Symbol> for Subs {
}
}
impl GetSubsSlice<Uls> for Subs {
fn get_subs_slice(&self, subs_slice: SubsSlice<Uls>) -> &[Uls] {
subs_slice.get_slice(&self.unspecialized_lambda_sets)
}
}
impl fmt::Debug for Subs {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f)?;
@ -711,6 +798,7 @@ fn subs_fmt_content(this: &Content, subs: &Subs, f: &mut fmt::Formatter) -> fmt:
Content::LambdaSet(LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
write!(f, "LambdaSet([")?;
@ -727,7 +815,14 @@ fn subs_fmt_content(this: &Content, subs: &Subs, f: &mut fmt::Formatter) -> fmt:
write!(f, ", ")?;
}
write!(f, "<{:?}>])", recursion_var)
write!(f, "]")?;
if let Some(rec_var) = recursion_var.into_variable() {
write!(f, " as <{:?}>", rec_var)?;
}
for uls in subs.get_subs_slice(*unspecialized) {
write!(f, " + {:?}", uls)?;
}
write!(f, ")")
}
Content::RangedNumber(typ, range) => {
write!(f, "RangedNumber({:?}, {:?})", typ, range)
@ -829,6 +924,29 @@ fn subs_fmt_flat_type(this: &FlatType, subs: &Subs, f: &mut fmt::Formatter) -> f
}
}
#[cfg(debug_assertions)]
pub struct DebugUtable<'a>(pub &'a Subs);
#[cfg(debug_assertions)]
impl std::fmt::Debug for DebugUtable<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("UnificationTable {\n")?;
for v in 0..self.0.utable.len() {
f.write_fmt(format_args!(" {} => ", v))?;
let var = unsafe { Variable::from_index(v as u32) };
let root = self.0.utable.root_key_without_compacting(var);
if root == var {
let desc = self.0.utable.get_descriptor(root);
let fmt_content = crate::subs::SubsFmtContent(&desc.content, self.0);
f.write_fmt(format_args!("{:?} at {}\n", fmt_content, desc.rank))?;
} else {
f.write_fmt(format_args!("{}\n", root.index()))?;
}
}
f.write_str("}")
}
}
#[derive(Debug)]
pub struct VarStore {
next: u32,
@ -1518,8 +1636,10 @@ impl Subs {
// store an empty slice at the first position
// used for "TagOrFunction"
variable_slices: vec![VariableSubsSlice::default()],
unspecialized_lambda_sets: Vec::new(),
tag_name_cache: Default::default(),
problems: Vec::new(),
uls_of_var: Default::default(),
};
subs.utable.reserve(capacity);
@ -1765,12 +1885,18 @@ impl Subs {
self.set_content(recursive, Content::Structure(flat_type));
}
pub fn mark_lambda_set_recursive(&mut self, recursive: Variable, solved_lambdas: UnionLambdas) {
pub fn mark_lambda_set_recursive(
&mut self,
recursive: Variable,
solved_lambdas: UnionLambdas,
unspecialized_lambdas: SubsSlice<Uls>,
) {
let (rec_var, new_tags) = self.mark_union_recursive_help(recursive, solved_lambdas);
let new_lambda_set = Content::LambdaSet(LambdaSet {
solved: new_tags,
recursion_var: OptVariable::from(rec_var),
unspecialized: unspecialized_lambdas,
});
self.set_content(recursive, new_lambda_set);
@ -1880,6 +2006,28 @@ impl Subs {
pub fn vars_since_snapshot(&mut self, snapshot: &Snapshot) -> core::ops::Range<Variable> {
self.utable.vars_since_snapshot(snapshot)
}
pub fn get_lambda_set(&self, lambda_set: Variable) -> LambdaSet {
match self.get_content_without_compacting(lambda_set) {
Content::LambdaSet(lambda_set) => *lambda_set,
_ => internal_error!("not a lambda set"),
}
}
pub fn remove_dependent_unspecialized_lambda_sets(
&mut self,
var: Variable,
) -> impl Iterator<Item = Variable> + '_ {
let utable = &self.utable;
let root_var = utable.root_key_without_compacting(var);
self.uls_of_var
.0
.drain_filter(move |cand_var, _| {
utable.root_key_without_compacting(*cand_var) == root_var
})
.flat_map(|(_, lambda_set_vars)| lambda_set_vars.into_iter())
}
}
#[inline(always)]
@ -2059,6 +2207,8 @@ pub struct LambdaSet {
///
/// However, we don't know if a lambda set is recursive or not until type inference.
pub recursion_var: OptVariable,
/// Lambdas we won't know until an ability specialization is resolved.
pub unspecialized: SubsSlice<Uls>,
}
#[derive(Clone, Copy, Debug, Default)]
@ -2935,6 +3085,7 @@ fn occurs(
LambdaSet(self::LambdaSet {
solved,
recursion_var,
unspecialized: _,
}) => {
let mut new_seen = seen.to_owned();
new_seen.push(root_var);
@ -2945,6 +3096,9 @@ fn occurs(
}
}
// unspecialized lambda vars excluded because they are not explicitly part of the
// type (they only matter after being resolved).
occurs_union(subs, root_var, &new_seen, include_recursion_var, solved)
}
RangedNumber(typ, _range_vars) => {
@ -3122,15 +3276,21 @@ fn explicit_substitute(
LambdaSet(self::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
// NOTE recursion_var is not substituted, verify that this is correct!
let new_solved = explicit_substitute_union(subs, from, to, solved, seen);
for Uls(v, _, _) in subs.get_subs_slice(unspecialized) {
debug_assert!(*v != from, "unspecialized lambda set vars should never occur in a position where they need to be explicitly substituted.");
}
subs.set_content(
in_var,
LambdaSet(self::LambdaSet {
solved: new_solved,
recursion_var,
unspecialized,
}),
);
@ -3239,12 +3399,18 @@ fn get_var_names(
LambdaSet(self::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
let taken_names = get_var_names_union(subs, solved, taken_names);
match recursion_var.into_variable() {
let mut taken_names = match recursion_var.into_variable() {
Some(v) => get_var_names(subs, v, taken_names),
None => taken_names,
};
for uls_index in unspecialized {
let Uls(v, _, _) = subs[uls_index];
taken_names = get_var_names(subs, v, taken_names);
}
taken_names
}
RangedNumber(typ, _) => get_var_names(subs, typ, taken_names),
@ -3483,10 +3649,7 @@ fn content_to_err_type(
ErrorType::Alias(symbol, err_args, Box::new(err_type), kind)
}
LambdaSet(self::LambdaSet {
solved: _,
recursion_var: _r,
}) => {
LambdaSet(self::LambdaSet { .. }) => {
// Don't print lambda sets since we don't expect them to be exposed to the user
ErrorType::Error
}
@ -3713,6 +3876,19 @@ fn get_fresh_var_name(state: &mut ErrorTypeState) -> Lowercase {
name
}
/// Exposed types in a module, captured in a storage subs. Includes
/// - all explicitly exposed symbol variables
/// - all implicitly exposed variables, which include
/// - ability member specializations
/// - specialization lambda sets under specialization ability members
#[derive(Clone, Debug)]
pub struct ExposedTypesStorageSubs {
pub storage_subs: StorageSubs,
pub stored_vars_by_symbol: VecMap<Symbol, Variable>,
/// lambda set var in other module -> var in storage subs
pub stored_specialization_lambda_set_vars: VecMap<Variable, Variable>,
}
#[derive(Clone, Debug)]
pub struct StorageSubs {
subs: Subs,
@ -3727,6 +3903,7 @@ struct StorageSubsOffsets {
field_names: u32,
record_fields: u32,
variable_slices: u32,
unspecialized_lambda_sets: u32,
problems: u32,
}
@ -3744,7 +3921,7 @@ impl StorageSubs {
}
pub fn extend_with_variable(&mut self, source: &mut Subs, variable: Variable) -> Variable {
deep_copy_var_to(source, &mut self.subs, variable)
storage_copy_var_to(source, &mut self.subs, variable)
}
pub fn import_variable_from(&mut self, source: &mut Subs, variable: Variable) -> CopiedImport {
@ -3764,6 +3941,7 @@ impl StorageSubs {
field_names: self.subs.field_names.len() as u32,
record_fields: self.subs.record_fields.len() as u32,
variable_slices: self.subs.variable_slices.len() as u32,
unspecialized_lambda_sets: self.subs.unspecialized_lambda_sets.len() as u32,
problems: self.subs.problems.len() as u32,
};
@ -3775,6 +3953,7 @@ impl StorageSubs {
field_names: target.field_names.len() as u32,
record_fields: target.record_fields.len() as u32,
variable_slices: target.variable_slices.len() as u32,
unspecialized_lambda_sets: target.unspecialized_lambda_sets.len() as u32,
problems: target.problems.len() as u32,
};
@ -3821,6 +4000,9 @@ impl StorageSubs {
target.closure_names.extend(self.subs.closure_names);
target.field_names.extend(self.subs.field_names);
target.record_fields.extend(self.subs.record_fields);
target
.unspecialized_lambda_sets
.extend(self.subs.unspecialized_lambda_sets);
target.problems.extend(self.subs.problems);
debug_assert_eq!(
@ -3905,9 +4087,11 @@ impl StorageSubs {
LambdaSet(self::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => LambdaSet(self::LambdaSet {
solved: Self::offset_lambda_set(offsets, *solved),
recursion_var: recursion_var.map(|v| Self::offset_variable(offsets, v)),
unspecialized: Self::offset_uls_slice(offsets, *unspecialized),
}),
RangedNumber(typ, range) => RangedNumber(Self::offset_variable(offsets, *typ), *range),
Error => Content::Error,
@ -3978,6 +4162,12 @@ impl StorageSubs {
slice
}
fn offset_uls_slice(offsets: &StorageSubsOffsets, mut slice: SubsSlice<Uls>) -> SubsSlice<Uls> {
slice.start += offsets.unspecialized_lambda_sets;
slice
}
fn offset_problem(
offsets: &StorageSubsOffsets,
mut problem_index: SubsIndex<Problem>,
@ -4004,7 +4194,7 @@ fn put_scratchpad(scratchpad: bumpalo::Bump) {
});
}
pub fn deep_copy_var_to(
pub fn storage_copy_var_to(
source: &mut Subs, // mut to set the copy
target: &mut Subs,
var: Variable,
@ -4016,14 +4206,14 @@ pub fn deep_copy_var_to(
let copy = {
let visited = bumpalo::collections::Vec::with_capacity_in(256, &arena);
let mut env = DeepCopyVarToEnv {
let mut env = StorageCopyVarToEnv {
visited,
source,
target,
max_rank: rank,
};
let copy = deep_copy_var_to_help(&mut env, var);
let copy = storage_copy_var_to_help(&mut env, var);
// we have tracked all visited variables, and can now traverse them
// in one go (without looking at the UnificationTable) and clear the copy field
@ -4046,7 +4236,7 @@ pub fn deep_copy_var_to(
copy
}
struct DeepCopyVarToEnv<'a> {
struct StorageCopyVarToEnv<'a> {
visited: bumpalo::collections::Vec<'a, Variable>,
source: &'a mut Subs,
target: &'a mut Subs,
@ -4054,8 +4244,8 @@ struct DeepCopyVarToEnv<'a> {
}
#[inline(always)]
fn deep_copy_union<L: Label>(
env: &mut DeepCopyVarToEnv<'_>,
fn storage_copy_union<L: Label>(
env: &mut StorageCopyVarToEnv<'_>,
tags: UnionLabels<L>,
) -> UnionLabels<L> {
let new_variable_slices = SubsSlice::reserve_variable_slices(env.target, tags.len());
@ -4068,7 +4258,7 @@ fn deep_copy_union<L: Label>(
let it = (new_variables.indices()).zip(slice);
for (target_index, var_index) in it {
let var = env.source[var_index];
let copy_var = deep_copy_var_to_help(env, var);
let copy_var = storage_copy_var_to_help(env, var);
env.target.variables[target_index] = copy_var;
}
@ -4085,7 +4275,7 @@ fn deep_copy_union<L: Label>(
UnionLabels::from_slices(new_tag_names, new_variable_slices)
}
fn deep_copy_var_to_help(env: &mut DeepCopyVarToEnv<'_>, var: Variable) -> Variable {
fn storage_copy_var_to_help(env: &mut StorageCopyVarToEnv<'_>, var: Variable) -> Variable {
use Content::*;
use FlatType::*;
@ -4138,7 +4328,7 @@ fn deep_copy_var_to_help(env: &mut DeepCopyVarToEnv<'_>, var: Variable) -> Varia
for (target_index, var_index) in (new_arguments.indices()).zip(arguments) {
let var = env.source[var_index];
let copy_var = deep_copy_var_to_help(env, var);
let copy_var = storage_copy_var_to_help(env, var);
env.target.variables[target_index] = copy_var;
}
@ -4146,15 +4336,15 @@ fn deep_copy_var_to_help(env: &mut DeepCopyVarToEnv<'_>, var: Variable) -> Varia
}
Func(arguments, closure_var, ret_var) => {
let new_ret_var = deep_copy_var_to_help(env, ret_var);
let new_ret_var = storage_copy_var_to_help(env, ret_var);
let new_closure_var = deep_copy_var_to_help(env, closure_var);
let new_closure_var = storage_copy_var_to_help(env, closure_var);
let new_arguments = SubsSlice::reserve_into_subs(env.target, arguments.len());
for (target_index, var_index) in (new_arguments.indices()).zip(arguments) {
let var = env.source[var_index];
let copy_var = deep_copy_var_to_help(env, var);
let copy_var = storage_copy_var_to_help(env, var);
env.target.variables[target_index] = copy_var;
}
@ -4171,7 +4361,7 @@ fn deep_copy_var_to_help(env: &mut DeepCopyVarToEnv<'_>, var: Variable) -> Varia
let it = (new_variables.indices()).zip(fields.iter_variables());
for (target_index, var_index) in it {
let var = env.source[var_index];
let copy_var = deep_copy_var_to_help(env, var);
let copy_var = storage_copy_var_to_help(env, var);
env.target.variables[target_index] = copy_var;
}
@ -4195,12 +4385,12 @@ fn deep_copy_var_to_help(env: &mut DeepCopyVarToEnv<'_>, var: Variable) -> Varia
}
};
Record(record_fields, deep_copy_var_to_help(env, ext_var))
Record(record_fields, storage_copy_var_to_help(env, ext_var))
}
TagUnion(tags, ext_var) => {
let new_ext = deep_copy_var_to_help(env, ext_var);
let union_tags = deep_copy_union(env, tags);
let new_ext = storage_copy_var_to_help(env, ext_var);
let union_tags = storage_copy_union(env, tags);
TagUnion(union_tags, new_ext)
}
@ -4210,14 +4400,14 @@ fn deep_copy_var_to_help(env: &mut DeepCopyVarToEnv<'_>, var: Variable) -> Varia
env.target.tag_names.push(env.source[tag_name].clone());
FunctionOrTagUnion(new_tag_name, symbol, deep_copy_var_to_help(env, ext_var))
FunctionOrTagUnion(new_tag_name, symbol, storage_copy_var_to_help(env, ext_var))
}
RecursiveTagUnion(rec_var, tags, ext_var) => {
let union_tags = deep_copy_union(env, tags);
let union_tags = storage_copy_union(env, tags);
let new_ext = deep_copy_var_to_help(env, ext_var);
let new_rec_var = deep_copy_var_to_help(env, rec_var);
let new_ext = storage_copy_var_to_help(env, ext_var);
let new_rec_var = storage_copy_var_to_help(env, rec_var);
RecursiveTagUnion(new_rec_var, union_tags, new_ext)
}
@ -4245,7 +4435,7 @@ fn deep_copy_var_to_help(env: &mut DeepCopyVarToEnv<'_>, var: Variable) -> Varia
opt_name,
structure,
} => {
let new_structure = deep_copy_var_to_help(env, structure);
let new_structure = storage_copy_var_to_help(env, structure);
debug_assert!((new_structure.index() as usize) < env.target.len());
@ -4299,7 +4489,7 @@ fn deep_copy_var_to_help(env: &mut DeepCopyVarToEnv<'_>, var: Variable) -> Varia
(new_variables.indices()).zip(arguments.all_variables())
{
let var = env.source[var_index];
let copy_var = deep_copy_var_to_help(env, var);
let copy_var = storage_copy_var_to_help(env, var);
env.target.variables[target_index] = copy_var;
}
@ -4308,7 +4498,7 @@ fn deep_copy_var_to_help(env: &mut DeepCopyVarToEnv<'_>, var: Variable) -> Varia
..arguments
};
let new_real_type_var = deep_copy_var_to_help(env, real_type_var);
let new_real_type_var = storage_copy_var_to_help(env, real_type_var);
let new_content = Alias(symbol, new_arguments, new_real_type_var, kind);
env.target.set(copy, make_descriptor(new_content));
@ -4319,20 +4509,33 @@ fn deep_copy_var_to_help(env: &mut DeepCopyVarToEnv<'_>, var: Variable) -> Varia
LambdaSet(self::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
let new_solved = deep_copy_union(env, solved);
let new_rec_var = recursion_var.map(|v| deep_copy_var_to_help(env, v));
let new_solved = storage_copy_union(env, solved);
let new_rec_var = recursion_var.map(|v| storage_copy_var_to_help(env, v));
// NB: we are only copying into storage here, not instantiating like in solve::deep_copy_var.
// So no bookkeeping should be done for the new unspecialized lambda sets.
let new_unspecialized = SubsSlice::reserve_uls_slice(env.target, unspecialized.len());
for (target_index, source_index) in
(new_unspecialized.into_iter()).zip(unspecialized.into_iter())
{
let Uls(var, sym, region) = env.source[source_index];
let new_var = storage_copy_var_to_help(env, var);
env.target[target_index] = Uls(new_var, sym, region);
}
let new_content = LambdaSet(self::LambdaSet {
solved: new_solved,
recursion_var: new_rec_var,
unspecialized: new_unspecialized,
});
env.target.set(copy, make_descriptor(new_content));
copy
}
RangedNumber(typ, range) => {
let new_typ = deep_copy_var_to_help(env, typ);
let new_typ = storage_copy_var_to_help(env, typ);
let new_content = RangedNumber(new_typ, range);
@ -4769,14 +4972,27 @@ fn copy_import_to_help(env: &mut CopyImportEnv<'_>, max_rank: Rank, var: Variabl
LambdaSet(self::LambdaSet {
solved,
recursion_var,
unspecialized,
}) => {
let new_solved = copy_union(env, max_rank, solved);
let new_rec_var =
recursion_var.map(|rec_var| copy_import_to_help(env, max_rank, rec_var));
// NB: we are only copying across subs here, not instantiating like in deep_copy_var.
// So no bookkeeping should be done for the new unspecialized lambda sets.
let new_unspecialized = SubsSlice::reserve_uls_slice(env.target, unspecialized.len());
for (target_index, source_index) in
(new_unspecialized.into_iter()).zip(unspecialized.into_iter())
{
let Uls(var, sym, region) = env.source[source_index];
let new_var = copy_import_to_help(env, max_rank, var);
env.target[target_index] = Uls(new_var, sym, region);
}
let new_content = LambdaSet(self::LambdaSet {
solved: new_solved,
recursion_var: new_rec_var,
unspecialized: new_unspecialized,
});
env.target.set(copy, make_descriptor(new_content));

View File

@ -273,8 +273,14 @@ pub enum Type {
/// usage site. Unspecialized lambda sets aid us in recovering those lambda sets; when we
/// instantiate `a` with a proper type `T`, we'll know to resolve the lambda set by extracting
/// it at region "1" from the specialization of "default" for `T`.
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct Uls(Variable, Symbol, u8);
#[derive(PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
pub struct Uls(pub Variable, pub Symbol, pub u8);
impl std::fmt::Debug for Uls {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Uls({:?}:{:?}:{:?})", self.0, self.1, self.2)
}
}
static mut TYPE_CLONE_COUNT: std::sync::atomic::AtomicUsize =
std::sync::atomic::AtomicUsize::new(0);
@ -648,8 +654,8 @@ impl fmt::Debug for Type {
Type::RangedNumber(typ, range_vars) => {
write!(f, "Ranged({:?}, {:?})", typ, range_vars)
}
Type::UnspecializedLambdaSet(Uls(a, mem, region)) => {
write!(f, "ULS({:?}:{:?}:{:?})", a, mem, region)
Type::UnspecializedLambdaSet(uls) => {
write!(f, "{:?}", uls)
}
}
}
@ -2780,7 +2786,10 @@ fn instantiate_lambda_sets_as_unspecialized(
type_arguments,
lambda_set_variables,
}) => {
stack.extend(lambda_set_variables.iter_mut().rev().map(|ls| &mut ls.0));
for lambda_set in lambda_set_variables.iter_mut() {
debug_assert!(matches!(lambda_set.0, Type::Variable(_)));
lambda_set.0 = new_uls();
}
stack.extend(type_arguments.iter_mut().rev());
}
Type::Alias {
@ -2790,8 +2799,11 @@ fn instantiate_lambda_sets_as_unspecialized(
actual,
kind: _,
} => {
for lambda_set in lambda_set_variables.iter_mut() {
debug_assert!(matches!(lambda_set.0, Type::Variable(_)));
lambda_set.0 = new_uls();
}
stack.push(actual);
stack.extend(lambda_set_variables.iter_mut().rev().map(|ls| &mut ls.0));
stack.extend(type_arguments.iter_mut().rev().map(|t| &mut t.typ));
}
Type::HostExposedAlias {
@ -2801,8 +2813,11 @@ fn instantiate_lambda_sets_as_unspecialized(
actual_var: _,
actual,
} => {
for lambda_set in lambda_set_variables.iter_mut() {
debug_assert!(matches!(lambda_set.0, Type::Variable(_)));
lambda_set.0 = new_uls();
}
stack.push(actual);
stack.extend(lambda_set_variables.iter_mut().rev().map(|ls| &mut ls.0));
stack.extend(type_arguments.iter_mut().rev());
}
Type::Apply(_sym, args, _region) => {

View File

@ -9,10 +9,10 @@ use roc_types::num::NumericRange;
use roc_types::subs::Content::{self, *};
use roc_types::subs::{
AliasVariables, Descriptor, ErrorTypeContext, FlatType, GetSubsSlice, LambdaSet, Mark,
OptVariable, RecordFields, Subs, SubsIndex, SubsSlice, UnionLabels, UnionLambdas, UnionTags,
Variable, VariableSubsSlice,
OptVariable, RecordFields, Subs, SubsIndex, SubsSlice, UlsOfVar, UnionLabels, UnionLambdas,
UnionTags, Variable, VariableSubsSlice,
};
use roc_types::types::{AliasKind, DoesNotImplementAbility, ErrorType, Mismatch, RecordField};
use roc_types::types::{AliasKind, DoesNotImplementAbility, ErrorType, Mismatch, RecordField, Uls};
macro_rules! mismatch {
() => {{
@ -143,18 +143,23 @@ pub enum Unified {
Success {
vars: Pool,
must_implement_ability: MustImplementConstraints,
lambda_sets_to_specialize: UlsOfVar,
},
Failure(Pool, ErrorType, ErrorType, DoesNotImplementAbility),
BadType(Pool, roc_types::types::Problem),
}
impl Unified {
pub fn expect_success(self, err_msg: &'static str) -> (Pool, MustImplementConstraints) {
pub fn expect_success(
self,
err_msg: &'static str,
) -> (Pool, MustImplementConstraints, UlsOfVar) {
match self {
Unified::Success {
vars,
must_implement_ability,
} => (vars, must_implement_ability),
lambda_sets_to_specialize,
} => (vars, must_implement_ability, lambda_sets_to_specialize),
_ => internal_error!("{}", err_msg),
}
}
@ -212,6 +217,9 @@ pub struct Outcome {
/// We defer these checks until the end of a solving phase.
/// NOTE: this vector is almost always empty!
must_implement_ability: MustImplementConstraints,
/// We defer resolution of these lambda sets to the caller of [unify].
/// See also [merge_flex_able_with_concrete].
lambda_sets_to_specialize: UlsOfVar,
}
impl Outcome {
@ -219,6 +227,8 @@ impl Outcome {
self.mismatches.extend(other.mismatches);
self.must_implement_ability
.extend(other.must_implement_ability);
self.lambda_sets_to_specialize
.union(other.lambda_sets_to_specialize);
}
}
@ -228,12 +238,14 @@ pub fn unify(subs: &mut Subs, var1: Variable, var2: Variable, mode: Mode) -> Uni
let Outcome {
mismatches,
must_implement_ability,
lambda_sets_to_specialize,
} = unify_pool(subs, &mut vars, var1, var2, mode);
if mismatches.is_empty() {
Unified::Success {
vars,
must_implement_ability,
lambda_sets_to_specialize,
}
} else {
let error_context = if mismatches.contains(&Mismatch::TypeNotInRange) {
@ -450,6 +462,7 @@ fn check_valid_range(subs: &mut Subs, var: Variable, range: NumericRange) -> Out
let outcome = Outcome {
mismatches: vec![Mismatch::TypeNotInRange],
must_implement_ability: Default::default(),
lambda_sets_to_specialize: Default::default(),
};
return outcome;
@ -595,12 +608,14 @@ fn unify_opaque(
}
FlexAbleVar(_, ability) if args.is_empty() => {
// Opaque type wins
let mut outcome = merge(subs, ctx, Alias(symbol, args, real_var, kind));
outcome.must_implement_ability.push(MustImplementAbility {
typ: Obligated::Opaque(symbol),
ability: *ability,
});
outcome
merge_flex_able_with_concrete(
subs,
ctx,
ctx.second,
*ability,
Alias(symbol, args, real_var, kind),
Obligated::Opaque(symbol),
)
}
Alias(_, _, other_real_var, AliasKind::Structural) => {
unify_pool(subs, pool, ctx.first, *other_real_var, ctx.mode)
@ -673,13 +688,15 @@ fn unify_structure(
outcome
}
FlexAbleVar(_, ability) => {
let mut outcome = merge(subs, ctx, Structure(*flat_type));
let must_implement_ability = MustImplementAbility {
typ: Obligated::Adhoc(ctx.first),
ability: *ability,
};
outcome.must_implement_ability.push(must_implement_ability);
outcome
// Structure wins
merge_flex_able_with_concrete(
subs,
ctx,
ctx.second,
*ability,
Structure(*flat_type),
Obligated::Adhoc(ctx.first),
)
}
// _name has an underscore because it's unused in --release builds
RigidVar(_name) => {
@ -815,10 +832,12 @@ fn unify_lambda_set_help(
let LambdaSet {
solved: solved1,
recursion_var: rec1,
unspecialized: uls1,
} = lset1;
let LambdaSet {
solved: solved2,
recursion_var: rec2,
unspecialized: uls2,
} = lset2;
debug_assert!(
@ -893,10 +912,34 @@ fn unify_lambda_set_help(
(None, None) => OptVariable::NONE,
};
// Combine the unspecialized lambda sets as needed. Note that we don't need to update the
// bookkeeping of variable -> lambda set to be resolved, because if we had v1 -> lset1, and
// now lset1 ~ lset2, then afterward either lset1 still resolves to itself or re-points to
// lset2. In either case the merged unspecialized lambda sets will be there.
let merged_unspecialized = match (uls1.is_empty(), uls2.is_empty()) {
(true, true) => SubsSlice::default(),
(false, true) => uls1,
(true, false) => uls2,
(false, false) => {
let mut all_uls = (subs.get_subs_slice(uls1).iter())
.chain(subs.get_subs_slice(uls2))
.map(|&Uls(var, sym, region)| {
// Take the root key to deduplicate
Uls(subs.get_root_key_without_compacting(var), sym, region)
})
.collect::<Vec<_>>();
all_uls.sort();
all_uls.dedup();
SubsSlice::extend_new(&mut subs.unspecialized_lambda_sets, all_uls)
}
};
let new_solved = UnionLabels::insert_into_subs(subs, all_lambdas);
let new_lambda_set = Content::LambdaSet(LambdaSet {
solved: new_solved,
recursion_var,
unspecialized: merged_unspecialized,
});
merge(subs, ctx, new_lambda_set)
@ -1258,7 +1301,7 @@ where
let input1_len = it1.size_hint().0;
let input2_len = it2.size_hint().0;
let max_common = input1_len.min(input2_len);
let max_common = std::cmp::min(input1_len, input2_len);
let mut result = Separate {
only_in_1: Vec::with_capacity(input1_len),
@ -1523,8 +1566,9 @@ fn maybe_mark_union_recursive(subs: &mut Subs, union_var: Variable) {
LambdaSet(self::LambdaSet {
solved,
recursion_var: OptVariable::NONE,
unspecialized,
}) => {
subs.mark_lambda_set_recursive(v, solved);
subs.mark_lambda_set_recursive(v, solved, unspecialized);
continue 'outer;
}
_ => { /* fall through */ }
@ -2070,12 +2114,14 @@ fn unify_flex_able(
Alias(name, args, _real_var, AliasKind::Opaque) => {
if args.is_empty() {
// Opaque type wins
let mut outcome = merge(subs, ctx, *other);
outcome.must_implement_ability.push(MustImplementAbility {
typ: Obligated::Opaque(*name),
merge_flex_able_with_concrete(
subs,
ctx,
ctx.first,
ability,
});
outcome
*other,
Obligated::Opaque(*name),
)
} else {
mismatch!("FlexAble vs Opaque with type vars")
}
@ -2083,18 +2129,50 @@ fn unify_flex_able(
Structure(_) | Alias(_, _, _, AliasKind::Structural) | RangedNumber(..) => {
// Structural type wins.
let mut outcome = merge(subs, ctx, *other);
outcome.must_implement_ability.push(MustImplementAbility {
typ: Obligated::Adhoc(ctx.second),
merge_flex_able_with_concrete(
subs,
ctx,
ctx.first,
ability,
});
outcome
*other,
Obligated::Adhoc(ctx.second),
)
}
Error => merge(subs, ctx, Error),
}
}
fn merge_flex_able_with_concrete(
subs: &mut Subs,
ctx: &Context,
flex_able_var: Variable,
ability: Symbol,
concrete_content: Content,
concrete_obligation: Obligated,
) -> Outcome {
let mut outcome = merge(subs, ctx, concrete_content);
let must_implement_ability = MustImplementAbility {
typ: concrete_obligation,
ability,
};
outcome.must_implement_ability.push(must_implement_ability);
// Figure which, if any, lambda sets should be specialized thanks to the flex able var
// being instantiated. Now as much as I would love to do that here, we don't, because we might
// be in the middle of solving a module and not resolved all available ability implementations
// yet! Instead we chuck it up in the [Outcome] and let our caller do the resolution.
//
// If we ever organize ability implementations so that they are well-known before any other
// unification is done, they can be solved in-band here!
let uls_of_concrete = subs.remove_dependent_unspecialized_lambda_sets(flex_able_var);
outcome
.lambda_sets_to_specialize
.extend(flex_able_var, uls_of_concrete);
outcome
}
#[inline(always)]
fn unify_recursion(
subs: &mut Subs,
@ -2246,6 +2324,7 @@ fn unify_function_or_tag_union_and_func(
let lambda_set_content = LambdaSet(self::LambdaSet {
solved: union_tags,
recursion_var: OptVariable::NONE,
unspecialized: SubsSlice::default(),
});
let tag_lambda_set = register(

View File

@ -756,7 +756,7 @@ fn doc_url<'a>(
}
} else {
match interns.module_ids.get_id(&module_name.into()) {
Some(&module_id) => {
Some(module_id) => {
// You can do qualified lookups on your own module, e.g.
// if I'm in the Foo module, I can do a `Foo.bar` lookup.
if module_id == home {
@ -772,7 +772,7 @@ fn doc_url<'a>(
// This is not the home module
match dep_idents
.get(&module_id)
.and_then(|exposed_ids| exposed_ids.get_id(&ident.into()))
.and_then(|exposed_ids| exposed_ids.get_id(ident))
{
Some(_) => {
// This is a valid symbol for this dependency,

View File

@ -60,6 +60,7 @@ use roc_module::ident::Lowercase;
use roc_module::symbol::Symbol;
use roc_region::all::Region;
use roc_types::pretty_print::name_and_print_var;
use roc_types::pretty_print::DebugPrint;
use roc_types::solved_types::Solved;
use roc_types::subs::{Subs, VarStore, Variable};
use snafu::OptionExt;
@ -462,8 +463,13 @@ impl<'a> EdModel<'a> {
let subs = solved.inner_mut();
let pretty_var =
name_and_print_var(var, subs, self.module.env.home, &self.loaded_module.interns);
let pretty_var = name_and_print_var(
var,
subs,
self.module.env.home,
&self.loaded_module.interns,
DebugPrint::NOTHING,
);
PoolStr::new(&pretty_var, self.module.env.pool)
}

View File

@ -54,7 +54,7 @@ size_t roc_str_len(struct RocStr str) {
}
}
extern void roc__mainForHost_1_exposed_generic(*RocStr);
extern void roc__mainForHost_1_exposed_generic(struct RocStr *string);
int main() {

View File

@ -3,6 +3,7 @@
use core::ffi::c_void;
use roc_std::RocStr;
use std::ffi::CStr;
use std::mem::ManuallyDrop;
use std::os::raw::c_char;
extern "C" {
@ -56,7 +57,10 @@ pub unsafe extern "C" fn roc_memset(dst: *mut c_void, c: i32, n: usize) -> *mut
#[no_mangle]
pub extern "C" fn rust_main() -> i32 {
unsafe {
let mut roc_str = RocStr::default();
// ManuallyDrop must be used here in order to prevent the RocStr from
// getting dropped as soon as it's no longer referenced anywhere, which
// happens earlier than the libc::write that receives a pointer to its data.
let mut roc_str = ManuallyDrop::new(RocStr::default());
roc_main(&mut roc_str);
let len = roc_str.len();
@ -65,6 +69,8 @@ pub extern "C" fn rust_main() -> i32 {
if libc::write(1, str_bytes, len) < 0 {
panic!("Writing to stdout failed!");
}
ManuallyDrop::drop(&mut roc_str)
}
// Exit code

View File

@ -23,7 +23,7 @@ use roc_repl_eval::{ReplApp, ReplAppMemory};
use roc_reporting::report::DEFAULT_PALETTE;
use roc_std::RocStr;
use roc_target::TargetInfo;
use roc_types::pretty_print::name_and_print_var;
use roc_types::pretty_print::{name_and_print_var, DebugPrint};
const BLUE: &str = "\u{001b}[36m";
const PINK: &str = "\u{001b}[35m";
@ -227,7 +227,8 @@ fn gen_and_eval_llvm<'a>(
let main_fn_var = *main_fn_var;
// pretty-print the expr type string for later.
let expr_type_str = name_and_print_var(main_fn_var, &mut subs, home, &interns);
let expr_type_str =
name_and_print_var(main_fn_var, &mut subs, home, &interns, DebugPrint::NOTHING);
let content = subs.get_content_without_compacting(main_fn_var);
let (_, main_fn_layout) = match procedures.keys().find(|(s, _)| *s == main_fn_symbol) {

View File

@ -12,7 +12,7 @@ use roc_repl_eval::{
};
use roc_reporting::report::DEFAULT_PALETTE_HTML;
use roc_target::TargetInfo;
use roc_types::pretty_print::name_and_print_var;
use roc_types::pretty_print::{name_and_print_var, DebugPrint};
use crate::{js_create_app, js_get_result_and_memory, js_run_app};
@ -184,7 +184,13 @@ pub async fn entrypoint_from_js(src: String) -> Result<String, String> {
let main_fn_var = *main_fn_var;
// pretty-print the expr type string for later.
let expr_type_str = name_and_print_var(main_fn_var, &mut subs, module_id, &interns);
let expr_type_str = name_and_print_var(
main_fn_var,
&mut subs,
module_id,
&interns,
DebugPrint::NOTHING,
);
let content = subs.get_content_without_compacting(main_fn_var);
let (_, main_fn_layout) = match procedures.keys().find(|(s, _)| *s == main_fn_symbol) {

View File

@ -37,3 +37,9 @@ fi
BINDGEN_FILE="roc_repl_wasm.js"
echo 'var __wbg_star0 = { now: Date.now };' > $WWW_ROOT/$BINDGEN_FILE
grep -v '^import' repl_wasm/pkg/$BINDGEN_FILE >> $WWW_ROOT/$BINDGEN_FILE
# Copy static files
if [[ $WWW_ROOT != repl_www/public ]]
then
cp -r repl_www/public/* $WWW_ROOT
fi

View File

@ -10299,4 +10299,53 @@ All branches in an `if` must have the same type!
),
)
}
#[test]
fn issue_1755() {
new_report_problem_as(
"issue_1755",
indoc!(
r#"
Handle := {}
await : Result a err, (a -> Result b err) -> Result b err
open : {} -> Result Handle *
close : Handle -> Result {} *
withOpen : (Handle -> Result {} *) -> Result {} *
withOpen = \callback ->
handle <- await (open {})
{} <- await (callback handle)
close handle
withOpen
"#
),
indoc!(
r#"
TYPE MISMATCH /code/proj/Main.roc
Something is off with the body of the `withOpen` definition:
10 withOpen : (Handle -> Result {} *) -> Result {} *
11 withOpen = \callback ->
12> handle <- await (open {})
13> {} <- await (callback handle)
14> close handle
The type annotation on `withOpen` says this `await` call should have the
type:
Result {} *
However, the type of this `await` call is connected to another type in a
way that isn't reflected in this annotation.
Tip: Any connection between types must use a named type variable, not
a `*`! Maybe the annotation on `withOpen` should have a named type
variable in place of the `*`?
"#
),
)
}
}

14
roc_std/Cargo.lock generated
View File

@ -20,6 +20,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "arrayvec"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
[[package]]
name = "cfg-if"
version = "1.0.0"
@ -193,13 +199,21 @@ checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
name = "roc_std"
version = "0.1.0"
dependencies = [
"arrayvec",
"indoc",
"libc",
"pretty_assertions",
"quickcheck",
"quickcheck_macros",
"static_assertions",
]
[[package]]
name = "static_assertions"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f406d6ee68db6796e11ffd7b4d171864c58b7451e79ef9460ea33c287a1f89a7"
[[package]]
name = "syn"
version = "1.0.86"

View File

@ -10,6 +10,7 @@ version = "0.1.0"
[dependencies]
static_assertions = "0.1"
arrayvec = "0.7.2"
[dev-dependencies]
indoc = "1.0.3"
@ -21,3 +22,4 @@ libc = "0.2.106"
[features]
default = ["platform"]
platform = []
no_std = []

View File

@ -1,19 +1,24 @@
#![crate_type = "lib"]
// #![no_std]
#![cfg_attr(feature = "no_std", no_std)]
use core::cmp::Ordering;
use core::ffi::c_void;
use core::fmt;
use core::fmt::{self, Debug};
use core::hash::{Hash, Hasher};
use core::mem::{ManuallyDrop, MaybeUninit};
use core::ops::Drop;
use core::str;
use std::hash::{Hash, Hasher};
use std::io::Write;
use arrayvec::ArrayString;
mod roc_box;
mod roc_list;
mod roc_str;
mod storage;
pub use roc_box::RocBox;
pub use roc_list::RocList;
pub use roc_str::RocStr;
pub use roc_str::{InteriorNulError, RocStr};
pub use storage::Storage;
// A list of C functions that are being imported
@ -27,17 +32,23 @@ extern "C" {
alignment: u32,
) -> *mut c_void;
pub fn roc_dealloc(ptr: *mut c_void, alignment: u32);
pub fn roc_panic(c_ptr: *mut c_void, tag_id: u32);
pub fn roc_memcpy(dst: *mut c_void, src: *mut c_void, n: usize) -> *mut c_void;
pub fn roc_memset(dst: *mut c_void, c: i32, n: usize) -> *mut c_void;
}
/// # Safety
/// This is only marked unsafe to typecheck without warnings in the rest of the code here.
#[cfg(not(feature = "platform"))]
#[no_mangle]
pub unsafe extern "C" fn roc_alloc(_size: usize, _alignment: u32) -> *mut c_void {
unimplemented!("It is not valid to call roc alloc from within the compiler. Please use the \"platform\" feature if this is a platform.")
}
/// # Safety
/// This is only marked unsafe to typecheck without warnings in the rest of the code here.
#[cfg(not(feature = "platform"))]
#[no_mangle]
pub unsafe extern "C" fn roc_realloc(
_ptr: *mut c_void,
_new_size: usize,
@ -46,13 +57,37 @@ pub unsafe extern "C" fn roc_realloc(
) -> *mut c_void {
unimplemented!("It is not valid to call roc realloc from within the compiler. Please use the \"platform\" feature if this is a platform.")
}
/// # Safety
/// This is only marked unsafe to typecheck without warnings in the rest of the code here.
#[cfg(not(feature = "platform"))]
#[no_mangle]
pub unsafe extern "C" fn roc_dealloc(_ptr: *mut c_void, _alignment: u32) {
unimplemented!("It is not valid to call roc dealloc from within the compiler. Please use the \"platform\" feature if this is a platform.")
}
#[cfg(not(feature = "platform"))]
#[no_mangle]
pub unsafe extern "C" fn roc_panic(c_ptr: *mut c_void, tag_id: u32) {
unimplemented!("It is not valid to call roc panic from within the compiler. Please use the \"platform\" feature if this is a platform.")
}
/// # Safety
/// This is only marked unsafe to typecheck without warnings in the rest of the code here.
#[cfg(not(feature = "platform"))]
#[no_mangle]
pub fn roc_memcpy(_dst: *mut c_void, _src: *mut c_void, _n: usize) -> *mut c_void {
unimplemented!("It is not valid to call roc memcpy from within the compiler. Please use the \"platform\" feature if this is a platform.")
}
/// # Safety
/// This is only marked unsafe to typecheck without warnings in the rest of the code here.
#[cfg(not(feature = "platform"))]
#[no_mangle]
pub fn roc_memset(_dst: *mut c_void, _c: i32, _n: usize) -> *mut c_void {
unimplemented!("It is not valid to call roc memset from within the compiler. Please use the \"platform\" feature if this is a platform.")
}
#[repr(u8)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RocOrder {
@ -71,15 +106,23 @@ pub struct RocResult<T, E> {
tag: RocResultTag,
}
impl<T, E> core::fmt::Debug for RocResult<T, E>
impl<T, E> Debug for RocResult<T, E>
where
T: core::fmt::Debug,
E: core::fmt::Debug,
T: Debug,
E: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.as_result_of_refs() {
Ok(payload) => write!(f, "RocOk({:?})", payload),
Err(payload) => write!(f, "RocErr({:?})", payload),
Ok(payload) => {
f.write_str("RocOk(")?;
payload.fmt(f)?;
f.write_str(")")
}
Err(payload) => {
f.write_str("RocErr(")?;
payload.fmt(f)?;
f.write_str(")")
}
}
}
}
@ -306,73 +349,62 @@ impl RocDec {
self.0
}
fn to_str_helper(&self, bytes: &mut [u8; Self::MAX_STR_LENGTH]) -> usize {
fn to_str_helper(self, string: &mut ArrayString<{ Self::MAX_STR_LENGTH }>) -> &str {
use std::fmt::Write;
if self.as_i128() == 0 {
write!(&mut bytes[..], "{}", "0").unwrap();
return 1;
return "0";
}
let is_negative = (self.as_i128() < 0) as usize;
static_assertions::const_assert!(Self::DECIMAL_PLACES + 1 == 19);
// The :019 in the following write! is computed as Self::DECIMAL_PLACES + 1. If you change
// Self::DECIMAL_PLACES, this assert should remind you to change that format string as
// well.
//
// Self::DECIMAL_PLACES, this assert should remind you to change that format string as well.
static_assertions::const_assert!(Self::DECIMAL_PLACES + 1 == 19);
// By using the :019 format, we're guaranteeing that numbers less than 1, say 0.01234
// get their leading zeros placed in bytes for us. i.e. bytes = b"0012340000000000000"
write!(&mut bytes[..], "{:019}", self.as_i128()).unwrap();
// get their leading zeros placed in bytes for us. i.e. `string = b"0012340000000000000"`
write!(string, "{:019}", self.as_i128()).unwrap();
// If self represents 1234.5678, then bytes is b"1234567800000000000000".
let mut i = Self::MAX_STR_LENGTH - 1;
// Find the last place where we have actual data.
while bytes[i] == 0 {
i = i - 1;
}
// At this point i is 21 because bytes[21] is the final '0' in b"1234567800000000000000".
let is_negative = self.as_i128() < 0;
let decimal_location = string.len() - Self::DECIMAL_PLACES + (is_negative as usize);
let decimal_location = i - Self::DECIMAL_PLACES + 1 + is_negative;
// decimal_location = 4
// skip trailing zeros
let last_nonzero_byte = string.trim_end_matches('0').len();
while bytes[i] == ('0' as u8) && i >= decimal_location {
bytes[i] = 0;
i = i - 1;
}
// Now i = 7, because bytes[7] = '8', and bytes = b"12345678"
if i < decimal_location {
if last_nonzero_byte < decimal_location {
// This means that we've removed trailing zeros and are left with an integer. Our
// convention is to print these without a decimal point or trailing zeros, so we're done.
return i + 1;
string.truncate(decimal_location);
return string.as_str();
}
let ret = i + 1;
while i >= decimal_location {
bytes[i + 1] = bytes[i];
i = i - 1;
}
bytes[i + 1] = bytes[i];
// Now i = 4, and bytes = b"123455678"
// otherwise, we're dealing with a fraction, and need to insert the decimal dot
bytes[decimal_location] = '.' as u8;
// Finally bytes = b"1234.5678"
// truncate all extra zeros off
string.truncate(last_nonzero_byte);
ret + 1
// push a dummy character so we have space for the decimal dot
string.push('$');
// Safety: at any time, the string only contains ascii characters, so it is always valid utf8
let bytes = unsafe { string.as_bytes_mut() };
// shift the fractional part by one
bytes.copy_within(decimal_location..last_nonzero_byte, decimal_location + 1);
// and put in the decimal dot in the right place
bytes[decimal_location] = b'.';
string.as_str()
}
pub fn to_str(&self) -> RocStr {
let mut bytes = [0 as u8; Self::MAX_STR_LENGTH];
let last_idx = self.to_str_helper(&mut bytes);
unsafe { RocStr::from_slice(&bytes[0..last_idx]) }
RocStr::from(self.to_str_helper(&mut ArrayString::new()))
}
}
impl fmt::Display for RocDec {
fn fmt(&self, fmtr: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut bytes = [0 as u8; Self::MAX_STR_LENGTH];
let last_idx = self.to_str_helper(&mut bytes);
let result = unsafe { str::from_utf8_unchecked(&bytes[0..last_idx]) };
write!(fmtr, "{}", result)
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.to_str_helper(&mut ArrayString::new()))
}
}
@ -394,52 +426,37 @@ impl From<I128> for i128 {
impl fmt::Debug for I128 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let i128: i128 = (*self).into();
i128.fmt(f)
i128::from(*self).fmt(f)
}
}
impl fmt::Display for I128 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let i128: i128 = (*self).into();
i128.fmt(f)
Debug::fmt(&i128::from(*self), f)
}
}
impl PartialEq for I128 {
fn eq(&self, other: &Self) -> bool {
let i128_self: i128 = (*self).into();
let i128_other: i128 = (*other).into();
i128_self.eq(&i128_other)
i128::from(*self).eq(&i128::from(*other))
}
}
impl PartialOrd for I128 {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
let i128_self: i128 = (*self).into();
let i128_other: i128 = (*other).into();
i128_self.partial_cmp(&i128_other)
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
i128::from(*self).partial_cmp(&i128::from(*other))
}
}
impl Ord for I128 {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
let i128_self: i128 = (*self).into();
let i128_other: i128 = (*other).into();
i128_self.cmp(&i128_other)
fn cmp(&self, other: &Self) -> Ordering {
i128::from(*self).cmp(&i128::from(*other))
}
}
impl Hash for I128 {
fn hash<H: Hasher>(&self, state: &mut H) {
let i128: i128 = (*self).into();
i128.hash(state);
i128::from(*self).hash(state);
}
}
@ -461,51 +478,36 @@ impl From<U128> for u128 {
impl fmt::Debug for U128 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let u128: u128 = (*self).into();
u128.fmt(f)
u128::from(*self).fmt(f)
}
}
impl fmt::Display for U128 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let u128: u128 = (*self).into();
u128.fmt(f)
Debug::fmt(&u128::from(*self), f)
}
}
impl PartialEq for U128 {
fn eq(&self, other: &Self) -> bool {
let u128_self: u128 = (*self).into();
let u128_other: u128 = (*other).into();
u128_self.eq(&u128_other)
u128::from(*self).eq(&u128::from(*other))
}
}
impl PartialOrd for U128 {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
let u128_self: u128 = (*self).into();
let u128_other: u128 = (*other).into();
u128_self.partial_cmp(&u128_other)
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
u128::from(*self).partial_cmp(&u128::from(*other))
}
}
impl Ord for U128 {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
let u128_self: u128 = (*self).into();
let u128_other: u128 = (*other).into();
u128_self.cmp(&u128_other)
fn cmp(&self, other: &Self) -> Ordering {
u128::from(*self).cmp(&u128::from(*other))
}
}
impl Hash for U128 {
fn hash<H: Hasher>(&self, state: &mut H) {
let u128: u128 = (*self).into();
u128.hash(state);
u128::from(*self).hash(state);
}
}

168
roc_std/src/roc_box.rs Normal file
View File

@ -0,0 +1,168 @@
#![deny(unsafe_op_in_unsafe_fn)]
use crate::{roc_alloc, roc_dealloc, storage::Storage};
use core::{
cell::Cell,
cmp::{self, Ordering},
fmt::Debug,
mem,
ops::Deref,
ptr::{self, NonNull},
};
#[repr(C)]
pub struct RocBox<T> {
contents: NonNull<T>,
}
impl<T> RocBox<T> {
pub fn new(contents: T) -> Self {
let alignment = Self::alloc_alignment();
let bytes = mem::size_of::<T>() + alignment;
let ptr = unsafe { roc_alloc(bytes, alignment as u32) };
if ptr.is_null() {
todo!("Call roc_panic with the info that an allocation failed.");
}
// Initialize the reference count.
let refcount_one = Storage::new_reference_counted();
unsafe { ptr.cast::<Storage>().write(refcount_one) };
let contents = unsafe {
let contents_ptr = ptr.cast::<u8>().add(alignment).cast::<T>();
*contents_ptr = contents;
// We already verified that the original alloc pointer was non-null,
// and this one is the alloc pointer with `alignment` bytes added to it,
// so it should be non-null too.
NonNull::new_unchecked(contents_ptr)
};
Self { contents }
}
#[inline(always)]
fn alloc_alignment() -> usize {
mem::align_of::<T>().max(mem::align_of::<Storage>())
}
pub fn into_inner(self) -> T {
unsafe { ptr::read(self.contents.as_ptr() as *mut T) }
}
fn storage(&self) -> &Cell<Storage> {
let alignment = Self::alloc_alignment();
unsafe {
&*self
.contents
.as_ptr()
.cast::<u8>()
.sub(alignment)
.cast::<Cell<Storage>>()
}
}
}
impl<T> Deref for RocBox<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { self.contents.as_ref() }
}
}
impl<T, U> PartialEq<RocBox<U>> for RocBox<T>
where
T: PartialEq<U>,
{
fn eq(&self, other: &RocBox<U>) -> bool {
self.deref() == other.deref()
}
}
impl<T> Eq for RocBox<T> where T: Eq {}
impl<T, U> PartialOrd<RocBox<U>> for RocBox<T>
where
T: PartialOrd<U>,
{
fn partial_cmp(&self, other: &RocBox<U>) -> Option<cmp::Ordering> {
let self_contents = unsafe { self.contents.as_ref() };
let other_contents = unsafe { other.contents.as_ref() };
self_contents.partial_cmp(other_contents)
}
}
impl<T> Ord for RocBox<T>
where
T: Ord,
{
fn cmp(&self, other: &Self) -> Ordering {
let self_contents = unsafe { self.contents.as_ref() };
let other_contents = unsafe { other.contents.as_ref() };
self_contents.cmp(other_contents)
}
}
impl<T> Debug for RocBox<T>
where
T: Debug,
{
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.deref().fmt(f)
}
}
impl<T> Clone for RocBox<T> {
fn clone(&self) -> Self {
let storage = self.storage();
let mut new_storage = storage.get();
// Increment the reference count
if !new_storage.is_readonly() {
new_storage.increment_reference_count();
storage.set(new_storage);
}
Self {
contents: self.contents,
}
}
}
impl<T> Drop for RocBox<T> {
fn drop(&mut self) {
let storage = self.storage();
let contents = self.contents;
// Decrease the list's reference count.
let mut new_storage = storage.get();
let needs_dealloc = new_storage.decrease();
if needs_dealloc {
unsafe {
// Drop the stored contents.
let contents_ptr = contents.as_ptr();
mem::drop::<T>(ptr::read(contents_ptr));
let alignment = Self::alloc_alignment();
// Release the memory.
roc_dealloc(
contents.as_ptr().cast::<u8>().sub(alignment).cast(),
alignment as u32,
);
}
} else if !new_storage.is_readonly() {
// Write the storage back.
storage.set(new_storage);
}
}
}

View File

@ -3,12 +3,13 @@
use core::{
cell::Cell,
cmp::{self, Ordering},
ffi::c_void,
fmt::Debug,
hash::Hash,
intrinsics::copy_nonoverlapping,
mem::{self, ManuallyDrop},
ops::Deref,
ptr,
ptr::NonNull,
ptr::{self, NonNull},
};
use crate::{roc_alloc, roc_dealloc, roc_realloc, storage::Storage};
@ -21,14 +22,61 @@ pub struct RocList<T> {
}
impl<T> RocList<T> {
#[inline(always)]
fn alloc_alignment() -> u32 {
mem::align_of::<T>().max(mem::align_of::<Storage>()) as u32
}
pub fn empty() -> Self {
RocList {
Self {
elements: None,
length: 0,
capacity: 0,
}
}
/// Create an empty RocList with enough space preallocated to store
/// the requested number of elements.
pub fn with_capacity(num_elems: usize) -> Self {
Self {
elements: Some(Self::elems_with_capacity(num_elems)),
length: 0,
capacity: num_elems,
}
}
/// Used for both roc_alloc and roc_realloc - given the number of elements,
/// returns the number of bytes needed to allocate, taking into account both the
/// size of the elements as well as the size of Storage.
fn alloc_bytes(num_elems: usize) -> usize {
mem::size_of::<Storage>() + (num_elems * mem::size_of::<T>())
}
fn elems_with_capacity(num_elems: usize) -> NonNull<ManuallyDrop<T>> {
let alloc_ptr = unsafe { roc_alloc(Self::alloc_bytes(num_elems), Self::alloc_alignment()) };
Self::elems_from_allocation(NonNull::new(alloc_ptr).unwrap_or_else(|| {
todo!("Call roc_panic with the info that an allocation failed.");
}))
}
fn elems_from_allocation(allocation: NonNull<c_void>) -> NonNull<ManuallyDrop<T>> {
let alloc_ptr = allocation.as_ptr();
unsafe {
let elem_ptr = Self::elem_ptr_from_alloc_ptr(alloc_ptr).cast::<ManuallyDrop<T>>();
// Initialize the reference count.
alloc_ptr
.cast::<Storage>()
.write(Storage::new_reference_counted());
// The original alloc pointer was non-null, and this one is the alloc pointer
// with `alignment` bytes added to it, so it should be non-null too.
NonNull::new_unchecked(elem_ptr)
}
}
pub fn len(&self) -> usize {
self.length
}
@ -41,21 +89,144 @@ impl<T> RocList<T> {
self.len() == 0
}
/// Note that there is no way to convert directly to a Vec.
///
/// This is because RocList values are not allocated using the system allocator, so
/// handing off any heap-allocated bytes to a Vec would not work because its Drop
/// implementation would try to free those bytes using the wrong allocator.
///
/// Instead, if you want a Rust Vec, you need to do a fresh allocation and copy the
/// bytes over - in other words, calling this `as_slice` method and then calling `to_vec`
/// on that.
pub fn as_slice(&self) -> &[T] {
&*self
}
#[inline(always)]
fn elements_and_storage(&self) -> Option<(NonNull<ManuallyDrop<T>>, &Cell<Storage>)> {
let elements = self.elements?;
let storage = unsafe { &*elements.as_ptr().cast::<Cell<Storage>>().sub(1) };
let storage = unsafe { &*self.ptr_to_allocation().cast::<Cell<Storage>>() };
Some((elements, storage))
}
pub(crate) fn storage(&self) -> Option<Storage> {
self.elements_and_storage()
.map(|(_, storage)| storage.get())
}
/// Useful for doing memcpy on the elements. Returns NULL if list is empty.
pub(crate) unsafe fn ptr_to_first_elem(&self) -> *const T {
unsafe { core::mem::transmute(self.elements) }
}
/// Useful for doing memcpy on the underlying allocation. Returns NULL if list is empty.
pub(crate) unsafe fn ptr_to_allocation(&self) -> *mut c_void {
unsafe {
self.ptr_to_first_elem()
.cast::<u8>()
.sub(Self::alloc_alignment() as usize) as *mut _
}
}
unsafe fn elem_ptr_from_alloc_ptr(alloc_ptr: *mut c_void) -> *mut c_void {
unsafe {
alloc_ptr
.cast::<u8>()
.add(Self::alloc_alignment() as usize)
.cast()
}
}
}
impl<T> RocList<T>
where
T: Clone,
{
/// Increase a RocList's capacity by at least the requested number of elements (possibly more).
///
/// May return a new RocList, if the provided one was not unique.
pub fn reserve(&mut self, num_elems: usize) {
let new_len = num_elems + self.length;
let new_elems;
let old_elements_ptr;
match self.elements_and_storage() {
Some((elements, storage)) => {
if storage.get().is_unique() {
unsafe {
let old_alloc = self.ptr_to_allocation();
// Try to reallocate in-place.
let new_alloc = roc_realloc(
old_alloc,
Self::alloc_bytes(new_len),
Self::alloc_bytes(self.capacity),
Self::alloc_alignment(),
);
if new_alloc == old_alloc {
// We successfully reallocated in-place; we're done!
return;
} else {
// We got back a different allocation; copy the existing elements
// into it. We don't need to increment their refcounts because
// The existing allocation that references to them is now gone and
// no longer referencing them.
new_elems = Self::elems_from_allocation(
NonNull::new(new_alloc).unwrap_or_else(|| {
todo!("Reallocation failed");
}),
);
}
// Note that realloc automatically deallocates the old allocation,
// so we don't need to call roc_dealloc here.
}
} else {
// Make a new allocation
new_elems = Self::elems_with_capacity(new_len);
old_elements_ptr = elements.as_ptr();
unsafe {
// Copy the old elements to the new allocation.
copy_nonoverlapping(old_elements_ptr, new_elems.as_ptr(), self.length);
}
// Decrease the current allocation's reference count.
let mut new_storage = storage.get();
if !new_storage.is_readonly() {
let needs_dealloc = new_storage.decrease();
if needs_dealloc {
// Unlike in Drop, do *not* decrement the refcounts of all the elements!
// The new allocation is referencing them, so instead of incrementing them all
// all just to decrement them again here, we neither increment nor decrement them.
unsafe {
roc_dealloc(self.ptr_to_allocation(), Self::alloc_alignment());
}
} else {
// Write the storage back.
storage.set(new_storage);
}
}
}
}
None => {
// This is an empty list, so `reserve` is the same as `with_capacity`.
self.update_to(Self::with_capacity(new_len));
return;
}
}
self.update_to(Self {
elements: Some(new_elems),
length: self.length,
capacity: new_len,
});
}
pub fn from_slice(slice: &[T]) -> Self {
let mut list = Self::empty();
list.extend_from_slice(slice);
@ -64,27 +235,37 @@ where
pub fn extend_from_slice(&mut self, slice: &[T]) {
// TODO: Can we do better for ZSTs? Alignment might be a problem.
if slice.is_empty() {
return;
}
let alignment = cmp::max(mem::align_of::<T>(), mem::align_of::<Storage>());
let elements_offset = alignment;
let new_size = elements_offset + mem::size_of::<T>() * (self.len() + slice.len());
let new_ptr = if let Some((elements, storage)) = self.elements_and_storage() {
let new_len = self.len() + slice.len();
let non_null_elements = if let Some((elements, storage)) = self.elements_and_storage() {
// Decrement the list's refence count.
let mut copy = storage.get();
let is_unique = copy.decrease();
if is_unique {
// If the memory is not shared, we can reuse the memory.
let old_size = elements_offset + mem::size_of::<T>() * self.len();
unsafe {
let ptr = elements.as_ptr().cast::<u8>().sub(alignment).cast();
roc_realloc(ptr, new_size, old_size, alignment as u32).cast()
// If we have enough capacity, we can add to the existing elements in-place.
if self.capacity() >= slice.len() {
elements
} else {
// There wasn't enough capacity, so we need a new allocation.
// Since this is a unique RocList, we can use realloc here.
let new_ptr = unsafe {
roc_realloc(
storage.as_ptr().cast(),
Self::alloc_bytes(new_len),
Self::alloc_bytes(self.capacity),
Self::alloc_alignment(),
)
};
self.capacity = new_len;
Self::elems_from_allocation(NonNull::new(new_ptr).unwrap_or_else(|| {
todo!("Reallocation failed");
}))
}
} else {
if !copy.is_readonly() {
@ -93,49 +274,19 @@ where
}
// Allocate new memory.
let new_ptr = unsafe { roc_alloc(new_size, alignment as u32) };
let new_elements = unsafe {
new_ptr
.cast::<u8>()
.add(alignment)
.cast::<ManuallyDrop<T>>()
};
// Initialize the reference count.
unsafe {
let storage_ptr = new_elements.cast::<Storage>().sub(1);
storage_ptr.write(Storage::new_reference_counted());
}
let new_elements = Self::elems_with_capacity(slice.len());
// Copy the old elements to the new allocation.
unsafe {
copy_nonoverlapping(elements.as_ptr(), new_elements, self.length);
copy_nonoverlapping(elements.as_ptr(), new_elements.as_ptr(), self.length);
}
new_ptr
new_elements
}
} else {
// Allocate new memory.
let new_ptr = unsafe { roc_alloc(new_size, alignment as u32) };
let new_elements = unsafe { new_ptr.cast::<u8>().add(elements_offset).cast::<T>() };
// Initialize the reference count.
unsafe {
let storage_ptr = new_elements.cast::<Storage>().sub(1);
storage_ptr.write(Storage::new_reference_counted());
}
new_ptr
Self::elems_with_capacity(slice.len())
};
let elements = unsafe {
new_ptr
.cast::<u8>()
.add(elements_offset)
.cast::<ManuallyDrop<T>>()
};
let non_null_elements = NonNull::new(elements).unwrap();
self.elements = Some(non_null_elements);
let elements = self.elements.unwrap().as_ptr();
@ -159,6 +310,16 @@ where
self.capacity = self.length
}
/// Replace self with a new version, without letting `drop` run in between.
fn update_to(&mut self, mut updated: Self) {
// We want to replace `self` with `updated` in a way that makes sure
// `self`'s `drop` never runs. This is the proper way to do that:
// swap them, and then forget the "updated" one (which is now pointing
// to the original allocation).
mem::swap(self, &mut updated);
mem::forget(updated);
}
}
impl<T> Deref for RocList<T> {
@ -274,27 +435,21 @@ impl<T> Drop for RocList<T> {
if let Some((elements, storage)) = self.elements_and_storage() {
// Decrease the list's reference count.
let mut new_storage = storage.get();
let needs_dealloc = new_storage.decrease();
if needs_dealloc {
unsafe {
// Drop the stored elements.
for index in 0..self.len() {
let elem_ptr = elements.as_ptr().add(index);
if !new_storage.is_readonly() {
let needs_dealloc = new_storage.decrease();
mem::drop::<T>(ManuallyDrop::take(&mut *elem_ptr));
if needs_dealloc {
unsafe {
// Drop the stored elements.
for index in 0..self.len() {
ManuallyDrop::drop(&mut *elements.as_ptr().add(index));
}
// Release the memory.
roc_dealloc(self.ptr_to_allocation(), Self::alloc_alignment());
}
let alignment = cmp::max(mem::align_of::<T>(), mem::align_of::<Storage>());
// Release the memory.
roc_dealloc(
elements.as_ptr().cast::<u8>().sub(alignment).cast(),
alignment as u32,
);
}
} else {
if !new_storage.is_readonly() {
} else {
// Write the storage back.
storage.set(new_storage);
}
@ -358,3 +513,19 @@ impl<T> Drop for IntoIter<T> {
}
}
}
impl<T: Hash> Hash for RocList<T> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
// This is the same as Rust's Vec implementation, which
// just delegates to the slice implementation. It's a bit surprising
// that Hash::hash_slice doesn't automatically incorporate the length,
// but the slice implementation indeed does explicitly call self.len().hash(state);
//
// To verify, click the "source" links for:
// Vec: https://doc.rust-lang.org/std/vec/struct.Vec.html#impl-Hash
// slice: https://doc.rust-lang.org/std/primitive.slice.html#impl-Hash
self.len().hash(state);
Hash::hash_slice(self.as_slice(), state);
}
}

View File

@ -1,18 +1,51 @@
#![deny(unsafe_op_in_unsafe_fn)]
use core::{
cmp,
convert::TryFrom,
fmt::Debug,
mem::{size_of, ManuallyDrop},
fmt,
hash::{self, Hash},
mem::{self, size_of, ManuallyDrop},
ops::{Deref, DerefMut},
ptr,
};
use std::hash::Hash;
#[cfg(not(feature = "no_std"))]
use std::ffi::{CStr, CString};
use crate::RocList;
#[repr(transparent)]
pub struct RocStr(RocStrInner);
fn with_stack_bytes<F, E, T>(length: usize, closure: F) -> T
where
F: FnOnce(*mut E) -> T,
{
use crate::{roc_alloc, roc_dealloc};
use core::mem::MaybeUninit;
if length < RocStr::TEMP_STR_MAX_STACK_BYTES {
// TODO: once https://doc.rust-lang.org/std/mem/union.MaybeUninit.html#method.uninit_array
// has become stabilized, use that here in order to do a precise
// stack allocation instead of always over-allocating to 64B.
let mut bytes: MaybeUninit<[u8; RocStr::TEMP_STR_MAX_STACK_BYTES]> = MaybeUninit::uninit();
closure(bytes.as_mut_ptr() as *mut E)
} else {
let align = core::mem::align_of::<E>() as u32;
// The string is too long to stack-allocate, so
// do a heap allocation and then free it afterwards.
let ptr = unsafe { roc_alloc(length, align) } as *mut E;
let answer = closure(ptr);
// Free the heap allocation.
unsafe { roc_dealloc(ptr.cast(), align) };
answer
}
}
impl RocStr {
pub const SIZE: usize = core::mem::size_of::<Self>();
pub const MASK: u8 = 0b1000_0000;
@ -28,8 +61,8 @@ impl RocStr {
/// # Safety
///
/// `slice` must be valid UTF-8.
pub unsafe fn from_slice(slice: &[u8]) -> Self {
if let Some(small_string) = unsafe { SmallString::try_from(slice) } {
pub unsafe fn from_slice_unchecked(slice: &[u8]) -> Self {
if let Some(small_string) = unsafe { SmallString::try_from_utf8_bytes(slice) } {
Self(RocStrInner { small_string })
} else {
let heap_allocated = RocList::from_slice(slice);
@ -51,6 +84,13 @@ impl RocStr {
}
}
pub fn capacity(&self) -> usize {
match self.as_enum_ref() {
RocStrInnerRef::HeapAllocated(roc_list) => roc_list.capacity(),
RocStrInnerRef::SmallString(_) => SmallString::CAPACITY,
}
}
pub fn len(&self) -> usize {
match self.as_enum_ref() {
RocStrInnerRef::HeapAllocated(h) => h.len(),
@ -62,9 +102,417 @@ impl RocStr {
self.len() == 0
}
/// Note that there is no way to convert directly to a String.
///
/// This is because RocStr values are not allocated using the system allocator, so
/// handing off any heap-allocated bytes to a String would not work because its Drop
/// implementation would try to free those bytes using the wrong allocator.
///
/// Instead, if you want a Rust String, you need to do a fresh allocation and copy the
/// bytes over - in other words, calling this `as_str` method and then calling `to_string`
/// on that.
pub fn as_str(&self) -> &str {
&*self
}
/// Create an empty RocStr with enough space preallocated to store
/// the requested number of bytes.
pub fn with_capacity(bytes: usize) -> Self {
if bytes <= SmallString::CAPACITY {
RocStr(RocStrInner {
small_string: SmallString::empty(),
})
} else {
// The requested capacity won't fit in a small string; we need to go big.
RocStr(RocStrInner {
heap_allocated: ManuallyDrop::new(RocList::with_capacity(bytes)),
})
}
}
/// Increase a RocStr's capacity by at least the requested number of bytes (possibly more).
///
/// May return a new RocStr, if the provided one was not unique.
pub fn reserve(&mut self, bytes: usize) {
if self.is_small_str() {
let small_str = unsafe { self.0.small_string };
let target_cap = small_str.len() + bytes;
if target_cap > SmallString::CAPACITY {
// The requested capacity won't fit in a small string; we need to go big.
let mut roc_list = RocList::with_capacity(target_cap);
roc_list.extend_from_slice(small_str.as_bytes());
*self = RocStr(RocStrInner {
heap_allocated: ManuallyDrop::new(roc_list),
});
}
} else {
let mut roc_list = unsafe { ManuallyDrop::take(&mut self.0.heap_allocated) };
roc_list.reserve(bytes);
let mut updated = RocStr(RocStrInner {
heap_allocated: ManuallyDrop::new(roc_list),
});
mem::swap(self, &mut updated);
mem::forget(updated);
}
}
/// Returns the index of the first interior \0 byte in the string, or None if there are none.
fn first_nul_byte(&self) -> Option<usize> {
match self.as_enum_ref() {
RocStrInnerRef::HeapAllocated(roc_list) => roc_list.iter().position(|byte| *byte == 0),
RocStrInnerRef::SmallString(small_string) => small_string.first_nul_byte(),
}
}
// If the string is under this many bytes, the with_terminator family
// of methods will allocate the terminated string on the stack when
// the RocStr is non-unique.
const TEMP_STR_MAX_STACK_BYTES: usize = 64;
/// Like calling with_utf8_terminator passing \0 for the terminator,
/// except it can fail because a RocStr may contain \0 characters,
/// which a nul-terminated string must not.
pub fn utf8_nul_terminated<T, F: Fn(*mut u8, usize) -> T>(
self,
func: F,
) -> Result<T, InteriorNulError> {
if let Some(pos) = self.first_nul_byte() {
Err(InteriorNulError { pos, roc_str: self })
} else {
Ok(self.with_utf8_terminator(b'\0', func))
}
}
/// Turn this RocStr into a UTF-8 `*mut u8`, terminate it with the given character
/// (commonly either `b'\n'` or b`\0`) and then provide access to that
/// `*mut u8` (as well as its length) for the duration of a given function. This is
/// designed to be an efficient way to turn a `RocStr` received from an application into
/// either the nul-terminated UTF-8 `char*` needed by UNIX syscalls, or into a
/// newline-terminated string to write to stdout or stderr (for a "println"-style effect).
///
/// **NOTE:** The length passed to the function is the same value that `RocStr::len` will
/// return; it does not count the terminator. So to convert it to a nul-terminated slice
/// of Rust bytes (for example), call `slice::from_raw_parts` passing the given length + 1.
///
/// This operation achieves efficiency by reusing allocated bytes from the RocStr itself,
/// and sometimes allocating on the stack. It does not allocate on the heap when given a
/// a small string or a string with unique refcount, but may allocate when given a large
/// string with non-unique refcount. (It will do a stack allocation if the string is under
/// 64 bytes; the stack allocation will only live for the duration of the called function.)
///
/// If the given (owned) RocStr is unique, this can overwrite the underlying bytes
/// to terminate the string in-place. Small strings have an extra byte at the end
/// where the length is stored, which can be replaced with the terminator. Heap-allocated
/// strings can have excess capacity which can hold a terminator, or if they have no
/// excess capacity, all the bytes can be shifted over the refcount in order to free up
/// a `usize` worth of free space at the end - which can easily fit a 1-byte terminator.
pub fn with_utf8_terminator<T, F: Fn(*mut u8, usize) -> T>(self, terminator: u8, func: F) -> T {
// Note that this function does not use with_terminator because it can be
// more efficient than that - due to knowing that it's already in UTF-8 and always
// has room for a 1-byte terminator in the existing allocation (either in the refcount
// bytes, or, in a small string, in the length at the end of the string).
let terminate = |alloc_ptr: *mut u8, len: usize| unsafe {
*(alloc_ptr.add(len)) = terminator;
func(alloc_ptr, len)
};
match self.as_enum_ref() {
RocStrInnerRef::HeapAllocated(roc_list) => {
unsafe {
match roc_list.storage() {
Some(storage) if storage.is_unique() => {
// The backing RocList was unique, so we can mutate it in-place.
let len = roc_list.len();
let ptr = if len < roc_list.capacity() {
// We happen to have excess capacity already, so we will be able
// to write the terminator into the first byte of excess capacity.
roc_list.ptr_to_first_elem() as *mut u8
} else {
// We always have an allocation that's even bigger than necessary,
// because the refcount bytes take up more than the 1B needed for
// the terminator. We just need to shift the bytes over on top
// of the refcount.
let alloc_ptr = roc_list.ptr_to_allocation() as *mut u8;
// First, copy the bytes over the original allocation - effectively
// shifting everything over by one `usize`. Now we no longer have a
// refcount (but the terminated won't use that anyway), but we do
// have a free `usize` at the end.
//
// IMPORTANT: Must use ptr::copy instead of ptr::copy_nonoverlapping
// because the regions definitely overlap!
ptr::copy(roc_list.ptr_to_first_elem() as *mut u8, alloc_ptr, len);
alloc_ptr
};
terminate(ptr, len)
}
Some(_) => {
let len = roc_list.len();
// The backing list was not unique, so we can't mutate it in-place.
// ask for `len + 1` to store the original string and the terminator
with_stack_bytes(len + 1, |alloc_ptr: *mut u8| {
let alloc_ptr = alloc_ptr as *mut u8;
let elem_ptr = roc_list.ptr_to_first_elem() as *mut u8;
// memcpy the bytes into the stack allocation
ptr::copy_nonoverlapping(elem_ptr, alloc_ptr, len);
terminate(alloc_ptr, len)
})
}
None => {
// The backing list was empty.
//
// No need to do a heap allocation for an empty string - we
// can just do a stack allocation that will live for the
// duration of the function.
func([terminator].as_mut_ptr(), 0)
}
}
}
}
RocStrInnerRef::SmallString(small_str) => {
let mut bytes = small_str.bytes;
// Even if the small string is at capacity, there will be room to write
// a terminator in the byte that's used to store the length.
terminate(bytes.as_mut_ptr() as *mut u8, small_str.len())
}
}
}
/// Like calling with_utf16_terminator passing \0 for the terminator,
/// except it can fail because a RocStr may contain \0 characters,
/// which a nul-terminated string must not.
pub fn utf16_nul_terminated<T, F: Fn(*mut u16, usize) -> T>(
self,
func: F,
) -> Result<T, InteriorNulError> {
if let Some(pos) = self.first_nul_byte() {
Err(InteriorNulError { pos, roc_str: self })
} else {
Ok(self.with_utf16_terminator(0, func))
}
}
/// Turn this RocStr into a nul-terminated UTF-16 `*mut u16` and then provide access to
/// that `*mut u16` (as well as its length) for the duration of a given function. This is
/// designed to be an efficient way to turn a RocStr received from an application into
/// the nul-terminated UTF-16 `wchar_t*` needed by Windows API calls.
///
/// **NOTE:** The length passed to the function is the same value that `RocStr::len` will
/// return; it does not count the terminator. So to convert it to a nul-terminated
/// slice of Rust bytes, call `slice::from_raw_parts` passing the given length + 1.
///
/// This operation achieves efficiency by reusing allocated bytes from the RocStr itself,
/// and sometimes allocating on the stack. It does not allocate on the heap when given a
/// a small string or a string with unique refcount, but may allocate when given a large
/// string with non-unique refcount. (It will do a stack allocation if the string is under
/// 64 bytes; the stack allocation will only live for the duration of the called function.)
///
/// Because this works on an owned RocStr, it's able to overwrite the underlying bytes
/// to nul-terminate the string in-place. Small strings have an extra byte at the end
/// where the length is stored, which can become 0 for nul-termination. Heap-allocated
/// strings can have excess capacity which can hold a termiator, or if they have no
/// excess capacity, all the bytes can be shifted over the refcount in order to free up
/// a `usize` worth of free space at the end - which can easily fit a terminator.
///
/// This operation can fail because a RocStr may contain \0 characters, which a
/// nul-terminated string must not.
pub fn with_utf16_terminator<T, F: Fn(*mut u16, usize) -> T>(
self,
terminator: u16,
func: F,
) -> T {
self.with_terminator(terminator, |dest_ptr: *mut u16, str_slice: &str| {
// Translate UTF-8 source bytes into UTF-16 and write them into the destination.
for (index, wchar) in str_slice.encode_utf16().enumerate() {
unsafe {
*(dest_ptr.add(index)) = wchar;
}
}
func(dest_ptr, str_slice.len())
})
}
pub fn with_windows_path<T, F: Fn(*mut u16, usize) -> T>(
self,
func: F,
) -> Result<T, InteriorNulError> {
if let Some(pos) = self.first_nul_byte() {
Err(InteriorNulError { pos, roc_str: self })
} else {
let answer = self.with_terminator(0u16, |dest_ptr: *mut u16, str_slice: &str| {
// Translate UTF-8 source bytes into UTF-16 and write them into the destination.
for (index, mut wchar) in str_slice.encode_utf16().enumerate() {
// Replace slashes with backslashes
if wchar == '/' as u16 {
wchar = '\\' as u16
};
unsafe {
*(dest_ptr.add(index)) = wchar;
}
}
func(dest_ptr, str_slice.len())
});
Ok(answer)
}
}
/// Generic version of temp_c_utf8 and temp_c_utf16. The given function will be
/// passed a pointer to elements of type E. The pointer will have enough room to hold
/// one element for each byte of the given `&str`'s length, plus the terminator element.
///
/// The terminator will be written right after the end of the space for the other elements,
/// but all the memory in that space before the terminator will be uninitialized. This means
/// if you want to do something like copy the contents of the `&str` into there, that will
/// need to be done explicitly.
///
/// The terminator is always written - even if there are no other elements present before it.
/// (In such a case, the `&str` argument will be empty and the `*mut E` will point directly
/// to the terminator).
///
/// One use for this is to convert slashes to backslashes in Windows paths;
/// this function provides the most efficient way to do that, because no extra
/// iteration pass is necessary; the conversion can be done after each translation
/// of a UTF-8 character to UTF-16. Here's how that would look:
///
/// use roc_std::{RocStr, InteriorNulError};
///
/// pub fn with_windows_path<T, F: Fn(*mut u16, usize) -> T>(
/// roc_str: RocStr,
/// func: F,
/// ) -> Result<T, InteriorNulError> {
/// let answer = roc_str.with_terminator(0u16, |dest_ptr: *mut u16, str_slice: &str| {
/// // Translate UTF-8 source bytes into UTF-16 and write them into the destination.
/// for (index, mut wchar) in str_slice.encode_utf16().enumerate() {
/// // Replace slashes with backslashes
/// if wchar == '/' as u16 {
/// wchar = '\\' as u16
/// };
///
/// unsafe {
/// *(dest_ptr.add(index)) = wchar;
/// }
/// }
///
/// func(dest_ptr, str_slice.len())
/// });
///
/// Ok(answer)
/// }
pub fn with_terminator<E: Copy, A, F: Fn(*mut E, &str) -> A>(
self,
terminator: E,
func: F,
) -> A {
use crate::Storage;
use core::mem::align_of;
let terminate = |alloc_ptr: *mut E, str_slice: &str| unsafe {
*(alloc_ptr.add(str_slice.len())) = terminator;
func(alloc_ptr, str_slice)
};
// When we don't have an existing allocation that can work, fall back on this.
// It uses either a stack allocation, or, if that would be too big, a heap allocation.
let fallback = |str_slice: &str| {
// We need 1 extra elem for the terminator. It must be an elem,
// not a byte, because we'll be providing a pointer to elems.
let needed_bytes = (str_slice.len() + 1) * size_of::<E>();
with_stack_bytes(needed_bytes, |alloc_ptr: *mut E| {
terminate(alloc_ptr, str_slice)
})
};
match self.as_enum_ref() {
RocStrInnerRef::HeapAllocated(roc_list) => {
let len = roc_list.len();
unsafe {
match roc_list.storage() {
Some(storage) if storage.is_unique() => {
// The backing RocList was unique, so we can mutate it in-place.
// We need 1 extra elem for the terminator. It must be an elem,
// not a byte, because we'll be providing a pointer to elems.
let needed_bytes = (len + 1) * size_of::<E>();
// We can use not only the capacity on the heap, but also
// the bytes originally used for the refcount.
let available_bytes = roc_list.capacity() + size_of::<Storage>();
if needed_bytes < available_bytes {
debug_assert!(align_of::<Storage>() >= align_of::<E>());
// We happen to have sufficient excess capacity already,
// so we will be able to write the new elements as well as
// the terminator into the existing allocation.
let ptr = roc_list.ptr_to_allocation() as *mut E;
let answer = terminate(ptr, self.as_str());
// We cannot rely on the RocStr::drop implementation, because
// it tries to use the refcount - which we just overwrote
// with string bytes.
mem::forget(self);
crate::roc_dealloc(ptr.cast(), mem::align_of::<E>() as u32);
answer
} else {
// We didn't have sufficient excess capacity already,
// so we need to do either a new stack allocation or a new
// heap allocation.
fallback(self.as_str())
}
}
Some(_) => {
// The backing list was not unique, so we can't mutate it in-place.
fallback(self.as_str())
}
None => {
// The backing list was empty.
//
// No need to do a heap allocation for an empty string - we
// can just do a stack allocation that will live for the
// duration of the function.
func([terminator].as_mut_ptr() as *mut E, "")
}
}
}
}
RocStrInnerRef::SmallString(small_str) => {
let len = small_str.len();
// We need 1 extra elem for the terminator. It must be an elem,
// not a byte, because we'll be providing a pointer to elems.
let needed_bytes = (len + 1) * size_of::<E>();
let available_bytes = size_of::<SmallString>();
if needed_bytes < available_bytes {
terminate(small_str.bytes.as_ptr() as *mut E, self.as_str())
} else {
fallback(self.as_str())
}
}
}
}
}
impl Deref for RocStr {
@ -78,6 +526,35 @@ impl Deref for RocStr {
}
}
/// This can fail because a CStr may contain invalid UTF-8 characters
#[cfg(not(feature = "no_std"))]
impl TryFrom<&CStr> for RocStr {
type Error = core::str::Utf8Error;
fn try_from(c_str: &CStr) -> Result<Self, Self::Error> {
c_str.to_str().map(RocStr::from)
}
}
/// This can fail because a CString may contain invalid UTF-8 characters
#[cfg(not(feature = "no_std"))]
impl TryFrom<CString> for RocStr {
type Error = core::str::Utf8Error;
fn try_from(c_string: CString) -> Result<Self, Self::Error> {
c_string.to_str().map(RocStr::from)
}
}
#[cfg(not(feature = "no_std"))]
/// Like https://doc.rust-lang.org/std/ffi/struct.NulError.html but
/// only for interior nuls, not for missing nul terminators.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct InteriorNulError {
pub pos: usize,
pub roc_str: RocStr,
}
impl Default for RocStr {
fn default() -> Self {
Self::empty()
@ -86,7 +563,7 @@ impl Default for RocStr {
impl From<&str> for RocStr {
fn from(s: &str) -> Self {
unsafe { Self::from_slice(s.as_bytes()) }
unsafe { Self::from_slice_unchecked(s.as_bytes()) }
}
}
@ -99,18 +576,24 @@ impl PartialEq for RocStr {
impl Eq for RocStr {}
impl PartialOrd for RocStr {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
self.as_str().partial_cmp(other.as_str())
}
}
impl Ord for RocStr {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
fn cmp(&self, other: &Self) -> cmp::Ordering {
self.as_str().cmp(other.as_str())
}
}
impl Debug for RocStr {
impl fmt::Debug for RocStr {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.deref().fmt(f)
}
}
impl fmt::Display for RocStr {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
self.deref().fmt(f)
}
@ -168,7 +651,7 @@ impl SmallString {
/// # Safety
///
/// `slice` must be valid UTF-8.
unsafe fn try_from(slice: &[u8]) -> Option<Self> {
unsafe fn try_from_utf8_bytes(slice: &[u8]) -> Option<Self> {
// Check the size of the slice.
let len_as_u8 = u8::try_from(slice.len()).ok()?;
if (len_as_u8 as usize) > Self::CAPACITY {
@ -191,6 +674,17 @@ impl SmallString {
fn len(&self) -> usize {
usize::from(self.len & !RocStr::MASK)
}
/// Returns the index of the first interior \0 byte in the string, or None if there are none.
fn first_nul_byte(&self) -> Option<usize> {
for (index, byte) in self.bytes[0..self.len()].iter().enumerate() {
if *byte == 0 {
return Some(index);
}
}
None
}
}
impl Deref for SmallString {
@ -210,7 +704,7 @@ impl DerefMut for SmallString {
}
impl Hash for RocStr {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.as_str().hash(state)
}
}

View File

@ -1,6 +1,14 @@
use core::num::NonZeroIsize;
const REFCOUNT_1: isize = isize::MIN;
/// # Safety
///
/// isize::MIN is definitely not zero. This can become
/// https://doc.rust-lang.org/std/num/struct.NonZeroIsize.html#associatedconstant.MIN
/// once it has been stabilized.
const REFCOUNT_1: NonZeroIsize = unsafe { NonZeroIsize::new_unchecked(isize::MIN) };
const _ASSERT_STORAGE_SIZE: () =
assert!(std::mem::size_of::<isize>() == std::mem::size_of::<Storage>());
#[derive(Clone, Copy, Debug)]
pub enum Storage {
@ -10,7 +18,7 @@ pub enum Storage {
impl Storage {
pub fn new_reference_counted() -> Self {
Self::ReferenceCounted(NonZeroIsize::new(REFCOUNT_1).unwrap())
Self::ReferenceCounted(REFCOUNT_1)
}
/// Increment the reference count.
@ -37,11 +45,11 @@ impl Storage {
match self {
Storage::Readonly => false,
Storage::ReferenceCounted(rc) => {
let rc_as_isize = rc.get();
if rc_as_isize == REFCOUNT_1 {
if *rc == REFCOUNT_1 {
true
} else {
*rc = NonZeroIsize::new(rc_as_isize - 1).unwrap();
*rc = NonZeroIsize::new(rc.get() - 1).expect("A reference count was decremented all the way to zero, which should never happen.");
false
}
}
@ -51,4 +59,8 @@ impl Storage {
pub fn is_readonly(&self) -> bool {
matches!(self, Self::Readonly)
}
pub fn is_unique(&self) -> bool {
matches!(self, Self::ReferenceCounted(REFCOUNT_1))
}
}

View File

@ -7,6 +7,8 @@ extern crate roc_std;
use core::ffi::c_void;
const ROC_SMALL_STR_CAPACITY: usize = core::mem::size_of::<roc_std::RocStr>() - 1;
#[no_mangle]
pub unsafe extern "C" fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
libc::malloc(size)
@ -27,10 +29,37 @@ pub unsafe extern "C" fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
libc::free(c_ptr)
}
#[cfg(test)]
#[no_mangle]
pub unsafe extern "C" fn roc_panic(c_ptr: *mut c_void, tag_id: u32) {
use std::ffi::CStr;
use std::os::raw::c_char;
match tag_id {
0 => {
let c_str = CStr::from_ptr(c_ptr as *const c_char);
let string = c_str.to_str().unwrap();
panic!("roc_panic during test: {}", string);
}
_ => todo!(),
}
}
#[cfg(test)]
#[no_mangle]
pub unsafe extern "C" fn roc_memcpy(dst: *mut c_void, src: *mut c_void, n: usize) -> *mut c_void {
libc::memcpy(dst, src, n)
}
#[cfg(test)]
#[no_mangle]
pub unsafe extern "C" fn roc_memset(dst: *mut c_void, c: i32, n: usize) -> *mut c_void {
libc::memset(dst, c, n)
}
#[cfg(test)]
mod test_roc_std {
use roc_std::RocResult;
use roc_std::RocStr;
use roc_std::{RocBox, RocDec, RocList, RocResult, RocStr};
fn roc_str_byte_representation(string: &RocStr) -> [u8; RocStr::SIZE] {
unsafe { core::mem::transmute_copy(string) }
@ -86,9 +115,45 @@ mod test_roc_std {
#[test]
fn empty_string_capacity() {
let string = RocStr::from("");
let string = RocStr::empty();
assert_eq!(string.capacity(), 0);
assert_eq!(string.capacity(), super::ROC_SMALL_STR_CAPACITY);
}
#[test]
fn reserve_small_str() {
let mut roc_str = RocStr::empty();
roc_str.reserve(42);
assert_eq!(roc_str.capacity(), 42);
}
#[test]
fn reserve_big_str() {
let mut roc_str = RocStr::empty();
roc_str.reserve(5000);
assert_eq!(roc_str.capacity(), 5000);
}
#[test]
fn reserve_small_list() {
let mut roc_list = RocList::<RocStr>::empty();
roc_list.reserve(42);
assert_eq!(roc_list.capacity(), 42);
}
#[test]
fn reserve_big_list() {
let mut roc_list = RocList::<RocStr>::empty();
roc_list.reserve(5000);
assert_eq!(roc_list.capacity(), 5000);
}
#[test]
@ -123,4 +188,133 @@ mod test_roc_std {
assert!(!roc_result.is_ok());
assert!(roc_result.is_err());
}
#[test]
fn create_roc_box() {
let contents = 42i32;
let roc_box = RocBox::new(contents);
assert_eq!(roc_box.into_inner(), contents)
}
#[test]
fn roc_dec_fmt() {
assert_eq!(
format!("{}", RocDec::MIN),
"-1701411834604692317316.87303715884105728"
);
let half = RocDec::from_str("0.5").unwrap();
assert_eq!(format!("{}", half), "0.5");
let ten = RocDec::from_str("10").unwrap();
assert_eq!(format!("{}", ten), "10");
let example = RocDec::from_str("1234.5678").unwrap();
assert_eq!(format!("{}", example), "1234.5678");
}
}
#[cfg(test)]
mod with_terminator {
use core::slice;
use roc_std::RocStr;
use std::ffi::CStr;
fn verify_temp_c(string: &str, excess_capacity: usize) {
let mut roc_str = RocStr::from(string);
println!("-------------1--------------");
if excess_capacity > 0 {
roc_str.reserve(excess_capacity);
}
// utf8_nul_terminated
{
println!("-------------2--------------");
let answer = roc_str.clone().utf8_nul_terminated(|ptr, len| {
println!("-------------3--------------");
let bytes = unsafe { slice::from_raw_parts(ptr, len + 1) };
println!("-------------4--------------");
let c_str = CStr::from_bytes_with_nul(bytes).unwrap();
println!("-------------5--------------");
assert_eq!(c_str.to_str(), Ok(string));
println!("-------------6--------------");
42
});
assert_eq!(Ok(42), answer);
}
// utf16_nul_terminated
{
let answer = roc_str.utf16_nul_terminated(|ptr, len| {
let bytes: &[u16] = unsafe { slice::from_raw_parts(ptr.cast(), len + 1) };
// Verify that it's nul-terminated
assert_eq!(bytes[len], 0);
let string = String::from_utf16(&bytes[0..len]).unwrap();
assert_eq!(string.as_str(), string);
42
});
assert_eq!(Ok(42), answer);
}
}
#[test]
fn empty_string() {
verify_temp_c("", 0);
}
/// e.g. "a" or "abc" or "abcdefg" etc.
fn string_for_len(len: usize) -> String {
let first_index: usize = 97; // start with ASCII lowercase "a"
let bytes: Vec<u8> = (0..len)
.map(|index| {
let letter = (index % 26) + first_index;
letter.try_into().unwrap()
})
.collect();
assert_eq!(bytes.len(), len);
// The bytes should contain no nul characters.
assert!(bytes.iter().all(|byte| *byte != 0));
String::from_utf8(bytes).unwrap()
}
#[test]
fn small_strings() {
for len in 1..=super::ROC_SMALL_STR_CAPACITY {
verify_temp_c(&string_for_len(len), 0);
}
}
#[test]
fn no_excess_capacity() {
// This is small enough that it should be a stack allocation for UTF-8
verify_temp_c(&string_for_len(33), 0);
// This is big enough that it should be a heap allocation for UTF-8 and UTF-16
verify_temp_c(&string_for_len(65), 0);
}
#[test]
fn with_excess_capacity() {
println!("Start!");
// We should be able to use the excess capacity for all of these.
verify_temp_c(&string_for_len(33), 1); // TODO why isn't this unique?! ohh because I CLONED IT
println!("Success!");
// verify_temp_c(&string_for_len(33), 33);
// verify_temp_c(&string_for_len(65), 1);
// verify_temp_c(&string_for_len(65), 64);
}
}

View File

@ -13,7 +13,7 @@ cp -r public/ build/
# grab the source code and copy it to Netlify's server; if it's not there, fail the build.
pushd build
wget https://github.com/rtfeldman/elm-css/files/8037422/roc-source-code.zip
wget https://github.com/rtfeldman/elm-css/files/8849069/roc-source-code.zip
popd
pushd ..