Merge branch 'trunk' of github.com:rtfeldman/roc into build-nix

This commit is contained in:
Anton-4 2022-07-26 17:24:25 +02:00
commit b791729046
No known key found for this signature in database
GPG Key ID: A13F4A6E21141925
232 changed files with 10322 additions and 6735 deletions

View File

@ -19,15 +19,18 @@ jobs:
with:
clean: "true"
- name: check formatting with rustfmt
run: nix develop -c cargo fmt --all -- --check
- name: check code style with clippy
run: nix develop -c cargo clippy --workspace --tests -- --deny warnings
- name: check code style with clippy --release
run: cargo clippy --workspace --tests --release -- --deny warnings
# this needs to be done after clippy because of code generation in wasi-libc-sys
- name: check formatting with rustfmt
run: nix develop -c cargo fmt --all -- --check
- name: execute tests with --release
run: nix develop -c cargo test --locked --release
# we run the llvm wasm tests only on this machine because it is fast and wasm should be cross-platform
- name: execute llvm wasm tests with --release
run: nix develop -c cargo test-gen-llvm-wasm --locked --release

View File

@ -85,3 +85,4 @@ Hashi364 <49736221+Kiyoshi364@users.noreply.github.com>
Jared Forsyth <jared@jaredforsyth.com>
Patrick Kilgore <git@pck.email>
Marten/Qqwy <w-m@wmcode.nl>
Christoph Rüßler <christoph.ruessler@mailbox.org>

View File

@ -41,6 +41,8 @@ Use `cargo run help` to see all subcommands.
To use the `repl` subcommand, execute `cargo run repl`.
Use `cargo build` to build the whole project.
Read the instructions [here](devtools/README.md) to make nix work well with your development tools (vscode, vim, rust-analyzer...)
#### Extra tips
If you want to load all dependencies automatically whenever you `cd` into `roc`, check out [direnv](https://direnv.net/).

68
Cargo.lock generated
View File

@ -3314,37 +3314,6 @@ dependencies = [
"libc",
]
[[package]]
name = "roc-bindgen"
version = "0.1.0"
dependencies = [
"bumpalo",
"clap 3.2.11",
"cli_utils",
"ctor",
"dircpy",
"fnv",
"indexmap",
"indoc",
"pretty_assertions",
"roc_builtins",
"roc_can",
"roc_collections",
"roc_error_macros",
"roc_load",
"roc_module",
"roc_mono",
"roc_reporting",
"roc_std",
"roc_target",
"roc_test_utils",
"roc_types",
"strum",
"strum_macros",
"target-lexicon",
"tempfile",
]
[[package]]
name = "roc_alias_analysis"
version = "0.1.0"
@ -3475,6 +3444,7 @@ dependencies = [
"roc_error_macros",
"roc_fmt",
"roc_gen_llvm",
"roc_glue",
"roc_linker",
"roc_load",
"roc_module",
@ -3716,6 +3686,7 @@ dependencies = [
"roc_error_macros",
"roc_module",
"roc_mono",
"roc_region",
"roc_std",
"roc_target",
"target-lexicon",
@ -3736,6 +3707,36 @@ dependencies = [
"roc_target",
]
[[package]]
name = "roc_glue"
version = "0.1.0"
dependencies = [
"bumpalo",
"clap 3.2.11",
"cli_utils",
"dircpy",
"fnv",
"indexmap",
"indoc",
"pretty_assertions",
"roc_builtins",
"roc_can",
"roc_collections",
"roc_error_macros",
"roc_load",
"roc_module",
"roc_mono",
"roc_reporting",
"roc_std",
"roc_target",
"roc_test_utils",
"roc_types",
"strum",
"strum_macros",
"target-lexicon",
"tempfile",
]
[[package]]
name = "roc_highlight"
version = "0.1.0"
@ -3920,6 +3921,7 @@ dependencies = [
"roc_module",
"roc_mono",
"roc_parse",
"roc_region",
"roc_repl_eval",
"roc_reporting",
"roc_std",
@ -4026,6 +4028,7 @@ dependencies = [
"arrayvec 0.7.2",
"bumpalo",
"indoc",
"insta",
"lazy_static",
"pretty_assertions",
"regex",
@ -4784,6 +4787,7 @@ dependencies = [
"either",
"indoc",
"inkwell 0.1.0",
"lazy_static",
"libc",
"libloading",
"roc_build",
@ -4791,6 +4795,8 @@ dependencies = [
"roc_can",
"roc_collections",
"roc_constrain",
"roc_debug_flags",
"roc_error_macros",
"roc_gen_dev",
"roc_gen_llvm",
"roc_gen_wasm",

View File

@ -33,7 +33,7 @@ members = [
"crates/vendor/inkwell",
"crates/vendor/pathfinding",
"crates/vendor/pretty",
"crates/bindgen",
"crates/glue",
"crates/editor",
"crates/ast",
"crates/cli",

View File

@ -4,7 +4,7 @@ Roc is a language for making delightful software.
The [tutorial](TUTORIAL.md) is the best place to learn about how to use the language - it assumes no prior knowledge of Roc or similar languages. (If you already know [Elm](https://elm-lang.org/), then [Roc for Elm Programmers](https://github.com/rtfeldman/roc/blob/trunk/roc-for-elm-programmers.md) may be of interest.)
There's also a folder of [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) - the [CLI example](https://github.com/rtfeldman/roc/tree/trunk/examples/cli) in particular is a reasonable starting point to build on.
There's also a folder of [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) - the [CLI form example](https://github.com/rtfeldman/roc/tree/trunk/examples/interactive/form.roc) in particular is a reasonable starting point to build on.
If you have a specific question, the [FAQ](FAQ.md) might have an answer, although [Roc Zulip chat](https://roc.zulipchat.com) is overall the best place to ask questions and get help! It's also where we discuss [ideas](https://roc.zulipchat.com/#narrow/stream/304641-ideas) for the language. If you want to get involved in contributing to the language, Zulip is also a great place to ask about good first projects.
@ -83,7 +83,7 @@ The core Roc language and standard library include no I/O operations, which give
## Project Goals
Roc is in relatively early stages of development. It's currently possible to build both platforms and applications (see the [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) folder for some examples that aren't particularly organized at the moment), although [documentation](https://github.com/rtfeldman/roc/tree/trunk/compiler/builtins/roc) is in even earlier stages than the compiler itself.
Roc is in relatively early stages of development. It's currently possible to build both platforms and applications (see the [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) folder for some examples that aren't particularly organized at the moment), although [documentation](https://github.com/rtfeldman/roc/tree/trunk/crates/compiler/builtins/roc) is in even earlier stages than the compiler itself.
Besides the above language design, a separate goal is for Roc to ship with an ambitiously boundary-pushing graphical editor. Not like "an IDE," but rather something that makes people say "I have never seen anything remotely like this outside of Bret Victor demos."

View File

@ -126,7 +126,7 @@ pub fn constrain_expr<'a>(
region,
));
and_constraints.push(Eq(num_type, expected, Category::Float, region));
and_constraints.push(Eq(num_type, expected, Category::Frac, region));
let defs_constraint = And(and_constraints);
@ -2705,7 +2705,7 @@ pub mod test_constrain {
A _ -> Z
"#
),
"[A [M, N]*] -> [X, Y, Z]*",
"[A [M, N]] -> [X, Y, Z]*",
)
}

View File

@ -277,7 +277,7 @@ fn to_pending_def<'a>(
Type(TypeDef::Opaque { .. }) => todo_opaques!(),
Type(TypeDef::Ability { .. }) => todo_abilities!(),
Value(AstValueDef::Expect(_)) => todo!(),
Value(AstValueDef::Expect { .. }) => todo!(),
SpaceBefore(sub_def, _) | SpaceAfter(sub_def, _) => {
to_pending_def(env, sub_def, scope, pattern_type)

View File

@ -171,10 +171,7 @@ pub fn to_pattern2<'a>(
malformed_pattern(env, problem, region)
}
Underscore(_) => match pattern_type {
WhenBranch | FunctionArg => Pattern2::Underscore,
TopLevelDef | DefExpr => underscore_in_def(env, region),
},
Underscore(_) => Pattern2::Underscore,
FloatLiteral(ref string) => match pattern_type {
WhenBranch => match finish_parsing_float(string) {
@ -591,16 +588,6 @@ fn unsupported_pattern<'a>(
Pattern2::UnsupportedPattern(region)
}
fn underscore_in_def<'a>(env: &mut Env<'a>, region: Region) -> Pattern2 {
use roc_problem::can::BadPattern;
env.problem(Problem::UnsupportedPattern(
BadPattern::UnderscoreInDef,
region,
));
Pattern2::UnsupportedPattern(region)
}
pub(crate) fn flatten_str_literal(pool: &mut Pool, literal: &StrLiteral<'_>) -> Pattern2 {
use roc_parse::ast::StrLiteral::*;

View File

@ -1084,7 +1084,8 @@ fn type_to_union_tags<'a>(
let ext = {
let (it, ext) =
roc_types::types::gather_tags_unsorted_iter(subs, UnionTags::default(), temp_ext_var);
roc_types::types::gather_tags_unsorted_iter(subs, UnionTags::default(), temp_ext_var)
.expect("not a tag union");
tag_vars.extend(it.map(|(n, v)| (n.clone(), v)));
tag_vars.sort_unstable_by(|(a, _), (b, _)| a.cmp(b));

View File

@ -1,40 +0,0 @@
use std::io;
static TEMPLATE: &[u8] = include_bytes!("../templates/template.c");
pub fn write_template(writer: &mut impl io::Write) -> io::Result<()> {
writer.write_all(TEMPLATE)?;
Ok(())
}
// pub fn write_bindings(_writer: &mut impl io::Write) -> io::Result<()> {
// extern struct RocStr roc__mainForHost_1_exposed();
// int main() {
// struct RocStr str = roc__mainForHost_1_exposed();
// // Determine str_len and the str_bytes pointer,
// // taking into account the small string optimization.
// size_t str_len = roc_str_len(str);
// char* str_bytes;
// if (is_small_str(str)) {
// str_bytes = (char*)&str;
// } else {
// str_bytes = str.bytes;
// }
// // Write to stdout
// if (write(1, str_bytes, str_len) >= 0) {
// // Writing succeeded!
// return 0;
// } else {
// printf("Error writing to stdout: %s\n", strerror(errno));
// return 1;
// }
// }
// Ok(())
// }

View File

@ -1,18 +0,0 @@
use std::io;
static TEMPLATE: &[u8] = include_bytes!("../templates/template.zig");
pub fn write_template(writer: &mut impl io::Write) -> io::Result<()> {
writer.write_all(TEMPLATE)?;
Ok(())
}
pub fn write_bindings(_writer: &mut impl io::Write) -> io::Result<()> {
// extern "C" {
// #[link_name = "roc__mainForHost_1_exposed"]
// fn roc_main() -> RocStr;
// }
Ok(())
}

View File

@ -1,113 +0,0 @@
use clap::Parser;
use roc_bindgen::bindgen_rs;
use roc_bindgen::load::load_types;
use roc_load::Threading;
use std::ffi::OsStr;
use std::fs::File;
use std::io::{ErrorKind, Write};
use std::path::PathBuf;
use std::process;
/// Printed in error messages if you try to use an unsupported extension.
const SUPPORTED_EXTENSIONS: &str = ".c, .rs, .zig, and .json";
// TODO add an option for --targets so that you can specify
// e.g. 64-bit, 32-bit, *and* 16-bit (which can matter for alignment because of pointers)
#[derive(Debug, Parser)]
#[clap(about)]
struct Opts {
/// The path to the `platform` module .roc file
platform_module: PathBuf,
/// The output file, e.g. `test.rs`
dest: PathBuf,
}
enum OutputType {
Rust,
C,
Zig,
Json,
}
pub fn main() {
let opts = Opts::parse();
let input_path = opts.platform_module;
let output_path = opts.dest;
let output_type = match output_path.extension().and_then(OsStr::to_str) {
Some("rs") => OutputType::Rust,
Some("c") => OutputType::C,
Some("zig") => OutputType::Zig,
Some("json") => OutputType::Json,
Some(other) => {
eprintln!(
"Unsupported output file extension: \".{}\" - currently supported extensions are {}",
other,
SUPPORTED_EXTENSIONS
);
process::exit(1);
}
None => {
eprintln!("The output file path needs to have a file extension in order to tell what output format to use. Currently supported extensions are {}", SUPPORTED_EXTENSIONS);
process::exit(1);
}
};
match load_types(input_path.clone(), Threading::AllAvailable) {
Ok(types_and_targets) => {
let mut file = File::create(output_path.clone()).unwrap_or_else(|err| {
eprintln!(
"Unable to create output file {} - {:?}",
output_path.display(),
err
);
process::exit(1);
});
let mut buf;
match output_type {
OutputType::Rust => {
buf = std::str::from_utf8(bindgen_rs::HEADER).unwrap().to_string();
let body = bindgen_rs::emit(&types_and_targets);
buf.push_str(&body);
}
OutputType::C => todo!("TODO: Generate bindings for C"),
OutputType::Zig => todo!("TODO: Generate bindings for Zig"),
OutputType::Json => todo!("TODO: Generate bindings for JSON"),
};
file.write_all(buf.as_bytes()).unwrap_or_else(|err| {
eprintln!(
"Unable to write bindings to output file {} - {:?}",
output_path.display(),
err
);
process::exit(1);
});
println!(
"🎉 Generated type declarations in:\n\n\t{}",
output_path.display()
);
}
Err(err) => match err.kind() {
ErrorKind::NotFound => {
eprintln!("Platform module file not found: {}", input_path.display());
process::exit(1);
}
error => {
eprintln!(
"Error loading platform module file {} - {:?}",
input_path.display(),
error
);
process::exit(1);
}
},
}
}

View File

@ -1,59 +0,0 @@
#include <errno.h>
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
void* roc_alloc(size_t size, unsigned int alignment) { return malloc(size); }
void* roc_realloc(void* ptr, size_t new_size, size_t old_size, unsigned int alignment) {
return realloc(ptr, new_size);
}
void roc_dealloc(void* ptr, unsigned int alignment) { free(ptr); }
void roc_panic(void* ptr, unsigned int alignment) {
char* msg = (char*)ptr;
fprintf(stderr,
"Application crashed with message\n\n %s\n\nShutting down\n", msg);
exit(0);
}
void* roc_memcpy(void* dest, const void* src, size_t n) {
return memcpy(dest, src, n);
}
void* roc_memset(void* str, int c, size_t n) { return memset(str, c, n); }
///////////////////////////////////////////////////////////////////////////
//
// roc_std
//
///////////////////////////////////////////////////////////////////////////
struct RocStr {
char* bytes;
size_t len;
};
bool is_small_str(struct RocStr str) { return ((ssize_t)str.len) < 0; }
// Determine the length of the string, taking into
// account the small string optimization
size_t roc_str_len(struct RocStr str) {
char* bytes = (char*)&str;
char last_byte = bytes[sizeof(str) - 1];
char last_byte_xored = last_byte ^ 0b10000000;
size_t small_len = (size_t)(last_byte_xored);
size_t big_len = str.len;
// Avoid branch misprediction costs by always
// determining both small_len and big_len,
// so this compiles to a cmov instruction.
if (is_small_str(str)) {
return small_len;
} else {
return big_len;
}
}

View File

@ -1,79 +0,0 @@
#![allow(non_snake_case)]
use core::ffi::c_void;
// TODO don't have these depend on the libc crate; instead, use default
// allocator, built-in memset, etc.
#[no_mangle]
pub unsafe extern "C" fn roc_alloc(size: usize, _alignment: u32) -> *mut c_void {
return libc::malloc(size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_realloc(
c_ptr: *mut c_void,
new_size: usize,
_old_size: usize,
_alignment: u32,
) -> *mut c_void {
return libc::realloc(c_ptr, new_size);
}
#[no_mangle]
pub unsafe extern "C" fn roc_dealloc(c_ptr: *mut c_void, _alignment: u32) {
return libc::free(c_ptr);
}
#[no_mangle]
pub unsafe extern "C" fn roc_panic(c_ptr: *mut c_void, tag_id: u32) {
use std::ffi::CStr;
use std::os::raw::c_char;
match tag_id {
0 => {
let slice = CStr::from_ptr(c_ptr as *const c_char);
let string = slice.to_str().unwrap();
eprintln!("Roc hit a panic: {}", string);
std::process::exit(1);
}
_ => todo!(),
}
}
#[no_mangle]
pub unsafe extern "C" fn roc_memcpy(dst: *mut c_void, src: *mut c_void, n: usize) -> *mut c_void {
libc::memcpy(dst, src, n)
}
#[no_mangle]
pub unsafe extern "C" fn roc_memset(dst: *mut c_void, c: i32, n: usize) -> *mut c_void {
libc::memset(dst, c, n)
}
////////////////////////////////////////////////////////////////////////////
//
// TODO: rust_main should be removed once we use surgical linking everywhere.
// It's just a workaround to get cargo to build an object file the way
// the non-surgical linker needs it to. The surgical linker works on
// executables, not object files, so this workaround is not needed there.
//
////////////////////////////////////////////////////////////////////////////
#[no_mangle]
pub extern "C" fn rust_main() -> i32 {
use roc_std::RocStr;
unsafe {
let roc_str = roc_main();
let len = roc_str.len();
let str_bytes = roc_str.as_bytes().as_ptr() as *const libc::c_void;
if libc::write(1, str_bytes, len) < 0 {
panic!("Writing to stdout failed!");
}
}
// Exit code
0
}

View File

@ -1,71 +0,0 @@
const std = @import("std");
const str = @import("str");
comptime {
// This is a workaround for https://github.com/ziglang/zig/issues/8218
// which is only necessary on macOS.
//
// Once that issue is fixed, we can undo the changes in
// 177cf12e0555147faa4d436e52fc15175c2c4ff0 and go back to passing
// -fcompiler-rt in link.rs instead of doing this. Note that this
// workaround is present in many host.zig files, so make sure to undo
// it everywhere!
if (std.builtin.os.tag == .macos) {
_ = @import("compiler_rt");
}
}
const Align = 2 * @alignOf(usize);
extern fn malloc(size: usize) callconv(.C) ?*align(Align) anyopaque;
extern fn realloc(c_ptr: [*]align(Align) u8, size: usize) callconv(.C) ?*anyopaque;
extern fn free(c_ptr: [*]align(Align) u8) callconv(.C) void;
extern fn memcpy(dst: [*]u8, src: [*]u8, size: usize) callconv(.C) void;
extern fn memset(dst: [*]u8, value: i32, size: usize) callconv(.C) void;
const DEBUG: bool = false;
export fn roc_alloc(size: usize, alignment: u32) callconv(.C) ?*anyopaque {
if (DEBUG) {
var ptr = malloc(size);
const stdout = std.io.getStdOut().writer();
stdout.print("alloc: {d} (alignment {d}, size {d})\n", .{ ptr, alignment, size }) catch unreachable;
return ptr;
} else {
return malloc(size);
}
}
export fn roc_realloc(c_ptr: *anyopaque, new_size: usize, old_size: usize, alignment: u32) callconv(.C) ?*anyopaque {
if (DEBUG) {
const stdout = std.io.getStdOut().writer();
stdout.print("realloc: {d} (alignment {d}, old_size {d})\n", .{ c_ptr, alignment, old_size }) catch unreachable;
}
return realloc(@alignCast(Align, @ptrCast([*]u8, c_ptr)), new_size);
}
export fn roc_dealloc(c_ptr: *anyopaque, alignment: u32) callconv(.C) void {
if (DEBUG) {
const stdout = std.io.getStdOut().writer();
stdout.print("dealloc: {d} (alignment {d})\n", .{ c_ptr, alignment }) catch unreachable;
}
free(@alignCast(Align, @ptrCast([*]u8, c_ptr)));
}
export fn roc_panic(c_ptr: *anyopaque, tag_id: u32) callconv(.C) void {
_ = tag_id;
const stderr = std.io.getStdErr().writer();
const msg = @ptrCast([*:0]const u8, c_ptr);
stderr.print("Application crashed with message\n\n {s}\n\nShutting down\n", .{msg}) catch unreachable;
std.process.exit(0);
}
export fn roc_memcpy(dst: [*]u8, src: [*]u8, size: usize) callconv(.C) void {
return memcpy(dst, src, size);
}
export fn roc_memset(dst: [*]u8, value: i32, size: usize) callconv(.C) void {
return memset(dst, value, size);
}

View File

@ -44,6 +44,7 @@ target-all = [
roc_collections = { path = "../compiler/collections" }
roc_can = { path = "../compiler/can" }
roc_docs = { path = "../docs" }
roc_glue = { path = "../glue" }
roc_parse = { path = "../compiler/parse" }
roc_region = { path = "../compiler/region" }
roc_module = { path = "../compiler/module" }

View File

@ -10,7 +10,7 @@ use roc_module::symbol::{Interns, ModuleId};
use roc_mono::ir::OptLevel;
use roc_reporting::report::RenderTarget;
use roc_target::TargetInfo;
use std::time::{Duration, SystemTime};
use std::time::{Duration, Instant};
use std::{path::PathBuf, thread::JoinHandle};
use target_lexicon::Triple;
use tempfile::Builder;
@ -49,7 +49,7 @@ pub fn build_file<'a>(
target_valgrind: bool,
threading: Threading,
) -> Result<BuiltFile, LoadingProblem<'a>> {
let compilation_start = SystemTime::now();
let compilation_start = Instant::now();
let target_info = TargetInfo::from(target);
// Step 1: compile the app and generate the .o file
@ -121,7 +121,13 @@ pub fn build_file<'a>(
.exposed_to_host
.closure_types
.iter()
.map(|x| x.as_str(&loaded.interns).to_string())
.map(|x| {
format!(
"{}_{}",
x.module_string(&loaded.interns),
x.as_str(&loaded.interns)
)
})
.collect();
let preprocessed_host_path = if emit_wasm {
@ -249,7 +255,7 @@ pub fn build_file<'a>(
);
report_timing(buf, "Emit .o file", code_gen_timing.emit_o_file);
let compilation_end = compilation_start.elapsed().unwrap();
let compilation_end = compilation_start.elapsed();
let size = std::fs::metadata(&app_o_file)
.unwrap_or_else(|err| {
@ -284,7 +290,7 @@ pub fn build_file<'a>(
}
// Step 2: link the precompiled host and compiled app
let link_start = SystemTime::now();
let link_start = Instant::now();
let problems = match (linking_strategy, link_type) {
(LinkingStrategy::Surgical, _) => {
roc_linker::link_preprocessed_host(target, &host_input_path, app_o_file, &binary_path);
@ -333,13 +339,13 @@ pub fn build_file<'a>(
}
};
let linking_time = link_start.elapsed().unwrap();
let linking_time = link_start.elapsed();
if emit_timings {
println!("Finished linking in {} ms\n", linking_time.as_millis());
}
let total_time = compilation_start.elapsed().unwrap();
let total_time = compilation_start.elapsed();
Ok(BuiltFile {
binary_path,
@ -369,7 +375,7 @@ fn spawn_rebuild_thread(
println!("🔨 Rebuilding host...");
}
let rebuild_host_start = SystemTime::now();
let rebuild_host_start = Instant::now();
if !precompiled {
match linking_strategy {
@ -410,7 +416,7 @@ fn spawn_rebuild_thread(
// Copy preprocessed host to executable location.
std::fs::copy(preprocessed_host_path, binary_path.as_path()).unwrap();
}
let rebuild_host_end = rebuild_host_start.elapsed().unwrap();
let rebuild_host_end = rebuild_host_start.elapsed();
rebuild_host_end.as_millis()
})
@ -423,7 +429,7 @@ pub fn check_file(
emit_timings: bool,
threading: Threading,
) -> Result<(program::Problems, Duration), LoadingProblem> {
let compilation_start = SystemTime::now();
let compilation_start = Instant::now();
// only used for generating errors. We don't do code generation, so hardcoding should be fine
// we need monomorphization for when exhaustiveness checking
@ -474,7 +480,7 @@ pub fn check_file(
}
}
let compilation_end = compilation_start.elapsed().unwrap();
let compilation_end = compilation_start.elapsed();
if emit_timings {
println!(

View File

@ -12,7 +12,7 @@ use roc_load::{Expectations, LoadingProblem, Threading};
use roc_module::symbol::{Interns, ModuleId};
use roc_mono::ir::OptLevel;
use roc_region::all::Region;
use roc_repl_cli::expect_mono_module_to_dylib;
use roc_repl_cli::{expect_mono_module_to_dylib, ToplevelExpect};
use roc_target::TargetInfo;
use std::env;
use std::ffi::{CString, OsStr};
@ -43,6 +43,7 @@ pub const CMD_CHECK: &str = "check";
pub const CMD_VERSION: &str = "version";
pub const CMD_FORMAT: &str = "format";
pub const CMD_TEST: &str = "test";
pub const CMD_GLUE: &str = "glue";
pub const FLAG_DEBUG: &str = "debug";
pub const FLAG_DEV: &str = "dev";
@ -59,6 +60,7 @@ pub const FLAG_VALGRIND: &str = "valgrind";
pub const FLAG_CHECK: &str = "check";
pub const ROC_FILE: &str = "ROC_FILE";
pub const ROC_DIR: &str = "ROC_DIR";
pub const GLUE_FILE: &str = "GLUE_FILE";
pub const DIRECTORY_OR_FILES: &str = "DIRECTORY_OR_FILES";
pub const ARGS_FOR_APP: &str = "ARGS_FOR_APP";
@ -171,7 +173,7 @@ pub fn build_app<'a>() -> Command<'a> {
)
)
.subcommand(Command::new(CMD_TEST)
.about("Run all top-level `expect`s in a root module and any modules it imports.")
.about("Run all top-level `expect`s in a main module and any modules it imports.")
.arg(flag_optimize.clone())
.arg(flag_max_threads.clone())
.arg(flag_opt_size.clone())
@ -183,7 +185,7 @@ pub fn build_app<'a>() -> Command<'a> {
.arg(flag_valgrind.clone())
.arg(
Arg::new(ROC_FILE)
.help("The .roc file for the root module")
.help("The .roc file for the main module")
.allow_invalid_utf8(true)
.required(false)
.default_value(DEFAULT_ROC_FILENAME)
@ -246,6 +248,21 @@ pub fn build_app<'a>() -> Command<'a> {
.allow_invalid_utf8(true)
)
)
.subcommand(Command::new(CMD_GLUE)
.about("Generate glue code between a platform's Roc API and its host language.")
.arg(
Arg::new(ROC_FILE)
.help("The .roc file for the platform module")
.allow_invalid_utf8(true)
.required(true)
)
.arg(
Arg::new(GLUE_FILE)
.help("The filename for the generated glue code. Currently, this must be a .rs file because only Rust glue generation is supported so far.")
.allow_invalid_utf8(true)
.required(true)
)
)
.trailing_var_arg(true)
.arg(flag_optimize)
.arg(flag_max_threads.clone())
@ -404,7 +421,7 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
let arena = &bumpalo::Bump::new();
let interns = arena.alloc(interns);
use roc_gen_llvm::run_jit_function;
use roc_gen_llvm::try_run_jit_function;
let mut failed = 0;
let mut passed = 0;
@ -424,18 +441,45 @@ pub fn test(matches: &ArgMatches, triple: Triple) -> io::Result<i32> {
);
for expect in expects {
// clear the state
libc::memset(shared_ptr.cast(), 0, SHM_SIZE as _);
*((shared_ptr as *mut usize).add(1)) = 16;
run_jit_function!(lib, expect, (), |v: ()| v);
let result: Result<(), String> = try_run_jit_function!(lib, expect.name, (), |v: ()| v);
let shared_memory_ptr: *const u8 = shared_ptr.cast();
let buffer = std::slice::from_raw_parts(shared_memory_ptr, SHM_SIZE as _);
let buffer =
std::slice::from_raw_parts(shared_memory_ptr.add(16), SHM_SIZE as usize - 16);
if buffer.iter().any(|b| *b != 0) {
if let Err(roc_panic_message) = result {
failed += 1;
render_expect_failure(arena, &mut expectations, interns, shared_memory_ptr);
render_expect_panic(
arena,
expect,
&roc_panic_message,
&mut expectations,
interns,
);
println!();
} else if buffer.iter().any(|b| *b != 0) {
failed += 1;
let count = *(shared_ptr as *const usize).add(0);
let mut offset = 16;
for _ in 0..count {
offset += render_expect_failure(
arena,
Some(expect),
&mut expectations,
interns,
shared_memory_ptr.add(offset),
);
println!();
}
} else {
passed += 1;
}
@ -699,8 +743,8 @@ pub fn build(
std::mem::forget(bytes);
x
} else {
println!(
"\x1B[{}m{}\x1B[39m {} and \x1B[{}m{}\x1B[39m {} found in {} ms.\n\nYou can run the program anyway with: \x1B[32mroc run {}\x1B[39m",
let mut output = format!(
"\x1B[{}m{}\x1B[39m {} and \x1B[{}m{}\x1B[39m {} found in {} ms.\n\nYou can run the program anyway with \x1B[32mroc run",
if problems.errors == 0 {
32 // green
} else {
@ -724,8 +768,15 @@ pub fn build(
"warnings"
},
total_time.as_millis(),
filename.to_string_lossy()
);
// If you're running "main.roc" then you can just do `roc run`
// to re-run the program.
if filename != DEFAULT_ROC_FILENAME {
output.push(' ');
output.push_str(&filename.to_string_lossy());
}
println!("{}\x1B[39m", output);
Ok(problems.exit_code())
}
@ -1001,7 +1052,13 @@ unsafe fn roc_run_native_debug(
let shared_memory_ptr: *const u8 = shared_ptr.cast();
render_expect_failure(arena, &mut expectations, interns, shared_memory_ptr);
render_expect_failure(
arena,
None,
&mut expectations,
interns,
shared_memory_ptr,
);
}
_ => println!("received signal {}", sig),
}
@ -1011,12 +1068,64 @@ unsafe fn roc_run_native_debug(
}
}
fn render_expect_panic<'a>(
_arena: &'a Bump,
expect: ToplevelExpect,
message: &str,
expectations: &mut VecMap<ModuleId, Expectations>,
interns: &'a Interns,
) {
use roc_reporting::report::Report;
use roc_reporting::report::RocDocAllocator;
use ven_pretty::DocAllocator;
let module_id = expect.symbol.module_id();
let data = expectations.get_mut(&module_id).unwrap();
// TODO cache these line offsets?
let path = &data.path;
let filename = data.path.to_owned();
let file_string = std::fs::read_to_string(path).unwrap();
let src_lines: Vec<_> = file_string.lines().collect();
let line_info = roc_region::all::LineInfo::new(&file_string);
let line_col_region = line_info.convert_region(expect.region);
let alloc = RocDocAllocator::new(&src_lines, module_id, interns);
let doc = alloc.stack([
alloc.text("This expectation crashed while running:"),
alloc.region(line_col_region),
alloc.text("The crash reported this message:"),
alloc.text(message),
]);
let report = Report {
title: "EXPECT FAILED".into(),
doc,
filename,
severity: roc_reporting::report::Severity::RuntimeError,
};
let mut buf = String::new();
report.render(
roc_reporting::report::RenderTarget::ColorTerminal,
&mut buf,
&alloc,
&roc_reporting::report::DEFAULT_PALETTE,
);
println!("{}", buf);
}
fn render_expect_failure<'a>(
arena: &'a Bump,
expect: Option<ToplevelExpect>,
expectations: &mut VecMap<ModuleId, Expectations>,
interns: &'a Interns,
start: *const u8,
) {
) -> usize {
use roc_reporting::report::Report;
use roc_reporting::report::RocDocAllocator;
use ven_pretty::DocAllocator;
@ -1031,8 +1140,6 @@ fn render_expect_failure<'a>(
let module_id: ModuleId = unsafe { std::mem::transmute(module_id_bytes) };
let data = expectations.get_mut(&module_id).unwrap();
let current = data.expectations.get(&region).unwrap();
let subs = arena.alloc(&mut data.subs);
// TODO cache these line offsets?
let path = &data.path;
@ -1041,13 +1148,34 @@ fn render_expect_failure<'a>(
let src_lines: Vec<_> = file_string.lines().collect();
let line_info = roc_region::all::LineInfo::new(&file_string);
let line_col_region = line_info.convert_region(region);
let display_region = match expect {
Some(expect) => {
if !expect.region.contains(&region) {
// this is an expect outside of a toplevel expect,
// likely in some function we called
region
} else {
Region::across_all([&expect.region, &region])
}
}
None => region,
};
let line_col_region = line_info.convert_region(display_region);
let alloc = RocDocAllocator::new(&src_lines, module_id, interns);
// 8 bytes for region, 4 for module id
let start_offset = 12;
let current = match data.expectations.get(&region) {
None => {
invalid_regions(alloc, filename, line_info, region);
return 0;
}
Some(current) => current,
};
let subs = arena.alloc(&mut data.subs);
let (symbols, variables): (Vec<_>, Vec<_>) = current.iter().map(|(a, b)| (*a, *b)).unzip();
let error_types: Vec<_> = variables
@ -1058,7 +1186,7 @@ fn render_expect_failure<'a>(
})
.collect();
let expressions = roc_repl_expect::get_values(
let (offset, expressions) = roc_repl_expect::get_values(
target_info,
arena,
subs,
@ -1124,6 +1252,44 @@ fn render_expect_failure<'a>(
);
println!("{}", buf);
offset
}
fn invalid_regions(
alloc: roc_reporting::report::RocDocAllocator,
filename: PathBuf,
line_info: roc_region::all::LineInfo,
region: Region,
) {
use ven_pretty::DocAllocator;
let line_col_region = line_info.convert_region(region);
let doc = alloc.stack([
alloc.text("Internal expect failure"),
alloc.region(line_col_region),
]);
let report = roc_reporting::report::Report {
title: "EXPECT FAILED".into(),
doc,
filename,
severity: roc_reporting::report::Severity::RuntimeError,
};
let mut buf = String::new();
report.render(
roc_reporting::report::RenderTarget::ColorTerminal,
&mut buf,
&alloc,
&roc_reporting::report::DEFAULT_PALETTE,
);
println!("{}", buf);
panic!();
}
#[cfg(target_os = "linux")]

View File

@ -2,11 +2,12 @@ use roc_build::link::LinkType;
use roc_cli::build::check_file;
use roc_cli::{
build_app, format, test, BuildConfig, FormatMode, Target, CMD_BUILD, CMD_CHECK, CMD_DOCS,
CMD_EDIT, CMD_FORMAT, CMD_REPL, CMD_RUN, CMD_TEST, CMD_VERSION, DIRECTORY_OR_FILES, FLAG_CHECK,
FLAG_LIB, FLAG_NO_LINK, FLAG_TARGET, FLAG_TIME, ROC_FILE,
CMD_EDIT, CMD_FORMAT, CMD_GLUE, CMD_REPL, CMD_RUN, CMD_TEST, CMD_VERSION, DIRECTORY_OR_FILES,
FLAG_CHECK, FLAG_LIB, FLAG_NO_LINK, FLAG_TARGET, FLAG_TIME, GLUE_FILE, ROC_FILE,
};
use roc_docs::generate_docs_html;
use roc_error_macros::user_error;
use roc_glue;
use roc_load::{LoadingProblem, Threading};
use std::fs::{self, FileType};
use std::io;
@ -64,6 +65,18 @@ fn main() -> io::Result<()> {
Ok(1)
}
}
Some((CMD_GLUE, matches)) => {
let input_path = Path::new(matches.value_of_os(ROC_FILE).unwrap());
let output_path = Path::new(matches.value_of_os(GLUE_FILE).unwrap());
if Some("rs") == output_path.extension().and_then(OsStr::to_str) {
roc_glue::generate(input_path, output_path)
} else {
eprintln!("Currently, `roc glue` only supports generating Rust glue files (with the .rs extension). In the future, the plan is to decouple `roc glue` from any particular output format, by having it accept a second .roc file which gets executed as a plugin to generate glue code for any desired language. However, this has not yet been implemented, and for now only .rs is supported.");
Ok(1)
}
}
Some((CMD_BUILD, matches)) => {
let target: Target = matches.value_of_t(FLAG_TARGET).unwrap_or_default();

View File

@ -400,14 +400,17 @@ mod cli_run {
expected_ending:"Which platform am I running on now?\n",
use_valgrind: true,
},
platformSwitchingC:"platform-switching/c-platform" => Example {
filename: "rocLovesC.roc",
executable_filename: "rocLovesC",
stdin: &[],
input_file: None,
expected_ending:"Roc <3 C!\n",
use_valgrind: true,
},
// We exclude the C platforming switching example
// because the main platform switching example runs the c platform.
// If we don't a race condition leads to test flakiness.
// platformSwitchingC:"platform-switching/c-platform" => Example {
// filename: "rocLovesC.roc",
// executable_filename: "rocLovesC",
// stdin: &[],
// input_file: None,
// expected_ending:"Roc <3 C!\n",
// use_valgrind: true,
// },
platformSwitchingRust:"platform-switching/rust-platform" => Example {
filename: "rocLovesRust.roc",
executable_filename: "rocLovesRust",
@ -816,7 +819,10 @@ mod cli_run {
// Some platform-switching examples live in nested directories
if example_dir_name == "platform-switching" {
for sub_dir in [
"c-platform",
// We exclude the C platforming switching example
// because the main platform switching example runs the c platform.
// If we don't a race condition leads to test flakiness.
// "c-platform",
"rust-platform",
"swift-platform",
"web-assembly-platform",

View File

@ -65,22 +65,18 @@ where
run_with_stdin(&roc_binary_path, args, stdin_vals)
}
pub fn run_bindgen<I, S>(args: I) -> Out
pub fn run_glue<I, S>(args: I) -> Out
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
run_with_stdin(&path_to_bindgen_binary(), args, &[])
run_with_stdin(&path_to_roc_binary(), args, &[])
}
pub fn path_to_roc_binary() -> PathBuf {
path_to_binary("roc")
}
pub fn path_to_bindgen_binary() -> PathBuf {
path_to_binary("roc-bindgen")
}
pub fn path_to_binary(binary_name: &str) -> PathBuf {
// Adapted from https://github.com/volta-cli/volta/blob/cefdf7436a15af3ce3a38b8fe53bb0cfdb37d3dd/tests/acceptance/support/sandbox.rs#L680
// by the Volta Contributors - license information can be found in

View File

@ -1006,7 +1006,27 @@ fn lowlevel_spec(
let old_value = builder.add_bag_get(block, bag)?;
let new_list = with_new_heap_cell(builder, block, bag)?;
builder.add_make_tuple(block, &[new_list, old_value])
// depending on the types, the list or value will come first in the struct
let fields = match layout {
Layout::Struct { field_layouts, .. } => field_layouts,
_ => unreachable!(),
};
match fields {
[Layout::Builtin(Builtin::List(_)), Layout::Builtin(Builtin::List(_))] => {
// field name is the tie breaker, list is first in
// { list : List a, value : a }
builder.add_make_tuple(block, &[new_list, old_value])
}
[Layout::Builtin(Builtin::List(_)), _] => {
builder.add_make_tuple(block, &[new_list, old_value])
}
[_, Layout::Builtin(Builtin::List(_))] => {
builder.add_make_tuple(block, &[old_value, new_list])
}
_ => unreachable!(),
}
}
ListSwap => {
let list = env.symbols[&arguments[0]];

View File

@ -5,7 +5,7 @@ use roc_module::symbol::{Interns, ModuleId};
use roc_mono::ir::OptLevel;
use roc_region::all::LineInfo;
use std::path::{Path, PathBuf};
use std::time::{Duration, SystemTime};
use std::time::{Duration, Instant};
use roc_collections::all::MutMap;
#[cfg(feature = "target-wasm32")]
@ -199,7 +199,7 @@ pub fn gen_from_mono_module_llvm(
use inkwell::module::Linkage;
use inkwell::targets::{FileType, RelocMode};
let code_gen_start = SystemTime::now();
let code_gen_start = Instant::now();
// Generate the binary
let target_info = roc_target::TargetInfo::from(target);
@ -292,8 +292,8 @@ pub fn gen_from_mono_module_llvm(
// Uncomment this to see the module's optimized LLVM instruction output:
// env.module.print_to_stderr();
let code_gen = code_gen_start.elapsed().unwrap();
let emit_o_file_start = SystemTime::now();
let code_gen = code_gen_start.elapsed();
let emit_o_file_start = Instant::now();
// annotate the LLVM IR output with debug info
// so errors are reported with the line number of the LLVM source
@ -389,7 +389,7 @@ pub fn gen_from_mono_module_llvm(
}
}
let emit_o_file = emit_o_file_start.elapsed().unwrap();
let emit_o_file = emit_o_file_start.elapsed();
CodeGenTiming {
code_gen,
@ -442,7 +442,7 @@ fn gen_from_mono_module_dev_wasm32(
app_o_file: &Path,
preprocessed_host_path: &Path,
) -> CodeGenTiming {
let code_gen_start = SystemTime::now();
let code_gen_start = Instant::now();
let MonomorphizedModule {
module_id,
procedures,
@ -482,8 +482,8 @@ fn gen_from_mono_module_dev_wasm32(
let final_binary_bytes =
roc_gen_wasm::build_app_binary(&env, &mut interns, host_module, procedures);
let code_gen = code_gen_start.elapsed().unwrap();
let emit_o_file_start = SystemTime::now();
let code_gen = code_gen_start.elapsed();
let emit_o_file_start = Instant::now();
// The app_o_file is actually the final binary
std::fs::write(&app_o_file, &final_binary_bytes).unwrap_or_else(|e| {
@ -494,7 +494,7 @@ fn gen_from_mono_module_dev_wasm32(
)
});
let emit_o_file = emit_o_file_start.elapsed().unwrap();
let emit_o_file = emit_o_file_start.elapsed();
CodeGenTiming {
code_gen,
@ -508,7 +508,7 @@ fn gen_from_mono_module_dev_assembly(
target: &target_lexicon::Triple,
app_o_file: &Path,
) -> CodeGenTiming {
let code_gen_start = SystemTime::now();
let code_gen_start = Instant::now();
let lazy_literals = true;
let generate_allocators = false; // provided by the platform
@ -531,15 +531,15 @@ fn gen_from_mono_module_dev_assembly(
let module_object = roc_gen_dev::build_module(&env, &mut interns, target, procedures);
let code_gen = code_gen_start.elapsed().unwrap();
let emit_o_file_start = SystemTime::now();
let code_gen = code_gen_start.elapsed();
let emit_o_file_start = Instant::now();
let module_out = module_object
.write()
.expect("failed to build output object");
std::fs::write(&app_o_file, module_out).expect("failed to write object to file");
let emit_o_file = emit_o_file_start.elapsed().unwrap();
let emit_o_file = emit_o_file_start.elapsed();
CodeGenTiming {
code_gen,

View File

@ -143,6 +143,7 @@ comptime {
exportStrFn(str.strTrim, "trim");
exportStrFn(str.strTrimLeft, "trim_left");
exportStrFn(str.strTrimRight, "trim_right");
exportStrFn(str.strCloneTo, "clone_to");
inline for (INTEGERS) |T| {
str.exportFromInt(T, ROC_BUILTINS ++ "." ++ STR ++ ".from_int.");

View File

@ -221,8 +221,7 @@ pub const RocStr = extern struct {
}
fn asArray(self: RocStr) [@sizeOf(RocStr)]u8 {
const as_int = @ptrToInt(&self);
const as_ptr = @intToPtr([*]u8, as_int);
const as_ptr = @ptrCast([*]const u8, &self);
const slice = as_ptr[0..@sizeOf(RocStr)];
return slice.*;
@ -2521,3 +2520,35 @@ test "getScalarUnsafe" {
try expectEqual(result.scalar, @intCast(u32, expected));
try expectEqual(result.bytesParsed, 1);
}
pub fn strCloneTo(
ptr: [*]u8,
offset: usize,
string: RocStr,
) callconv(.C) usize {
const WIDTH: usize = @sizeOf(RocStr);
if (string.isSmallStr()) {
const array: [@sizeOf(RocStr)]u8 = @bitCast([@sizeOf(RocStr)]u8, string);
var i: usize = 0;
while (i < array.len) : (i += 1) {
ptr[offset + i] = array[i];
}
return offset + WIDTH;
} else {
const slice = string.asSlice();
var relative = string;
relative.str_bytes = @intToPtr(?[*]u8, offset + WIDTH); // i.e. just after the string struct
// write the string struct
const array = relative.asArray();
@memcpy(ptr + offset, &array, WIDTH);
// write the string bytes just after the struct
@memcpy(ptr + offset + WIDTH, slice.ptr, slice.len);
return offset + WIDTH + slice.len;
}
}

View File

@ -81,7 +81,7 @@ withCapacity = \n -> @Dict (List.withCapacity n)
get : Dict k v, k -> Result v [KeyNotFound]*
get = \@Dict list, needle ->
when List.find list (\Pair key _ -> key == needle) is
when List.findFirst list (\Pair key _ -> key == needle) is
Ok (Pair _ v) ->
Ok v
@ -94,7 +94,7 @@ walk = \@Dict list, initialState, transform ->
insert : Dict k v, k, v -> Dict k v
insert = \@Dict list, k, v ->
when List.findIndex list (\Pair key _ -> key == k) is
when List.findFirstIndex list (\Pair key _ -> key == k) is
Err NotFound ->
insertFresh (@Dict list) k v
@ -109,7 +109,7 @@ len = \@Dict list ->
remove : Dict k v, k -> Dict k v
remove = \@Dict list, key ->
when List.findIndex list (\Pair k _ -> k == key) is
when List.findFirstIndex list (\Pair k _ -> k == key) is
Err NotFound ->
@Dict list

View File

@ -8,6 +8,7 @@ interface Json
Str,
Encode.{
Encoder,
EncoderFormatting,
custom,
appendWith,
u8,
@ -31,7 +32,28 @@ interface Json
},
]
Json := {}
Json := {} has [
EncoderFormatting {
u8,
u16,
u32,
u64,
u128,
i8,
i16,
i32,
i64,
i128,
f32,
f64,
dec,
bool,
string,
list,
record,
tag,
},
]
toUtf8 = @Json {}

View File

@ -44,11 +44,17 @@ interface List
any,
takeFirst,
takeLast,
find,
findIndex,
findFirst,
findLast,
findFirstIndex,
findLastIndex,
sublist,
intersperse,
split,
splitFirst,
splitLast,
startsWith,
endsWith,
all,
dropIf,
sortAsc,
@ -770,30 +776,41 @@ maxHelp = \list, initial ->
## You may know a similar function named `concatMap` in other languages.
joinMap : List a, (a -> List b) -> List b
joinMap = \list, mapper ->
List.walk list [] (\state, elem -> List.concat state (mapper elem))
List.walk list [] \state, elem -> List.concat state (mapper elem)
## Returns the first element of the list satisfying a predicate function.
## If no satisfying element is found, an `Err NotFound` is returned.
find : List elem, (elem -> Bool) -> Result elem [NotFound]*
find = \array, pred ->
findFirst : List elem, (elem -> Bool) -> Result elem [NotFound]*
findFirst = \list, pred ->
callback = \_, elem ->
if pred elem then
Break elem
else
Continue {}
when List.iterate array {} callback is
Continue {} ->
Err NotFound
when List.iterate list {} callback is
Continue {} -> Err NotFound
Break found -> Ok found
Break found ->
Ok found
## Returns the last element of the list satisfying a predicate function.
## If no satisfying element is found, an `Err NotFound` is returned.
findLast : List elem, (elem -> Bool) -> Result elem [NotFound]*
findLast = \list, pred ->
callback = \_, elem ->
if pred elem then
Break elem
else
Continue {}
when List.iterateBackwards list {} callback is
Continue {} -> Err NotFound
Break found -> Ok found
## Returns the index at which the first element in the list
## satisfying a predicate function can be found.
## If no satisfying element is found, an `Err NotFound` is returned.
findIndex : List elem, (elem -> Bool) -> Result Nat [NotFound]*
findIndex = \list, matcher ->
findFirstIndex : List elem, (elem -> Bool) -> Result Nat [NotFound]*
findFirstIndex = \list, matcher ->
foundIndex = List.iterate list 0 \index, elem ->
if matcher elem then
Break index
@ -804,6 +821,21 @@ findIndex = \list, matcher ->
Break index -> Ok index
Continue _ -> Err NotFound
## Returns the last index at which the first element in the list
## satisfying a predicate function can be found.
## If no satisfying element is found, an `Err NotFound` is returned.
findLastIndex : List elem, (elem -> Bool) -> Result Nat [NotFound]*
findLastIndex = \list, matches ->
foundIndex = List.iterateBackwards list (List.len list) \prevIndex, elem ->
if matches elem then
Break (prevIndex - 1)
else
Continue (prevIndex - 1)
when foundIndex is
Break index -> Ok index
Continue _ -> Err NotFound
## Returns a subsection of the given list, beginning at the `start` index and
## including a total of `len` elements.
##
@ -843,6 +875,33 @@ intersperse = \list, sep ->
List.dropLast newList
## Returns `True` if the first list starts with the second list.
##
## If the second list is empty, this always returns `True`; every list
## is considered to "start with" an empty list.
##
## If the first list is empty, this only returns `True` if the second list is empty.
startsWith : List elem, List elem -> Bool
startsWith = \list, prefix ->
# TODO once we have seamless slices, verify that this wouldn't
# have better performance with a function like List.compareSublists
prefix == List.sublist list { start: 0, len: List.len prefix }
## Returns `True` if the first list ends with the second list.
##
## If the second list is empty, this always returns `True`; every list
## is considered to "end with" an empty list.
##
## If the first list is empty, this only returns `True` if the second list is empty.
endsWith : List elem, List elem -> Bool
endsWith = \list, suffix ->
# TODO once we have seamless slices, verify that this wouldn't
# have better performance with a function like List.compareSublists
length = List.len suffix
start = Num.subSaturated (List.len list) length
suffix == List.sublist list { start, len: length }
## Splits the list into two lists, around the given index.
##
## The returned lists are labeled `before` and `others`. The `before` list will
@ -859,6 +918,36 @@ split = \elements, userSplitIndex ->
{ before, others }
## Returns the elements before the first occurrence of a delimiter, as well as the
## remaining elements after that occurrence. If the delimiter is not found, returns `Err`.
##
## List.splitFirst [Foo, Z, Bar, Z, Baz] Z == Ok { before: [Foo], after: [Bar, Baz] }
splitFirst : List elem, elem -> Result { before : List elem, after : List elem } [NotFound]*
splitFirst = \list, delimiter ->
when List.findFirstIndex list (\elem -> elem == delimiter) is
Ok index ->
before = List.sublist list { start: 0, len: index }
after = List.sublist list { start: index + 1, len: List.len list - index - 1 }
Ok { before, after }
Err NotFound -> Err NotFound
## Returns the elements before the last occurrence of a delimiter, as well as the
## remaining elements after that occurrence. If the delimiter is not found, returns `Err`.
##
## List.splitLast [Foo, Z, Bar, Z, Baz] Z == Ok { before: [Foo, Bar], after: [Baz] }
splitLast : List elem, elem -> Result { before : List elem, after : List elem } [NotFound]*
splitLast = \list, delimiter ->
when List.findLastIndex list (\elem -> elem == delimiter) is
Ok index ->
before = List.sublist list { start: 0, len: index }
after = List.sublist list { start: index + 1, len: List.len list - index - 1 }
Ok { before, after }
Err NotFound -> Err NotFound
## Like [List.map], except the transformation function returns a [Result].
## If that function ever returns `Err`, [mapTry] immediately returns that `Err`.
## If it returns `Ok` for every element, [mapTry] returns `Ok` with the transformed list.
@ -895,11 +984,26 @@ iterHelp : List elem, s, (s, elem -> [Continue s, Break b]), Nat, Nat -> [Contin
iterHelp = \list, state, f, index, length ->
if index < length then
when f state (List.getUnsafe list index) is
Continue nextState ->
iterHelp list nextState f (index + 1) length
Continue nextState -> iterHelp list nextState f (index + 1) length
Break b -> Break b
else
Continue state
Break b ->
Break b
## Primitive for iterating over a List from back to front, being able to decide at every
## element whether to continue
iterateBackwards : List elem, s, (s, elem -> [Continue s, Break b]) -> [Continue s, Break b]
iterateBackwards = \list, init, func ->
iterBackwardsHelp list init func (List.len list)
## internal helper
iterBackwardsHelp : List elem, s, (s, elem -> [Continue s, Break b]), Nat -> [Continue s, Break b]
iterBackwardsHelp = \list, state, f, prevIndex ->
if prevIndex > 0 then
index = prevIndex - 1
when f state (List.getUnsafe list index) is
Continue nextState -> iterBackwardsHelp list nextState f index
Break b -> Break b
else
Continue state

View File

@ -336,6 +336,7 @@ pub const STR_GET_UNSAFE: &str = "roc_builtins.str.get_unsafe";
pub const STR_RESERVE: &str = "roc_builtins.str.reserve";
pub const STR_APPEND_SCALAR: &str = "roc_builtins.str.append_scalar";
pub const STR_GET_SCALAR_UNSAFE: &str = "roc_builtins.str.get_scalar_unsafe";
pub const STR_CLONE_TO: &str = "roc_builtins.str.clone_to";
pub const LIST_MAP: &str = "roc_builtins.list.map";
pub const LIST_MAP2: &str = "roc_builtins.list.map2";

View File

@ -4,7 +4,10 @@ use roc_collections::{all::MutMap, VecMap, VecSet};
use roc_error_macros::internal_error;
use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::Region;
use roc_types::{subs::Variable, types::Type};
use roc_types::{
subs::Variable,
types::{MemberImpl, Type},
};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MemberVariables {
@ -68,10 +71,7 @@ impl AbilityMemberData<Resolved> {
}
/// (member, specialization type) -> specialization
pub type SpecializationsMap<Phase> = VecMap<(Symbol, Symbol), MemberSpecialization<Phase>>;
pub type PendingSpecializations = SpecializationsMap<Pending>;
pub type ResolvedSpecializations = SpecializationsMap<Resolved>;
pub type ImplMap = VecMap<(Symbol, Symbol), MemberImpl>;
/// Solved lambda sets for an ability member specialization. For example, if we have
///
@ -86,15 +86,13 @@ pub type SpecializationLambdaSets = VecMap<u8, Variable>;
/// A particular specialization of an ability member.
#[derive(Debug, Clone)]
pub struct MemberSpecialization<Phase: ResolvePhase> {
pub struct MemberSpecializationInfo<Phase: ResolvePhase> {
_phase: std::marker::PhantomData<Phase>,
pub symbol: Symbol,
pub specialization_lambda_sets: SpecializationLambdaSets,
}
impl MemberSpecialization<Resolved> {
impl MemberSpecializationInfo<Resolved> {
pub fn new(symbol: Symbol, specialization_lambda_sets: SpecializationLambdaSets) -> Self {
Self {
_phase: Default::default(),
@ -111,6 +109,22 @@ static_assertions::assert_eq_size!(SpecializationId, Option<SpecializationId>);
pub enum SpecializationLambdaSetError {}
/// A key into a particular implementation of an ability member for an opaque type.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub struct ImplKey {
pub opaque: Symbol,
pub ability_member: Symbol,
}
/// Fully-resolved implementation of an ability member for an opaque type.
/// This is only fully known after type solving of the owning module.
#[derive(Clone, Debug)]
pub enum ResolvedImpl {
Impl(MemberSpecializationInfo<Resolved>),
Derived,
Error,
}
/// Stores information about what abilities exist in a scope, what it means to implement an
/// ability, and what types implement them.
// TODO(abilities): this should probably go on the Scope, I don't put it there for now because we
@ -120,23 +134,28 @@ pub enum SpecializationLambdaSetError {}
pub struct IAbilitiesStore<Phase: ResolvePhase> {
/// Maps an ability to the members defining it.
members_of_ability: MutMap<Symbol, Vec<Symbol>>,
/// Map of symbols that specialize an ability member to the root ability symbol name,
/// and the type the specialization claims to implement the ability for.
///
/// For example, in the program
///
/// Hash has hash : a -> U64 | a has Hash
///
/// Id := {} implements [Hash {hash: myHash}]
/// myHash = \@Id n -> n
///
/// We keep the mapping myHash->(hash, Id)
specialization_to_root: MutMap<Symbol, ImplKey>,
/// Information about all members composing abilities.
ability_members: MutMap<Symbol, AbilityMemberData<Phase>>,
/// Map of symbols that specialize an ability member to the root ability symbol name.
/// For example, for the program
/// Hash has hash : a -> U64 | a has Hash
/// ^^^^ gets the symbol "#hash"
/// hash = \@Id n -> n
/// ^^^^ gets the symbol "#hash1"
///
/// We keep the mapping #hash1->#hash
specialization_to_root: MutMap<Symbol, Symbol>,
/// Maps a tuple (member, type) specifying that `type` has an implementation of an ability
/// member `member`, to how that implementation is defined.
declared_implementations: ImplMap,
/// Maps a tuple (member, type) specifying that `type` declares an implementation of an ability
/// member `member`, to the exact symbol that implements the ability.
declared_specializations: SpecializationsMap<Phase>,
/// Information about specialized ability member implementations for a type.
specializations: MutMap<Symbol, MemberSpecializationInfo<Phase>>,
next_specialization_id: NonZeroU32,
@ -148,14 +167,15 @@ pub struct IAbilitiesStore<Phase: ResolvePhase> {
impl<Phase: ResolvePhase> Default for IAbilitiesStore<Phase> {
fn default() -> Self {
Self {
members_of_ability: Default::default(),
ability_members: Default::default(),
specialization_to_root: Default::default(),
declared_specializations: Default::default(),
next_specialization_id:
// Safety: 1 != 0
unsafe { NonZeroU32::new_unchecked(1) },
resolved_specializations: Default::default(),
members_of_ability: Default::default(),
specialization_to_root: Default::default(),
ability_members: Default::default(),
declared_implementations: Default::default(),
specializations: Default::default(),
next_specialization_id:
// Safety: 1 != 0
unsafe { NonZeroU32::new_unchecked(1) },
resolved_specializations: Default::default(),
}
}
}
@ -207,22 +227,44 @@ impl<Phase: ResolvePhase> IAbilitiesStore<Phase> {
&self.ability_members
}
#[inline(always)]
fn register_one_declared_impl(&mut self, impl_key: ImplKey, member_impl: MemberImpl) {
if let MemberImpl::Impl(specialization_symbol) = member_impl {
self.specialization_to_root
.insert(specialization_symbol, impl_key);
}
self.declared_implementations
.insert((impl_key.ability_member, impl_key.opaque), member_impl);
}
/// Records the implementations of an ability an opaque type declares to have.
///
/// Calling this function does not validate that the implementations are correctly specializing
/// in their definition, nor does it store type information about the implementations.
///
/// It is expected that during type solving, the owner of the abilities store marks the claimed
/// implementation as either a proper or erroring implementation using
/// [`Self::mark_implementation`].
pub fn register_declared_implementations(
&mut self,
implementing_type: Symbol,
// (ability member, implementation)
implementations: impl IntoIterator<Item = (Symbol, MemberImpl)>,
) {
for (member, member_impl) in implementations.into_iter() {
let impl_key = ImplKey {
opaque: implementing_type,
ability_member: member,
};
self.register_one_declared_impl(impl_key, member_impl);
}
}
/// Returns whether a symbol is declared to specialize an ability member.
pub fn is_specialization_name(&self, symbol: Symbol) -> bool {
self.specialization_to_root.contains_key(&symbol)
}
/// Records that the symbol `specializing_symbol` claims to specialize `ability_member`; for
/// example the symbol of `hash : Id -> U64` specializing `hash : a -> U64 | a has Hash`.
pub fn register_specializing_symbol(
&mut self,
specializing_symbol: Symbol,
ability_member: Symbol,
) {
self.specialization_to_root
.insert(specializing_symbol, ability_member);
}
pub fn members_of_ability(&self, ability: Symbol) -> Option<&[Symbol]> {
self.members_of_ability.get(&ability).map(|v| v.as_ref())
}
@ -237,15 +279,23 @@ impl<Phase: ResolvePhase> IAbilitiesStore<Phase> {
id
}
/// Finds the implementation key for a symbol specializing the ability member, if it specializes any.
/// For example, suppose `hashId : Id -> U64` specializes `hash : a -> U64 | a has Hash`.
/// Calling this with `hashId` would retrieve (hash, hashId).
pub fn impl_key(&self, specializing_symbol: Symbol) -> Option<&ImplKey> {
self.specialization_to_root.get(&specializing_symbol)
}
/// Creates a store from [`self`] that closes over the abilities/members given by the
/// imported `symbols`, and their specializations (if any).
pub fn closure_from_imported(&self, symbols: &VecSet<Symbol>) -> PendingAbilitiesStore {
let Self {
members_of_ability,
ability_members,
declared_specializations,
declared_implementations,
specializations,
// Covered by `declared_specializations`
// Covered by `declared_implementations`
specialization_to_root: _,
// Taking closure for a new module, so specialization IDs can be fresh
@ -292,12 +342,21 @@ impl<Phase: ResolvePhase> IAbilitiesStore<Phase> {
new.register_ability(ability, imported_member_data);
// Add any specializations of the ability's members we know about.
declared_specializations
declared_implementations
.iter()
.filter(|((member, _), _)| members.contains(member))
.for_each(|(&(member, typ), specialization)| {
new.register_specializing_symbol(specialization.symbol, member);
new.import_specialization(member, typ, specialization);
.for_each(|(&(member, typ), member_impl)| {
let impl_key = ImplKey {
ability_member: member,
opaque: typ,
};
new.register_one_declared_impl(impl_key, *member_impl);
if let MemberImpl::Impl(spec_symbol) = member_impl {
if let Some(specialization_info) = specializations.get(spec_symbol) {
new.import_specialization(specialization_info);
}
}
});
}
@ -305,20 +364,29 @@ impl<Phase: ResolvePhase> IAbilitiesStore<Phase> {
}
}
#[derive(Debug)]
pub enum MarkError {
NoDeclaredImpl,
ImplIsNotCustom,
}
impl IAbilitiesStore<Resolved> {
/// Finds the symbol name and ability member definition for a symbol specializing the ability
/// member, if it specializes any.
/// For example, suppose `hash : Id -> U64` has symbol #hash1 and specializes
/// `hash : a -> U64 | a has Hash` with symbol #hash. Calling this with #hash1 would retrieve
/// the ability member data for #hash.
pub fn root_name_and_def(
/// For example, suppose `hashId : Id -> U64` specializes `hash : a -> U64 | a has Hash`.
/// Calling this with `hashId` would retrieve the ability member data for `hash`, and what type
/// `hashId` is specializing for.
pub fn impl_key_and_def(
&self,
specializing_symbol: Symbol,
) -> Option<(Symbol, &AbilityMemberData<Resolved>)> {
let root_symbol = self.specialization_to_root.get(&specializing_symbol)?;
debug_assert!(self.ability_members.contains_key(root_symbol));
let root_data = self.ability_members.get(root_symbol).unwrap();
Some((*root_symbol, root_data))
) -> Option<(ImplKey, &AbilityMemberData<Resolved>)> {
let impl_key = self.impl_key(specializing_symbol)?;
debug_assert!(self.ability_members.contains_key(&impl_key.ability_member));
let root_data = self
.ability_members
.get(&impl_key.ability_member)
.expect("impl keys can only exist for known ability members");
Some((*impl_key, root_data))
}
/// Finds the ability member definition for a member name.
@ -326,36 +394,59 @@ impl IAbilitiesStore<Resolved> {
self.ability_members.get(&member)
}
/// Returns an iterator over pairs ((ability member, type), specialization) specifying that
/// "ability member" has a "specialization" for type "type".
pub fn iter_specializations(
/// Returns an iterator over pairs ((ability member, type), implementation) specifying that
/// the give type has an implementation of an ability member.
pub fn iter_declared_implementations(
&self,
) -> impl Iterator<Item = ((Symbol, Symbol), &MemberSpecialization<Resolved>)> + '_ {
self.declared_specializations.iter().map(|(k, v)| (*k, v))
) -> impl Iterator<Item = ((Symbol, Symbol), &MemberImpl)> + '_ {
self.declared_implementations.iter().map(|(k, v)| (*k, v))
}
/// Retrieves the specialization of `member` for `typ`, if it exists.
pub fn get_specialization(
&self,
member: Symbol,
typ: Symbol,
) -> Option<&MemberSpecialization<Resolved>> {
self.declared_specializations.get(&(member, typ))
/// Retrieves the declared implementation of `member` for `typ`, if it exists.
pub fn get_implementation(&self, member: Symbol, typ: Symbol) -> Option<&MemberImpl> {
self.declared_implementations.get(&(member, typ))
}
/// Records a specialization of `ability_member` with specialized type `implementing_type`.
/// Entries via this function are considered a source of truth. It must be ensured that a
/// specialization is validated before being registered here.
pub fn register_specialization_for_type(
/// Marks a declared implementation as either properly specializing, or as erroring.
pub fn mark_implementation(
&mut self,
ability_member: Symbol,
implementing_type: Symbol,
specialization: MemberSpecialization<Resolved>,
) {
let old_spec = self
.declared_specializations
.insert((ability_member, implementing_type), specialization);
debug_assert!(old_spec.is_none(), "Replacing existing specialization");
typ: Symbol,
mark: Result<MemberSpecializationInfo<Resolved>, ()>,
) -> Result<(), MarkError> {
match self
.declared_implementations
.get_mut(&(ability_member, typ))
{
Some(member_impl) => match *member_impl {
MemberImpl::Impl(specialization_symbol) => {
debug_assert!(!self.specializations.contains_key(&specialization_symbol));
match mark {
Ok(specialization_info) => {
self.specializations
.insert(specialization_symbol, specialization_info);
}
Err(()) => {
// Mark the member implementation as erroring, so we know to generate a
// runtime error function as appropriate.
*member_impl = MemberImpl::Error;
}
}
Ok(())
}
MemberImpl::Derived | MemberImpl::Error => Err(MarkError::ImplIsNotCustom),
},
None => Err(MarkError::NoDeclaredImpl),
}
}
pub fn specialization_info(
&self,
specialization_symbol: Symbol,
) -> Option<&MemberSpecializationInfo<Resolved>> {
self.specializations.get(&specialization_symbol)
}
pub fn insert_resolved(&mut self, id: SpecializationId, specialization: Symbol) {
@ -374,21 +465,43 @@ impl IAbilitiesStore<Resolved> {
}
impl IAbilitiesStore<Pending> {
pub fn import_specialization(
pub fn import_implementation(&mut self, impl_key: ImplKey, resolved_impl: &ResolvedImpl) {
let ImplKey {
opaque,
ability_member,
} = impl_key;
let member_impl = match resolved_impl {
ResolvedImpl::Impl(specialization) => {
self.import_specialization(specialization);
MemberImpl::Impl(specialization.symbol)
}
ResolvedImpl::Derived => MemberImpl::Derived,
ResolvedImpl::Error => MemberImpl::Error,
};
let old_declared_impl = self
.declared_implementations
.insert((ability_member, opaque), member_impl);
debug_assert!(
old_declared_impl.is_none(),
"Replacing existing declared impl!"
);
}
fn import_specialization(
&mut self,
ability_member: Symbol,
implementing_type: Symbol,
specialization: &MemberSpecialization<impl ResolvePhase>,
specialization: &MemberSpecializationInfo<impl ResolvePhase>,
) {
let MemberSpecialization {
let MemberSpecializationInfo {
_phase,
symbol,
specialization_lambda_sets,
} = specialization;
let old_spec = self.declared_specializations.insert(
(ability_member, implementing_type),
MemberSpecialization {
let old_spec = self.specializations.insert(
*symbol,
MemberSpecializationInfo {
_phase: Default::default(),
symbol: *symbol,
specialization_lambda_sets: specialization_lambda_sets.clone(),
@ -402,9 +515,10 @@ impl IAbilitiesStore<Pending> {
members_of_ability: other_members_of_ability,
ability_members: mut other_ability_members,
specialization_to_root,
declared_specializations,
declared_implementations,
next_specialization_id,
resolved_specializations,
specializations,
} = other;
for (ability, members) in other_members_of_ability.into_iter() {
@ -425,13 +539,18 @@ impl IAbilitiesStore<Pending> {
debug_assert!(old_root.is_none() || old_root.unwrap() == member);
}
for ((member, typ), specialization) in declared_specializations.into_iter() {
for ((member, typ), impl_) in declared_implementations.into_iter() {
let old_impl = self.declared_implementations.insert((member, typ), impl_);
debug_assert!(old_impl.is_none() || old_impl.unwrap() == impl_);
}
for (symbol, specialization_info) in specializations.into_iter() {
let old_specialization = self
.declared_specializations
.insert((member, typ), specialization.clone());
.specializations
.insert(symbol, specialization_info.clone());
debug_assert!(
old_specialization.is_none()
|| old_specialization.unwrap().symbol == specialization.symbol
|| old_specialization.unwrap().symbol == specialization_info.symbol
);
}
@ -456,9 +575,10 @@ impl IAbilitiesStore<Pending> {
members_of_ability,
ability_members,
specialization_to_root,
declared_specializations,
declared_implementations,
next_specialization_id,
resolved_specializations,
specializations,
} = self;
let ability_members = ability_members
@ -491,24 +611,22 @@ impl IAbilitiesStore<Pending> {
})
.collect();
let declared_specializations = declared_specializations
let specializations = specializations
.into_iter()
.map(
|(
key,
MemberSpecialization {
|(symbol, specialization)| {
let MemberSpecializationInfo {
_phase,
symbol,
symbol: _,
specialization_lambda_sets,
},
)| {
} = specialization;
let symbol_module = symbol.module_id();
// NOTE: this totally assumes we're dealing with subs that belong to an
// individual module, things would be badly broken otherwise
let member_specialization = if symbol_module == my_module {
internal_error!("Ability store may only be pending before module solving, \
so there shouldn't be any known module specializations at this point, but we found one for {:?}", symbol);
// MemberSpecialization::new(symbol, specialization_lambda_sets)
} else {
let specialization_lambda_sets = specialization_lambda_sets
.into_iter()
@ -523,10 +641,11 @@ impl IAbilitiesStore<Pending> {
)
})
.collect();
MemberSpecialization::new(symbol, specialization_lambda_sets)
MemberSpecializationInfo::new(symbol, specialization_lambda_sets)
};
(key, member_specialization)
},
(symbol, member_specialization)
}
)
.collect();
@ -534,9 +653,10 @@ impl IAbilitiesStore<Pending> {
members_of_ability,
ability_members,
specialization_to_root,
declared_specializations,
declared_implementations,
next_specialization_id,
resolved_specializations,
specializations,
}
}
}

View File

@ -1,6 +1,6 @@
use crate::env::Env;
use crate::procedure::References;
use crate::scope::Scope;
use crate::scope::{PendingAbilitiesInScope, Scope};
use roc_collections::{ImMap, MutSet, SendMap, VecMap, VecSet};
use roc_module::ident::{Ident, Lowercase, TagName};
use roc_module::symbol::Symbol;
@ -267,7 +267,7 @@ pub fn canonicalize_annotation(
annotation: &TypeAnnotation,
region: Region,
var_store: &mut VarStore,
pending_abilities_in_scope: &[Symbol],
pending_abilities_in_scope: &PendingAbilitiesInScope,
) -> Annotation {
let mut introduced_variables = IntroducedVariables::default();
let mut references = VecSet::default();
@ -621,11 +621,11 @@ fn can_annotation_help(
let symbol = match scope.introduce(name.value.into(), region) {
Ok(symbol) => symbol,
Err((original_region, shadow, _new_symbol)) => {
let problem = Problem::Shadowed(original_region, shadow.clone());
Err((shadowed_symbol, shadow, _new_symbol)) => {
let problem = Problem::Shadowed(shadowed_symbol.region, shadow.clone());
env.problem(roc_problem::can::Problem::Shadowing {
original_region,
original_region: shadowed_symbol.region,
shadow,
kind: ShadowKind::Variable,
});
@ -908,7 +908,7 @@ fn canonicalize_has_clause(
var_store: &mut VarStore,
introduced_variables: &mut IntroducedVariables,
clause: &Loc<roc_parse::ast::HasClause<'_>>,
pending_abilities_in_scope: &[Symbol],
pending_abilities_in_scope: &PendingAbilitiesInScope,
references: &mut VecSet<Symbol>,
) -> Result<(), Type> {
let Loc {
@ -929,7 +929,7 @@ fn canonicalize_has_clause(
let symbol = make_apply_symbol(env, ability.region, scope, module_name, ident)?;
// Ability defined locally, whose members we are constructing right now...
if !pending_abilities_in_scope.contains(&symbol)
if !pending_abilities_in_scope.contains_key(&symbol)
// or an ability that was imported from elsewhere
&& !scope.abilities_store.is_ability(symbol)
{

View File

@ -66,7 +66,7 @@ impl Constraints {
Category::StrInterpolation,
Category::If,
Category::When,
Category::Float,
Category::Frac,
Category::Int,
Category::Num,
Category::List,
@ -199,7 +199,7 @@ impl Constraints {
Category::StrInterpolation => Self::CATEGORY_STRINTERPOLATION,
Category::If => Self::CATEGORY_IF,
Category::When => Self::CATEGORY_WHEN,
Category::Float => Self::CATEGORY_FLOAT,
Category::Frac => Self::CATEGORY_FLOAT,
Category::Int => Self::CATEGORY_INT,
Category::Num => Self::CATEGORY_NUM,
Category::List => Self::CATEGORY_LIST,

View File

@ -1,6 +1,6 @@
use crate::{
def::Def,
expr::{AccessorData, ClosureData, Expr, Field, OpaqueWrapFunctionData},
expr::{AccessorData, ClosureData, Expr, Field, OpaqueWrapFunctionData, WhenBranchPattern},
pattern::{DestructType, Pattern, RecordDestruct},
};
use roc_module::{
@ -295,7 +295,16 @@ fn deep_copy_expr_help<C: CopyEnv>(env: &mut C, copied: &mut Vec<Variable>, expr
}| crate::expr::WhenBranch {
patterns: patterns
.iter()
.map(|lp| lp.map(|p| deep_copy_pattern_help(env, copied, p)))
.map(
|WhenBranchPattern {
pattern,
degenerate,
}| WhenBranchPattern {
pattern: pattern
.map(|p| deep_copy_pattern_help(env, copied, p)),
degenerate: *degenerate,
},
)
.collect(),
value: value.map(|e| go_help!(e)),
guard: guard.as_ref().map(|le| le.map(|e| go_help!(e))),

View File

@ -1,4 +1,5 @@
use crate::abilities::AbilityMemberData;
use crate::abilities::ImplKey;
use crate::abilities::MemberVariables;
use crate::abilities::PendingMemberType;
use crate::annotation::canonicalize_annotation;
@ -16,9 +17,10 @@ use crate::expr::{canonicalize_expr, Output, Recursive};
use crate::pattern::{canonicalize_def_header_pattern, BindingsFromPattern, Pattern};
use crate::procedure::References;
use crate::scope::create_alias;
use crate::scope::Scope;
use crate::scope::{PendingAbilitiesInScope, Scope};
use roc_collections::ReferenceMatrix;
use roc_collections::VecMap;
use roc_collections::VecSet;
use roc_collections::{ImSet, MutMap, SendMap};
use roc_error_macros::internal_error;
use roc_module::ident::Ident;
@ -27,7 +29,7 @@ use roc_module::symbol::IdentId;
use roc_module::symbol::ModuleId;
use roc_module::symbol::Symbol;
use roc_parse::ast;
use roc_parse::ast::AbilityMember;
use roc_parse::ast::AssignedField;
use roc_parse::ast::Defs;
use roc_parse::ast::ExtractSpaces;
use roc_parse::ast::TypeHeader;
@ -41,6 +43,7 @@ use roc_types::types::AliasCommon;
use roc_types::types::AliasKind;
use roc_types::types::AliasVar;
use roc_types::types::LambdaSet;
use roc_types::types::MemberImpl;
use roc_types::types::OptAbleType;
use roc_types::types::{Alias, Type};
use std::fmt::Debug;
@ -141,6 +144,12 @@ impl PendingValueDef<'_> {
}
}
#[derive(Debug, Clone)]
struct PendingAbilityMember<'a> {
name: Loc<Symbol>,
typ: Loc<ast::TypeAnnotation<'a>>,
}
#[derive(Debug, Clone)]
enum PendingTypeDef<'a> {
/// A structural type alias, e.g. `Ints : List Int`
@ -160,7 +169,7 @@ enum PendingTypeDef<'a> {
Ability {
name: Loc<Symbol>,
members: &'a [ast::AbilityMember<'a>],
members: Vec<PendingAbilityMember<'a>>,
},
/// An invalid alias, that is ignored in the rest of the pipeline
@ -295,7 +304,7 @@ fn canonicalize_alias<'a>(
output: &mut Output,
var_store: &mut VarStore,
scope: &mut Scope,
pending_abilities_in_scope: &[Symbol],
pending_abilities_in_scope: &PendingAbilitiesInScope,
name: Loc<Symbol>,
ann: &'a Loc<ast::TypeAnnotation<'a>>,
@ -413,6 +422,171 @@ fn canonicalize_alias<'a>(
))
}
/// Canonicalizes a claimed ability implementation like `{ eq }` or `{ eq: myEq }`.
/// Returns a mapping of the ability member to the implementation symbol.
/// If there was an error, a problem will be recorded and nothing is returned.
fn canonicalize_claimed_ability_impl<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
ability: Symbol,
loc_impl: &Loc<ast::AssignedField<'a, ast::Expr<'a>>>,
) -> Result<(Symbol, Symbol), ()> {
let ability_home = ability.module_id();
match loc_impl.extract_spaces().item {
AssignedField::LabelOnly(label) => {
let label_str = label.value;
let region = label.region;
let member_symbol =
match env.qualified_lookup_with_module_id(scope, ability_home, label_str, region) {
Ok(symbol) => symbol,
Err(_) => {
env.problem(Problem::NotAnAbilityMember {
ability,
name: label_str.to_owned(),
region,
});
return Err(());
}
};
match scope.lookup_ability_member_shadow(member_symbol) {
Some(impl_symbol) => Ok((member_symbol, impl_symbol)),
None => {
env.problem(Problem::ImplementationNotFound {
member: member_symbol,
region: label.region,
});
Err(())
}
}
}
AssignedField::RequiredValue(label, _spaces, value) => {
let impl_ident = match value.value {
ast::Expr::Var { module_name, ident } => {
if module_name.is_empty() {
ident
} else {
env.problem(Problem::QualifiedAbilityImpl {
region: value.region,
});
return Err(());
}
}
_ => {
env.problem(Problem::AbilityImplNotIdent {
region: value.region,
});
return Err(());
}
};
let impl_region = value.region;
let member_symbol = match env.qualified_lookup_with_module_id(
scope,
ability_home,
label.value,
label.region,
) {
Ok(symbol) => symbol,
Err(_) => {
env.problem(Problem::NotAnAbilityMember {
ability,
name: label.value.to_owned(),
region: label.region,
});
return Err(());
}
};
let impl_symbol = match scope.lookup(&impl_ident.into(), impl_region) {
Ok(symbol) => symbol,
Err(err) => {
env.problem(Problem::RuntimeError(err));
return Err(());
}
};
Ok((member_symbol, impl_symbol))
}
AssignedField::OptionalValue(_, _, _) => {
env.problem(Problem::OptionalAbilityImpl {
ability,
region: loc_impl.region,
});
Err(())
}
AssignedField::Malformed(_) => {
// An error will already have been reported
Err(())
}
AssignedField::SpaceBefore(_, _) | AssignedField::SpaceAfter(_, _) => {
internal_error!("unreachable")
}
}
}
struct SeparatedMembers {
not_required: Vec<Symbol>,
not_implemented: Vec<Symbol>,
}
/// Partitions ability members in a `has [ Ability {...members} ]` clause into the members the
/// opaque type claims to implement but are not part of the ability, and the ones it does not
/// implement.
fn separate_implemented_and_required_members(
implemented: VecSet<Symbol>,
required: VecSet<Symbol>,
) -> SeparatedMembers {
use std::cmp::Ordering;
let mut implemented = implemented.into_vec();
let mut required = required.into_vec();
implemented.sort();
required.sort();
let mut implemented = implemented.into_iter().peekable();
let mut required = required.into_iter().peekable();
let mut not_required = vec![];
let mut not_implemented = vec![];
loop {
// Equal => both required and implemented
// Less => implemented but not required
// Greater => required but not implemented
let ord = match (implemented.peek(), required.peek()) {
(Some(implemented), Some(required)) => Some(implemented.cmp(required)),
(Some(_), None) => Some(Ordering::Less),
(None, Some(_)) => Some(Ordering::Greater),
(None, None) => None,
};
match ord {
Some(Ordering::Less) => {
not_required.push(implemented.next().unwrap());
}
Some(Ordering::Greater) => {
not_implemented.push(required.next().unwrap());
}
Some(Ordering::Equal) => {
_ = implemented.next().unwrap();
_ = required.next().unwrap();
}
None => break,
}
}
SeparatedMembers {
not_required,
not_implemented,
}
}
#[inline(always)]
#[allow(clippy::too_many_arguments)]
fn canonicalize_opaque<'a>(
@ -420,7 +594,7 @@ fn canonicalize_opaque<'a>(
output: &mut Output,
var_store: &mut VarStore,
scope: &mut Scope,
pending_abilities_in_scope: &[Symbol],
pending_abilities_in_scope: &PendingAbilitiesInScope,
name: Loc<Symbol>,
ann: &'a Loc<ast::TypeAnnotation<'a>>,
@ -442,40 +616,159 @@ fn canonicalize_opaque<'a>(
if let Some(has_abilities) = has_abilities {
let has_abilities = has_abilities.value.collection();
let mut can_abilities = vec![];
let mut derived_abilities = vec![];
for has_ability in has_abilities.items {
let region = has_ability.region;
let (ability, _impls) = match has_ability.value.extract_spaces().item {
let (ability, opt_impls) = match has_ability.value.extract_spaces().item {
ast::HasAbility::HasAbility { ability, impls } => (ability, impls),
_ => internal_error!("spaces not extracted"),
};
match ability.value {
let ability_region = ability.region;
let (ability, members) = match ability.value {
ast::TypeAnnotation::Apply(module_name, ident, []) => {
match make_apply_symbol(env, region, scope, module_name, ident) {
Ok(ability) if ability.is_builtin_ability() => {
can_abilities.push(Loc::at(region, ability));
}
Ok(_) => {
// Register the problem but keep going, we may still be able to compile the
// program even if a derive is missing.
env.problem(Problem::IllegalDerive(region));
Ok(ability) => {
let opt_members = scope
.abilities_store
.members_of_ability(ability)
.map(|members| members.iter().copied().collect())
.or_else(|| pending_abilities_in_scope.get(&ability).cloned());
if let Some(members) = opt_members {
// This is an ability we already imported into the scope,
// or which is also undergoing canonicalization at the moment.
(ability, members)
} else {
env.problem(Problem::NotAnAbility(ability_region));
continue;
}
}
Err(_) => {
// This is bad apply; an error will have been reported for it
// already.
continue;
}
}
}
_ => {
// Register the problem but keep going, we may still be able to compile the
// program even if a derive is missing.
env.problem(Problem::IllegalDerive(region));
// Register the problem but keep going.
env.problem(Problem::NotAnAbility(ability_region));
continue;
}
};
if let Some(impls) = opt_impls {
let mut impl_map: VecMap<Symbol, Loc<MemberImpl>> = VecMap::default();
// First up canonicalize all the claimed implementations, building a map of ability
// member -> implementation.
for loc_impl in impls.extract_spaces().item.items {
let (member, impl_symbol) =
match canonicalize_claimed_ability_impl(env, scope, ability, loc_impl) {
Ok((member, impl_symbol)) => (member, impl_symbol),
Err(()) => continue,
};
// Did the user claim this implementation for a specialization of a different
// type? e.g.
//
// A has [Hash {hash: myHash}]
// B has [Hash {hash: myHash}]
//
// If so, that's an error and we drop the impl for this opaque type.
let member_impl = match scope.abilities_store.impl_key(impl_symbol) {
Some(ImplKey {
opaque,
ability_member,
}) => {
env.problem(Problem::OverloadedSpecialization {
overload: loc_impl.region,
original_opaque: *opaque,
ability_member: *ability_member,
});
MemberImpl::Error
}
None => MemberImpl::Impl(impl_symbol),
};
// Did the user already claim an implementation for the ability member for this
// type previously? (e.g. Hash {hash: hash1, hash: hash2})
let opt_old_impl_symbol =
impl_map.insert(member, Loc::at(loc_impl.region, member_impl));
if let Some(old_impl_symbol) = opt_old_impl_symbol {
env.problem(Problem::DuplicateImpl {
original: old_impl_symbol.region,
duplicate: loc_impl.region,
});
}
}
// Check that the members this opaque claims to implement corresponds 1-to-1 with
// the members the ability offers.
let SeparatedMembers {
not_required,
not_implemented,
} = separate_implemented_and_required_members(
impl_map.iter().map(|(member, _)| *member).collect(),
members,
);
if !not_required.is_empty() {
// Implementing something that's not required is a recoverable error, we don't
// need to skip association of the implemented abilities. Just remove the
// unneeded members.
for sym in not_required.iter() {
impl_map.remove(sym);
}
env.problem(Problem::ImplementsNonRequired {
region,
ability,
not_required,
});
}
if !not_implemented.is_empty() {
// We'll generate runtime errors for the members that are needed but
// unspecified.
for sym in not_implemented.iter() {
impl_map.insert(*sym, Loc::at_zero(MemberImpl::Error));
}
env.problem(Problem::DoesNotImplementAbility {
region,
ability,
not_implemented,
});
}
let impls = impl_map
.into_iter()
.map(|(member, def)| (member, def.value));
scope
.abilities_store
.register_declared_implementations(name.value, impls);
} else if let Some((_, members)) = ability.derivable_ability() {
let impls = members.iter().map(|member| (*member, MemberImpl::Derived));
scope
.abilities_store
.register_declared_implementations(name.value, impls);
derived_abilities.push(Loc::at(ability_region, ability));
} else {
// There was no record specified of functions to use for
// members, but also this isn't a builtin ability, so we don't
// know how to auto-derive it.
env.problem(Problem::IllegalDerivedAbility(region));
}
}
if !can_abilities.is_empty() {
if !derived_abilities.is_empty() {
// Fresh instance of this opaque to be checked for derivability during solving.
let fresh_inst = Type::DelayedAlias(AliasCommon {
symbol: name.value,
@ -493,7 +786,8 @@ fn canonicalize_opaque<'a>(
let old = output
.pending_derives
.insert(name.value, (fresh_inst, can_abilities));
.insert(name.value, (fresh_inst, derived_abilities));
debug_assert!(old.is_none());
}
}
@ -528,19 +822,44 @@ pub(crate) fn canonicalize_defs<'a>(
// to itself won't be processed until after its def has been added to scope.
let mut pending_type_defs = Vec::with_capacity(loc_defs.type_defs.len());
let mut value_defs = Vec::with_capacity(loc_defs.value_defs.len());
let mut pending_value_defs = Vec::with_capacity(loc_defs.value_defs.len());
let mut pending_abilities_in_scope = PendingAbilitiesInScope::default();
// Convert the type defs into pending defs first, then all the value defs.
// Follow this order because we need all value symbols to fully canonicalize type defs (in case
// there are opaques that implement an ability using a value symbol). But, value symbols might
// shadow symbols defined in a local ability def.
for (_, either_index) in loc_defs.tags.iter().enumerate() {
if let Ok(type_index) = either_index.split() {
let type_def = &loc_defs.type_defs[type_index.index()];
let pending_type_def = to_pending_type_def(env, type_def, scope, pattern_type);
if let PendingTypeDef::Ability { name, members } = &pending_type_def {
pending_abilities_in_scope.insert(
name.value,
members.iter().map(|mem| mem.name.value).collect(),
);
}
pending_type_defs.push(pending_type_def);
}
}
for (index, either_index) in loc_defs.tags.iter().enumerate() {
match either_index.split() {
Ok(type_index) => {
let type_def = &loc_defs.type_defs[type_index.index()];
pending_type_defs.push(to_pending_type_def(env, type_def, scope, pattern_type));
}
Err(value_index) => {
let value_def = &loc_defs.value_defs[value_index.index()];
let region = loc_defs.regions[index];
value_defs.push(Loc::at(region, value_def));
}
if let Err(value_index) = either_index.split() {
let value_def = &loc_defs.value_defs[value_index.index()];
let region = loc_defs.regions[index];
let pending = to_pending_value_def(
env,
var_store,
value_def,
scope,
&pending_abilities_in_scope,
&mut output,
pattern_type,
);
pending_value_defs.push(Loc::at(region, pending));
}
}
@ -553,8 +872,8 @@ pub(crate) fn canonicalize_defs<'a>(
&mut output,
var_store,
scope,
&pending_abilities_in_scope,
pending_type_defs,
pattern_type,
);
// Now that we have the scope completely assembled, and shadowing resolved,
@ -564,7 +883,7 @@ pub(crate) fn canonicalize_defs<'a>(
output,
var_store,
scope,
&value_defs,
pending_value_defs,
pattern_type,
aliases,
symbols_introduced,
@ -577,7 +896,7 @@ fn canonicalize_value_defs<'a>(
mut output: Output,
var_store: &mut VarStore,
scope: &mut Scope,
value_defs: &[Loc<&'a roc_parse::ast::ValueDef<'a>>],
value_defs: Vec<Loc<PendingValue<'a>>>,
pattern_type: PatternType,
mut aliases: VecMap<Symbol, Alias>,
mut symbols_introduced: MutMap<Symbol, Region>,
@ -588,25 +907,14 @@ fn canonicalize_value_defs<'a>(
let mut pending_value_defs = Vec::with_capacity(value_defs.len());
let mut pending_expects = Vec::with_capacity(value_defs.len());
for loc_def in value_defs {
let mut new_output = Output::default();
let pending = to_pending_value_def(
env,
var_store,
loc_def.value,
scope,
&mut new_output,
pattern_type,
);
match pending {
for loc_pending_def in value_defs {
match loc_pending_def.value {
PendingValue::Def(pending_def) => {
// Record the ast::Expr for later. We'll do another pass through these
// once we have the entire scope assembled. If we were to canonicalize
// the exprs right now, they wouldn't have symbols in scope from defs
// that get would have gotten added later in the defs list!
pending_value_defs.push(pending_def);
output.union(new_output);
}
PendingValue::SignatureDefMismatch => { /* skip */ }
PendingValue::Expect(pending_expect) => {
@ -618,7 +926,17 @@ fn canonicalize_value_defs<'a>(
let mut symbol_to_index: Vec<(IdentId, u32)> = Vec::with_capacity(pending_value_defs.len());
for (def_index, pending_def) in pending_value_defs.iter().enumerate() {
for (s, r) in BindingsFromPattern::new(pending_def.loc_pattern()) {
let mut new_bindings = BindingsFromPattern::new(pending_def.loc_pattern())
.into_iter()
.peekable();
if new_bindings.peek().is_none() {
env.problem(Problem::NoIdentifiersIntroduced(
pending_def.loc_pattern().region,
));
}
for (s, r) in new_bindings {
// store the top-level defs, used to ensure that closures won't capture them
if let PatternType::TopLevelDef = pattern_type {
env.top_level_symbols.insert(s);
@ -692,8 +1010,8 @@ fn canonicalize_type_defs<'a>(
output: &mut Output,
var_store: &mut VarStore,
scope: &mut Scope,
pending_abilities_in_scope: &PendingAbilitiesInScope,
pending_type_defs: Vec<PendingTypeDef<'a>>,
pattern_type: PatternType,
) -> (VecMap<Symbol, Alias>, MutMap<Symbol, Region>) {
enum TypeDef<'a> {
Alias(
@ -707,12 +1025,10 @@ fn canonicalize_type_defs<'a>(
&'a Loc<ast::TypeAnnotation<'a>>,
Option<&'a Loc<ast::HasAbilities<'a>>>,
),
Ability(Loc<Symbol>, &'a [AbilityMember<'a>]),
Ability(Loc<Symbol>, Vec<PendingAbilityMember<'a>>),
}
let mut type_defs = MutMap::default();
let mut pending_abilities_in_scope = Vec::new();
let mut referenced_type_symbols = VecMap::default();
// Determine which idents we introduced in the course of this process.
@ -758,7 +1074,6 @@ fn canonicalize_type_defs<'a>(
referenced_type_symbols.insert(name.value, referenced_symbols);
type_defs.insert(name.value, TypeDef::Ability(name, members));
pending_abilities_in_scope.push(name.value);
}
PendingTypeDef::InvalidAlias { .. }
| PendingTypeDef::InvalidAbility { .. }
@ -780,7 +1095,7 @@ fn canonicalize_type_defs<'a>(
output,
var_store,
scope,
&pending_abilities_in_scope,
pending_abilities_in_scope,
name,
ann,
&vars,
@ -798,7 +1113,7 @@ fn canonicalize_type_defs<'a>(
output,
var_store,
scope,
&pending_abilities_in_scope,
pending_abilities_in_scope,
name,
ann,
&vars,
@ -813,7 +1128,7 @@ fn canonicalize_type_defs<'a>(
TypeDef::Ability(name, members) => {
// For now we enforce that aliases cannot reference abilities, so let's wait to
// resolve ability definitions until aliases are resolved and in scope below.
abilities.insert(name.value, (name, members));
abilities.insert(name.value, members);
}
}
}
@ -839,8 +1154,7 @@ fn canonicalize_type_defs<'a>(
var_store,
scope,
abilities,
&pending_abilities_in_scope,
pattern_type,
pending_abilities_in_scope,
);
(aliases, symbols_introduced)
@ -853,19 +1167,26 @@ fn resolve_abilities<'a>(
output: &mut Output,
var_store: &mut VarStore,
scope: &mut Scope,
abilities: MutMap<Symbol, (Loc<Symbol>, &[AbilityMember])>,
pending_abilities_in_scope: &[Symbol],
pattern_type: PatternType,
abilities: MutMap<Symbol, Vec<PendingAbilityMember>>,
pending_abilities_in_scope: &PendingAbilitiesInScope,
) {
for (loc_ability_name, members) in abilities.into_values() {
for (ability, members) in abilities {
let mut can_members = Vec::with_capacity(members.len());
for member in members {
for PendingAbilityMember {
name:
Loc {
value: member_sym,
region: member_name_region,
},
typ,
} in members
{
let member_annot = canonicalize_annotation(
env,
scope,
&member.typ.value,
member.typ.region,
&typ.value,
typ.region,
var_store,
pending_abilities_in_scope,
);
@ -875,26 +1196,6 @@ fn resolve_abilities<'a>(
output.references.insert_type_lookup(symbol);
}
let name_region = member.name.region;
let member_name = member.name.extract_spaces().item;
let member_sym = match scope.introduce(member_name.into(), name_region) {
Ok(sym) => sym,
Err((original_region, shadow, _new_symbol)) => {
env.problem(roc_problem::can::Problem::Shadowing {
original_region,
shadow,
kind: ShadowKind::Variable,
});
// Pretend the member isn't a part of the ability
continue;
}
};
if pattern_type == PatternType::TopLevelDef {
env.top_level_symbols.insert(member_sym);
}
// What variables in the annotation are bound to the parent ability, and what variables
// are bound to some other ability?
let (variables_bound_to_ability, _variables_bound_to_other_abilities): (
@ -904,7 +1205,7 @@ fn resolve_abilities<'a>(
.introduced_variables
.able
.iter()
.partition(|av| av.ability == loc_ability_name.value);
.partition(|av| av.ability == ability);
let var_bound_to_ability = match variables_bound_to_ability.as_slice() {
[one] => one.variable,
@ -913,8 +1214,8 @@ fn resolve_abilities<'a>(
// need to be a part of the ability.
env.problem(Problem::AbilityMemberMissingHasClause {
member: member_sym,
ability: loc_ability_name.value,
region: name_region,
ability,
region: member_name_region,
});
// Pretend the member isn't a part of the ability
continue;
@ -933,7 +1234,7 @@ fn resolve_abilities<'a>(
.collect();
env.problem(Problem::AbilityMemberMultipleBoundVars {
member: member_sym,
ability: loc_ability_name.value,
ability,
span_has_clauses,
bound_var_names,
});
@ -965,8 +1266,8 @@ fn resolve_abilities<'a>(
can_members.push((
member_sym,
AbilityMemberData {
parent_ability: loc_ability_name.value,
region: name_region,
parent_ability: ability,
region: member_name_region,
typ: PendingMemberType::Local {
variables,
signature,
@ -977,9 +1278,7 @@ fn resolve_abilities<'a>(
}
// Store what symbols a type must define implementations for to have this ability.
scope
.abilities_store
.register_ability(loc_ability_name.value, can_members);
scope.abilities_store.register_ability(ability, can_members);
}
}
@ -1089,8 +1388,29 @@ pub(crate) fn sort_can_defs_new(
// TODO: inefficient, but I want to make this what CanDefs contains in the future
let mut defs: Vec<_> = defs.into_iter().map(|x| x.unwrap()).collect();
// symbols are put in declarations in dependency order, from "main" up, so
//
// x = 3
// y = x + 1
//
// will get ordering [ y, x ]
let mut declarations = Declarations::with_capacity(defs.len());
// because of the ordering of declarations, expects should come first because they are
// independent, but can rely on all other top-level symbols in the module
let it = expects
.conditions
.into_iter()
.zip(expects.regions)
.zip(expects.preceding_comment);
for ((condition, region), preceding_comment) in it {
// an `expect` does not have a user-defined name, but we'll need a name to call the expectation
let name = scope.gen_unique_symbol();
declarations.push_expect(preceding_comment, name, Loc::at(region, condition));
}
for (symbol, alias) in aliases.into_iter() {
output.aliases.insert(symbol, alias);
}
@ -1255,12 +1575,6 @@ pub(crate) fn sort_can_defs_new(
}
}
for (condition, region) in expects.conditions.into_iter().zip(expects.regions) {
// an `expect` does not have a user-defined name, but we'll need a name to call the expectation
let name = scope.gen_unique_symbol();
declarations.push_expect(name, Loc::at(region, condition));
}
(declarations, output)
}
@ -1522,7 +1836,7 @@ fn canonicalize_pending_value_def<'a>(
use PendingValueDef::*;
// All abilities should be resolved by the time we're canonicalizing value defs.
let pending_abilities_in_scope = &[];
let pending_abilities_in_scope = &Default::default();
let output = match pending_def {
AnnotationOnly(_, loc_can_pattern, loc_ann) => {
@ -2036,10 +2350,38 @@ fn to_pending_type_def<'a>(
};
}
let mut named_members = Vec::with_capacity(members.len());
for member in *members {
let name_region = member.name.region;
let member_name = member.name.extract_spaces().item;
let member_sym = match scope.introduce(member_name.into(), name_region) {
Ok(sym) => sym,
Err((shadowed_symbol, shadow, _new_symbol)) => {
env.problem(roc_problem::can::Problem::Shadowing {
original_region: shadowed_symbol.region,
shadow,
kind: ShadowKind::Variable,
});
// Pretend the member isn't a part of the ability
continue;
}
};
named_members.push(PendingAbilityMember {
name: Loc::at(name_region, member_sym),
typ: member.typ,
});
if pattern_type == PatternType::TopLevelDef {
env.top_level_symbols.insert(member_sym);
}
}
PendingTypeDef::Ability {
name,
// We'll handle adding the member symbols later on when we do all value defs.
members,
members: named_members,
}
}
}
@ -2061,6 +2403,7 @@ fn to_pending_value_def<'a>(
var_store: &mut VarStore,
def: &'a ast::ValueDef<'a>,
scope: &mut Scope,
pending_abilities_in_scope: &PendingAbilitiesInScope,
output: &mut Output,
pattern_type: PatternType,
) -> PendingValue<'a> {
@ -2073,6 +2416,7 @@ fn to_pending_value_def<'a>(
env,
var_store,
scope,
pending_abilities_in_scope,
output,
pattern_type,
&loc_pattern.value,
@ -2091,6 +2435,7 @@ fn to_pending_value_def<'a>(
env,
var_store,
scope,
pending_abilities_in_scope,
output,
pattern_type,
&loc_pattern.value,
@ -2123,6 +2468,7 @@ fn to_pending_value_def<'a>(
env,
var_store,
scope,
pending_abilities_in_scope,
output,
pattern_type,
&body_pattern.value,
@ -2149,9 +2495,12 @@ fn to_pending_value_def<'a>(
}
}
Expect(condition) => PendingValue::Expect(PendingExpect {
Expect {
condition,
preceding_comment: Region::zero(),
preceding_comment,
} => PendingValue::Expect(PendingExpect {
condition,
preceding_comment: *preceding_comment,
}),
}
}

View File

@ -1,6 +1,6 @@
use crate::annotation::IntroducedVariables;
use crate::def::Def;
use crate::expr::{AnnotatedMark, ClosureData, Declarations, Expr, Recursive};
use crate::expr::{AnnotatedMark, ClosureData, Declarations, Expr, Recursive, WhenBranchPattern};
use crate::pattern::Pattern;
use crate::scope::Scope;
use roc_collections::{SendMap, VecSet};
@ -475,11 +475,15 @@ fn build_effect_after(
type_arguments,
lambda_set_variables,
};
let pattern = WhenBranchPattern {
pattern: Loc::at_zero(pattern),
degenerate: false,
};
let branches = vec![crate::expr::WhenBranch {
guard: None,
value: Loc::at_zero(force_inner_thunk_call),
patterns: vec![Loc::at_zero(pattern)],
patterns: vec![pattern],
redundant: RedundantMark::new(var_store),
}];
@ -1256,9 +1260,13 @@ fn build_effect_loop_inner_body(
let step_tag_name = TagName("Step".into());
let step_pattern = applied_tag_pattern(step_tag_name, &[new_state_symbol], var_store);
let step_pattern = WhenBranchPattern {
pattern: Loc::at_zero(step_pattern),
degenerate: false,
};
crate::expr::WhenBranch {
patterns: vec![Loc::at_zero(step_pattern)],
patterns: vec![step_pattern],
value: Loc::at_zero(force_thunk2),
guard: None,
redundant: RedundantMark::new(var_store),
@ -1268,9 +1276,13 @@ fn build_effect_loop_inner_body(
let done_branch = {
let done_tag_name = TagName("Done".into());
let done_pattern = applied_tag_pattern(done_tag_name, &[done_symbol], var_store);
let done_pattern = WhenBranchPattern {
pattern: Loc::at_zero(done_pattern),
degenerate: false,
};
crate::expr::WhenBranch {
patterns: vec![Loc::at_zero(done_pattern)],
patterns: vec![done_pattern],
value: Loc::at_zero(Expr::Var(done_symbol)),
guard: None,
redundant: RedundantMark::new(var_store),

View File

@ -58,7 +58,6 @@ impl<'a> Env<'a> {
}
}
/// Returns Err if the symbol resolved, but it was not exposed by the given module
pub fn qualified_lookup(
&mut self,
scope: &Scope,
@ -72,87 +71,10 @@ impl<'a> Env<'a> {
ident
);
let is_type_name = ident.starts_with(|c: char| c.is_uppercase());
let module_name = ModuleName::from(module_name_str);
match self.module_ids.get_id(&module_name) {
Some(module_id) => {
// You can do qualified lookups on your own module, e.g.
// if I'm in the Foo module, I can do a `Foo.bar` lookup.
if module_id == self.home {
match scope.locals.ident_ids.get_id(ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, ident_id);
if is_type_name {
self.qualified_type_lookups.insert(symbol);
} else {
self.qualified_value_lookups.insert(symbol);
}
Ok(symbol)
}
None => {
let error = RuntimeError::LookupNotInScope(
Loc {
value: Ident::from(ident),
region,
},
scope
.locals
.ident_ids
.ident_strs()
.map(|(_, string)| string.into())
.collect(),
);
Err(error)
}
}
} else {
match self.dep_idents.get(&module_id) {
Some(exposed_ids) => match exposed_ids.get_id(ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, ident_id);
if is_type_name {
self.qualified_type_lookups.insert(symbol);
} else {
self.qualified_value_lookups.insert(symbol);
}
Ok(symbol)
}
None => {
let exposed_values = exposed_ids
.ident_strs()
.filter(|(_, ident)| {
ident.starts_with(|c: char| c.is_lowercase())
})
.map(|(_, ident)| Lowercase::from(ident))
.collect();
Err(RuntimeError::ValueNotExposed {
module_name,
ident: Ident::from(ident),
region,
exposed_values,
})
}
},
None => Err(RuntimeError::ModuleNotImported {
module_name,
imported_modules: self
.dep_idents
.keys()
.filter_map(|module_id| self.module_ids.get_name(*module_id))
.map(|module_name| module_name.as_ref().into())
.collect(),
region,
module_exists: true,
}),
}
}
}
Some(module_id) => self.qualified_lookup_help(scope, module_id, ident, region),
None => Err(RuntimeError::ModuleNotImported {
module_name,
imported_modules: self
@ -166,6 +88,108 @@ impl<'a> Env<'a> {
}
}
pub fn qualified_lookup_with_module_id(
&mut self,
scope: &Scope,
module_id: ModuleId,
ident: &str,
region: Region,
) -> Result<Symbol, RuntimeError> {
self.qualified_lookup_help(scope, module_id, ident, region)
}
/// Returns Err if the symbol resolved, but it was not exposed by the given module
fn qualified_lookup_help(
&mut self,
scope: &Scope,
module_id: ModuleId,
ident: &str,
region: Region,
) -> Result<Symbol, RuntimeError> {
let is_type_name = ident.starts_with(|c: char| c.is_uppercase());
// You can do qualified lookups on your own module, e.g.
// if I'm in the Foo module, I can do a `Foo.bar` lookup.
if module_id == self.home {
match scope.locals.ident_ids.get_id(ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, ident_id);
if is_type_name {
self.qualified_type_lookups.insert(symbol);
} else {
self.qualified_value_lookups.insert(symbol);
}
Ok(symbol)
}
None => {
let error = RuntimeError::LookupNotInScope(
Loc {
value: Ident::from(ident),
region,
},
scope
.locals
.ident_ids
.ident_strs()
.map(|(_, string)| string.into())
.collect(),
);
Err(error)
}
}
} else {
match self.dep_idents.get(&module_id) {
Some(exposed_ids) => match exposed_ids.get_id(ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, ident_id);
if is_type_name {
self.qualified_type_lookups.insert(symbol);
} else {
self.qualified_value_lookups.insert(symbol);
}
Ok(symbol)
}
None => {
let exposed_values = exposed_ids
.ident_strs()
.filter(|(_, ident)| ident.starts_with(|c: char| c.is_lowercase()))
.map(|(_, ident)| Lowercase::from(ident))
.collect();
Err(RuntimeError::ValueNotExposed {
module_name: self
.module_ids
.get_name(module_id)
.expect("Module ID known, but not in the module IDs somehow")
.clone(),
ident: Ident::from(ident),
region,
exposed_values,
})
}
},
None => Err(RuntimeError::ModuleNotImported {
module_name: self
.module_ids
.get_name(module_id)
.expect("Module ID known, but not in the module IDs somehow")
.clone(),
imported_modules: self
.dep_idents
.keys()
.filter_map(|module_id| self.module_ids.get_name(*module_id))
.map(|module_name| module_name.as_ref().into())
.collect(),
region,
module_exists: true,
}),
}
}
}
pub fn problem(&mut self, problem: Problem) {
self.problems.push(problem)
}

View File

@ -260,16 +260,19 @@ pub fn sketch_when_branches(
// NB: ordering the guard pattern first seems to be better at catching
// non-exhaustive constructors in the second argument; see the paper to see if
// there is a way to improve this in general.
vec![guard_pattern, sketch_pattern(target_var, &loc_pat.value)],
vec![
guard_pattern,
sketch_pattern(target_var, &loc_pat.pattern.value),
],
)]
} else {
// Simple case
vec![sketch_pattern(target_var, &loc_pat.value)]
vec![sketch_pattern(target_var, &loc_pat.pattern.value)]
};
let row = SketchedRow {
patterns,
region: loc_pat.region,
region: loc_pat.pattern.region,
guard,
redundant_mark: *redundant,
};

View File

@ -7,7 +7,7 @@ use crate::num::{
finish_parsing_base, finish_parsing_float, finish_parsing_num, float_expr_from_result,
int_expr_from_result, num_expr_from_result, FloatBound, IntBound, NumBound,
};
use crate::pattern::{canonicalize_pattern, BindingsFromPattern, Pattern};
use crate::pattern::{canonicalize_pattern, BindingsFromPattern, Pattern, PermitShadows};
use crate::procedure::References;
use crate::scope::Scope;
use crate::traverse::{walk_expr, Visitor};
@ -243,7 +243,7 @@ impl Expr {
match self {
Self::Num(..) => Category::Num,
Self::Int(..) => Category::Int,
Self::Float(..) => Category::Float,
Self::Float(..) => Category::Frac,
Self::Str(..) => Category::Str,
Self::SingleQuote(..) => Category::Character,
Self::List { .. } => Category::List,
@ -481,9 +481,18 @@ impl Recursive {
}
}
#[derive(Clone, Debug)]
pub struct WhenBranchPattern {
pub pattern: Loc<Pattern>,
/// Degenerate branch patterns are those that don't fully bind symbols that the branch body
/// needs. For example, in `A x | B y -> x`, the `B y` pattern is degenerate.
/// Degenerate patterns emit a runtime error if reached in a program.
pub degenerate: bool,
}
#[derive(Clone, Debug)]
pub struct WhenBranch {
pub patterns: Vec<Loc<Pattern>>,
pub patterns: Vec<WhenBranchPattern>,
pub value: Loc<Expr>,
pub guard: Option<Loc<Expr>>,
/// Whether this branch is redundant in the `when` it appears in
@ -497,11 +506,13 @@ impl WhenBranch {
.patterns
.first()
.expect("when branch has no pattern?")
.pattern
.region,
&self
.patterns
.last()
.expect("when branch has no pattern?")
.pattern
.region,
)
}
@ -512,7 +523,7 @@ impl WhenBranch {
Region::across_all(
self.patterns
.iter()
.map(|p| &p.region)
.map(|p| &p.pattern.region)
.chain([self.value.region].iter()),
)
}
@ -1187,6 +1198,7 @@ fn canonicalize_closure_body<'a>(
FunctionArg,
&loc_pattern.value,
loc_pattern.region,
PermitShadows(false),
);
can_args.push((
@ -1269,6 +1281,59 @@ fn canonicalize_closure_body<'a>(
(closure_data, output)
}
enum MultiPatternVariables {
OnePattern,
MultiPattern {
bound_occurrences: VecMap<Symbol, (Region, u8)>,
},
}
impl MultiPatternVariables {
#[inline(always)]
fn new(num_patterns: usize) -> Self {
if num_patterns > 1 {
Self::MultiPattern {
bound_occurrences: VecMap::with_capacity(2),
}
} else {
Self::OnePattern
}
}
#[inline(always)]
fn add_pattern(&mut self, pattern: &Loc<Pattern>) {
match self {
MultiPatternVariables::OnePattern => {}
MultiPatternVariables::MultiPattern { bound_occurrences } => {
for (sym, region) in BindingsFromPattern::new(pattern) {
if !bound_occurrences.contains_key(&sym) {
bound_occurrences.insert(sym, (region, 0));
}
bound_occurrences.get_mut(&sym).unwrap().1 += 1;
}
}
}
}
#[inline(always)]
fn get_unbound(self) -> impl Iterator<Item = (Symbol, Region)> {
let bound_occurrences = match self {
MultiPatternVariables::OnePattern => Default::default(),
MultiPatternVariables::MultiPattern { bound_occurrences } => bound_occurrences,
};
bound_occurrences
.into_iter()
.filter_map(|(sym, (region, occurs))| {
if occurs == 1 {
Some((sym, region))
} else {
None
}
})
}
}
#[inline(always)]
fn canonicalize_when_branch<'a>(
env: &mut Env<'a>,
@ -1279,9 +1344,11 @@ fn canonicalize_when_branch<'a>(
output: &mut Output,
) -> (WhenBranch, References) {
let mut patterns = Vec::with_capacity(branch.patterns.len());
let mut multi_pattern_variables = MultiPatternVariables::new(branch.patterns.len());
for (i, loc_pattern) in branch.patterns.iter().enumerate() {
let permit_shadows = PermitShadows(i > 0); // patterns can shadow symbols defined in the first pattern.
// TODO report symbols not bound in all patterns
for loc_pattern in branch.patterns.iter() {
let can_pattern = canonicalize_pattern(
env,
var_store,
@ -1290,9 +1357,24 @@ fn canonicalize_when_branch<'a>(
WhenBranch,
&loc_pattern.value,
loc_pattern.region,
permit_shadows,
);
patterns.push(can_pattern);
multi_pattern_variables.add_pattern(&can_pattern);
patterns.push(WhenBranchPattern {
pattern: can_pattern,
degenerate: false,
});
}
let mut some_symbols_not_bound_in_all_patterns = false;
for (unbound_symbol, region) in multi_pattern_variables.get_unbound() {
env.problem(Problem::NotBoundInAllPatterns {
unbound_symbol,
region,
});
some_symbols_not_bound_in_all_patterns = true;
}
let (value, mut branch_output) = canonicalize_expr(
@ -1319,12 +1401,33 @@ fn canonicalize_when_branch<'a>(
// Now that we've collected all the references for this branch, check to see if
// any of the new idents it defined were unused. If any were, report it.
for (symbol, region) in BindingsFromPattern::new_many(patterns.iter()) {
if !output.references.has_value_lookup(symbol) {
let mut pattern_bound_symbols_body_needs = VecSet::default();
for (symbol, region) in BindingsFromPattern::new_many(patterns.iter().map(|pat| &pat.pattern)) {
if output.references.has_value_lookup(symbol) {
pattern_bound_symbols_body_needs.insert(symbol);
} else {
env.problem(Problem::UnusedDef(symbol, region));
}
}
if some_symbols_not_bound_in_all_patterns && !pattern_bound_symbols_body_needs.is_empty() {
// There might be branches that don't bind all the symbols needed by the body; mark those
// branches degenerate.
for pattern in patterns.iter_mut() {
let bound_by_pattern: VecSet<_> = BindingsFromPattern::new(&pattern.pattern)
.map(|(sym, _)| sym)
.collect();
let binds_all_needed = pattern_bound_symbols_body_needs
.iter()
.all(|sym| bound_by_pattern.contains(sym));
if !binds_all_needed {
pattern.degenerate = true;
}
}
}
(
WhenBranch {
patterns,
@ -1465,6 +1568,13 @@ fn canonicalize_var_lookup(
output.references.insert_value_lookup(symbol);
if scope.abilities_store.is_ability_member_name(symbol) {
// Is there a shadow implementation with the same name? If so, we might be in
// the def for that shadow. In that case add a value lookup of the shadow impl,
// so that it's marked as possibly-recursive.
if let Some(shadow) = scope.get_member_shadow(symbol) {
output.references.insert_value_lookup(shadow.value);
}
AbilityMember(
symbol,
Some(scope.abilities_store.fresh_specialization_id()),
@ -2190,12 +2300,17 @@ impl Declarations {
index
}
pub fn push_expect(&mut self, name: Symbol, loc_expr: Loc<Expr>) -> usize {
pub fn push_expect(
&mut self,
preceding_comment: Region,
name: Symbol,
loc_expr: Loc<Expr>,
) -> usize {
let index = self.declarations.len();
self.declarations.push(DeclarationTag::Expectation);
self.variables.push(Variable::BOOL);
self.symbols.push(Loc::at_zero(name));
self.symbols.push(Loc::at(preceding_comment, name));
self.annotations.push(None);
self.expressions.push(loc_expr);
@ -2622,12 +2737,15 @@ struct ExpectCollector {
}
impl crate::traverse::Visitor for ExpectCollector {
fn visit_expr(&mut self, expr: &Expr, region: Region, var: Variable) {
fn visit_expr(&mut self, expr: &Expr, _region: Region, var: Variable) {
if let Expr::Expect {
lookups_in_cond, ..
lookups_in_cond,
loc_condition,
..
} = expr
{
self.expects.insert(region, lookups_in_cond.to_vec());
self.expects
.insert(loc_condition.region, lookups_in_cond.to_vec());
}
walk_expr(self, expr, var)

View File

@ -1,4 +1,4 @@
use crate::abilities::{PendingAbilitiesStore, ResolvedSpecializations};
use crate::abilities::{ImplKey, PendingAbilitiesStore, ResolvedImpl};
use crate::annotation::canonicalize_annotation;
use crate::def::{canonicalize_defs, Def};
use crate::effect_module::HostedGeneratedFunctions;
@ -103,12 +103,20 @@ impl ExposedForModule {
}
}
/// During type solving and monomorphization, a module must know how its imported ability
/// implementations are resolved - are they derived, or have a concrete implementation?
///
/// Unfortunately we cannot keep this information opaque, as it's important for properly
/// restoring specialization lambda sets. As such, we need to export implementation information,
/// which is the job of this structure.
pub type ResolvedImplementations = VecMap<ImplKey, ResolvedImpl>;
/// The types of all exposed values/functions of a module. This includes ability member
/// specializations.
#[derive(Clone, Debug)]
pub struct ExposedModuleTypes {
pub exposed_types_storage_subs: ExposedTypesStorageSubs,
pub resolved_specializations: ResolvedSpecializations,
pub resolved_implementations: ResolvedImplementations,
}
#[derive(Debug)]
@ -434,7 +442,7 @@ pub fn canonicalize_module_defs<'a>(
.iter()
.map(|(symbol, loc_ann)| {
// We've already canonicalized the module, so there are no pending abilities.
let pending_abilities_in_scope = &[];
let pending_abilities_in_scope = &Default::default();
let ann = canonicalize_annotation(
&mut env,
@ -910,7 +918,10 @@ fn fix_values_captured_in_closure_expr(
// patterns can contain default expressions, so much go over them too!
for loc_pat in branch.patterns.iter_mut() {
fix_values_captured_in_closure_pattern(&mut loc_pat.value, no_capture_symbols);
fix_values_captured_in_closure_pattern(
&mut loc_pat.pattern.value,
no_capture_symbols,
);
}
if let Some(guard) = &mut branch.guard {

View File

@ -82,9 +82,15 @@ fn desugar_value_def<'a>(arena: &'a Bump, def: &'a ValueDef<'a>) -> ValueDef<'a>
body_pattern: *body_pattern,
body_expr: desugar_expr(arena, body_expr),
},
Expect(condition) => {
Expect {
condition,
preceding_comment,
} => {
let desugared_condition = &*arena.alloc(desugar_expr(arena, condition));
Expect(desugared_condition)
Expect {
condition: desugared_condition,
preceding_comment: *preceding_comment,
}
}
}
}

View File

@ -5,7 +5,7 @@ use crate::num::{
finish_parsing_base, finish_parsing_float, finish_parsing_num, FloatBound, IntBound, NumBound,
ParsedNumResult,
};
use crate::scope::Scope;
use crate::scope::{PendingAbilitiesInScope, Scope};
use roc_module::ident::{Ident, Lowercase, TagName};
use roc_module::symbol::Symbol;
use roc_parse::ast::{self, StrLiteral, StrSegment};
@ -175,10 +175,12 @@ pub enum DestructType {
Guard(Variable, Loc<Pattern>),
}
#[allow(clippy::too_many_arguments)]
pub fn canonicalize_def_header_pattern<'a>(
env: &mut Env<'a>,
var_store: &mut VarStore,
scope: &mut Scope,
pending_abilities_in_scope: &PendingAbilitiesInScope,
output: &mut Output,
pattern_type: PatternType,
pattern: &ast::Pattern<'a>,
@ -189,7 +191,11 @@ pub fn canonicalize_def_header_pattern<'a>(
match pattern {
// Identifiers that shadow ability members may appear (and may only appear) at the header of a def.
Identifier(name) => {
match scope.introduce_or_shadow_ability_member((*name).into(), region) {
match scope.introduce_or_shadow_ability_member(
pending_abilities_in_scope,
(*name).into(),
region,
) {
Ok((symbol, shadowing_ability_member)) => {
let can_pattern = match shadowing_ability_member {
// A fresh identifier.
@ -199,6 +205,7 @@ pub fn canonicalize_def_header_pattern<'a>(
}
// Likely a specialization of an ability.
Some(ability_member_name) => {
output.references.insert_bound(symbol);
output.references.insert_value_lookup(ability_member_name);
Pattern::AbilityMemberSpecialization {
ident: symbol,
@ -221,10 +228,26 @@ pub fn canonicalize_def_header_pattern<'a>(
}
}
}
_ => canonicalize_pattern(env, var_store, scope, output, pattern_type, pattern, region),
_ => canonicalize_pattern(
env,
var_store,
scope,
output,
pattern_type,
pattern,
region,
PermitShadows(false),
),
}
}
/// Allow binding of symbols that appear shadowed.
///
/// For example, in the branch `A x | B x -> ...`, both pattern bind `x`; that's not a shadow!
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct PermitShadows(pub bool);
#[allow(clippy::too_many_arguments)]
pub fn canonicalize_pattern<'a>(
env: &mut Env<'a>,
var_store: &mut VarStore,
@ -233,6 +256,7 @@ pub fn canonicalize_pattern<'a>(
pattern_type: PatternType,
pattern: &ast::Pattern<'a>,
region: Region,
permit_shadows: PermitShadows,
) -> Loc<Pattern> {
use roc_parse::ast::Pattern::*;
use PatternType::*;
@ -244,15 +268,21 @@ pub fn canonicalize_pattern<'a>(
Pattern::Identifier(symbol)
}
Err((original_region, shadow, new_symbol)) => {
env.problem(Problem::RuntimeError(RuntimeError::Shadowing {
original_region,
shadow: shadow.clone(),
kind: ShadowKind::Variable,
}));
output.references.insert_bound(new_symbol);
Err((shadowed_symbol, shadow, new_symbol)) => {
if permit_shadows.0 {
output.references.insert_bound(shadowed_symbol.value);
Pattern::Shadowed(original_region, shadow, new_symbol)
Pattern::Identifier(shadowed_symbol.value)
} else {
env.problem(Problem::RuntimeError(RuntimeError::Shadowing {
original_region: shadowed_symbol.region,
shadow: shadow.clone(),
kind: ShadowKind::Variable,
}));
output.references.insert_bound(new_symbol);
Pattern::Shadowed(shadowed_symbol.region, shadow, new_symbol)
}
}
},
Tag(name) => {
@ -283,6 +313,7 @@ pub fn canonicalize_pattern<'a>(
pattern_type,
&loc_pattern.value,
loc_pattern.region,
permit_shadows,
);
can_patterns.push((var_store.fresh(), can_pattern));
@ -354,10 +385,7 @@ pub fn canonicalize_pattern<'a>(
ptype => unsupported_pattern(env, ptype, region),
},
Underscore(_) => match pattern_type {
WhenBranch | FunctionArg => Pattern::Underscore,
TopLevelDef | DefExpr => bad_underscore(env, region),
},
Underscore(_) => Pattern::Underscore,
&NumLiteral(str) => match pattern_type {
WhenBranch => match finish_parsing_num(str) {
@ -451,6 +479,7 @@ pub fn canonicalize_pattern<'a>(
pattern_type,
sub_pattern,
region,
permit_shadows,
)
}
RecordDestructure(patterns) => {
@ -476,9 +505,9 @@ pub fn canonicalize_pattern<'a>(
},
});
}
Err((original_region, shadow, new_symbol)) => {
Err((shadowed_symbol, shadow, new_symbol)) => {
env.problem(Problem::RuntimeError(RuntimeError::Shadowing {
original_region,
original_region: shadowed_symbol.region,
shadow: shadow.clone(),
kind: ShadowKind::Variable,
}));
@ -487,8 +516,11 @@ pub fn canonicalize_pattern<'a>(
// are, we're definitely shadowed and will
// get a runtime exception as soon as we
// encounter the first bad pattern.
opt_erroneous =
Some(Pattern::Shadowed(original_region, shadow, new_symbol));
opt_erroneous = Some(Pattern::Shadowed(
shadowed_symbol.region,
shadow,
new_symbol,
));
}
};
}
@ -505,6 +537,7 @@ pub fn canonicalize_pattern<'a>(
pattern_type,
&loc_guard.value,
loc_guard.region,
permit_shadows,
);
destructs.push(Loc {
@ -544,9 +577,9 @@ pub fn canonicalize_pattern<'a>(
},
});
}
Err((original_region, shadow, new_symbol)) => {
Err((shadowed_symbol, shadow, new_symbol)) => {
env.problem(Problem::RuntimeError(RuntimeError::Shadowing {
original_region,
original_region: shadowed_symbol.region,
shadow: shadow.clone(),
kind: ShadowKind::Variable,
}));
@ -555,8 +588,11 @@ pub fn canonicalize_pattern<'a>(
// are, we're definitely shadowed and will
// get a runtime exception as soon as we
// encounter the first bad pattern.
opt_erroneous =
Some(Pattern::Shadowed(original_region, shadow, new_symbol));
opt_erroneous = Some(Pattern::Shadowed(
shadowed_symbol.region,
shadow,
new_symbol,
));
}
};
}
@ -614,16 +650,6 @@ fn unsupported_pattern(env: &mut Env, pattern_type: PatternType, region: Region)
Pattern::UnsupportedPattern(region)
}
fn bad_underscore(env: &mut Env, region: Region) -> Pattern {
use roc_problem::can::BadPattern;
env.problem(Problem::UnsupportedPattern(
BadPattern::UnderscoreInDef,
region,
));
Pattern::UnsupportedPattern(region)
}
/// When we detect a malformed pattern like `3.X` or `0b5`,
/// report it to Env and return an UnsupportedPattern runtime error pattern.
fn malformed_pattern(env: &mut Env, problem: MalformedPatternProblem, region: Region) -> Pattern {

View File

@ -1,4 +1,4 @@
use roc_collections::VecMap;
use roc_collections::{VecMap, VecSet};
use roc_module::ident::Ident;
use roc_module::symbol::{IdentId, IdentIds, ModuleId, Symbol};
use roc_problem::can::RuntimeError;
@ -9,6 +9,9 @@ use crate::abilities::PendingAbilitiesStore;
use bitvec::vec::BitVec;
// ability -> member names
pub(crate) type PendingAbilitiesInScope = VecMap<Symbol, VecSet<Symbol>>;
#[derive(Clone, Debug)]
pub struct Scope {
/// The type aliases currently in scope
@ -27,6 +30,13 @@ pub struct Scope {
/// Identifiers that are imported (and introduced in the header)
imports: Vec<(Ident, Symbol, Region)>,
/// Shadows of an ability member, for example a local specialization of `eq` for the ability
/// member `Eq has eq : a, a -> Bool | a has Eq` gets a shadow symbol it can use for its
/// implementation.
///
/// Only one shadow of an ability member is permitted per scope.
shadows: VecMap<Symbol, Loc<Symbol>>,
/// Identifiers that are in scope, and defined in the current module
pub locals: ScopedIdentIds,
}
@ -48,6 +58,7 @@ impl Scope {
locals: ScopedIdentIds::from_ident_ids(home, initial_ident_ids),
aliases: VecMap::default(),
abilities_store: starting_abilities_store,
shadows: VecMap::default(),
imports,
}
}
@ -56,6 +67,10 @@ impl Scope {
self.lookup_str(ident.as_str(), region)
}
pub fn lookup_ability_member_shadow(&self, member: Symbol) -> Option<Symbol> {
self.shadows.get(&member).map(|loc_shadow| loc_shadow.value)
}
pub fn add_docs_imports(&mut self) {
self.imports
.push(("Dict".into(), Symbol::DICT_DICT, Region::zero()));
@ -238,7 +253,7 @@ impl Scope {
&mut self,
ident: Ident,
region: Region,
) -> Result<Symbol, (Region, Loc<Ident>, Symbol)> {
) -> Result<Symbol, (Loc<Symbol>, Loc<Ident>, Symbol)> {
self.introduce_str(ident.as_str(), region)
}
@ -246,17 +261,17 @@ impl Scope {
&mut self,
ident: &str,
region: Region,
) -> Result<Symbol, (Region, Loc<Ident>, Symbol)> {
) -> Result<Symbol, (Loc<Symbol>, Loc<Ident>, Symbol)> {
match self.introduce_help(ident, region) {
Ok(symbol) => Ok(symbol),
Err((_, original_region)) => {
Err((shadowed_symbol, original_region)) => {
let shadow = Loc {
value: Ident::from(ident),
region,
};
let symbol = self.locals.scopeless_symbol(ident, region);
Err((original_region, shadow, symbol))
Err((Loc::at(original_region, shadowed_symbol), shadow, symbol))
}
}
}
@ -288,6 +303,7 @@ impl Scope {
#[allow(clippy::type_complexity)]
pub fn introduce_or_shadow_ability_member(
&mut self,
pending_abilities_in_scope: &PendingAbilitiesInScope,
ident: Ident,
region: Region,
) -> Result<(Symbol, Option<Symbol>), (Region, Loc<Ident>, Symbol)> {
@ -297,11 +313,27 @@ impl Scope {
Err((original_symbol, original_region)) => {
let shadow_symbol = self.scopeless_symbol(ident, region);
if self.abilities_store.is_ability_member_name(original_symbol) {
self.abilities_store
.register_specializing_symbol(shadow_symbol, original_symbol);
if self.abilities_store.is_ability_member_name(original_symbol)
|| pending_abilities_in_scope
.iter()
.any(|(_, members)| members.iter().any(|m| *m == original_symbol))
{
match self.shadows.get(&original_symbol) {
Some(loc_original_shadow) => {
// Duplicate shadow of an ability members; that's illegal.
let shadow = Loc {
value: ident.clone(),
region,
};
Err((loc_original_shadow.region, shadow, shadow_symbol))
}
None => {
self.shadows
.insert(original_symbol, Loc::at(region, shadow_symbol));
Ok((shadow_symbol, Some(original_symbol)))
Ok((shadow_symbol, Some(original_symbol)))
}
}
} else {
// This is an illegal shadow.
let shadow = Loc {
@ -316,6 +348,10 @@ impl Scope {
}
}
pub fn get_member_shadow(&self, ability_member: Symbol) -> Option<&Loc<Symbol>> {
self.shadows.get(&ability_member)
}
/// Create a new symbol, but don't add it to the scope (yet)
///
/// Used for record guards like { x: Just _ } where the `x` is not added to the scope,
@ -606,13 +642,13 @@ mod test {
assert!(scope.lookup(&ident, Region::zero()).is_err());
let first = scope.introduce(ident.clone(), region1).unwrap();
let (original_region, _ident, shadow_symbol) =
let (original, _ident, shadow_symbol) =
scope.introduce(ident.clone(), region2).unwrap_err();
scope.register_debug_idents();
assert_ne!(first, shadow_symbol);
assert_eq!(original_region, region1);
assert_eq!(original.region, region1);
let lookup = scope.lookup(&ident, Region::zero()).unwrap();
@ -773,13 +809,13 @@ mod test {
scope.import(ident.clone(), symbol, region1).unwrap();
let (original_region, _ident, shadow_symbol) =
let (original, _ident, shadow_symbol) =
scope.introduce(ident.clone(), region2).unwrap_err();
scope.register_debug_idents();
assert_ne!(symbol, shadow_symbol);
assert_eq!(original_region, region1);
assert_eq!(original.region, region1);
let lookup = scope.lookup(&ident, Region::zero()).unwrap();

View File

@ -2,7 +2,7 @@
use roc_module::{ident::Lowercase, symbol::Symbol};
use roc_region::all::{Loc, Region};
use roc_types::subs::Variable;
use roc_types::{subs::Variable, types::MemberImpl};
use crate::{
abilities::AbilitiesStore,
@ -323,9 +323,13 @@ pub fn walk_when_branch<V: Visitor>(
redundant: _,
} = branch;
patterns
.iter()
.for_each(|pat| visitor.visit_pattern(&pat.value, pat.region, pat.value.opt_var()));
patterns.iter().for_each(|pat| {
visitor.visit_pattern(
&pat.pattern.value,
pat.pattern.region,
pat.pattern.value.opt_var(),
)
});
visitor.visit_expr(&value.value, value.region, expr_var);
if let Some(guard) = guard {
visitor.visit_expr(&guard.value, guard.region, Variable::BOOL);
@ -587,8 +591,8 @@ pub fn find_ability_member_and_owning_type_at(
abilities_store: &AbilitiesStore,
) -> Option<Symbol> {
abilities_store
.iter_specializations()
.find(|(_, ms)| ms.symbol == symbol)
.iter_declared_implementations()
.find(|(_, member_impl)| matches!(member_impl, MemberImpl::Impl(sym) if *sym == symbol))
.map(|(spec, _)| spec.1)
}
}

View File

@ -377,8 +377,8 @@ mod test_can {
let arena = Bump::new();
let CanExprOut { problems, .. } = can_expr_with(&arena, test_home(), src);
assert_eq!(problems.len(), 1);
assert!(problems.iter().all(|problem| matches!(
assert_eq!(problems.len(), 2);
assert!(problems.iter().any(|problem| matches!(
problem,
Problem::RuntimeError(RuntimeError::Shadowing { .. })
)));
@ -398,8 +398,8 @@ mod test_can {
let arena = Bump::new();
let CanExprOut { problems, .. } = can_expr_with(&arena, test_home(), src);
assert_eq!(problems.len(), 1);
assert!(problems.iter().all(|problem| matches!(
assert_eq!(problems.len(), 2);
assert!(problems.iter().any(|problem| matches!(
problem,
Problem::RuntimeError(RuntimeError::Shadowing { .. })
)));
@ -419,9 +419,9 @@ mod test_can {
let arena = Bump::new();
let CanExprOut { problems, .. } = can_expr_with(&arena, test_home(), src);
assert_eq!(problems.len(), 1);
assert_eq!(problems.len(), 2);
println!("{:#?}", problems);
assert!(problems.iter().all(|problem| matches!(
assert!(problems.iter().any(|problem| matches!(
problem,
Problem::RuntimeError(RuntimeError::Shadowing { .. })
)));

View File

@ -118,6 +118,10 @@ impl<K: PartialEq, V> VecMap<K, V> {
(self.keys, self.values)
}
pub fn unzip_slices(&self) -> (&[K], &[V]) {
(&self.keys, &self.values)
}
/// # Safety
///
/// keys and values must have the same length, and there must not

View File

@ -118,17 +118,17 @@ pub fn float_literal(
precision_var,
bound,
region,
Category::Float,
Category::Frac,
);
constrs.extend([
constraints.equal_types(
num_type.clone(),
ForReason(reason, num_float(Type::Variable(precision_var)), region),
Category::Float,
Category::Frac,
region,
),
constraints.equal_types(num_type, expected, Category::Float, region),
constraints.equal_types(num_type, expected, Category::Frac, region),
]);
let and_constraint = constraints.and_constraint(constrs);

View File

@ -735,7 +735,8 @@ pub fn constrain_expr(
let mut pattern_vars = Vec::with_capacity(branches.len());
let mut pattern_headers = SendMap::default();
let mut pattern_cons = Vec::with_capacity(branches.len() + 2);
let mut branch_cons = Vec::with_capacity(branches.len());
let mut delayed_is_open_constraints = Vec::with_capacity(2);
let mut body_cons = Vec::with_capacity(branches.len());
for (index, when_branch) in branches.iter().enumerate() {
let expected_pattern = |sub_pattern, sub_region| {
@ -749,19 +750,24 @@ pub fn constrain_expr(
)
};
let (new_pattern_vars, new_pattern_headers, pattern_con, branch_con) =
constrain_when_branch_help(
constraints,
env,
region,
when_branch,
expected_pattern,
branch_expr_reason(
&expected,
HumanIndex::zero_based(index),
when_branch.value.region,
),
);
let ConstrainedBranch {
vars: new_pattern_vars,
headers: new_pattern_headers,
pattern_constraints,
is_open_constrains,
body_constraints,
} = constrain_when_branch_help(
constraints,
env,
region,
when_branch,
expected_pattern,
branch_expr_reason(
&expected,
HumanIndex::zero_based(index),
when_branch.value.region,
),
);
pattern_vars.extend(new_pattern_vars);
@ -779,9 +785,10 @@ pub fn constrain_expr(
}
pattern_headers.extend(new_pattern_headers);
pattern_cons.push(pattern_con);
pattern_cons.push(pattern_constraints);
delayed_is_open_constraints.extend(is_open_constrains);
branch_cons.push(branch_con);
body_cons.push(body_constraints);
}
// Deviation: elm adds another layer of And nesting
@ -793,6 +800,11 @@ pub fn constrain_expr(
// The return type of each branch must equal the return type of
// the entire when-expression.
// Layer on the "is-open" constraints at the very end, after we know what the branch
// types are supposed to look like without open-ness.
let is_open_constr = constraints.and_constraint(delayed_is_open_constraints);
pattern_cons.push(is_open_constr);
// After solving the condition variable with what's expected from the branch patterns,
// check it against the condition expression.
//
@ -826,7 +838,7 @@ pub fn constrain_expr(
// Solve all the pattern constraints together, introducing variables in the pattern as
// need be before solving the bodies.
let pattern_constraints = constraints.and_constraint(pattern_cons);
let body_constraints = constraints.and_constraint(branch_cons);
let body_constraints = constraints.and_constraint(body_cons);
let when_body_con = constraints.let_constraint(
[],
pattern_vars,
@ -1790,6 +1802,14 @@ fn constrain_value_def(
}
}
struct ConstrainedBranch {
vars: Vec<Variable>,
headers: VecMap<Symbol, Loc<Type>>,
pattern_constraints: Constraint,
is_open_constrains: Vec<Constraint>,
body_constraints: Constraint,
}
/// Constrain a when branch, returning (variables in pattern, symbols introduced in pattern, pattern constraint, body constraint).
/// We want to constraint all pattern constraints in a "when" before body constraints.
#[inline(always)]
@ -1800,12 +1820,7 @@ fn constrain_when_branch_help(
when_branch: &WhenBranch,
pattern_expected: impl Fn(HumanIndex, Region) -> PExpected<Type>,
expr_expected: Expected<Type>,
) -> (
Vec<Variable>,
VecMap<Symbol, Loc<Type>>,
Constraint,
Constraint,
) {
) -> ConstrainedBranch {
let ret_constraint = constrain_expr(
constraints,
env,
@ -1822,53 +1837,91 @@ fn constrain_when_branch_help(
};
for (i, loc_pattern) in when_branch.patterns.iter().enumerate() {
let pattern_expected = pattern_expected(HumanIndex::zero_based(i), loc_pattern.region);
let pattern_expected =
pattern_expected(HumanIndex::zero_based(i), loc_pattern.pattern.region);
let mut partial_state = PatternState::default();
constrain_pattern(
constraints,
env,
&loc_pattern.value,
loc_pattern.region,
&loc_pattern.pattern.value,
loc_pattern.pattern.region,
pattern_expected,
&mut state,
&mut partial_state,
);
state.vars.extend(partial_state.vars);
state.constraints.extend(partial_state.constraints);
state
.delayed_is_open_constraints
.extend(partial_state.delayed_is_open_constraints);
if i == 0 {
state.headers.extend(partial_state.headers);
} else {
// Make sure the bound variables in the patterns on the same branch agree in their types.
for (sym, typ1) in state.headers.iter() {
if let Some(typ2) = partial_state.headers.get(sym) {
state.constraints.push(constraints.equal_types(
typ1.value.clone(),
Expected::NoExpectation(typ2.value.clone()),
Category::When,
typ2.region,
));
}
// If the pattern doesn't bind all symbols introduced in the branch we'll have
// reported a canonicalization error, but still might reach here; that's okay.
}
// Add any variables this pattern binds that the other patterns don't bind.
// This will already have been reported as an error, but we still might be able to
// solve their types.
for (sym, ty) in partial_state.headers {
if !state.headers.contains_key(&sym) {
state.headers.insert(sym, ty);
}
}
}
}
let (pattern_constraints, body_constraints) = if let Some(loc_guard) = &when_branch.guard {
let guard_constraint = constrain_expr(
constraints,
env,
region,
&loc_guard.value,
Expected::ForReason(
Reason::WhenGuard,
Type::Variable(Variable::BOOL),
loc_guard.region,
),
);
let (pattern_constraints, delayed_is_open_constraints, body_constraints) =
if let Some(loc_guard) = &when_branch.guard {
let guard_constraint = constrain_expr(
constraints,
env,
region,
&loc_guard.value,
Expected::ForReason(
Reason::WhenGuard,
Type::Variable(Variable::BOOL),
loc_guard.region,
),
);
// must introduce the headers from the pattern before constraining the guard
state
.constraints
.append(&mut state.delayed_is_open_constraints);
let state_constraints = constraints.and_constraint(state.constraints);
let inner = constraints.let_constraint([], [], [], guard_constraint, ret_constraint);
// must introduce the headers from the pattern before constraining the guard
let delayed_is_open_constraints = state.delayed_is_open_constraints;
let state_constraints = constraints.and_constraint(state.constraints);
let inner = constraints.let_constraint([], [], [], guard_constraint, ret_constraint);
(state_constraints, inner)
} else {
state
.constraints
.append(&mut state.delayed_is_open_constraints);
let state_constraints = constraints.and_constraint(state.constraints);
(state_constraints, ret_constraint)
};
(state_constraints, delayed_is_open_constraints, inner)
} else {
let delayed_is_open_constraints = state.delayed_is_open_constraints;
let state_constraints = constraints.and_constraint(state.constraints);
(
state_constraints,
delayed_is_open_constraints,
ret_constraint,
)
};
(
state.vars,
state.headers,
ConstrainedBranch {
vars: state.vars,
headers: state.headers,
pattern_constraints,
is_open_constrains: delayed_is_open_constraints,
body_constraints,
)
}
}
fn constrain_field(

View File

@ -289,7 +289,7 @@ pub fn constrain_pattern(
num_precision_var,
bound,
region,
Category::Float,
Category::Frac,
);
// Link the free num var with the float var and our expectation.
@ -298,7 +298,7 @@ pub fn constrain_pattern(
state.constraints.push(constraints.equal_types(
num_type.clone(), // TODO check me if something breaks!
Expected::NoExpectation(float_type),
Category::Float,
Category::Frac,
region,
));
@ -498,9 +498,6 @@ pub fn constrain_pattern(
state.vars.push(*ext_var);
state.constraints.push(whole_con);
state.constraints.push(tag_con);
state
.constraints
.append(&mut state.delayed_is_open_constraints);
}
UnwrappedOpaque {

View File

@ -119,6 +119,11 @@ flags! {
/// Prints LLVM function verification output.
ROC_PRINT_LLVM_FN_VERIFICATION
// ===WASM Gen===
/// Writes a `final.wasm` file to /tmp
ROC_WRITE_FINAL_WASM
// ===Load===
/// Print load phases as they complete.

View File

@ -3,7 +3,9 @@
use std::iter::once;
use roc_can::abilities::SpecializationLambdaSets;
use roc_can::expr::{AnnotatedMark, ClosureData, Expr, Field, Recursive, WhenBranch};
use roc_can::expr::{
AnnotatedMark, ClosureData, Expr, Field, Recursive, WhenBranch, WhenBranchPattern,
};
use roc_can::module::ExposedByModule;
use roc_can::pattern::Pattern;
use roc_collections::SendMap;
@ -672,6 +674,10 @@ fn to_encoder_tag_union(
.map(|(var, sym)| (*var, Loc::at_zero(Pattern::Identifier(*sym))))
.collect(),
};
let branch_pattern = WhenBranchPattern {
pattern: Loc::at_zero(pattern),
degenerate: false,
};
// whole type of the elements in [ Encode.toEncoder v1, Encode.toEncoder v2 ]
let whole_payload_encoders_var = env.subs.fresh_unnamed_flex_var();
@ -792,7 +798,7 @@ fn to_encoder_tag_union(
env.unify(this_encoder_var, whole_tag_encoders_var);
WhenBranch {
patterns: vec![Loc::at_zero(pattern)],
patterns: vec![branch_pattern],
value: Loc::at_zero(encode_tag_call),
guard: None,
redundant: RedundantMark::known_non_redundant(),

View File

@ -158,7 +158,7 @@ impl<'a> Formattable for ValueDef<'a> {
}
Body(loc_pattern, loc_expr) => loc_pattern.is_multiline() || loc_expr.is_multiline(),
AnnotatedBody { .. } => true,
Expect(loc_expr) => loc_expr.is_multiline(),
Expect { condition, .. } => condition.is_multiline(),
}
}
@ -232,7 +232,7 @@ impl<'a> Formattable for ValueDef<'a> {
Body(loc_pattern, loc_expr) => {
fmt_body(buf, &loc_pattern.value, &loc_expr.value, indent);
}
Expect(condition) => fmt_expect(buf, condition, self.is_multiline(), indent),
Expect { condition, .. } => fmt_expect(buf, condition, self.is_multiline(), indent),
AnnotatedBody {
ann_pattern,
ann_type,

View File

@ -540,7 +540,13 @@ impl<'a> RemoveSpaces<'a> for ValueDef<'a> {
body_pattern: arena.alloc(body_pattern.remove_spaces(arena)),
body_expr: arena.alloc(body_expr.remove_spaces(arena)),
},
Expect(a) => Expect(arena.alloc(a.remove_spaces(arena))),
Expect {
condition,
preceding_comment,
} => Expect {
condition: arena.alloc(condition.remove_spaces(arena)),
preceding_comment,
},
}
}
}

View File

@ -8,7 +8,9 @@ use roc_collections::all::MutMap;
use roc_error_macros::internal_error;
use roc_module::symbol::{Interns, Symbol};
use roc_mono::code_gen_help::CodeGenHelp;
use roc_mono::ir::{BranchInfo, JoinPointId, Literal, Param, ProcLayout, SelfRecursive, Stmt};
use roc_mono::ir::{
BranchInfo, JoinPointId, ListLiteralElement, Literal, Param, ProcLayout, SelfRecursive, Stmt,
};
use roc_mono::layout::{Builtin, Layout, TagIdIntType, UnionLayout};
use roc_target::TargetInfo;
use std::marker::PhantomData;
@ -21,6 +23,7 @@ pub(crate) mod x86_64;
use storage::StorageManager;
const REFCOUNT_ONE: u64 = i64::MIN as u64;
// TODO: on all number functions double check and deal with over/underflow.
pub trait CallConv<GeneralReg: RegTrait, FloatReg: RegTrait, ASM: Assembler<GeneralReg, FloatReg>>:
@ -1075,6 +1078,133 @@ impl<
ASM::add_reg64_reg64_imm32(&mut self.buf, dst_reg, CC::BASE_PTR_REG, offset);
}
fn create_empty_array(&mut self, sym: &Symbol) {
let base_offset = self.storage_manager.claim_stack_area(sym, 24);
self.storage_manager
.with_tmp_general_reg(&mut self.buf, |_storage_manager, buf, reg| {
ASM::mov_reg64_imm64(buf, reg, 0);
ASM::mov_base32_reg64(buf, base_offset, reg);
ASM::mov_base32_reg64(buf, base_offset + 8, reg);
ASM::mov_base32_reg64(buf, base_offset + 16, reg);
});
}
fn create_array(
&mut self,
sym: &Symbol,
elem_layout: &Layout<'a>,
elems: &'a [ListLiteralElement<'a>],
) {
// Allocate
// This requires at least 8 for the refcount alignment.
let allocation_alignment = std::cmp::max(
8,
elem_layout.allocation_alignment_bytes(self.storage_manager.target_info()) as u64,
);
let elem_size = elem_layout.stack_size(self.storage_manager.target_info()) as u64;
let allocation_size = elem_size * elems.len() as u64 + allocation_alignment /* add space for refcount */;
let u64_layout = Layout::Builtin(Builtin::Int(IntWidth::U64));
self.load_literal(
&Symbol::DEV_TMP,
&u64_layout,
&Literal::Int((allocation_size as i128).to_ne_bytes()),
);
let u32_layout = Layout::Builtin(Builtin::Int(IntWidth::U32));
self.load_literal(
&Symbol::DEV_TMP2,
&u32_layout,
&Literal::Int((allocation_alignment as i128).to_ne_bytes()),
);
self.build_fn_call(
&Symbol::DEV_TMP3,
"roc_alloc".to_string(),
&[Symbol::DEV_TMP, Symbol::DEV_TMP2],
&[u64_layout, u32_layout],
&u64_layout,
);
self.free_symbol(&Symbol::DEV_TMP);
self.free_symbol(&Symbol::DEV_TMP2);
// Fill pointer with elems
let ptr_reg = self
.storage_manager
.load_to_general_reg(&mut self.buf, &Symbol::DEV_TMP3);
// Point to first element of array.
ASM::add_reg64_reg64_imm32(&mut self.buf, ptr_reg, ptr_reg, allocation_alignment as i32);
// fill refcount at -8.
self.storage_manager.with_tmp_general_reg(
&mut self.buf,
|_storage_manager, buf, tmp_reg| {
ASM::mov_reg64_imm64(buf, tmp_reg, REFCOUNT_ONE as i64);
ASM::mov_mem64_offset32_reg64(buf, ptr_reg, -8, tmp_reg);
},
);
// Copy everything into output array.
let mut elem_offset = 0;
for elem in elems {
// TODO: this could be a lot faster when loading large lists
// if we move matching on the element layout to outside this loop.
// It also greatly bloats the code here.
// Refactor this and switch to one external match.
// We also could make loadining indivitual literals much faster
let elem_sym = match elem {
ListLiteralElement::Symbol(sym) => sym,
ListLiteralElement::Literal(lit) => {
self.load_literal(&Symbol::DEV_TMP, elem_layout, lit);
&Symbol::DEV_TMP
}
};
// TODO: Expand to all types.
match elem_layout {
Layout::Builtin(Builtin::Int(IntWidth::I64 | IntWidth::U64)) => {
let sym_reg = self
.storage_manager
.load_to_general_reg(&mut self.buf, elem_sym);
ASM::mov_mem64_offset32_reg64(&mut self.buf, ptr_reg, elem_offset, sym_reg);
}
_ if elem_size == 0 => {}
_ if elem_size > 8 => {
let (from_offset, size) = self.storage_manager.stack_offset_and_size(elem_sym);
debug_assert!(from_offset % 8 == 0);
debug_assert!(size % 8 == 0);
debug_assert_eq!(size as u64, elem_size);
self.storage_manager.with_tmp_general_reg(
&mut self.buf,
|_storage_manager, buf, tmp_reg| {
for i in (0..size as i32).step_by(8) {
ASM::mov_reg64_base32(buf, tmp_reg, from_offset + i);
ASM::mov_mem64_offset32_reg64(buf, ptr_reg, elem_offset, tmp_reg);
}
},
);
}
x => todo!("copying data to list with layout, {:?}", x),
}
elem_offset += elem_size as i32;
if elem_sym == &Symbol::DEV_TMP {
self.free_symbol(elem_sym);
}
}
// Setup list on stack.
self.storage_manager.with_tmp_general_reg(
&mut self.buf,
|storage_manager, buf, tmp_reg| {
let base_offset = storage_manager.claim_stack_area(sym, 24);
ASM::mov_base32_reg64(buf, base_offset, ptr_reg);
ASM::mov_reg64_imm64(buf, tmp_reg, elems.len() as i64);
ASM::mov_base32_reg64(buf, base_offset + 8, tmp_reg);
ASM::mov_base32_reg64(buf, base_offset + 16, tmp_reg);
},
);
self.free_symbol(&Symbol::DEV_TMP3);
}
fn create_struct(&mut self, sym: &Symbol, layout: &Layout<'a>, fields: &'a [Symbol]) {
self.storage_manager
.create_struct(&mut self.buf, sym, layout, fields);

View File

@ -594,16 +594,16 @@ impl<
let (data_size, data_alignment) =
union_layout.data_size_and_alignment(self.target_info);
let id_offset = data_size - data_alignment;
let id_builtin = union_layout.tag_id_builtin();
let discriminant = union_layout.discriminant();
let size = id_builtin.stack_size(self.target_info);
let size = discriminant.stack_size();
self.allocation_map.insert(*sym, owned_data);
self.symbol_storage_map.insert(
*sym,
Stack(ReferencedPrimitive {
base_offset: union_offset + id_offset as i32,
size,
sign_extend: matches!(id_builtin, sign_extended_int_builtins!()),
sign_extend: false, // tag ids are always unsigned
}),
);
}
@ -734,7 +734,11 @@ impl<
ASM::mov_base32_freg64(buf, to_offset, reg);
}
_ if layout.stack_size(self.target_info) == 0 => {}
_ if layout.safe_to_memcpy() && layout.stack_size(self.target_info) > 8 => {
// TODO: Verify this is always true.
// The dev backend does not deal with refcounting and does not care about if data is safe to memcpy.
// It is just temporarily storing the value due to needing to free registers.
// Later, it will be reloaded and stored in refcounted as needed.
_ if layout.stack_size(self.target_info) > 8 => {
let (from_offset, size) = self.stack_offset_and_size(sym);
debug_assert!(from_offset % 8 == 0);
debug_assert!(size % 8 == 0);

View File

@ -310,6 +310,22 @@ trait Backend<'a> {
x => todo!("the call type, {:?}", x),
}
}
Expr::EmptyArray => {
self.create_empty_array(sym);
}
Expr::Array { elem_layout, elems } => {
let mut syms = bumpalo::vec![in self.env().arena];
for sym in elems.iter().filter_map(|x| match x {
ListLiteralElement::Symbol(sym) => Some(sym),
_ => None,
}) {
syms.push(*sym);
}
// TODO: This could be a huge waste.
// We probably want to call this within create_array, one element at a time.
self.load_literal_symbols(syms.into_bump_slice());
self.create_array(sym, elem_layout, elems);
}
Expr::Struct(fields) => {
self.load_literal_symbols(fields);
self.create_struct(sym, layout, fields);
@ -772,6 +788,17 @@ trait Backend<'a> {
/// load_literal sets a symbol to be equal to a literal.
fn load_literal(&mut self, sym: &Symbol, layout: &Layout<'a>, lit: &Literal<'a>);
/// create_empty_array creates an empty array with nullptr, zero length, and zero capacity.
fn create_empty_array(&mut self, sym: &Symbol);
/// create_array creates an array filling it with the specified objects.
fn create_array(
&mut self,
sym: &Symbol,
elem_layout: &Layout<'a>,
elems: &'a [ListLiteralElement<'a>],
);
/// create_struct creates a struct with the elements specified loaded into it as data.
fn create_struct(&mut self, sym: &Symbol, layout: &Layout<'a>, fields: &'a [Symbol]);

View File

@ -420,8 +420,8 @@ fn build_proc<'a, B: Backend<'a>>(
}
}
Relocation::LinkedFunction { offset, name } => {
// If the symbol is an undefined zig builtin, we need to add it here.
if output.symbol_id(name.as_bytes()) == None && name.starts_with("roc_builtins.") {
// If the symbol is an undefined roc function, we need to add it here.
if output.symbol_id(name.as_bytes()) == None && name.starts_with("roc_") {
let builtin_symbol = Symbol {
name: name.as_bytes().to_vec(),
value: 0,

View File

@ -16,6 +16,7 @@ roc_mono = { path = "../mono" }
roc_target = { path = "../roc_target" }
roc_std = { path = "../../roc_std", default-features = false }
roc_debug_flags = { path = "../debug_flags" }
roc_region = { path = "../region" }
morphic_lib = { path = "../../vendor/morphic_lib" }
bumpalo = { version = "3.8.0", features = ["collections"] }
inkwell = { path = "../../vendor/inkwell" }

View File

@ -2,7 +2,7 @@
use crate::debug_info_init;
use crate::llvm::build::{
complex_bitcast_check_size, load_roc_value, struct_from_fields, to_cc_return, CCReturn, Env,
C_CALL_CONV, FAST_CALL_CONV, TAG_DATA_INDEX,
C_CALL_CONV, FAST_CALL_CONV,
};
use crate::llvm::convert::basic_type_from_layout;
use crate::llvm::refcounting::{
@ -10,11 +10,14 @@ use crate::llvm::refcounting::{
};
use inkwell::attributes::{Attribute, AttributeLoc};
use inkwell::types::{BasicType, BasicTypeEnum};
use inkwell::values::{BasicValue, BasicValueEnum, CallSiteValue, FunctionValue, InstructionValue};
use inkwell::values::{
BasicValue, BasicValueEnum, CallSiteValue, FunctionValue, InstructionValue, IntValue,
PointerValue, StructValue,
};
use inkwell::AddressSpace;
use roc_error_macros::internal_error;
use roc_module::symbol::Symbol;
use roc_mono::layout::{LambdaSet, Layout, LayoutIds, UnionLayout};
use roc_mono::layout::{Builtin, LambdaSet, Layout, LayoutIds};
use super::build::create_entry_block_alloca;
@ -36,67 +39,6 @@ pub fn call_bitcode_fn<'a, 'ctx, 'env>(
})
}
pub fn call_list_bitcode_fn<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
args: &[BasicValueEnum<'ctx>],
fn_name: &str,
) -> BasicValueEnum<'ctx> {
use bumpalo::collections::Vec;
let parent = env
.builder
.get_insert_block()
.and_then(|b| b.get_parent())
.unwrap();
let list_type = super::convert::zig_list_type(env);
let result = create_entry_block_alloca(env, parent, list_type.into(), "list_alloca");
let mut arguments: Vec<BasicValueEnum> = Vec::with_capacity_in(args.len() + 1, env.arena);
arguments.push(result.into());
arguments.extend(args);
call_void_bitcode_fn(env, &arguments, fn_name);
env.builder.build_load(result, "load_list")
}
pub fn call_str_bitcode_fn<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
args: &[BasicValueEnum<'ctx>],
fn_name: &str,
) -> BasicValueEnum<'ctx> {
use bumpalo::collections::Vec;
let parent = env
.builder
.get_insert_block()
.and_then(|b| b.get_parent())
.unwrap();
let str_type = super::convert::zig_str_type(env);
match env.target_info.ptr_width() {
roc_target::PtrWidth::Bytes4 => {
// 3 machine words actually fit into 2 registers
call_bitcode_fn(env, args, fn_name)
}
roc_target::PtrWidth::Bytes8 => {
let result =
create_entry_block_alloca(env, parent, str_type.into(), "return_str_alloca");
let mut arguments: Vec<BasicValueEnum> =
Vec::with_capacity_in(args.len() + 1, env.arena);
arguments.push(result.into());
arguments.extend(args);
call_void_bitcode_fn(env, &arguments, fn_name);
result.into()
}
}
}
pub fn call_void_bitcode_fn<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
args: &[BasicValueEnum<'ctx>],
@ -181,7 +123,15 @@ pub fn call_bitcode_fn_fixing_for_convention<'a, 'ctx, 'env>(
.try_into()
.expect("Zig bitcode return type is not a basic type!");
// when we write an i128 into this (happens in NumToInt), zig expects this pointer to
// be 16-byte aligned. Not doing so is UB and will immediately fail on CI
let cc_return_value_ptr = env.builder.build_alloca(cc_return_type, "return_value");
cc_return_value_ptr
.as_instruction()
.unwrap()
.set_alignment(16)
.unwrap();
let fixed_args: Vec<BasicValueEnum<'ctx>> = [cc_return_value_ptr.into()]
.iter()
.chain(args)
@ -220,123 +170,7 @@ const ARGUMENT_SYMBOLS: [Symbol; 8] = [
Symbol::ARG_8,
];
pub fn build_has_tag_id<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
function: FunctionValue<'ctx>,
union_layout: UnionLayout<'a>,
) -> FunctionValue<'ctx> {
let fn_name: &str = &format!("{}_has_tag_id", function.get_name().to_string_lossy());
// currently the code assumes we're dealing with a non-recursive layout
debug_assert!(matches!(union_layout, UnionLayout::NonRecursive(_)));
match env.module.get_function(fn_name) {
Some(function_value) => function_value,
None => build_has_tag_id_help(env, union_layout, fn_name),
}
}
fn build_has_tag_id_help<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
union_layout: UnionLayout<'a>,
fn_name: &str,
) -> FunctionValue<'ctx> {
let i8_ptr_type = env.context.i8_type().ptr_type(AddressSpace::Generic);
let argument_types: &[BasicTypeEnum] = &[env.context.i16_type().into(), i8_ptr_type.into()];
let block = env.builder.get_insert_block().expect("to be in a function");
let di_location = env.builder.get_current_debug_location().unwrap();
let output_type = crate::llvm::convert::zig_has_tag_id_type(env);
let function_value = crate::llvm::refcounting::build_header_help(
env,
fn_name,
output_type.into(),
argument_types,
);
// called from zig, must use C calling convention
function_value.set_call_conventions(C_CALL_CONV);
let kind_id = Attribute::get_named_enum_kind_id("alwaysinline");
debug_assert!(kind_id > 0);
let attr = env.context.create_enum_attribute(kind_id, 1);
function_value.add_attribute(AttributeLoc::Function, attr);
let entry = env.context.append_basic_block(function_value, "entry");
env.builder.position_at_end(entry);
debug_info_init!(env, function_value);
let it = function_value.get_param_iter();
let arguments =
bumpalo::collections::Vec::from_iter_in(it.take(argument_types.len()), env.arena);
for (argument, name) in arguments.iter().zip(ARGUMENT_SYMBOLS.iter()) {
argument.set_name(name.as_str(&env.interns));
}
match arguments.as_slice() {
[tag_id, tag_value_ptr] => {
let tag_type = basic_type_from_layout(env, &Layout::Union(union_layout));
let tag_value = env.builder.build_pointer_cast(
tag_value_ptr.into_pointer_value(),
tag_type.ptr_type(AddressSpace::Generic),
"load_opaque_get_tag_id",
);
let actual_tag_id = {
let tag_id_i64 = crate::llvm::build::get_tag_id(
env,
function_value,
&union_layout,
tag_value.into(),
);
env.builder.build_int_cast_sign_flag(
tag_id_i64,
env.context.i16_type(),
true,
"to_i16",
)
};
let answer = env.builder.build_int_compare(
inkwell::IntPredicate::EQ,
tag_id.into_int_value(),
actual_tag_id,
"compare",
);
let tag_data_ptr = {
let ptr = env
.builder
.build_struct_gep(tag_value, TAG_DATA_INDEX, "get_data_ptr")
.unwrap();
env.builder.build_bitcast(ptr, i8_ptr_type, "to_opaque")
};
let field_vals = [(0, answer.into()), (1, tag_data_ptr)];
let output = struct_from_fields(env, output_type, field_vals.iter().copied());
env.builder.build_return(Some(&output));
env.builder.position_at_end(block);
env.builder
.set_current_debug_location(env.context, di_location);
function_value
}
_ => unreachable!(),
}
}
pub fn build_transform_caller<'a, 'ctx, 'env>(
pub(crate) fn build_transform_caller<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
function: FunctionValue<'ctx>,
closure_data_layout: LambdaSet<'a>,
@ -433,14 +267,15 @@ fn build_transform_caller_help<'a, 'ctx, 'env>(
arguments_cast.push(argument);
}
match closure_data_layout.runtime_representation() {
Layout::Struct {
field_layouts: &[], ..
} => {
// nothing to add
match (
closure_data_layout.is_represented().is_some(),
closure_data_layout.runtime_representation(),
) {
(false, _) => {
// the function doesn't expect a closure argument, nothing to add
}
other => {
let closure_type = basic_type_from_layout(env, &other).ptr_type(AddressSpace::Generic);
(true, layout) => {
let closure_type = basic_type_from_layout(env, &layout).ptr_type(AddressSpace::Generic);
let closure_cast = env
.builder
@ -802,3 +637,330 @@ pub fn build_compare_wrapper<'a, 'ctx, 'env>(
function_value
}
enum BitcodeReturnValue<'ctx> {
List(PointerValue<'ctx>),
Str(PointerValue<'ctx>),
Basic,
}
impl<'ctx> BitcodeReturnValue<'ctx> {
fn call_and_load_64bit<'a, 'env>(
&self,
env: &Env<'a, 'ctx, 'env>,
arguments: &[BasicValueEnum<'ctx>],
fn_name: &str,
) -> BasicValueEnum<'ctx> {
match self {
BitcodeReturnValue::List(result) => {
call_void_bitcode_fn(env, arguments, fn_name);
env.builder.build_load(*result, "load_list")
}
BitcodeReturnValue::Str(result) => {
call_void_bitcode_fn(env, arguments, fn_name);
// we keep a string in the alloca
(*result).into()
}
BitcodeReturnValue::Basic => call_bitcode_fn(env, arguments, fn_name),
}
}
}
pub(crate) enum BitcodeReturns {
List,
Str,
Basic,
}
impl BitcodeReturns {
fn additional_arguments(&self) -> usize {
match self {
BitcodeReturns::List | BitcodeReturns::Str => 1,
BitcodeReturns::Basic => 0,
}
}
fn return_value_64bit<'a, 'ctx, 'env>(
&self,
env: &Env<'a, 'ctx, 'env>,
arguments: &mut bumpalo::collections::Vec<'a, BasicValueEnum<'ctx>>,
) -> BitcodeReturnValue<'ctx> {
match self {
BitcodeReturns::List => {
let list_type = super::convert::zig_list_type(env);
let parent = env
.builder
.get_insert_block()
.and_then(|b| b.get_parent())
.unwrap();
let result =
create_entry_block_alloca(env, parent, list_type.into(), "list_alloca");
arguments.push(result.into());
BitcodeReturnValue::List(result)
}
BitcodeReturns::Str => {
let str_type = super::convert::zig_str_type(env);
let parent = env
.builder
.get_insert_block()
.and_then(|b| b.get_parent())
.unwrap();
let result = create_entry_block_alloca(env, parent, str_type.into(), "str_alloca");
arguments.push(result.into());
BitcodeReturnValue::Str(result)
}
BitcodeReturns::Basic => BitcodeReturnValue::Basic,
}
}
fn call_and_load_32bit<'a, 'ctx, 'env>(
&self,
env: &Env<'a, 'ctx, 'env>,
arguments: &[BasicValueEnum<'ctx>],
fn_name: &str,
) -> BasicValueEnum<'ctx> {
let value = call_bitcode_fn(env, arguments, fn_name);
match self {
BitcodeReturns::List => {
receive_zig_roc_list_32bit(env, value.into_struct_value()).into()
}
BitcodeReturns::Str => receive_zig_roc_str_32bit(env, value.into_struct_value()).into(),
BitcodeReturns::Basic => value,
}
}
}
fn ptr_len_cap<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
value: StructValue<'ctx>,
) -> (PointerValue<'ctx>, IntValue<'ctx>, IntValue<'ctx>) {
let ptr_and_len = env
.builder
.build_extract_value(value, 0, "get_list_cap")
.unwrap()
.into_int_value();
let upper_word = {
let shift = env.builder.build_right_shift(
ptr_and_len,
env.context.i64_type().const_int(32, false),
false,
"list_ptr_shift",
);
env.builder
.build_int_cast(shift, env.context.i32_type(), "list_ptr_int")
};
let lower_word = env
.builder
.build_int_cast(ptr_and_len, env.context.i32_type(), "list_len");
let len = upper_word;
let ptr = env.builder.build_int_to_ptr(
lower_word,
env.context.i8_type().ptr_type(AddressSpace::Generic),
"list_ptr",
);
let cap = env
.builder
.build_extract_value(value, 1, "get_list_cap")
.unwrap()
.into_int_value();
(ptr, len, cap)
}
/// Converts the { i64, i32 } struct that zig returns into `list.RocList = type { i8*, i32, i32 }`
fn receive_zig_roc_list_32bit<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
value: StructValue<'ctx>,
) -> StructValue<'ctx> {
let list_type = super::convert::zig_list_type(env);
let (ptr, len, cap) = ptr_len_cap(env, value);
struct_from_fields(
env,
list_type,
[(0, ptr.into()), (1, len.into()), (2, cap.into())].into_iter(),
)
}
/// Converts the { i64, i32 } struct that zig returns into `list.RocList = type { i8*, i32, i32 }`
fn receive_zig_roc_str_32bit<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
value: StructValue<'ctx>,
) -> StructValue<'ctx> {
let str_type = super::convert::zig_str_type(env);
let (ptr, len, cap) = ptr_len_cap(env, value);
struct_from_fields(
env,
str_type,
[(0, ptr.into()), (1, len.into()), (2, cap.into())].into_iter(),
)
}
pub(crate) fn pass_list_to_zig_64bit<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
list: BasicValueEnum<'ctx>,
) -> PointerValue<'ctx> {
let parent = env
.builder
.get_insert_block()
.and_then(|b| b.get_parent())
.unwrap();
let list_type = super::convert::zig_list_type(env);
let list_alloca = create_entry_block_alloca(env, parent, list_type.into(), "list_alloca");
env.builder.build_store(list_alloca, list);
list_alloca
}
fn pass_string_to_zig_64bit<'a, 'ctx, 'env>(
_env: &Env<'a, 'ctx, 'env>,
string: BasicValueEnum<'ctx>,
) -> PointerValue<'ctx> {
// we must pass strings by-pointer, and that is already how they are stored
string.into_pointer_value()
}
pub(crate) fn pass_list_or_string_to_zig_32bit<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
list_or_string: StructValue<'ctx>,
) -> (IntValue<'ctx>, IntValue<'ctx>) {
let ptr = env
.builder
.build_extract_value(list_or_string, Builtin::WRAPPER_PTR, "list_ptr")
.unwrap()
.into_pointer_value();
let ptr = env
.builder
.build_ptr_to_int(ptr, env.context.i32_type(), "ptr_to_i32");
let len = env
.builder
.build_extract_value(list_or_string, Builtin::WRAPPER_LEN, "list_len")
.unwrap()
.into_int_value();
let cap = env
.builder
.build_extract_value(list_or_string, Builtin::WRAPPER_CAPACITY, "list_cap")
.unwrap()
.into_int_value();
let int_64_type = env.context.i64_type();
let len = env
.builder
.build_int_z_extend(len, int_64_type, "list_len_64");
let ptr = env
.builder
.build_int_z_extend(ptr, int_64_type, "list_ptr_64");
let len_shift =
env.builder
.build_left_shift(len, int_64_type.const_int(32, false), "list_len_shift");
let ptr_len = env.builder.build_or(len_shift, ptr, "list_ptr_len");
(ptr_len, cap)
}
pub(crate) fn call_str_bitcode_fn<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
strings: &[BasicValueEnum<'ctx>],
other_arguments: &[BasicValueEnum<'ctx>],
returns: BitcodeReturns,
fn_name: &str,
) -> BasicValueEnum<'ctx> {
use bumpalo::collections::Vec;
match env.target_info.ptr_width() {
roc_target::PtrWidth::Bytes4 => {
let mut arguments: Vec<BasicValueEnum> =
Vec::with_capacity_in(other_arguments.len() + 2 * strings.len(), env.arena);
for string in strings {
let (a, b) = pass_list_or_string_to_zig_32bit(env, string.into_struct_value());
arguments.push(a.into());
arguments.push(b.into());
}
arguments.extend(other_arguments);
returns.call_and_load_32bit(env, &arguments, fn_name)
}
roc_target::PtrWidth::Bytes8 => {
let capacity = other_arguments.len() + strings.len() + returns.additional_arguments();
let mut arguments: Vec<BasicValueEnum> = Vec::with_capacity_in(capacity, env.arena);
let return_value = returns.return_value_64bit(env, &mut arguments);
for string in strings {
arguments.push(pass_string_to_zig_64bit(env, *string).into());
}
arguments.extend(other_arguments);
return_value.call_and_load_64bit(env, &arguments, fn_name)
}
}
}
pub(crate) fn call_list_bitcode_fn<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
lists: &[StructValue<'ctx>],
other_arguments: &[BasicValueEnum<'ctx>],
returns: BitcodeReturns,
fn_name: &str,
) -> BasicValueEnum<'ctx> {
use bumpalo::collections::Vec;
match env.target_info.ptr_width() {
roc_target::PtrWidth::Bytes4 => {
let mut arguments: Vec<BasicValueEnum> =
Vec::with_capacity_in(other_arguments.len() + 2 * lists.len(), env.arena);
for list in lists {
let (a, b) = pass_list_or_string_to_zig_32bit(env, *list);
arguments.push(a.into());
arguments.push(b.into());
}
arguments.extend(other_arguments);
returns.call_and_load_32bit(env, &arguments, fn_name)
}
roc_target::PtrWidth::Bytes8 => {
let capacity = other_arguments.len() + lists.len() + returns.additional_arguments();
let mut arguments: Vec<BasicValueEnum> = Vec::with_capacity_in(capacity, env.arena);
let return_value = returns.return_value_64bit(env, &mut arguments);
for list in lists {
arguments.push(pass_list_to_zig_64bit(env, (*list).into()).into());
}
arguments.extend(other_arguments);
return_value.call_and_load_64bit(env, &arguments, fn_name)
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,12 @@
#![allow(clippy::too_many_arguments)]
use crate::llvm::bitcode::{build_dec_wrapper, call_list_bitcode_fn};
use crate::llvm::bitcode::build_dec_wrapper;
use crate::llvm::build::{
allocate_with_refcount_help, cast_basic_basic, Env, RocFunctionCall, Scope,
};
use crate::llvm::convert::basic_type_from_layout;
use crate::llvm::refcounting::increment_refcount_layout;
use inkwell::builder::Builder;
use inkwell::context::Context;
use inkwell::types::{BasicType, BasicTypeEnum, PointerType};
use inkwell::types::{BasicType, PointerType};
use inkwell::values::{BasicValueEnum, FunctionValue, IntValue, PointerValue, StructValue};
use inkwell::{AddressSpace, IntPredicate};
use morphic_lib::UpdateMode;
@ -15,9 +14,22 @@ use roc_builtins::bitcode;
use roc_module::symbol::Symbol;
use roc_mono::layout::{Builtin, Layout, LayoutIds};
use super::build::{create_entry_block_alloca, load_roc_value, load_symbol, store_roc_value};
use super::bitcode::{call_list_bitcode_fn, BitcodeReturns};
use super::build::{
create_entry_block_alloca, load_roc_value, load_symbol, store_roc_value, struct_from_fields,
};
use super::convert::zig_list_type;
pub fn list_symbol_to_c_abi<'a, 'ctx, 'env>(
fn call_list_bitcode_fn_1<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
list: StructValue<'ctx>,
other_arguments: &[BasicValueEnum<'ctx>],
fn_name: &str,
) -> BasicValueEnum<'ctx> {
call_list_bitcode_fn(env, &[list], other_arguments, BitcodeReturns::List, fn_name)
}
pub(crate) fn list_symbol_to_c_abi<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
scope: &Scope<'a, 'ctx>,
symbol: Symbol,
@ -28,7 +40,7 @@ pub fn list_symbol_to_c_abi<'a, 'ctx, 'env>(
.and_then(|b| b.get_parent())
.unwrap();
let list_type = super::convert::zig_list_type(env);
let list_type = zig_list_type(env);
let list_alloca = create_entry_block_alloca(env, parent, list_type.into(), "list_alloca");
let list = load_symbol(scope, &symbol);
@ -37,25 +49,7 @@ pub fn list_symbol_to_c_abi<'a, 'ctx, 'env>(
list_alloca
}
pub fn list_to_c_abi<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
list: BasicValueEnum<'ctx>,
) -> PointerValue<'ctx> {
let parent = env
.builder
.get_insert_block()
.and_then(|b| b.get_parent())
.unwrap();
let list_type = super::convert::zig_list_type(env);
let list_alloca = create_entry_block_alloca(env, parent, list_type.into(), "list_alloca");
env.builder.build_store(list_alloca, list);
list_alloca
}
pub fn pass_update_mode<'a, 'ctx, 'env>(
pub(crate) fn pass_update_mode<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
update_mode: UpdateMode,
) -> BasicValueEnum<'ctx> {
@ -83,7 +77,7 @@ fn pass_element_as_opaque<'a, 'ctx, 'env>(
)
}
pub fn layout_width<'a, 'ctx, 'env>(
pub(crate) fn layout_width<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
layout: &Layout<'a>,
) -> BasicValueEnum<'ctx> {
@ -92,7 +86,7 @@ pub fn layout_width<'a, 'ctx, 'env>(
.into()
}
pub fn pass_as_opaque<'a, 'ctx, 'env>(
pub(crate) fn pass_as_opaque<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
ptr: PointerValue<'ctx>,
) -> BasicValueEnum<'ctx> {
@ -103,23 +97,25 @@ pub fn pass_as_opaque<'a, 'ctx, 'env>(
)
}
pub fn list_with_capacity<'a, 'ctx, 'env>(
pub(crate) fn list_with_capacity<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
capacity: IntValue<'ctx>,
element_layout: &Layout<'a>,
) -> BasicValueEnum<'ctx> {
call_list_bitcode_fn(
env,
&[],
&[
capacity.into(),
env.alignment_intvalue(element_layout),
layout_width(env, element_layout),
],
BitcodeReturns::List,
bitcode::LIST_WITH_CAPACITY,
)
}
pub fn list_get_unsafe<'a, 'ctx, 'env>(
pub(crate) fn list_get_unsafe<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
layout_ids: &mut LayoutIds<'a>,
parent: FunctionValue<'ctx>,
@ -147,17 +143,17 @@ pub fn list_get_unsafe<'a, 'ctx, 'env>(
}
/// List.reserve : List elem, Nat -> List elem
pub fn list_reserve<'a, 'ctx, 'env>(
pub(crate) fn list_reserve<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
list: BasicValueEnum<'ctx>,
spare: BasicValueEnum<'ctx>,
element_layout: &Layout<'a>,
update_mode: UpdateMode,
) -> BasicValueEnum<'ctx> {
call_list_bitcode_fn(
call_list_bitcode_fn_1(
env,
list.into_struct_value(),
&[
list_to_c_abi(env, list).into(),
env.alignment_intvalue(element_layout),
spare,
layout_width(env, element_layout),
@ -168,16 +164,16 @@ pub fn list_reserve<'a, 'ctx, 'env>(
}
/// List.appendUnsafe : List elem, elem -> List elem
pub fn list_append_unsafe<'a, 'ctx, 'env>(
pub(crate) fn list_append_unsafe<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
original_wrapper: StructValue<'ctx>,
element: BasicValueEnum<'ctx>,
element_layout: &Layout<'a>,
) -> BasicValueEnum<'ctx> {
call_list_bitcode_fn(
call_list_bitcode_fn_1(
env,
original_wrapper,
&[
list_to_c_abi(env, original_wrapper.into()).into(),
pass_element_as_opaque(env, element, *element_layout),
layout_width(env, element_layout),
],
@ -186,16 +182,16 @@ pub fn list_append_unsafe<'a, 'ctx, 'env>(
}
/// List.prepend : List elem, elem -> List elem
pub fn list_prepend<'a, 'ctx, 'env>(
pub(crate) fn list_prepend<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
original_wrapper: StructValue<'ctx>,
element: BasicValueEnum<'ctx>,
element_layout: &Layout<'a>,
) -> BasicValueEnum<'ctx> {
call_list_bitcode_fn(
call_list_bitcode_fn_1(
env,
original_wrapper,
&[
list_to_c_abi(env, original_wrapper.into()).into(),
env.alignment_intvalue(element_layout),
pass_element_as_opaque(env, element, *element_layout),
layout_width(env, element_layout),
@ -205,7 +201,7 @@ pub fn list_prepend<'a, 'ctx, 'env>(
}
/// List.swap : List elem, Nat, Nat -> List elem
pub fn list_swap<'a, 'ctx, 'env>(
pub(crate) fn list_swap<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
original_wrapper: StructValue<'ctx>,
index_1: IntValue<'ctx>,
@ -213,10 +209,10 @@ pub fn list_swap<'a, 'ctx, 'env>(
element_layout: &Layout<'a>,
update_mode: UpdateMode,
) -> BasicValueEnum<'ctx> {
call_list_bitcode_fn(
call_list_bitcode_fn_1(
env,
original_wrapper,
&[
list_to_c_abi(env, original_wrapper.into()).into(),
env.alignment_intvalue(element_layout),
layout_width(env, element_layout),
index_1.into(),
@ -228,7 +224,7 @@ pub fn list_swap<'a, 'ctx, 'env>(
}
/// List.sublist : List elem, { start : Nat, len : Nat } -> List elem
pub fn list_sublist<'a, 'ctx, 'env>(
pub(crate) fn list_sublist<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
layout_ids: &mut LayoutIds<'a>,
original_wrapper: StructValue<'ctx>,
@ -237,10 +233,10 @@ pub fn list_sublist<'a, 'ctx, 'env>(
element_layout: &Layout<'a>,
) -> BasicValueEnum<'ctx> {
let dec_element_fn = build_dec_wrapper(env, layout_ids, element_layout);
call_list_bitcode_fn(
call_list_bitcode_fn_1(
env,
original_wrapper,
&[
list_to_c_abi(env, original_wrapper.into()).into(),
env.alignment_intvalue(element_layout),
layout_width(env, element_layout),
start.into(),
@ -252,7 +248,7 @@ pub fn list_sublist<'a, 'ctx, 'env>(
}
/// List.dropAt : List elem, Nat -> List elem
pub fn list_drop_at<'a, 'ctx, 'env>(
pub(crate) fn list_drop_at<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
layout_ids: &mut LayoutIds<'a>,
original_wrapper: StructValue<'ctx>,
@ -260,10 +256,10 @@ pub fn list_drop_at<'a, 'ctx, 'env>(
element_layout: &Layout<'a>,
) -> BasicValueEnum<'ctx> {
let dec_element_fn = build_dec_wrapper(env, layout_ids, element_layout);
call_list_bitcode_fn(
call_list_bitcode_fn_1(
env,
original_wrapper,
&[
list_to_c_abi(env, original_wrapper.into()).into(),
env.alignment_intvalue(element_layout),
layout_width(env, element_layout),
count.into(),
@ -274,7 +270,7 @@ pub fn list_drop_at<'a, 'ctx, 'env>(
}
/// List.replace_unsafe : List elem, Nat, elem -> { list: List elem, value: elem }
pub fn list_replace_unsafe<'a, 'ctx, 'env>(
pub(crate) fn list_replace_unsafe<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
_layout_ids: &mut LayoutIds<'a>,
list: BasicValueEnum<'ctx>,
@ -291,10 +287,10 @@ pub fn list_replace_unsafe<'a, 'ctx, 'env>(
// Assume the bounds have already been checked earlier
// (e.g. by List.replace or List.set, which wrap List.#replaceUnsafe)
let new_list = match update_mode {
UpdateMode::InPlace => call_list_bitcode_fn(
UpdateMode::InPlace => call_list_bitcode_fn_1(
env,
list.into_struct_value(),
&[
list_to_c_abi(env, list).into(),
index.into(),
pass_element_as_opaque(env, element, *element_layout),
layout_width(env, element_layout),
@ -302,10 +298,10 @@ pub fn list_replace_unsafe<'a, 'ctx, 'env>(
],
bitcode::LIST_REPLACE_IN_PLACE,
),
UpdateMode::Immutable => call_list_bitcode_fn(
UpdateMode::Immutable => call_list_bitcode_fn_1(
env,
list.into_struct_value(),
&[
list_to_c_abi(env, list).into(),
env.alignment_intvalue(element_layout),
index.into(),
pass_element_as_opaque(env, element, *element_layout),
@ -319,21 +315,28 @@ pub fn list_replace_unsafe<'a, 'ctx, 'env>(
// Load the element and returned list into a struct.
let old_element = env.builder.build_load(element_ptr, "load_element");
let result = env
.context
.struct_type(
&[super::convert::zig_list_type(env).into(), element_type],
false,
)
.const_zero();
// the list has the same alignment as a usize / ptr. The element comes first in the struct if
// its alignment is bigger than that of a list.
let element_align = element_layout.alignment_bytes(env.target_info);
let element_first = element_align > env.target_info.ptr_width() as u32;
let fields = if element_first {
[element_type, zig_list_type(env).into()]
} else {
[zig_list_type(env).into(), element_type]
};
let result = env.context.struct_type(&fields, false).const_zero();
let (list_index, element_index) = if element_first { (1, 0) } else { (0, 1) };
let result = env
.builder
.build_insert_value(result, new_list, 0, "insert_list")
.build_insert_value(result, new_list, list_index, "insert_list")
.unwrap();
env.builder
.build_insert_value(result, old_element, 1, "insert_value")
.build_insert_value(result, old_element, element_index, "insert_value")
.unwrap()
.into_struct_value()
.into()
@ -352,7 +355,7 @@ fn bounds_check_comparison<'ctx>(
}
/// List.len : List * -> Nat
pub fn list_len<'ctx>(
pub(crate) fn list_len<'ctx>(
builder: &Builder<'ctx>,
wrapper_struct: StructValue<'ctx>,
) -> IntValue<'ctx> {
@ -363,7 +366,7 @@ pub fn list_len<'ctx>(
}
/// List.capacity : List * -> Nat
pub fn list_capacity<'ctx>(
pub(crate) fn list_capacity<'ctx>(
builder: &Builder<'ctx>,
wrapper_struct: StructValue<'ctx>,
) -> IntValue<'ctx> {
@ -373,18 +376,41 @@ pub fn list_capacity<'ctx>(
.into_int_value()
}
pub(crate) fn destructure<'ctx>(
builder: &Builder<'ctx>,
wrapper_struct: StructValue<'ctx>,
) -> (PointerValue<'ctx>, IntValue<'ctx>, IntValue<'ctx>) {
let length = builder
.build_extract_value(wrapper_struct, Builtin::WRAPPER_LEN, "list_len")
.unwrap()
.into_int_value();
let capacity = builder
.build_extract_value(wrapper_struct, Builtin::WRAPPER_CAPACITY, "list_cap")
.unwrap()
.into_int_value();
// a `*mut u8` pointer
let generic_ptr = builder
.build_extract_value(wrapper_struct, Builtin::WRAPPER_PTR, "read_list_ptr")
.unwrap()
.into_pointer_value();
(generic_ptr, length, capacity)
}
/// List.sortWith : List a, (a, a -> Ordering) -> List a
pub fn list_sort_with<'a, 'ctx, 'env>(
pub(crate) fn list_sort_with<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
roc_function_call: RocFunctionCall<'ctx>,
compare_wrapper: PointerValue<'ctx>,
list: BasicValueEnum<'ctx>,
element_layout: &Layout<'a>,
) -> BasicValueEnum<'ctx> {
call_list_bitcode_fn(
call_list_bitcode_fn_1(
env,
list.into_struct_value(),
&[
list_to_c_abi(env, list).into(),
compare_wrapper.into(),
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
@ -397,22 +423,22 @@ pub fn list_sort_with<'a, 'ctx, 'env>(
}
/// List.map : List before, (before -> after) -> List after
pub fn list_map<'a, 'ctx, 'env>(
pub(crate) fn list_map<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
roc_function_call: RocFunctionCall<'ctx>,
list: BasicValueEnum<'ctx>,
element_layout: &Layout<'a>,
return_layout: &Layout<'a>,
) -> BasicValueEnum<'ctx> {
call_list_bitcode_fn(
call_list_bitcode_fn_1(
env,
list.into_struct_value(),
&[
list_to_c_abi(env, list).into(),
roc_function_call.caller.into(),
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
roc_function_call.data_is_owned.into(),
env.alignment_intvalue(element_layout),
env.alignment_intvalue(return_layout),
layout_width(env, element_layout),
layout_width(env, return_layout),
],
@ -420,7 +446,7 @@ pub fn list_map<'a, 'ctx, 'env>(
)
}
pub fn list_map2<'a, 'ctx, 'env>(
pub(crate) fn list_map2<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
layout_ids: &mut LayoutIds<'a>,
roc_function_call: RocFunctionCall<'ctx>,
@ -435,9 +461,8 @@ pub fn list_map2<'a, 'ctx, 'env>(
call_list_bitcode_fn(
env,
&[list1.into_struct_value(), list2.into_struct_value()],
&[
list_to_c_abi(env, list1).into(),
list_to_c_abi(env, list2).into(),
roc_function_call.caller.into(),
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
@ -449,11 +474,12 @@ pub fn list_map2<'a, 'ctx, 'env>(
dec_a.as_global_value().as_pointer_value().into(),
dec_b.as_global_value().as_pointer_value().into(),
],
BitcodeReturns::List,
bitcode::LIST_MAP2,
)
}
pub fn list_map3<'a, 'ctx, 'env>(
pub(crate) fn list_map3<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
layout_ids: &mut LayoutIds<'a>,
roc_function_call: RocFunctionCall<'ctx>,
@ -472,9 +498,11 @@ pub fn list_map3<'a, 'ctx, 'env>(
call_list_bitcode_fn(
env,
&[
list_to_c_abi(env, list1).into(),
list_to_c_abi(env, list2).into(),
list_to_c_abi(env, list3).into(),
list1.into_struct_value(),
list2.into_struct_value(),
list3.into_struct_value(),
],
&[
roc_function_call.caller.into(),
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
@ -488,11 +516,12 @@ pub fn list_map3<'a, 'ctx, 'env>(
dec_b.as_global_value().as_pointer_value().into(),
dec_c.as_global_value().as_pointer_value().into(),
],
BitcodeReturns::List,
bitcode::LIST_MAP3,
)
}
pub fn list_map4<'a, 'ctx, 'env>(
pub(crate) fn list_map4<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
layout_ids: &mut LayoutIds<'a>,
roc_function_call: RocFunctionCall<'ctx>,
@ -514,10 +543,12 @@ pub fn list_map4<'a, 'ctx, 'env>(
call_list_bitcode_fn(
env,
&[
list_to_c_abi(env, list1).into(),
list_to_c_abi(env, list2).into(),
list_to_c_abi(env, list3).into(),
list_to_c_abi(env, list4).into(),
list1.into_struct_value(),
list2.into_struct_value(),
list3.into_struct_value(),
list4.into_struct_value(),
],
&[
roc_function_call.caller.into(),
pass_as_opaque(env, roc_function_call.data),
roc_function_call.inc_n_data.into(),
@ -533,106 +564,31 @@ pub fn list_map4<'a, 'ctx, 'env>(
dec_c.as_global_value().as_pointer_value().into(),
dec_d.as_global_value().as_pointer_value().into(),
],
BitcodeReturns::List,
bitcode::LIST_MAP4,
)
}
/// List.concat : List elem, List elem -> List elem
pub fn list_concat<'a, 'ctx, 'env>(
pub(crate) fn list_concat<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
first_list: BasicValueEnum<'ctx>,
second_list: BasicValueEnum<'ctx>,
list1: BasicValueEnum<'ctx>,
list2: BasicValueEnum<'ctx>,
element_layout: &Layout<'a>,
) -> BasicValueEnum<'ctx> {
call_list_bitcode_fn(
env,
&[list1.into_struct_value(), list2.into_struct_value()],
&[
list_to_c_abi(env, first_list).into(),
list_to_c_abi(env, second_list).into(),
env.alignment_intvalue(element_layout),
layout_width(env, element_layout),
],
BitcodeReturns::List,
bitcode::LIST_CONCAT,
)
}
pub fn decrementing_elem_loop<'ctx, LoopFn>(
builder: &Builder<'ctx>,
ctx: &'ctx Context,
parent: FunctionValue<'ctx>,
ptr: PointerValue<'ctx>,
len: IntValue<'ctx>,
index_name: &str,
mut loop_fn: LoopFn,
) -> PointerValue<'ctx>
where
LoopFn: FnMut(IntValue<'ctx>, BasicValueEnum<'ctx>),
{
decrementing_index_loop(builder, ctx, parent, len, index_name, |index| {
// The pointer to the element in the list
let elem_ptr = unsafe { builder.build_in_bounds_gep(ptr, &[index], "load_index") };
let elem = builder.build_load(elem_ptr, "get_elem");
loop_fn(index, elem);
})
}
// a for-loop from the back to the front
fn decrementing_index_loop<'ctx, LoopFn>(
builder: &Builder<'ctx>,
ctx: &'ctx Context,
parent: FunctionValue<'ctx>,
end: IntValue<'ctx>,
index_name: &str,
mut loop_fn: LoopFn,
) -> PointerValue<'ctx>
where
LoopFn: FnMut(IntValue<'ctx>),
{
// constant 1i64
let one = ctx.i64_type().const_int(1, false);
// allocate a stack slot for the current index
let index_alloca = builder.build_alloca(ctx.i64_type(), index_name);
// we assume `end` is the length of the list
// the final index is therefore `end - 1`
let end_index = builder.build_int_sub(end, one, "end_index");
builder.build_store(index_alloca, end_index);
let loop_bb = ctx.append_basic_block(parent, "loop");
builder.build_unconditional_branch(loop_bb);
builder.position_at_end(loop_bb);
let current_index = builder
.build_load(index_alloca, index_name)
.into_int_value();
let next_index = builder.build_int_sub(current_index, one, "nextindex");
builder.build_store(index_alloca, next_index);
// The body of the loop
loop_fn(current_index);
// #index >= 0
let condition = builder.build_int_compare(
IntPredicate::SGE,
next_index,
ctx.i64_type().const_zero(),
"bounds_check",
);
let after_loop_bb = ctx.append_basic_block(parent, "after_outer_loop_1");
builder.build_conditional_branch(condition, loop_bb, after_loop_bb);
builder.position_at_end(after_loop_bb);
index_alloca
}
pub fn incrementing_elem_loop<'a, 'ctx, 'env, LoopFn>(
pub(crate) fn incrementing_elem_loop<'a, 'ctx, 'env, LoopFn>(
env: &Env<'a, 'ctx, 'env>,
parent: FunctionValue<'ctx>,
element_layout: Layout<'a>,
@ -663,7 +619,7 @@ where
// This helper simulates a basic for loop, where
// and index increments up from 0 to some end value
pub fn incrementing_index_loop<'a, 'ctx, 'env, LoopFn>(
pub(crate) fn incrementing_index_loop<'a, 'ctx, 'env, LoopFn>(
env: &Env<'a, 'ctx, 'env>,
parent: FunctionValue<'ctx>,
end: IntValue<'ctx>,
@ -710,61 +666,17 @@ where
index_alloca
}
pub fn build_basic_phi2<'a, 'ctx, 'env, PassFn, FailFn>(
pub(crate) fn empty_polymorphic_list<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
parent: FunctionValue<'ctx>,
comparison: IntValue<'ctx>,
mut build_pass: PassFn,
mut build_fail: FailFn,
ret_type: BasicTypeEnum<'ctx>,
) -> BasicValueEnum<'ctx>
where
PassFn: FnMut() -> BasicValueEnum<'ctx>,
FailFn: FnMut() -> BasicValueEnum<'ctx>,
{
let builder = env.builder;
let context = env.context;
// build blocks
let then_block = context.append_basic_block(parent, "then");
let else_block = context.append_basic_block(parent, "else");
let cont_block = context.append_basic_block(parent, "branchcont");
builder.build_conditional_branch(comparison, then_block, else_block);
// build then block
builder.position_at_end(then_block);
let then_val = build_pass();
builder.build_unconditional_branch(cont_block);
let then_block = builder.get_insert_block().unwrap();
// build else block
builder.position_at_end(else_block);
let else_val = build_fail();
builder.build_unconditional_branch(cont_block);
let else_block = builder.get_insert_block().unwrap();
// emit merge block
builder.position_at_end(cont_block);
let phi = builder.build_phi(ret_type, "branch");
phi.add_incoming(&[(&then_val, then_block), (&else_val, else_block)]);
phi.as_basic_value()
}
pub fn empty_polymorphic_list<'a, 'ctx, 'env>(env: &Env<'a, 'ctx, 'env>) -> BasicValueEnum<'ctx> {
let struct_type = super::convert::zig_list_type(env);
) -> BasicValueEnum<'ctx> {
let struct_type = zig_list_type(env);
// The pointer should be null (aka zero) and the length should be zero,
// so the whole struct should be a const_zero
BasicValueEnum::StructValue(struct_type.const_zero())
}
pub fn load_list<'ctx>(
pub(crate) fn load_list<'ctx>(
builder: &Builder<'ctx>,
wrapper_struct: StructValue<'ctx>,
ptr_type: PointerType<'ctx>,
@ -779,7 +691,7 @@ pub fn load_list<'ctx>(
(length, ptr)
}
pub fn load_list_ptr<'ctx>(
pub(crate) fn load_list_ptr<'ctx>(
builder: &Builder<'ctx>,
wrapper_struct: StructValue<'ctx>,
ptr_type: PointerType<'ctx>,
@ -794,7 +706,7 @@ pub fn load_list_ptr<'ctx>(
cast_basic_basic(builder, generic_ptr.into(), ptr_type.into()).into_pointer_value()
}
pub fn allocate_list<'a, 'ctx, 'env>(
pub(crate) fn allocate_list<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
elem_layout: &Layout<'a>,
number_of_elements: IntValue<'ctx>,
@ -812,48 +724,27 @@ pub fn allocate_list<'a, 'ctx, 'env>(
allocate_with_refcount_help(env, basic_type, alignment_bytes, number_of_data_bytes)
}
pub fn store_list<'a, 'ctx, 'env>(
pub(crate) fn store_list<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
pointer_to_first_element: PointerValue<'ctx>,
len: IntValue<'ctx>,
) -> BasicValueEnum<'ctx> {
let builder = env.builder;
) -> StructValue<'ctx> {
let ptr = pass_as_opaque(env, pointer_to_first_element);
let cap = len;
let struct_type = super::convert::zig_list_type(env);
// Store the pointer
let mut struct_val = builder
.build_insert_value(
struct_type.get_undef(),
pass_as_opaque(env, pointer_to_first_element),
Builtin::WRAPPER_PTR,
"insert_ptr_store_list",
)
.unwrap();
// Store the length
struct_val = builder
.build_insert_value(struct_val, len, Builtin::WRAPPER_LEN, "insert_len")
.unwrap();
// Store the capacity
struct_val = builder
.build_insert_value(
struct_val,
len,
Builtin::WRAPPER_CAPACITY,
"insert_capacity",
)
.unwrap();
builder.build_bitcast(
struct_val.into_struct_value(),
super::convert::zig_list_type(env),
"cast_collection",
struct_from_fields(
env,
zig_list_type(env),
[
(Builtin::WRAPPER_PTR as usize, ptr),
(Builtin::WRAPPER_LEN as usize, len.into()),
(Builtin::WRAPPER_CAPACITY as usize, cap.into()),
]
.into_iter(),
)
}
pub fn decref<'a, 'ctx, 'env>(
pub(crate) fn decref<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
wrapper_struct: StructValue<'ctx>,
alignment: u32,

View File

@ -1,73 +1,15 @@
use crate::llvm::bitcode::{call_bitcode_fn, call_str_bitcode_fn};
use crate::llvm::build::{Env, Scope};
use inkwell::builder::Builder;
use inkwell::values::{BasicValueEnum, IntValue, PointerValue, StructValue};
use crate::llvm::build::Env;
use inkwell::values::{BasicValueEnum, PointerValue, StructValue};
use inkwell::AddressSpace;
use roc_builtins::bitcode::{self, FloatWidth, IntWidth};
use roc_module::symbol::Symbol;
use roc_mono::layout::{Builtin, Layout};
use roc_builtins::bitcode;
use roc_mono::layout::Layout;
use roc_target::PtrWidth;
use super::build::{create_entry_block_alloca, load_symbol};
use super::bitcode::{call_str_bitcode_fn, BitcodeReturns};
pub static CHAR_LAYOUT: Layout = Layout::u8();
pub fn str_symbol_to_c_abi<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
scope: &Scope<'a, 'ctx>,
symbol: Symbol,
) -> PointerValue<'ctx> {
let string = load_symbol(scope, &symbol);
str_to_c_abi(env, string)
}
pub fn str_to_c_abi<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
value: BasicValueEnum<'ctx>,
) -> PointerValue<'ctx> {
let parent = env
.builder
.get_insert_block()
.and_then(|b| b.get_parent())
.unwrap();
let str_type = super::convert::zig_str_type(env);
let string_alloca = create_entry_block_alloca(env, parent, str_type.into(), "str_alloca");
env.builder.build_store(string_alloca, value);
string_alloca
}
pub fn destructure<'ctx>(
builder: &Builder<'ctx>,
wrapper_struct: StructValue<'ctx>,
) -> (PointerValue<'ctx>, IntValue<'ctx>) {
let length = builder
.build_extract_value(wrapper_struct, Builtin::WRAPPER_LEN, "list_len")
.unwrap()
.into_int_value();
// a `*mut u8` pointer
let generic_ptr = builder
.build_extract_value(wrapper_struct, Builtin::WRAPPER_PTR, "read_list_ptr")
.unwrap()
.into_pointer_value();
(generic_ptr, length)
}
/// Str.fromInt : Int -> Str
pub fn str_from_int<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
value: IntValue<'ctx>,
int_width: IntWidth,
) -> BasicValueEnum<'ctx> {
call_str_bitcode_fn(env, &[value.into()], &bitcode::STR_FROM_INT[int_width])
}
pub fn decode_from_utf8_result<'a, 'ctx, 'env>(
pub(crate) fn decode_from_utf8_result<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
pointer: PointerValue<'ctx>,
) -> StructValue<'ctx> {
@ -103,17 +45,8 @@ pub fn decode_from_utf8_result<'a, 'ctx, 'env>(
}
}
/// Str.fromFloat : Float * -> Str
pub fn str_from_float<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
float: BasicValueEnum<'ctx>,
float_width: FloatWidth,
) -> BasicValueEnum<'ctx> {
call_str_bitcode_fn(env, &[float], &bitcode::STR_FROM_FLOAT[float_width])
}
/// Dec.toStr : Dec -> Str
pub fn dec_to_str<'a, 'ctx, 'env>(
pub(crate) fn dec_to_str<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
dec: BasicValueEnum<'ctx>,
) -> BasicValueEnum<'ctx> {
@ -131,16 +64,24 @@ pub fn dec_to_str<'a, 'ctx, 'env>(
call_str_bitcode_fn(
env,
&[],
&[right_bits.into(), left_bits.into()],
BitcodeReturns::Str,
bitcode::DEC_TO_STR,
)
}
/// Str.equal : Str, Str -> Bool
pub fn str_equal<'a, 'ctx, 'env>(
pub(crate) fn str_equal<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
value1: BasicValueEnum<'ctx>,
value2: BasicValueEnum<'ctx>,
) -> BasicValueEnum<'ctx> {
call_bitcode_fn(env, &[value1, value2], bitcode::STR_EQUAL)
call_str_bitcode_fn(
env,
&[value1, value2],
&[],
BitcodeReturns::Basic,
bitcode::STR_EQUAL,
)
}

View File

@ -2,9 +2,10 @@ use crate::llvm::build::Env;
use bumpalo::collections::Vec;
use inkwell::context::Context;
use inkwell::types::{BasicType, BasicTypeEnum, FloatType, IntType, StructType};
use inkwell::values::StructValue;
use inkwell::AddressSpace;
use roc_builtins::bitcode::{FloatWidth, IntWidth};
use roc_mono::layout::{Builtin, Layout, UnionLayout};
use roc_mono::layout::{round_up_to_alignment, Builtin, Layout, UnionLayout};
use roc_target::TargetInfo;
fn basic_type_from_record<'a, 'ctx, 'env>(
@ -56,36 +57,40 @@ pub fn basic_type_from_union_layout<'a, 'ctx, 'env>(
) -> BasicTypeEnum<'ctx> {
use UnionLayout::*;
let tag_id_type = basic_type_from_layout(env, &union_layout.tag_id_layout());
match union_layout {
NonRecursive(tags) => {
let data = block_of_memory_slices(env.context, tags, env.target_info);
env.context.struct_type(&[data, tag_id_type], false).into()
//
RocUnion::tagged_from_slices(env.context, tags, env.target_info)
.struct_type()
.into()
}
Recursive(tags)
| NullableWrapped {
other_tags: tags, ..
} => {
let data = block_of_memory_slices(env.context, tags, env.target_info);
if union_layout.stores_tag_id_as_data(env.target_info) {
env.context
.struct_type(&[data, tag_id_type], false)
RocUnion::tagged_from_slices(env.context, tags, env.target_info)
.struct_type()
.ptr_type(AddressSpace::Generic)
.into()
} else {
data.ptr_type(AddressSpace::Generic).into()
RocUnion::untagged_from_slices(env.context, tags, env.target_info)
.struct_type()
.ptr_type(AddressSpace::Generic)
.into()
}
}
NullableUnwrapped { other_fields, .. } => {
let block = block_of_memory_slices(env.context, &[other_fields], env.target_info);
block.ptr_type(AddressSpace::Generic).into()
RocUnion::untagged_from_slices(env.context, &[other_fields], env.target_info)
.struct_type()
.ptr_type(AddressSpace::Generic)
.into()
}
NonNullableUnwrapped(fields) => {
let block = block_of_memory_slices(env.context, &[fields], env.target_info);
block.ptr_type(AddressSpace::Generic).into()
RocUnion::untagged_from_slices(env.context, &[fields], env.target_info)
.struct_type()
.ptr_type(AddressSpace::Generic)
.into()
}
}
}
@ -185,68 +190,197 @@ pub fn float_type_from_float_width<'a, 'ctx, 'env>(
}
}
pub fn block_of_memory_slices<'ctx>(
context: &'ctx Context,
layouts: &[&[Layout<'_>]],
target_info: TargetInfo,
) -> BasicTypeEnum<'ctx> {
let mut union_size = 0;
for tag in layouts {
let mut total = 0;
for layout in tag.iter() {
total += layout.stack_size(target_info);
fn alignment_type(context: &Context, alignment: u32) -> BasicTypeEnum {
match alignment {
0 => context.struct_type(&[], false).into(),
1 => context.i8_type().into(),
2 => context.i16_type().into(),
4 => context.i32_type().into(),
8 => context.i64_type().into(),
16 => context.i128_type().into(),
_ => unimplemented!("weird alignment: {alignment}"),
}
}
#[derive(Debug, Clone, Copy)]
enum TagType {
I8,
I16,
}
#[derive(Debug, Clone, Copy)]
pub(crate) struct RocUnion<'ctx> {
struct_type: StructType<'ctx>,
data_align: u32,
data_width: u32,
tag_type: Option<TagType>,
}
impl<'ctx> RocUnion<'ctx> {
pub const TAG_ID_INDEX: u32 = 2;
pub const TAG_DATA_INDEX: u32 = 1;
fn new(
context: &'ctx Context,
_target_info: TargetInfo,
data_align: u32,
data_width: u32,
tag_type: Option<TagType>,
) -> Self {
let bytes = round_up_to_alignment(data_width, data_align);
let byte_array_type = context.i8_type().array_type(bytes).as_basic_type_enum();
let alignment_array_type = alignment_type(context, data_align)
.array_type(0)
.as_basic_type_enum();
let struct_type = if let Some(tag_type) = tag_type {
let tag_width = match tag_type {
TagType::I8 => 1,
TagType::I16 => 2,
};
let tag_padding = round_up_to_alignment(tag_width, data_align) - tag_width;
let tag_padding_type = context
.i8_type()
.array_type(tag_padding)
.as_basic_type_enum();
context.struct_type(
&[
alignment_array_type,
byte_array_type,
match tag_type {
TagType::I8 => context.i8_type().into(),
TagType::I16 => context.i16_type().into(),
},
tag_padding_type,
],
false,
)
} else {
context.struct_type(&[alignment_array_type, byte_array_type], false)
};
Self {
struct_type,
data_align,
data_width,
tag_type,
}
}
pub fn struct_type(&self) -> StructType<'ctx> {
self.struct_type
}
pub fn tagged_from_slices(
context: &'ctx Context,
layouts: &[&[Layout<'_>]],
target_info: TargetInfo,
) -> Self {
let tag_type = match layouts.len() {
0..=255 => TagType::I8,
_ => TagType::I16,
};
let (data_width, data_align) =
Layout::stack_size_and_alignment_slices(layouts, target_info);
Self::new(context, target_info, data_align, data_width, Some(tag_type))
}
pub fn untagged_from_slices(
context: &'ctx Context,
layouts: &[&[Layout<'_>]],
target_info: TargetInfo,
) -> Self {
let (data_width, data_align) =
Layout::stack_size_and_alignment_slices(layouts, target_info);
Self::new(context, target_info, data_align, data_width, None)
}
pub fn tag_alignment(&self) -> u32 {
let tag_id_alignment = match self.tag_type {
None => 0,
Some(TagType::I8) => 1,
Some(TagType::I16) => 2,
};
self.data_align.max(tag_id_alignment)
}
pub fn tag_width(&self) -> u32 {
let tag_id_width = match self.tag_type {
None => 0,
Some(TagType::I8) => 1,
Some(TagType::I16) => 2,
};
let mut width = self.data_width;
// add padding between data and the tag id
width = round_up_to_alignment(width, tag_id_width);
// add tag id
width += tag_id_width;
// add padding after the tag id
width = round_up_to_alignment(width, self.tag_alignment());
width
}
pub fn as_struct_value<'a, 'env>(
&self,
env: &Env<'a, 'ctx, 'env>,
data: StructValue<'ctx>,
tag_id: Option<usize>,
) -> StructValue<'ctx> {
debug_assert_eq!(tag_id.is_some(), self.tag_type.is_some());
let tag_alloca = env.builder.build_alloca(self.struct_type(), "tag_alloca");
let data_buffer = env
.builder
.build_struct_gep(tag_alloca, Self::TAG_DATA_INDEX, "data_buffer")
.unwrap();
let cast_pointer = env.builder.build_pointer_cast(
data_buffer,
data.get_type().ptr_type(AddressSpace::Generic),
"to_data_ptr",
);
// NOTE: the data may be smaller than the buffer, so there might be uninitialized
// bytes in the buffer. We should never touch those, but e.g. valgrind might not
// realize that. If that comes up, the solution is to just fill it with zeros
env.builder.build_store(cast_pointer, data);
// set the tag id
//
// NOTE: setting the tag id initially happened before writing the data into it.
// That turned out to expose UB. More info at https://github.com/rtfeldman/roc/issues/3554
if let Some(tag_id) = tag_id {
let tag_id_type = match self.tag_type.unwrap() {
TagType::I8 => env.context.i8_type(),
TagType::I16 => env.context.i16_type(),
};
let tag_id_ptr = env
.builder
.build_struct_gep(tag_alloca, Self::TAG_ID_INDEX, "tag_id_ptr")
.unwrap();
let tag_id = tag_id_type.const_int(tag_id as u64, false);
env.builder.build_store(tag_id_ptr, tag_id);
}
union_size = union_size.max(total);
}
block_of_memory_help(context, union_size)
}
pub fn block_of_memory<'ctx>(
context: &'ctx Context,
layout: &Layout<'_>,
target_info: TargetInfo,
) -> BasicTypeEnum<'ctx> {
// TODO make this dynamic
let mut union_size = layout.stack_size(target_info);
if let Layout::Union(UnionLayout::NonRecursive { .. }) = layout {
union_size -= target_info.ptr_width() as u32;
}
block_of_memory_help(context, union_size)
}
fn block_of_memory_help(context: &Context, union_size: u32) -> BasicTypeEnum<'_> {
// The memory layout of Union is a bit tricky.
// We have tags with different memory layouts, that are part of the same type.
// For llvm, all tags must have the same memory layout.
//
// So, we convert all tags to a layout of bytes of some size.
// It turns out that encoding to i64 for as many elements as possible is
// a nice optimization, the remainder is encoded as bytes.
let num_i64 = union_size / 8;
let num_i8 = union_size % 8;
let i8_array_type = context.i8_type().array_type(num_i8).as_basic_type_enum();
let i64_array_type = context.i64_type().array_type(num_i64).as_basic_type_enum();
if num_i64 == 0 {
// The object fits perfectly in some number of i8s
context.struct_type(&[i8_array_type], false).into()
} else if num_i8 == 0 {
// The object fits perfectly in some number of i64s
// (i.e. the size is a multiple of 8 bytes)
context.struct_type(&[i64_array_type], false).into()
} else {
// There are some trailing bytes at the end
let i8_array_type = context.i8_type().array_type(num_i8).as_basic_type_enum();
context
.struct_type(&[i64_array_type, i8_array_type], false)
.into()
env.builder
.build_load(tag_alloca, "load_tag")
.into_struct_value()
}
}

View File

@ -0,0 +1,342 @@
use crate::llvm::bitcode::call_bitcode_fn;
use crate::llvm::build::Env;
use crate::llvm::build_list::{self, incrementing_elem_loop};
use crate::llvm::convert::basic_type_from_layout;
use inkwell::builder::Builder;
use inkwell::types::BasicType;
use inkwell::values::{BasicValueEnum, IntValue, PointerValue};
use inkwell::AddressSpace;
use roc_builtins::bitcode;
use roc_module::symbol::Symbol;
use roc_mono::layout::{Builtin, Layout, LayoutIds, UnionLayout};
use roc_region::all::Region;
use super::build::{load_symbol_and_layout, Scope};
fn pointer_at_offset<'ctx>(
bd: &Builder<'ctx>,
ptr: PointerValue<'ctx>,
offset: IntValue<'ctx>,
) -> PointerValue<'ctx> {
unsafe { bd.build_gep(ptr, &[offset], "offset_ptr") }
}
/// Writes the module and region into the buffer
fn write_header<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
ptr: PointerValue<'ctx>,
mut offset: IntValue<'ctx>,
condition: Symbol,
region: Region,
) -> IntValue<'ctx> {
let region_start = env
.context
.i32_type()
.const_int(region.start().offset as _, false);
let region_end = env
.context
.i32_type()
.const_int(region.end().offset as _, false);
let module_id: u32 = unsafe { std::mem::transmute(condition.module_id()) };
let module_id = env.context.i32_type().const_int(module_id as _, false);
offset = build_copy(env, ptr, offset, region_start.into());
offset = build_copy(env, ptr, offset, region_end.into());
offset = build_copy(env, ptr, offset, module_id.into());
offset
}
/// Read the first two 32-bit values from the shared memory,
/// representing the total number of expect frames and the next free position
fn read_state<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
ptr: PointerValue<'ctx>,
) -> (IntValue<'ctx>, IntValue<'ctx>) {
let ptr_type = env.ptr_int().ptr_type(AddressSpace::Generic);
let ptr = env.builder.build_pointer_cast(ptr, ptr_type, "");
let one = env.ptr_int().const_int(1, false);
let offset_ptr = pointer_at_offset(env.builder, ptr, one);
let count = env.builder.build_load(ptr, "load_count");
let offset = env.builder.build_load(offset_ptr, "load_offset");
(count.into_int_value(), offset.into_int_value())
}
fn write_state<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
ptr: PointerValue<'ctx>,
count: IntValue<'ctx>,
offset: IntValue<'ctx>,
) {
let ptr_type = env.ptr_int().ptr_type(AddressSpace::Generic);
let ptr = env.builder.build_pointer_cast(ptr, ptr_type, "");
let one = env.ptr_int().const_int(1, false);
let offset_ptr = pointer_at_offset(env.builder, ptr, one);
env.builder.build_store(ptr, count);
env.builder.build_store(offset_ptr, offset);
}
pub(crate) fn clone_to_shared_memory<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
scope: &Scope<'a, 'ctx>,
layout_ids: &mut LayoutIds<'a>,
condition: Symbol,
region: Region,
lookups: &[Symbol],
) {
let func = env
.module
.get_function(bitcode::UTILS_EXPECT_FAILED_START)
.unwrap();
let call_result = env
.builder
.build_call(func, &[], "call_expect_start_failed");
let original_ptr = call_result
.try_as_basic_value()
.left()
.unwrap()
.into_pointer_value();
let (count, mut offset) = read_state(env, original_ptr);
offset = write_header(env, original_ptr, offset, condition, region);
for lookup in lookups.iter() {
let (value, layout) = load_symbol_and_layout(scope, lookup);
offset = build_clone(
env,
layout_ids,
original_ptr,
offset,
value,
*layout,
WhenRecursive::Unreachable,
);
}
let one = env.ptr_int().const_int(1, false);
let new_count = env.builder.build_int_add(count, one, "inc");
write_state(env, original_ptr, new_count, offset)
}
#[derive(Clone, Debug, Copy)]
enum WhenRecursive<'a> {
Unreachable,
#[allow(dead_code)]
Loop(UnionLayout<'a>),
}
fn build_clone<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
layout_ids: &mut LayoutIds<'a>,
ptr: PointerValue<'ctx>,
offset: IntValue<'ctx>,
value: BasicValueEnum<'ctx>,
layout: Layout<'a>,
when_recursive: WhenRecursive<'a>,
) -> IntValue<'ctx> {
match layout {
Layout::Builtin(builtin) => {
build_clone_builtin(env, layout_ids, ptr, offset, value, builtin, when_recursive)
}
Layout::Struct {
field_layouts: _, ..
} => {
if layout.safe_to_memcpy() {
build_copy(env, ptr, offset, value)
} else {
todo!()
}
}
Layout::LambdaSet(_) => unreachable!("cannot compare closures"),
Layout::Union(_union_layout) => {
if layout.safe_to_memcpy() {
build_copy(env, ptr, offset, value)
} else {
todo!()
}
}
/*
Layout::Boxed(inner_layout) => build_box_eq(
env,
layout_ids,
when_recursive,
lhs_layout,
inner_layout,
lhs_val,
rhs_val,
),
Layout::RecursivePointer => match when_recursive {
WhenRecursive::Unreachable => {
unreachable!("recursion pointers should never be compared directly")
}
WhenRecursive::Loop(union_layout) => {
let layout = Layout::Union(union_layout);
let bt = basic_type_from_layout(env, &layout);
// cast the i64 pointer to a pointer to block of memory
let field1_cast = env
.builder
.build_bitcast(lhs_val, bt, "i64_to_opaque")
.into_pointer_value();
let field2_cast = env
.builder
.build_bitcast(rhs_val, bt, "i64_to_opaque")
.into_pointer_value();
build_tag_eq(
env,
layout_ids,
WhenRecursive::Loop(union_layout),
&union_layout,
field1_cast.into(),
field2_cast.into(),
)
}
},
*/
_ => todo!(),
}
}
fn build_copy<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
ptr: PointerValue<'ctx>,
offset: IntValue<'ctx>,
value: BasicValueEnum<'ctx>,
) -> IntValue<'ctx> {
let ptr = unsafe {
env.builder
.build_in_bounds_gep(ptr, &[offset], "at_current_offset")
};
let ptr_type = value.get_type().ptr_type(AddressSpace::Generic);
let ptr = env
.builder
.build_pointer_cast(ptr, ptr_type, "cast_ptr_type");
env.builder.build_store(ptr, value);
let width = value.get_type().size_of().unwrap();
env.builder.build_int_add(offset, width, "new_offset")
}
fn build_clone_builtin<'a, 'ctx, 'env>(
env: &Env<'a, 'ctx, 'env>,
layout_ids: &mut LayoutIds<'a>,
ptr: PointerValue<'ctx>,
offset: IntValue<'ctx>,
value: BasicValueEnum<'ctx>,
builtin: Builtin<'a>,
when_recursive: WhenRecursive<'a>,
) -> IntValue<'ctx> {
use Builtin::*;
match builtin {
Int(_) | Float(_) | Bool | Decimal => build_copy(env, ptr, offset, value),
Builtin::Str => {
//
call_bitcode_fn(
env,
&[ptr.into(), offset.into(), value],
bitcode::STR_CLONE_TO,
)
.into_int_value()
}
Builtin::List(elem) => {
let bd = env.builder;
let list = value.into_struct_value();
let (elements, len, _cap) = build_list::destructure(env.builder, list);
let list_width = env
.ptr_int()
.const_int(env.target_info.ptr_size() as u64 * 3, false);
let elements_offset = bd.build_int_add(offset, list_width, "new_offset");
let mut offset = offset;
// we only copy the elements we actually have (and skip extra capacity)
offset = build_copy(env, ptr, offset, elements_offset.into());
offset = build_copy(env, ptr, offset, len.into());
offset = build_copy(env, ptr, offset, len.into());
let (element_width, _element_align) = elem.stack_size_and_alignment(env.target_info);
let element_width = env.ptr_int().const_int(element_width as _, false);
let elements_width = bd.build_int_mul(element_width, len, "elements_width");
if elem.safe_to_memcpy() {
// NOTE we are not actually sure the dest is properly aligned
let dest = pointer_at_offset(bd, ptr, offset);
let src = bd.build_pointer_cast(
elements,
env.context.i8_type().ptr_type(AddressSpace::Generic),
"to_bytes_pointer",
);
bd.build_memcpy(dest, 1, src, 1, elements_width).unwrap();
bd.build_int_add(offset, elements_width, "new_offset")
} else {
let elements_start_offset = offset;
let element_type = basic_type_from_layout(env, elem);
let elements = bd.build_pointer_cast(
elements,
element_type.ptr_type(AddressSpace::Generic),
"elements",
);
let element_offset = bd.build_alloca(env.ptr_int(), "element_offset");
bd.build_store(element_offset, elements_start_offset);
let body = |_index, element| {
let current_offset = bd.build_load(element_offset, "element_offset");
let new_offset = build_clone(
env,
layout_ids,
ptr,
current_offset.into_int_value(),
element,
*elem,
when_recursive,
);
bd.build_store(element_offset, new_offset);
};
let parent = env
.builder
.get_insert_block()
.and_then(|b| b.get_parent())
.unwrap();
incrementing_elem_loop(env, parent, *elem, elements, len, "index", body);
bd.build_load(element_offset, "element_offset")
.into_int_value()
}
}
}
}

View File

@ -4,5 +4,6 @@ pub mod build_list;
pub mod build_str;
pub mod compare;
pub mod convert;
mod expect;
pub mod externs;
pub mod refcounting;

View File

@ -2,10 +2,10 @@ use crate::debug_info_init;
use crate::llvm::bitcode::call_void_bitcode_fn;
use crate::llvm::build::{
add_func, cast_basic_basic, get_tag_id, tag_pointer_clear_tag_id, use_roc_value, Env,
FAST_CALL_CONV, TAG_DATA_INDEX, TAG_ID_INDEX,
FAST_CALL_CONV,
};
use crate::llvm::build_list::{incrementing_elem_loop, list_len, load_list};
use crate::llvm::convert::basic_type_from_layout;
use crate::llvm::convert::{basic_type_from_layout, RocUnion};
use bumpalo::collections::Vec;
use inkwell::basic_block::BasicBlock;
use inkwell::module::Linkage;
@ -1597,7 +1597,7 @@ fn modify_refcount_union_help<'a, 'ctx, 'env>(
// read the tag_id
let tag_id_ptr = env
.builder
.build_struct_gep(arg_ptr, TAG_ID_INDEX, "tag_id_ptr")
.build_struct_gep(arg_ptr, RocUnion::TAG_ID_INDEX, "tag_id_ptr")
.unwrap();
let tag_id = env
@ -1634,7 +1634,7 @@ fn modify_refcount_union_help<'a, 'ctx, 'env>(
debug_assert!(wrapper_type.is_struct_type());
let opaque_tag_data_ptr = env
.builder
.build_struct_gep(arg_ptr, TAG_DATA_INDEX, "field_ptr")
.build_struct_gep(arg_ptr, RocUnion::TAG_DATA_INDEX, "field_ptr")
.unwrap();
let cast_tag_data_pointer = env.builder.build_pointer_cast(

View File

@ -1,4 +1,4 @@
use std::ffi::CString;
use std::ffi::CStr;
use std::mem::MaybeUninit;
use std::os::raw::c_char;
@ -37,14 +37,9 @@ impl<T: Sized> From<RocCallResult<T>> for Result<T, String> {
match call_result.tag {
0 => Ok(unsafe { call_result.value.assume_init() }),
_ => Err({
let raw = unsafe { CString::from_raw(call_result.error_msg) };
let raw = unsafe { CStr::from_ptr(call_result.error_msg) };
let result = format!("{:?}", raw);
// make sure rust does not try to free the Roc string
std::mem::forget(raw);
result
raw.to_str().unwrap().to_owned()
}),
}
}
@ -74,16 +69,16 @@ macro_rules! run_roc_dylib {
}
#[macro_export]
macro_rules! run_jit_function {
macro_rules! try_run_jit_function {
($lib: expr, $main_fn_name: expr, $ty:ty, $transform:expr) => {{
let v: String = String::new();
run_jit_function!($lib, $main_fn_name, $ty, $transform, v)
try_run_jit_function!($lib, $main_fn_name, $ty, $transform, v)
}};
($lib: expr, $main_fn_name: expr, $ty:ty, $transform:expr, $errors:expr) => {{
run_jit_function!($lib, $main_fn_name, $ty, $transform, $errors, &[])
($lib: expr, $main_fn_name: expr, $ty:ty, $transform:expr) => {{
try_run_jit_function!($lib, $main_fn_name, $ty, $transform, &[])
}};
($lib: expr, $main_fn_name: expr, $ty:ty, $transform:expr, $errors:expr, $expect_failures:expr) => {{
($lib: expr, $main_fn_name: expr, $ty:ty, $transform:expr, $expect_failures:expr) => {{
use inkwell::context::Context;
use roc_builtins::bitcode;
use roc_gen_llvm::run_roc::RocCallResult;
@ -99,15 +94,33 @@ macro_rules! run_jit_function {
let mut main_result = MaybeUninit::uninit();
main(main_result.as_mut_ptr());
match main_result.assume_init().into() {
Ok(success) => {
// only if there are no exceptions thrown, check for errors
assert!($errors.is_empty(), "Encountered errors:\n{}", $errors);
main_result.assume_init().into()
}
}};
}
$transform(success)
}
Err(error_msg) => panic!("Roc failed with message: {}", error_msg),
#[macro_export]
macro_rules! run_jit_function {
($lib: expr, $main_fn_name: expr, $ty:ty, $transform:expr) => {{
let v: String = String::new();
run_jit_function!($lib, $main_fn_name, $ty, $transform, v)
}};
($lib: expr, $main_fn_name: expr, $ty:ty, $transform:expr, $errors:expr) => {{
run_jit_function!($lib, $main_fn_name, $ty, $transform, $errors, &[])
}};
($lib: expr, $main_fn_name: expr, $ty:ty, $transform:expr, $errors:expr, $expect_failures:expr) => {{
let result =
$crate::try_run_jit_function!($lib, $main_fn_name, $ty, $transform, $expect_failures);
match result {
Ok(success) => {
// only if there are no exceptions thrown, check for errors
assert!($errors.is_empty(), "Encountered errors:\n{}", $errors);
$transform(success)
}
Err(error_msg) => panic!("Roc failed with message: {}", error_msg),
}
}};
}

View File

@ -253,8 +253,6 @@ impl<'a> WasmBackend<'a> {
.to_symbol_string(symbol, self.interns);
let name = String::from_str_in(&name, self.env.arena).into_bump_str();
// dbg!(name);
self.proc_lookup.push(ProcLookupData {
name: symbol,
layout,
@ -1595,10 +1593,9 @@ impl<'a> WasmBackend<'a> {
// Store the tag ID (if any)
if stores_tag_id_as_data {
let id_offset =
data_offset + union_layout.data_size_without_tag_id(TARGET_INFO).unwrap();
let id_offset = data_offset + union_layout.tag_id_offset(TARGET_INFO).unwrap();
let id_align = union_layout.tag_id_builtin().alignment_bytes(TARGET_INFO);
let id_align = union_layout.discriminant().alignment_bytes();
let id_align = Align::from(id_align);
self.code_builder.get_local(local_id);
@ -1679,22 +1676,18 @@ impl<'a> WasmBackend<'a> {
};
if union_layout.stores_tag_id_as_data(TARGET_INFO) {
let id_offset = union_layout.data_size_without_tag_id(TARGET_INFO).unwrap();
let id_offset = union_layout.tag_id_offset(TARGET_INFO).unwrap();
let id_align = union_layout.tag_id_builtin().alignment_bytes(TARGET_INFO);
let id_align = union_layout.discriminant().alignment_bytes();
let id_align = Align::from(id_align);
self.storage
.load_symbols(&mut self.code_builder, &[structure]);
match union_layout.tag_id_builtin() {
Builtin::Bool | Builtin::Int(IntWidth::U8) => {
self.code_builder.i32_load8_u(id_align, id_offset)
}
Builtin::Int(IntWidth::U16) => self.code_builder.i32_load16_u(id_align, id_offset),
Builtin::Int(IntWidth::U32) => self.code_builder.i32_load(id_align, id_offset),
Builtin::Int(IntWidth::U64) => self.code_builder.i64_load(id_align, id_offset),
x => internal_error!("Unexpected layout for tag union id {:?}", x),
use roc_mono::layout::Discriminant::*;
match union_layout.discriminant() {
U0 | U1 | U8 => self.code_builder.i32_load8_u(id_align, id_offset),
U16 => self.code_builder.i32_load16_u(id_align, id_offset),
}
} else if union_layout.stores_tag_id_in_pointer(TARGET_INFO) {
self.storage

View File

@ -2078,14 +2078,23 @@ pub fn call_higher_order_lowlevel<'a>(
..
} = passed_function;
let closure_data_layout = match backend.storage.symbol_layouts[captured_environment] {
Layout::LambdaSet(lambda_set) => lambda_set.runtime_representation(),
Layout::Struct {
field_layouts: &[], ..
} => Layout::UNIT,
x => internal_error!("Closure data has an invalid layout\n{:?}", x),
};
let closure_data_exists: bool = closure_data_layout != Layout::UNIT;
let (closure_data_layout, closure_data_exists) =
match backend.storage.symbol_layouts[captured_environment] {
Layout::LambdaSet(lambda_set) => {
if lambda_set.is_represented().is_some() {
(lambda_set.runtime_representation(), true)
} else {
// Closure data is a lambda set, which *itself* has no closure data!
// The higher-order wrapper doesn't need to pass this down, that's
// handled in other ways in the IR. Here just pretend it's Unit.
(Layout::UNIT, false)
}
}
Layout::Struct {
field_layouts: &[], ..
} => (Layout::UNIT, false),
x => internal_error!("Closure data has an invalid layout\n{:?}", x),
};
// We create a wrapper around the passed function, which just unboxes the arguments.
// This allows Zig builtins to have a generic pointer-based interface.
@ -2152,21 +2161,16 @@ pub fn call_higher_order_lowlevel<'a>(
let wrapper_fn_idx =
backend.register_helper_proc(wrapper_sym, wrapper_layout, helper_proc_source);
let wrapper_fn_ptr = backend.get_fn_ptr(wrapper_fn_idx);
let inc_fn_ptr = match closure_data_layout {
Layout::Struct {
field_layouts: &[], ..
} => {
// Our code gen would ignore the Unit arg, but the Zig builtin passes a pointer for it!
// That results in an exception (type signature mismatch in indirect call).
// The workaround is to use I32 layout, treating the (ignored) pointer as an integer.
let inc_fn = backend
.get_refcount_fn_index(Layout::Builtin(Builtin::Int(IntWidth::I32)), HelperOp::Inc);
backend.get_fn_ptr(inc_fn)
}
_ => {
let inc_fn = backend.get_refcount_fn_index(closure_data_layout, HelperOp::Inc);
backend.get_fn_ptr(inc_fn)
}
let inc_fn_ptr = if !closure_data_exists {
// Our code gen would ignore the Unit arg, but the Zig builtin passes a pointer for it!
// That results in an exception (type signature mismatch in indirect call).
// The workaround is to use I32 layout, treating the (ignored) pointer as an integer.
let inc_fn = backend
.get_refcount_fn_index(Layout::Builtin(Builtin::Int(IntWidth::I32)), HelperOp::Inc);
backend.get_fn_ptr(inc_fn)
} else {
let inc_fn = backend.get_refcount_fn_index(closure_data_layout, HelperOp::Inc);
backend.get_fn_ptr(inc_fn)
};
match op {

View File

@ -14,7 +14,7 @@ use crate::wasm_module::{
linking::SymInfo, linking::WasmObjectSymbol, Align, CodeBuilder, Export, ExportType, LocalId,
Signature, ValueType, WasmModule,
};
use roc_std::{RocDec, RocList, RocOrder, RocResult, RocStr};
use roc_std::{RocDec, RocList, RocOrder, RocResult, RocStr, I128, U128};
/// Type-driven wrapper generation
pub trait Wasm32Result {
@ -186,6 +186,8 @@ wasm_result_primitive!(f64, f64_store, Align::Bytes8);
wasm_result_stack_memory!(u128);
wasm_result_stack_memory!(i128);
wasm_result_stack_memory!(U128);
wasm_result_stack_memory!(I128);
wasm_result_stack_memory!(RocDec);
impl Wasm32Result for RocStr {
@ -260,3 +262,19 @@ where
)
}
}
impl<T, U, V, W> Wasm32Result for (T, U, V, W)
where
T: Wasm32Result + Wasm32Sized,
U: Wasm32Result + Wasm32Sized,
V: Wasm32Result + Wasm32Sized,
W: Wasm32Result + Wasm32Sized,
{
fn build_wrapper_body(code_builder: &mut CodeBuilder, main_function_index: u32) {
build_wrapper_body_stack_memory(
code_builder,
main_function_index,
T::ACTUAL_WIDTH + U::ACTUAL_WIDTH + V::ACTUAL_WIDTH + W::ACTUAL_WIDTH,
)
}
}

View File

@ -1,4 +1,4 @@
use roc_std::{RocDec, RocList, RocOrder, RocResult, RocStr};
use roc_std::{RocDec, RocList, RocOrder, RocResult, RocStr, I128, U128};
pub trait Wasm32Sized: Sized {
const SIZE_OF_WASM: usize;
@ -22,9 +22,8 @@ macro_rules! wasm32_sized_primitive {
}
}
wasm32_sized_primitive!(
u8, i8, u16, i16, u32, i32, char, u64, i64, u128, i128, f32, f64, bool, RocDec, RocOrder,
);
wasm32_sized_primitive!(u8, i8, u16, i16, u32, i32, char, u64, i64, u128, i128, f32, f64, bool,);
wasm32_sized_primitive!(RocDec, RocOrder, I128, U128,);
impl Wasm32Sized for () {
const SIZE_OF_WASM: usize = 0;
@ -47,8 +46,8 @@ impl<T: Wasm32Sized> Wasm32Sized for RocList<T> {
}
impl<T: Wasm32Sized, E: Wasm32Sized> Wasm32Sized for RocResult<T, E> {
const ALIGN_OF_WASM: usize = max2(T::ALIGN_OF_WASM, E::ALIGN_OF_WASM);
const SIZE_OF_WASM: usize = max2(T::ACTUAL_WIDTH, E::ACTUAL_WIDTH) + 1;
const ALIGN_OF_WASM: usize = max(&[T::ALIGN_OF_WASM, E::ALIGN_OF_WASM]);
const SIZE_OF_WASM: usize = max(&[T::ACTUAL_WIDTH, E::ACTUAL_WIDTH]) + 1;
}
impl<T: Wasm32Sized> Wasm32Sized for &'_ T {
@ -68,22 +67,39 @@ impl Wasm32Sized for usize {
impl<T: Wasm32Sized, U: Wasm32Sized> Wasm32Sized for (T, U) {
const SIZE_OF_WASM: usize = T::SIZE_OF_WASM + U::SIZE_OF_WASM;
const ALIGN_OF_WASM: usize = max2(T::SIZE_OF_WASM, U::SIZE_OF_WASM);
const ALIGN_OF_WASM: usize = max(&[T::ALIGN_OF_WASM, U::ALIGN_OF_WASM]);
}
impl<T: Wasm32Sized, U: Wasm32Sized, V: Wasm32Sized> Wasm32Sized for (T, U, V) {
const SIZE_OF_WASM: usize = T::SIZE_OF_WASM + U::SIZE_OF_WASM + V::SIZE_OF_WASM;
const ALIGN_OF_WASM: usize = max3(T::SIZE_OF_WASM, U::SIZE_OF_WASM, V::SIZE_OF_WASM);
const ALIGN_OF_WASM: usize = max(&[T::ALIGN_OF_WASM, U::ALIGN_OF_WASM, V::ALIGN_OF_WASM]);
}
const fn max2(a: usize, b: usize) -> usize {
if a > b {
a
} else {
b
impl<T: Wasm32Sized, U: Wasm32Sized, V: Wasm32Sized, W: Wasm32Sized> Wasm32Sized for (T, U, V, W) {
const SIZE_OF_WASM: usize =
T::SIZE_OF_WASM + U::SIZE_OF_WASM + V::SIZE_OF_WASM + W::SIZE_OF_WASM;
const ALIGN_OF_WASM: usize = max(&[
T::ALIGN_OF_WASM,
U::ALIGN_OF_WASM,
V::ALIGN_OF_WASM,
W::ALIGN_OF_WASM,
]);
}
const fn max(alignments: &[usize]) -> usize {
assert!(!alignments.is_empty());
let mut largest = 0;
let mut i = 0;
while i < alignments.len() {
largest = if largest > alignments[i] {
largest
} else {
alignments[i]
};
i += 1;
}
}
const fn max3(a: usize, b: usize, c: usize) -> usize {
max2(max2(a, b), c)
largest
}

View File

@ -200,7 +200,7 @@ fn generate_entry_docs<'a>(
ValueDef::Body(_, _) => (),
ValueDef::Expect(_) => {
ValueDef::Expect { .. } => {
// Don't generate docs for `expect`s
}
},

View File

@ -5,12 +5,13 @@ use crossbeam::deque::{Injector, Stealer, Worker};
use crossbeam::thread;
use parking_lot::Mutex;
use roc_builtins::roc::module_source;
use roc_can::abilities::{AbilitiesStore, PendingAbilitiesStore, ResolvedSpecializations};
use roc_can::abilities::{AbilitiesStore, PendingAbilitiesStore, ResolvedImpl};
use roc_can::constraint::{Constraint as ConstraintSoa, Constraints};
use roc_can::expr::Declarations;
use roc_can::expr::PendingDerives;
use roc_can::module::{
canonicalize_module_defs, ExposedByModule, ExposedForModule, ExposedModuleTypes, Module,
ResolvedImplementations,
};
use roc_collections::{default_hasher, BumpMap, MutMap, MutSet, VecMap, VecSet};
use roc_constrain::module::constrain_module;
@ -41,7 +42,7 @@ use roc_parse::module::module_defs;
use roc_parse::parser::{FileError, Parser, SyntaxError};
use roc_region::all::{LineInfo, Loc, Region};
use roc_reporting::report::RenderTarget;
use roc_solve::module::{Solved, SolvedModule};
use roc_solve::module::{extract_module_owned_implementations, Solved, SolvedModule};
use roc_solve::solve;
use roc_target::TargetInfo;
use roc_types::subs::{ExposedTypesStorageSubs, Subs, VarStore, Variable};
@ -61,9 +62,9 @@ use crate::work::Dependencies;
pub use crate::work::Phase;
#[cfg(target_family = "wasm")]
use crate::wasm_system_time::{Duration, SystemTime};
use crate::wasm_instant::{Duration, Instant};
#[cfg(not(target_family = "wasm"))]
use std::time::{Duration, SystemTime};
use std::time::{Duration, Instant};
/// Default name for the binary generated for an app, if an invalid one was specified.
const DEFAULT_APP_OUTPUT_PATH: &str = "app";
@ -462,7 +463,7 @@ fn start_phase<'a>(
Subs::default(),
ProcsBase::default(),
LayoutCache::new(state.target_info),
ModuleTiming::new(SystemTime::now()),
ModuleTiming::new(Instant::now()),
)
} else if state.make_specializations_pass.current_pass() == 1 {
let found_specializations = state
@ -562,7 +563,7 @@ pub struct LoadedModule {
pub exposed_aliases: MutMap<Symbol, Alias>,
pub exposed_values: Vec<Symbol>,
pub exposed_types_storage: ExposedTypesStorageSubs,
pub resolved_specializations: ResolvedSpecializations,
pub resolved_implementations: ResolvedImplementations,
pub sources: MutMap<ModuleId, (PathBuf, Box<str>)>,
pub timings: MutMap<ModuleId, ModuleTiming>,
pub documentation: MutMap<ModuleId, ModuleDocumentation>,
@ -672,7 +673,7 @@ pub struct MonomorphizedModule<'a> {
pub can_problems: MutMap<ModuleId, Vec<roc_problem::can::Problem>>,
pub type_problems: MutMap<ModuleId, Vec<solve::TypeError>>,
pub procedures: MutMap<(Symbol, ProcLayout<'a>), Proc<'a>>,
pub toplevel_expects: Vec<Symbol>,
pub toplevel_expects: VecMap<Symbol, Region>,
pub entry_point: EntryPoint<'a>,
pub exposed_to_host: ExposedToHost,
pub sources: MutMap<ModuleId, (PathBuf, Box<str>)>,
@ -754,7 +755,7 @@ enum Msg<'a> {
exposed_vars_by_symbol: Vec<(Symbol, Variable)>,
exposed_aliases_by_symbol: MutMap<Symbol, (bool, Alias)>,
exposed_types_storage: ExposedTypesStorageSubs,
resolved_specializations: ResolvedSpecializations,
resolved_implementations: ResolvedImplementations,
dep_idents: IdentIdsByModule,
documentation: MutMap<ModuleId, ModuleDocumentation>,
abilities_store: AbilitiesStore,
@ -767,7 +768,7 @@ enum Msg<'a> {
solved_subs: Solved<Subs>,
module_timing: ModuleTiming,
abilities_store: AbilitiesStore,
toplevel_expects: std::vec::Vec<Symbol>,
toplevel_expects: VecMap<Symbol, Region>,
},
MadeSpecializations {
module_id: ModuleId,
@ -855,7 +856,7 @@ struct State<'a> {
pub module_cache: ModuleCache<'a>,
pub dependencies: Dependencies<'a>,
pub procedures: MutMap<(Symbol, ProcLayout<'a>), Proc<'a>>,
pub toplevel_expects: Vec<Symbol>,
pub toplevel_expects: VecMap<Symbol, Region>,
pub exposed_to_host: ExposedToHost,
/// This is the "final" list of IdentIds, after canonicalization and constraint gen
@ -924,7 +925,7 @@ impl<'a> State<'a> {
module_cache: ModuleCache::default(),
dependencies,
procedures: MutMap::default(),
toplevel_expects: Vec::new(),
toplevel_expects: VecMap::default(),
exposed_to_host: ExposedToHost::default(),
exposed_types,
arc_modules,
@ -958,12 +959,12 @@ pub struct ModuleTiming {
// TODO pub monomorphize: Duration,
/// Total duration will always be more than the sum of the other fields, due
/// to things like state lookups in between phases, waiting on other threads, etc.
start_time: SystemTime,
end_time: SystemTime,
start_time: Instant,
end_time: Instant,
}
impl ModuleTiming {
pub fn new(start_time: SystemTime) -> Self {
pub fn new(start_time: Instant) -> Self {
ModuleTiming {
read_roc_file: Duration::default(),
parse_header: Duration::default(),
@ -979,7 +980,7 @@ impl ModuleTiming {
}
pub fn total(&self) -> Duration {
self.end_time.duration_since(self.start_time).unwrap()
self.end_time.duration_since(self.start_time)
}
/// Subtract all the other fields from total_start_to_finish
@ -997,10 +998,10 @@ impl ModuleTiming {
end_time,
} = self;
let calculate = |t: Result<Duration, _>| -> Option<Duration> {
let calculate = |d: Option<Duration>| -> Option<Duration> {
make_specializations
.iter()
.fold(t.ok(), |t, pass_time| t?.checked_sub(*pass_time))?
.fold(d, |d, pass_time| d?.checked_sub(*pass_time))?
.checked_sub(*find_specializations)?
.checked_sub(*solve)?
.checked_sub(*constrain)?
@ -1010,7 +1011,7 @@ impl ModuleTiming {
.checked_sub(*read_roc_file)
};
calculate(end_time.duration_since(*start_time)).unwrap_or_default()
calculate(Some(end_time.duration_since(*start_time))).unwrap_or_default()
}
}
@ -1186,7 +1187,7 @@ impl<'a> LoadStart<'a> {
// Load the root module synchronously; we can't proceed until we have its id.
let (root_id, root_msg) = {
let root_start_time = SystemTime::now();
let root_start_time = Instant::now();
let res_loaded = load_filename(
arena,
@ -1273,7 +1274,7 @@ impl<'a> LoadStart<'a> {
// Load the root module synchronously; we can't proceed until we have its id.
let (root_id, root_msg) = {
let root_start_time = SystemTime::now();
let root_start_time = Instant::now();
load_from_str(
arena,
@ -1513,7 +1514,7 @@ fn state_thread_step<'a>(
exposed_vars_by_symbol,
exposed_aliases_by_symbol,
exposed_types_storage,
resolved_specializations,
resolved_implementations,
dep_idents,
documentation,
abilities_store,
@ -1532,7 +1533,7 @@ fn state_thread_step<'a>(
exposed_aliases_by_symbol,
exposed_vars_by_symbol,
exposed_types_storage,
resolved_specializations,
resolved_implementations,
dep_idents,
documentation,
abilities_store,
@ -2291,7 +2292,7 @@ fn update<'a>(
loc_expects,
} => {
log!("solved types for {:?}", module_id);
module_timing.end_time = SystemTime::now();
module_timing.end_time = Instant::now();
state
.module_cache
@ -2362,7 +2363,7 @@ fn update<'a>(
exposed_vars_by_symbol: solved_module.exposed_vars_by_symbol,
exposed_aliases_by_symbol: solved_module.aliases,
exposed_types_storage: solved_module.exposed_types,
resolved_specializations: solved_module.solved_specializations,
resolved_implementations: solved_module.solved_implementations,
dep_idents,
documentation,
abilities_store,
@ -2381,7 +2382,7 @@ fn update<'a>(
module_id,
ExposedModuleTypes {
exposed_types_storage_subs: solved_module.exposed_types,
resolved_specializations: solved_module.solved_specializations,
resolved_implementations: solved_module.solved_implementations,
},
);
@ -2843,7 +2844,7 @@ fn finish(
exposed_aliases_by_symbol: MutMap<Symbol, Alias>,
exposed_vars_by_symbol: Vec<(Symbol, Variable)>,
exposed_types_storage: ExposedTypesStorageSubs,
resolved_specializations: ResolvedSpecializations,
resolved_implementations: ResolvedImplementations,
dep_idents: IdentIdsByModule,
documentation: MutMap<ModuleId, ModuleDocumentation>,
abilities_store: AbilitiesStore,
@ -2890,7 +2891,7 @@ fn finish(
exposed_values,
exposed_to_host: exposed_vars_by_symbol.into_iter().collect(),
exposed_types_storage,
resolved_specializations,
resolved_implementations,
sources,
timings: state.timings,
documentation,
@ -2907,18 +2908,18 @@ fn load_platform_module<'a>(
module_ids: Arc<Mutex<PackageModuleIds<'a>>>,
ident_ids_by_module: SharedIdentIdsByModule,
) -> Result<Msg<'a>, LoadingProblem<'a>> {
let module_start_time = SystemTime::now();
let file_io_start = SystemTime::now();
let module_start_time = Instant::now();
let file_io_start = Instant::now();
let file = fs::read(&filename);
let file_io_duration = file_io_start.elapsed().unwrap();
let file_io_duration = file_io_start.elapsed();
match file {
Ok(bytes_vec) => {
let parse_start = SystemTime::now();
let parse_start = Instant::now();
let bytes = arena.alloc(bytes_vec);
let parse_state = roc_parse::state::State::new(bytes);
let parsed = roc_parse::module::parse_header(arena, parse_state.clone());
let parse_header_duration = parse_start.elapsed().unwrap();
let parse_header_duration = parse_start.elapsed();
// Insert the first entries for this module's timings
let mut pkg_module_timing = ModuleTiming::new(module_start_time);
@ -3047,10 +3048,10 @@ fn load_module<'a>(
arc_shorthands: Arc<Mutex<MutMap<&'a str, PackageName<'a>>>>,
ident_ids_by_module: SharedIdentIdsByModule,
) -> Result<(ModuleId, Msg<'a>), LoadingProblem<'a>> {
let module_start_time = SystemTime::now();
let module_start_time = Instant::now();
let parse_start = SystemTime::now();
let parse_header_duration = parse_start.elapsed().unwrap();
let parse_start = Instant::now();
let parse_header_duration = parse_start.elapsed();
// Insert the first entries for this module's timings
let mut module_timing = ModuleTiming::new(module_start_time);
@ -3188,12 +3189,12 @@ fn parse_header<'a>(
module_ids: Arc<Mutex<PackageModuleIds<'a>>>,
ident_ids_by_module: SharedIdentIdsByModule,
src_bytes: &'a [u8],
start_time: SystemTime,
start_time: Instant,
) -> Result<(ModuleId, Msg<'a>), LoadingProblem<'a>> {
let parse_start = SystemTime::now();
let parse_start = Instant::now();
let parse_state = roc_parse::state::State::new(src_bytes);
let parsed = roc_parse::module::parse_header(arena, parse_state.clone());
let parse_header_duration = parse_start.elapsed().unwrap();
let parse_header_duration = parse_start.elapsed();
// Insert the first entries for this module's timings
let mut module_timing = ModuleTiming::new(start_time);
@ -3375,11 +3376,11 @@ fn load_filename<'a>(
opt_shorthand: Option<&'a str>,
module_ids: Arc<Mutex<PackageModuleIds<'a>>>,
ident_ids_by_module: SharedIdentIdsByModule,
module_start_time: SystemTime,
module_start_time: Instant,
) -> Result<(ModuleId, Msg<'a>), LoadingProblem<'a>> {
let file_io_start = SystemTime::now();
let file_io_start = Instant::now();
let file = fs::read(&filename);
let file_io_duration = file_io_start.elapsed().unwrap();
let file_io_duration = file_io_start.elapsed();
match file {
Ok(bytes) => parse_header(
@ -3409,10 +3410,10 @@ fn load_from_str<'a>(
src: &'a str,
module_ids: Arc<Mutex<PackageModuleIds<'a>>>,
ident_ids_by_module: SharedIdentIdsByModule,
module_start_time: SystemTime,
module_start_time: Instant,
) -> Result<(ModuleId, Msg<'a>), LoadingProblem<'a>> {
let file_io_start = SystemTime::now();
let file_io_duration = file_io_start.elapsed().unwrap();
let file_io_start = Instant::now();
let file_io_duration = file_io_start.elapsed();
parse_header(
arena,
@ -3766,7 +3767,7 @@ fn send_header_two<'a>(
{
// If we don't have an app module id (e.g. because we're doing
// `roc check myplatform.roc` or because we're doing bindgen),
// `roc check myplatform.roc` or because we're generating glue code),
// insert the `requires` symbols into the platform module's IdentIds.
//
// Otherwise, get them from the app module's IdentIds, because it
@ -3991,7 +3992,7 @@ pub fn add_imports(
match $exposed_by_module.get(&module_id) {
Some(ExposedModuleTypes {
exposed_types_storage_subs: exposed_types,
resolved_specializations: _,
resolved_implementations: _,
}) => {
let variable = match exposed_types.stored_vars_by_symbol.iter().find(|(s, _)| **s == $symbol) {
None => {
@ -4050,8 +4051,8 @@ pub fn add_imports(
// One idea is to just always assume external modules fulfill their specialization obligations
// and save lambda set resolution for mono.
for (_, module_types) in exposed_for_module.exposed_by_module.iter_all() {
for ((member, typ), specialization) in module_types.resolved_specializations.iter() {
pending_abilities.import_specialization(*member, *typ, specialization)
for (impl_key, resolved_impl) in module_types.resolved_implementations.iter() {
pending_abilities.import_implementation(*impl_key, resolved_impl);
}
}
@ -4081,7 +4082,7 @@ pub fn add_imports(
|ctx, module, lset_var| match ctx.exposed_by_module.get(&module) {
Some(ExposedModuleTypes {
exposed_types_storage_subs: exposed_types,
resolved_specializations: _,
resolved_implementations: _,
}) => {
let var = exposed_types
.stored_specialization_lambda_set_vars
@ -4112,7 +4113,7 @@ fn run_solve_solve(
derived_module: SharedDerivedModule,
) -> (
Solved<Subs>,
ResolvedSpecializations,
ResolvedImplementations,
Vec<(Symbol, Variable)>,
Vec<solve::TypeError>,
AbilitiesStore,
@ -4148,7 +4149,7 @@ fn run_solve_solve(
solve_aliases.insert(*name, alias.clone());
}
let (solved_subs, solved_specializations, exposed_vars_by_symbol, problems, abilities_store) = {
let (solved_subs, solved_implementations, exposed_vars_by_symbol, problems, abilities_store) = {
let module_id = module.module_id;
let (solved_subs, solved_env, problems, abilities_store) = roc_solve::module::run_solve(
@ -4164,19 +4165,17 @@ fn run_solve_solve(
derived_module,
);
// Figure out what specializations belong to this module
let solved_specializations: ResolvedSpecializations = abilities_store
.iter_specializations()
.filter(|((member, typ), _)| {
// This module solved this specialization if either the member or the type comes from the
// module.
member.module_id() == module_id || typ.module_id() == module_id
})
.map(|(key, specialization)| (key, specialization.clone()))
.collect();
let solved_implementations =
extract_module_owned_implementations(module_id, &abilities_store);
let is_specialization_symbol =
|sym| solved_specializations.values().any(|ms| ms.symbol == sym);
let is_specialization_symbol = |sym| {
solved_implementations
.values()
.any(|resolved_impl| match resolved_impl {
ResolvedImpl::Impl(specialization) => specialization.symbol == sym,
ResolvedImpl::Derived | ResolvedImpl::Error => false,
})
};
// Expose anything that is explicitly exposed by the header, or is a specialization of an
// ability.
@ -4187,7 +4186,7 @@ fn run_solve_solve(
(
solved_subs,
solved_specializations,
solved_implementations,
exposed_vars_by_symbol,
problems,
abilities_store,
@ -4196,7 +4195,7 @@ fn run_solve_solve(
(
solved_subs,
solved_specializations,
solved_implementations,
exposed_vars_by_symbol,
problems,
abilities_store,
@ -4218,7 +4217,7 @@ fn run_solve<'a>(
cached_subs: CachedSubs,
derived_module: SharedDerivedModule,
) -> Msg<'a> {
let solve_start = SystemTime::now();
let solve_start = Instant::now();
let module_id = module.module_id;
@ -4229,7 +4228,7 @@ fn run_solve<'a>(
let loc_expects = std::mem::take(&mut module.loc_expects);
let module = module;
let (solved_subs, solved_specializations, exposed_vars_by_symbol, problems, abilities_store) = {
let (solved_subs, solved_implementations, exposed_vars_by_symbol, problems, abilities_store) = {
if module_id.is_builtin() {
match cached_subs.lock().remove(&module_id) {
None => run_solve_solve(
@ -4271,7 +4270,7 @@ fn run_solve<'a>(
module_id,
&mut solved_subs,
&exposed_vars_by_symbol,
&solved_specializations,
&solved_implementations,
&abilities_store,
);
@ -4279,13 +4278,13 @@ fn run_solve<'a>(
exposed_vars_by_symbol,
problems,
aliases,
solved_specializations,
solved_implementations,
exposed_types,
};
// Record the final timings
let solve_end = SystemTime::now();
module_timing.solve = solve_end.duration_since(solve_start).unwrap();
let solve_end = Instant::now();
module_timing.solve = solve_end.duration_since(solve_start);
// Send the subs to the main thread for processing,
Msg::SolvedTypes {
@ -4363,7 +4362,7 @@ fn canonicalize_and_constrain<'a>(
parsed: ParsedModule<'a>,
skip_constraint_gen: bool,
) -> CanAndCon {
let canonicalize_start = SystemTime::now();
let canonicalize_start = Instant::now();
let ParsedModule {
module_id,
@ -4412,9 +4411,9 @@ fn canonicalize_and_constrain<'a>(
_after
);
let canonicalize_end = SystemTime::now();
let canonicalize_end = Instant::now();
module_timing.canonicalize = canonicalize_end.duration_since(canonicalize_start).unwrap();
module_timing.canonicalize = canonicalize_end.duration_since(canonicalize_start);
// Generate documentation information
// TODO: store timing information?
@ -4522,7 +4521,7 @@ fn canonicalize_and_constrain<'a>(
fn parse<'a>(arena: &'a Bump, header: ModuleHeader<'a>) -> Result<Msg<'a>, LoadingProblem<'a>> {
let mut module_timing = header.module_timing;
let parse_start = SystemTime::now();
let parse_start = Instant::now();
let source = header.parse_state.original_bytes();
let parse_state = header.parse_state;
let parsed_defs = match module_defs().parse(arena, parse_state) {
@ -4536,9 +4535,9 @@ fn parse<'a>(arena: &'a Bump, header: ModuleHeader<'a>) -> Result<Msg<'a>, Loadi
// Record the parse end time once, to avoid checking the time a second time
// immediately afterward (for the beginning of canonicalization).
let parse_end = SystemTime::now();
let parse_end = Instant::now();
module_timing.parse_body = parse_end.duration_since(parse_start).unwrap();
module_timing.parse_body = parse_end.duration_since(parse_start);
let imported_modules = header.imported_modules;
@ -4632,7 +4631,7 @@ fn make_specializations<'a>(
exposed_by_module: &ExposedByModule,
derived_module: SharedDerivedModule,
) -> Msg<'a> {
let make_specializations_start = SystemTime::now();
let make_specializations_start = Instant::now();
let mut update_mode_ids = UpdateModeIds::new();
// do the thing
let mut mono_env = roc_mono::ir::Env {
@ -4676,12 +4675,10 @@ fn make_specializations<'a>(
// Turn `Bytes.Decode.IdentId(238)` into `Bytes.Decode.238`, we rely on this in mono tests
mono_env.home.register_debug_idents(mono_env.ident_ids);
let make_specializations_end = SystemTime::now();
module_timing.make_specializations.push(
make_specializations_end
.duration_since(make_specializations_start)
.unwrap(),
);
let make_specializations_end = Instant::now();
module_timing
.make_specializations
.push(make_specializations_end.duration_since(make_specializations_start));
Msg::MadeSpecializations {
module_id: home,
@ -4712,10 +4709,10 @@ fn build_pending_specializations<'a>(
abilities_store: AbilitiesStore,
derived_module: SharedDerivedModule,
) -> Msg<'a> {
let find_specializations_start = SystemTime::now();
let find_specializations_start = Instant::now();
let mut module_thunks = bumpalo::collections::Vec::new_in(arena);
let mut toplevel_expects = std::vec::Vec::new();
let mut toplevel_expects = VecMap::default();
let mut procs_base = ProcsBase {
partial_procs: BumpMap::default(),
@ -5014,7 +5011,13 @@ fn build_pending_specializations<'a>(
is_self_recursive: false,
};
toplevel_expects.push(symbol);
// extend the region of the expect expression with the region of the preceding
// comment, so it is shown in failure/panic messages
let name_region = declarations.symbols[index].region;
let expr_region = declarations.expressions[index].region;
let region = Region::span_across(&name_region, &expr_region);
toplevel_expects.insert(symbol, region);
procs_base.partial_procs.insert(symbol, proc);
}
}
@ -5022,10 +5025,9 @@ fn build_pending_specializations<'a>(
procs_base.module_thunks = module_thunks.into_bump_slice();
let find_specializations_end = SystemTime::now();
module_timing.find_specializations = find_specializations_end
.duration_since(find_specializations_start)
.unwrap();
let find_specializations_end = Instant::now();
module_timing.find_specializations =
find_specializations_end.duration_since(find_specializations_start);
Msg::FoundSpecializations {
module_id: home,
@ -5058,7 +5060,7 @@ fn load_derived_partial_procs<'a>(
) {
debug_assert_eq!(home, ModuleId::DERIVED_GEN);
let load_derived_procs_start = SystemTime::now();
let load_derived_procs_start = Instant::now();
let mut new_module_thunks = bumpalo::collections::Vec::new_in(arena);
@ -5126,11 +5128,10 @@ fn load_derived_partial_procs<'a>(
procs_base.module_thunks = new_module_thunks.into_bump_slice();
}
let load_derived_procs_end = SystemTime::now();
let load_derived_procs_end = Instant::now();
module_timing.find_specializations = load_derived_procs_end
.duration_since(load_derived_procs_start)
.unwrap();
module_timing.find_specializations =
load_derived_procs_end.duration_since(load_derived_procs_start);
}
fn run_task<'a>(
@ -5420,11 +5421,10 @@ fn to_missing_platform_report(module_id: ModuleId, other: PlatformPath) -> Strin
}
RootIsInterface => {
let doc = alloc.stack([
alloc.reflow(r"The input file is an interface module, but only app modules can be run."),
alloc.concat([
alloc.reflow(r"I will still parse and typecheck the input file and its dependencies, "),
alloc.reflow(r"but won't output any executable."),
])
alloc.reflow(
r"The input file is an `interface` module, but only `app` modules can be run.",
),
alloc.reflow(r"Tip: You can use `roc check` or `roc test` to verify an interface module like this one."),
]);
Report {
@ -5436,11 +5436,10 @@ fn to_missing_platform_report(module_id: ModuleId, other: PlatformPath) -> Strin
}
RootIsHosted => {
let doc = alloc.stack([
alloc.reflow(r"The input file is a hosted module, but only app modules can be run."),
alloc.concat([
alloc.reflow(r"I will still parse and typecheck the input file and its dependencies, "),
alloc.reflow(r"but won't output any executable."),
])
alloc.reflow(
r"The input file is a `hosted` module, but only `app` modules can be run.",
),
alloc.reflow(r"Tip: You can use `roc check` or `roc test` to verify a hosted module like this one."),
]);
Report {
@ -5452,11 +5451,10 @@ fn to_missing_platform_report(module_id: ModuleId, other: PlatformPath) -> Strin
}
RootIsPlatformModule => {
let doc = alloc.stack([
alloc.reflow(r"The input file is a package config file, but only app modules can be run."),
alloc.concat([
alloc.reflow(r"I will still parse and typecheck the input file and its dependencies, "),
alloc.reflow(r"but won't output any executable."),
])
alloc.reflow(
r"The input file is a `platform` module, but only `app` modules can be run.",
),
alloc.reflow(r"Tip: You can use `roc check` or `roc test` to verify a platform module like this one."),
]);
Report {

View File

@ -6,4 +6,4 @@ pub mod file;
mod work;
#[cfg(target_family = "wasm")]
mod wasm_system_time;
mod wasm_instant;

View File

@ -1,24 +1,24 @@
#![cfg(target_family = "wasm")]
/*
For the Web REPL (repl_www), we build the compiler as a Wasm module.
SystemTime is the only thing in the compiler that would need a special implementation for this.
Instant is the only thing in the compiler that would need a special implementation for this.
There is a WASI implementation for it, but we are targeting the browser, not WASI!
It's possible to write browser versions of WASI's low-level ABI but we'd rather avoid it.
Instead we use these dummy implementations, which should just disappear at compile time.
*/
#[derive(Debug, Clone, Copy)]
pub struct SystemTime;
pub struct Instant;
impl SystemTime {
impl Instant {
pub fn now() -> Self {
SystemTime
Instant
}
pub fn duration_since(&self, _: SystemTime) -> Result<Duration, String> {
Ok(Duration)
pub fn duration_since(&self, _: Instant) -> Duration {
Duration
}
pub fn elapsed(&self) -> Result<Duration, String> {
Ok(Duration)
pub fn elapsed(&self) -> Duration {
Duration
}
}

View File

@ -47,7 +47,8 @@ const SYMBOL_HAS_NICHE: () =
#[cfg(debug_assertions)]
const PRETTY_PRINT_DEBUG_SYMBOLS: bool = true;
pub const BUILTIN_ABILITIES: &[Symbol] = &[Symbol::ENCODE_ENCODING];
pub const DERIVABLE_ABILITIES: &[(Symbol, &[Symbol])] =
&[(Symbol::ENCODE_ENCODING, &[Symbol::ENCODE_TO_ENCODER])];
/// In Debug builds only, Symbol has a name() method that lets
/// you look up its name in a global intern table. This table is
@ -86,8 +87,12 @@ impl Symbol {
self.module_id().is_builtin()
}
pub fn is_builtin_ability(self) -> bool {
BUILTIN_ABILITIES.contains(&self)
pub fn is_derivable_ability(self) -> bool {
self.derivable_ability().is_some()
}
pub fn derivable_ability(self) -> Option<&'static (Symbol, &'static [Symbol])> {
DERIVABLE_ABILITIES.iter().find(|(name, _)| *name == self)
}
pub fn module_string<'a>(&self, interns: &'a Interns) -> &'a ModuleName {
@ -1265,32 +1270,37 @@ define_builtins! {
41 LIST_ANY: "any"
42 LIST_TAKE_FIRST: "takeFirst"
43 LIST_TAKE_LAST: "takeLast"
44 LIST_FIND: "find"
45 LIST_FIND_RESULT: "#find_result" // symbol used in the definition of List.find
46 LIST_SUBLIST: "sublist"
47 LIST_INTERSPERSE: "intersperse"
48 LIST_INTERSPERSE_CLOS: "#intersperseClos"
49 LIST_SPLIT: "split"
50 LIST_SPLIT_CLOS: "#splitClos"
51 LIST_ALL: "all"
52 LIST_DROP_IF: "dropIf"
53 LIST_DROP_IF_PREDICATE: "#dropIfPred"
54 LIST_SORT_ASC: "sortAsc"
55 LIST_SORT_DESC: "sortDesc"
56 LIST_SORT_DESC_COMPARE: "#sortDescCompare"
57 LIST_REPLACE: "replace"
58 LIST_IS_UNIQUE: "#isUnique"
59 LIST_FIND_INDEX: "findIndex"
60 LIST_GET_UNSAFE: "getUnsafe"
61 LIST_REPLACE_UNSAFE: "replaceUnsafe"
62 LIST_WITH_CAPACITY: "withCapacity"
63 LIST_ITERATE: "iterate"
64 LIST_UNREACHABLE: "unreachable"
65 LIST_RESERVE: "reserve"
66 LIST_APPEND_UNSAFE: "appendUnsafe"
67 LIST_SUBLIST_LOWLEVEL: "sublistLowlevel"
68 LIST_CAPACITY: "capacity"
69 LIST_MAP_TRY: "mapTry"
44 LIST_FIND_FIRST: "findFirst"
45 LIST_FIND_LAST: "findLast"
46 LIST_FIND_FIRST_INDEX: "findFirstIndex"
47 LIST_FIND_LAST_INDEX: "findLastIndex"
48 LIST_FIND_RESULT: "#find_result" // symbol used in the definition of List.findFirst
49 LIST_SUBLIST: "sublist"
50 LIST_INTERSPERSE: "intersperse"
51 LIST_INTERSPERSE_CLOS: "#intersperseClos"
52 LIST_SPLIT: "split"
53 LIST_SPLIT_FIRST: "splitFirst"
54 LIST_SPLIT_LAST: "splitLast"
55 LIST_SPLIT_CLOS: "#splitClos"
56 LIST_ALL: "all"
57 LIST_DROP_IF: "dropIf"
58 LIST_DROP_IF_PREDICATE: "#dropIfPred"
59 LIST_SORT_ASC: "sortAsc"
60 LIST_SORT_DESC: "sortDesc"
61 LIST_SORT_DESC_COMPARE: "#sortDescCompare"
62 LIST_STARTS_WITH: "startsWith"
63 LIST_ENDS_WITH: "endsWith"
64 LIST_REPLACE: "replace"
65 LIST_IS_UNIQUE: "#isUnique"
66 LIST_GET_UNSAFE: "getUnsafe"
67 LIST_REPLACE_UNSAFE: "replaceUnsafe"
68 LIST_WITH_CAPACITY: "withCapacity"
69 LIST_UNREACHABLE: "unreachable"
70 LIST_RESERVE: "reserve"
71 LIST_APPEND_UNSAFE: "appendUnsafe"
72 LIST_SUBLIST_LOWLEVEL: "sublistLowlevel"
73 LIST_CAPACITY: "capacity"
74 LIST_MAP_TRY: "mapTry"
}
7 RESULT: "Result" => {
0 RESULT_RESULT: "Result" // the Result.Result type alias

View File

@ -361,7 +361,7 @@ fn flatten<'a>(
} else {
for (index, (arg_pattern, _)) in arguments.iter().enumerate() {
let mut new_path = path.clone();
new_path.push(PathInstruction {
new_path.push(PathInstruction::TagIndex {
index: index as u64,
tag_id,
});
@ -678,6 +678,7 @@ fn to_relevant_branch_help<'a>(
..
} => {
debug_assert!(test_name == &CtorName::Tag(TagName(RECORD_TAG_NAME.into())));
let destructs_len = destructs.len();
let sub_positions = destructs.into_iter().enumerate().map(|(index, destruct)| {
let pattern = match destruct.typ {
DestructType::Guard(guard) => guard.clone(),
@ -685,10 +686,15 @@ fn to_relevant_branch_help<'a>(
};
let mut new_path = path.to_vec();
new_path.push(PathInstruction {
index: index as u64,
tag_id: *tag_id,
});
let next_instr = if destructs_len == 1 {
PathInstruction::NewType
} else {
PathInstruction::TagIndex {
index: index as u64,
tag_id: *tag_id,
}
};
new_path.push(next_instr);
(new_path, pattern)
});
@ -710,15 +716,13 @@ fn to_relevant_branch_help<'a>(
tag_id,
..
} => {
debug_assert_eq!(*tag_id, 0);
debug_assert_eq!(test_opaque_tag_name, &CtorName::Opaque(opaque));
let (argument, _) = *argument;
let mut new_path = path.to_vec();
new_path.push(PathInstruction {
index: 0,
tag_id: *tag_id,
});
new_path.push(PathInstruction::NewType);
start.push((new_path, argument));
start.extend(end);
@ -744,16 +748,22 @@ fn to_relevant_branch_help<'a>(
let tag_id = 0;
debug_assert_eq!(tag_id, *test_id);
let num_args = arguments.len();
let sub_positions =
arguments
.into_iter()
.enumerate()
.map(|(index, (pattern, _))| {
let mut new_path = path.to_vec();
new_path.push(PathInstruction {
index: index as u64,
tag_id,
});
let next_instr = if num_args == 1 {
PathInstruction::NewType
} else {
PathInstruction::TagIndex {
index: index as u64,
tag_id,
}
};
new_path.push(next_instr);
(new_path, pattern)
});
start.extend(sub_positions);
@ -809,7 +819,7 @@ fn to_relevant_branch_help<'a>(
.enumerate()
.map(|(index, (pattern, _))| {
let mut new_path = path.to_vec();
new_path.push(PathInstruction {
new_path.push(PathInstruction::TagIndex {
index: index as u64,
tag_id,
});
@ -828,7 +838,7 @@ fn to_relevant_branch_help<'a>(
.enumerate()
.map(|(index, (pattern, _))| {
let mut new_path = path.to_vec();
new_path.push(PathInstruction {
new_path.push(PathInstruction::TagIndex {
index: index as u64,
tag_id,
});
@ -972,14 +982,17 @@ fn is_irrelevant_to<'a>(selected_path: &[PathInstruction], branch: &Branch<'a>)
}
}
/// Does this pattern need a branch test?
///
/// Keep up to date with [needs_path_instruction].
fn needs_tests(pattern: &Pattern) -> bool {
use Pattern::*;
match pattern {
Identifier(_) | Underscore => false,
RecordDestructure(_, _)
| NewtypeDestructure { .. }
NewtypeDestructure { .. }
| RecordDestructure(..)
| AppliedTag { .. }
| OpaqueUnwrap { .. }
| BitLiteral { .. }
@ -1229,9 +1242,9 @@ pub fn optimize_when<'a>(
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
struct PathInstruction {
index: u64,
tag_id: TagIdIntType,
enum PathInstruction {
NewType,
TagIndex { index: u64, tag_id: TagIdIntType },
}
fn path_to_expr_help<'a>(
@ -1246,51 +1259,60 @@ fn path_to_expr_help<'a>(
let instructions = path;
let mut it = instructions.iter().peekable();
while let Some(PathInstruction { index, tag_id }) = it.next() {
let index = *index;
match &layout {
Layout::Union(union_layout) => {
let inner_expr = Expr::UnionAtIndex {
tag_id: *tag_id,
structure: symbol,
index,
union_layout: *union_layout,
};
let inner_layout = union_layout.layout_at(*tag_id as TagIdIntType, index as usize);
symbol = env.unique_symbol();
stores.push((symbol, inner_layout, inner_expr));
layout = inner_layout;
while let Some(path_instr) = it.next() {
match path_instr {
PathInstruction::NewType => {
// pass through
}
Layout::Struct { field_layouts, .. } => {
debug_assert!(field_layouts.len() > 1);
PathInstruction::TagIndex { index, tag_id } => {
let index = *index;
let inner_expr = Expr::StructAtIndex {
index,
field_layouts,
structure: symbol,
};
match &layout {
Layout::Union(union_layout) => {
let inner_expr = Expr::UnionAtIndex {
tag_id: *tag_id,
structure: symbol,
index,
union_layout: *union_layout,
};
let inner_layout = field_layouts[index as usize];
let inner_layout =
union_layout.layout_at(*tag_id as TagIdIntType, index as usize);
symbol = env.unique_symbol();
stores.push((symbol, inner_layout, inner_expr));
symbol = env.unique_symbol();
stores.push((symbol, inner_layout, inner_expr));
layout = inner_layout;
}
layout = inner_layout;
}
_ => {
// this MUST be an index into a single-element (hence unwrapped) record
Layout::Struct { field_layouts, .. } => {
debug_assert!(field_layouts.len() > 1);
debug_assert_eq!(index, 0, "{:?}", &layout);
debug_assert_eq!(*tag_id, 0);
debug_assert!(it.peek().is_none());
let inner_expr = Expr::StructAtIndex {
index,
field_layouts,
structure: symbol,
};
break;
let inner_layout = field_layouts[index as usize];
symbol = env.unique_symbol();
stores.push((symbol, inner_layout, inner_expr));
layout = inner_layout;
}
_ => {
// this MUST be an index into a single-element (hence unwrapped) record
debug_assert_eq!(index, 0, "{:?}", &layout);
debug_assert_eq!(*tag_id, 0);
debug_assert!(it.peek().is_none());
break;
}
}
}
}
}
@ -1343,7 +1365,7 @@ fn test_to_equality<'a>(
}),
)
}
_ => unreachable!("{:?}", (cond_layout, union)),
_ => unreachable!("{:#?}", (cond_layout, union, test_layout, path)),
}
}

View File

@ -111,9 +111,11 @@ pub fn occurring_variables(stmt: &Stmt<'_>) -> (MutSet<Symbol>, MutSet<Symbol>)
Expect {
condition,
remainder,
lookups,
..
} => {
result.insert(*condition);
result.extend(lookups.iter().copied());
stack.push(remainder);
}
@ -328,11 +330,13 @@ impl<'a> Context<'a> {
match self.vars.get(&symbol) {
Some(info) => *info,
None => {
eprintln!(
"Symbol {:?} {} has no info in self.vars",
symbol,
symbol, // self.vars
);
if cfg!(debug_assertions) {
eprintln!(
"Symbol {:?} {} has no info in self.vars",
symbol,
symbol, // self.vars
);
}
VarInfo {
persistent: true,
@ -1178,7 +1182,7 @@ impl<'a> Context<'a> {
lookups,
layouts,
} => {
let (b, b_live_vars) = self.visit_stmt(codegen, remainder);
let (b, mut b_live_vars) = self.visit_stmt(codegen, remainder);
let expect = self.arena.alloc(Stmt::Expect {
condition: *condition,
@ -1188,6 +1192,10 @@ impl<'a> Context<'a> {
remainder: b,
});
let expect = self.add_inc_before_consume_all(lookups, expect, &b_live_vars);
b_live_vars.extend(lookups.iter().copied());
(expect, b_live_vars)
}
@ -1297,9 +1305,11 @@ pub fn collect_stmt(
Expect {
condition,
remainder,
lookups,
..
} => {
vars.insert(*condition);
vars.extend(lookups.iter().copied());
collect_stmt(remainder, jp_live_vars, vars)
}

View File

@ -20,7 +20,7 @@ use roc_debug_flags::{
};
use roc_derive::SharedDerivedModule;
use roc_error_macros::{internal_error, todo_abilities};
use roc_exhaustive::{Ctor, CtorName, Guard, RenderAs, TagId};
use roc_exhaustive::{Ctor, CtorName, RenderAs, TagId};
use roc_late_solve::{resolve_ability_specialization, AbilitiesView, Resolved, UnificationFailed};
use roc_module::ident::{ForeignSymbol, Lowercase, TagName};
use roc_module::low_level::LowLevel;
@ -2533,7 +2533,7 @@ fn pattern_to_when<'a>(
body: Loc<roc_can::expr::Expr>,
) -> (Symbol, Loc<roc_can::expr::Expr>) {
use roc_can::expr::Expr::*;
use roc_can::expr::WhenBranch;
use roc_can::expr::{WhenBranch, WhenBranchPattern};
use roc_can::pattern::Pattern::*;
match &pattern.value {
@ -2580,7 +2580,10 @@ fn pattern_to_when<'a>(
region: Region::zero(),
loc_cond: Box::new(Loc::at_zero(Var(symbol))),
branches: vec![WhenBranch {
patterns: vec![pattern],
patterns: vec![WhenBranchPattern {
pattern,
degenerate: false,
}],
value: body,
guard: None,
// If this type-checked, it's non-redundant
@ -5184,7 +5187,7 @@ pub fn with_hole<'a>(
}
}
TypedHole(_) => Stmt::RuntimeError("Hit a blank"),
RuntimeError(e) => Stmt::RuntimeError(env.arena.alloc(format!("{:?}", e))),
RuntimeError(e) => Stmt::RuntimeError(env.arena.alloc(e.runtime_message())),
}
}
@ -6033,56 +6036,50 @@ fn to_opt_branches<'a>(
)> {
debug_assert!(!branches.is_empty());
let mut loc_branches = std::vec::Vec::new();
let mut opt_branches = std::vec::Vec::new();
for when_branch in branches {
let exhaustive_guard = if when_branch.guard.is_some() {
Guard::HasGuard
} else {
Guard::NoGuard
};
if when_branch.redundant.is_redundant(env.subs) {
// Don't codegen this branch since it's redundant.
continue;
}
for loc_pattern in when_branch.patterns {
match from_can_pattern(env, procs, layout_cache, &loc_pattern.value) {
match from_can_pattern(env, procs, layout_cache, &loc_pattern.pattern.value) {
Ok((mono_pattern, assignments)) => {
loc_branches.push((
Loc::at(loc_pattern.region, mono_pattern.clone()),
exhaustive_guard,
));
let loc_expr = if !loc_pattern.degenerate {
let mut loc_expr = when_branch.value.clone();
let mut loc_expr = when_branch.value.clone();
let region = loc_pattern.region;
for (symbol, variable, expr) in assignments.into_iter().rev() {
let def = roc_can::def::Def {
annotation: None,
expr_var: variable,
loc_expr: Loc::at(region, expr),
loc_pattern: Loc::at(
region,
roc_can::pattern::Pattern::Identifier(symbol),
),
pattern_vars: std::iter::once((symbol, variable)).collect(),
};
let new_expr =
roc_can::expr::Expr::LetNonRec(Box::new(def), Box::new(loc_expr));
loc_expr = Loc::at(region, new_expr);
}
let region = loc_pattern.pattern.region;
for (symbol, variable, expr) in assignments.into_iter().rev() {
let def = roc_can::def::Def {
annotation: None,
expr_var: variable,
loc_expr: Loc::at(region, expr),
loc_pattern: Loc::at(
region,
roc_can::pattern::Pattern::Identifier(symbol),
),
pattern_vars: std::iter::once((symbol, variable)).collect(),
};
let new_expr =
roc_can::expr::Expr::LetNonRec(Box::new(def), Box::new(loc_expr));
loc_expr = Loc::at(region, new_expr);
}
loc_expr
} else {
// This pattern is degenerate; when it's reached we must emit a runtime
// error.
Loc::at_zero(roc_can::expr::Expr::RuntimeError(
RuntimeError::DegenerateBranch(loc_pattern.pattern.region),
))
};
// TODO remove clone?
opt_branches.push((mono_pattern, when_branch.guard.clone(), loc_expr.value));
}
Err(runtime_error) => {
loc_branches.push((
Loc::at(loc_pattern.region, Pattern::Underscore),
exhaustive_guard,
));
// TODO remove clone?
opt_branches.push((
Pattern::Underscore,
@ -6336,17 +6333,23 @@ fn substitute_in_stmt_help<'a>(
layouts,
remainder,
} => {
// TODO should we substitute in the ModifyRc?
match substitute_in_stmt_help(arena, remainder, subs) {
Some(cont) => Some(arena.alloc(Expect {
condition: *condition,
region: *region,
lookups,
layouts,
remainder: cont,
})),
None => None,
}
let new_remainder =
substitute_in_stmt_help(arena, remainder, subs).unwrap_or(remainder);
let new_lookups = Vec::from_iter_in(
lookups.iter().map(|s| substitute(subs, *s).unwrap_or(*s)),
arena,
);
let expect = Expect {
condition: substitute(subs, *condition).unwrap_or(*condition),
region: *region,
lookups: new_lookups.into_bump_slice(),
layouts,
remainder: new_remainder,
};
Some(arena.alloc(expect))
}
Jump(id, args) => {

View File

@ -444,38 +444,28 @@ impl<'a> UnionLayout<'a> {
}
}
pub fn discriminant_size(num_tags: usize) -> IntWidth {
if num_tags <= u8::MAX as usize {
IntWidth::U8
} else if num_tags <= u16::MAX as usize {
IntWidth::U16
} else {
panic!("tag union is too big")
}
}
pub fn tag_id_builtin(&self) -> Builtin<'a> {
pub fn discriminant(&self) -> Discriminant {
match self {
UnionLayout::NonRecursive(tags) => {
let union_size = tags.len();
Builtin::Int(Self::discriminant_size(union_size))
}
UnionLayout::Recursive(tags) => {
let union_size = tags.len();
Builtin::Int(Self::discriminant_size(union_size))
}
UnionLayout::NonRecursive(tags) => Discriminant::from_number_of_tags(tags.len()),
UnionLayout::Recursive(tags) => Discriminant::from_number_of_tags(tags.len()),
UnionLayout::NullableWrapped { other_tags, .. } => {
Builtin::Int(Self::discriminant_size(other_tags.len() + 1))
Discriminant::from_number_of_tags(other_tags.len() + 1)
}
UnionLayout::NonNullableUnwrapped(_) => Builtin::Bool,
UnionLayout::NullableUnwrapped { .. } => Builtin::Bool,
UnionLayout::NonNullableUnwrapped(_) => Discriminant::from_number_of_tags(2),
UnionLayout::NullableUnwrapped { .. } => Discriminant::from_number_of_tags(1),
}
}
pub fn tag_id_layout(&self) -> Layout<'a> {
Layout::Builtin(self.tag_id_builtin())
// TODO is it beneficial to return a more specific layout?
// e.g. Layout::bool() and Layout::VOID
match self.discriminant() {
Discriminant::U0 => Layout::u8(),
Discriminant::U1 => Layout::u8(),
Discriminant::U8 => Layout::u8(),
Discriminant::U16 => Layout::u16(),
}
}
fn stores_tag_id_in_pointer_bits(tags: &[&[Layout<'a>]], target_info: TargetInfo) -> bool {
@ -561,13 +551,30 @@ impl<'a> UnionLayout<'a> {
/// Size of the data in memory, whether it's stack or heap (for non-null tag ids)
pub fn data_size_and_alignment(&self, target_info: TargetInfo) -> (u32, u32) {
let id_data_layout = if self.stores_tag_id_as_data(target_info) {
Some(self.tag_id_layout())
} else {
None
};
let (data_width, data_align) = self.data_size_and_alignment_help_match(target_info);
self.data_size_and_alignment_help_match(id_data_layout, target_info)
if self.stores_tag_id_as_data(target_info) {
use Discriminant::*;
match self.discriminant() {
U0 => (round_up_to_alignment(data_width, data_align), data_align),
U1 | U8 => (
round_up_to_alignment(data_width + 1, data_align),
data_align,
),
U16 => {
// first, round up the data so the tag id is well-aligned;
// then add the tag id width, and make sure the whole extends
// to the next alignment multiple
let tag_align = data_align.max(2);
let tag_width =
round_up_to_alignment(round_up_to_alignment(data_width, 2) + 2, tag_align);
(tag_width, tag_align)
}
}
} else {
(data_width, data_align)
}
}
/// Size of the data before the tag_id, if it exists.
@ -577,75 +584,49 @@ impl<'a> UnionLayout<'a> {
return None;
};
Some(self.data_size_and_alignment_help_match(None, target_info).0)
Some(self.data_size_and_alignment_help_match(target_info).0)
}
fn data_size_and_alignment_help_match(
&self,
id_data_layout: Option<Layout>,
target_info: TargetInfo,
) -> (u32, u32) {
fn data_size_and_alignment_help_match(&self, target_info: TargetInfo) -> (u32, u32) {
match self {
Self::NonRecursive(tags) => {
Self::data_size_and_alignment_help(tags, id_data_layout, target_info)
}
Self::Recursive(tags) => {
Self::data_size_and_alignment_help(tags, id_data_layout, target_info)
}
Self::NonRecursive(tags) => Layout::stack_size_and_alignment_slices(tags, target_info),
Self::Recursive(tags) => Layout::stack_size_and_alignment_slices(tags, target_info),
Self::NonNullableUnwrapped(fields) => {
Self::data_size_and_alignment_help(&[fields], id_data_layout, target_info)
Layout::stack_size_and_alignment_slices(&[fields], target_info)
}
Self::NullableWrapped { other_tags, .. } => {
Self::data_size_and_alignment_help(other_tags, id_data_layout, target_info)
Layout::stack_size_and_alignment_slices(other_tags, target_info)
}
Self::NullableUnwrapped { other_fields, .. } => {
Self::data_size_and_alignment_help(&[other_fields], id_data_layout, target_info)
Layout::stack_size_and_alignment_slices(&[other_fields], target_info)
}
}
}
fn data_size_and_alignment_help(
variant_field_layouts: &[&[Layout]],
id_data_layout: Option<Layout>,
target_info: TargetInfo,
) -> (u32, u32) {
let mut size = 0;
let mut alignment_bytes = 0;
for field_layouts in variant_field_layouts {
let mut data = Layout::struct_no_name_order(field_layouts);
let fields_and_id;
if let Some(id_layout) = id_data_layout {
fields_and_id = [data, id_layout];
data = Layout::struct_no_name_order(&fields_and_id);
}
let (variant_size, variant_alignment) = data.stack_size_and_alignment(target_info);
alignment_bytes = alignment_bytes.max(variant_alignment);
size = size.max(variant_size);
pub fn tag_id_offset(&self, target_info: TargetInfo) -> Option<u32> {
match self {
UnionLayout::NonRecursive(tags)
| UnionLayout::Recursive(tags)
| UnionLayout::NullableWrapped {
other_tags: tags, ..
} => Some(Self::tag_id_offset_help(tags, target_info)),
UnionLayout::NonNullableUnwrapped(_) | UnionLayout::NullableUnwrapped { .. } => None,
}
}
(size, alignment_bytes)
fn tag_id_offset_help(layouts: &[&[Layout]], target_info: TargetInfo) -> u32 {
let (data_width, data_align) =
Layout::stack_size_and_alignment_slices(layouts, target_info);
round_up_to_alignment(data_width, data_align)
}
/// Very important to use this when doing a memcpy!
fn stack_size_without_alignment(&self, target_info: TargetInfo) -> u32 {
match self {
UnionLayout::NonRecursive(tags) => {
let id_layout = self.tag_id_layout();
let mut size = 0;
for field_layouts in tags.iter() {
let fields = Layout::struct_no_name_order(field_layouts);
let fields_and_id = [fields, id_layout];
let data = Layout::struct_no_name_order(&fields_and_id);
size = size.max(data.stack_size_without_alignment(target_info));
}
size
UnionLayout::NonRecursive(_) => {
let (width, align) = self.data_size_and_alignment(target_info);
round_up_to_alignment(width, align)
}
UnionLayout::Recursive(_)
| UnionLayout::NonNullableUnwrapped(_)
@ -655,6 +636,39 @@ impl<'a> UnionLayout<'a> {
}
}
pub enum Discriminant {
U0,
U1,
U8,
U16,
}
impl Discriminant {
pub const fn from_number_of_tags(tags: usize) -> Self {
match tags {
0 => Discriminant::U0,
1 => Discriminant::U0,
2 => Discriminant::U1,
3..=255 => Discriminant::U8,
256..=65_535 => Discriminant::U16,
_ => panic!("discriminant too large"),
}
}
pub const fn stack_size(&self) -> u32 {
match self {
Discriminant::U0 => 0,
Discriminant::U1 => 1,
Discriminant::U8 => 1,
Discriminant::U16 => 2,
}
}
pub const fn alignment_bytes(&self) -> u32 {
self.stack_size()
}
}
/// Custom type so we can get the numeric representation of a symbol in tests (so `#UserApp.3`
/// instead of `UserApp.foo`). The pretty name is not reliable when running many tests
/// concurrently. The number does not change and will give a reliable output.
@ -800,13 +814,13 @@ impl<'a> LambdaSet<'a> {
}
pub fn is_represented(&self) -> Option<Layout<'a>> {
if let Layout::Struct {
field_layouts: &[], ..
} = self.representation
{
None
} else {
Some(*self.representation)
match self.representation {
Layout::Struct {
field_layouts: &[], ..
}
| Layout::Builtin(Builtin::Bool)
| Layout::Builtin(Builtin::Int(..)) => None,
repr => Some(*repr),
}
}
@ -1191,10 +1205,16 @@ impl<'a, 'b> Env<'a, 'b> {
}
pub const fn round_up_to_alignment(width: u32, alignment: u32) -> u32 {
if alignment != 0 && width % alignment > 0 {
width + alignment - (width % alignment)
} else {
width
match alignment {
0 => width,
1 => width,
_ => {
if width % alignment > 0 {
width + alignment - (width % alignment)
} else {
width
}
}
}
}
@ -1497,15 +1517,15 @@ impl<'a> Layout<'a> {
})
.max();
let tag_id_builtin = variant.tag_id_builtin();
let discriminant = variant.discriminant();
match max_alignment {
Some(align) => round_up_to_alignment(
align.max(tag_id_builtin.alignment_bytes(target_info)),
tag_id_builtin.alignment_bytes(target_info),
align.max(discriminant.alignment_bytes()),
discriminant.alignment_bytes(),
),
None => {
// none of the tags had any payload, but the tag id still contains information
tag_id_builtin.alignment_bytes(target_info)
discriminant.alignment_bytes()
}
}
}
@ -1539,6 +1559,29 @@ impl<'a> Layout<'a> {
}
}
pub fn stack_size_and_alignment_slices(
slices: &[&[Self]],
target_info: TargetInfo,
) -> (u32, u32) {
let mut data_align = 1;
let mut data_width = 0;
for tag in slices {
let mut total = 0;
for layout in tag.iter() {
let (stack_size, alignment) = layout.stack_size_and_alignment(target_info);
total += stack_size;
data_align = data_align.max(alignment);
}
data_width = data_width.max(total);
}
data_width = round_up_to_alignment(data_width, data_align);
(data_width, data_align)
}
pub fn is_refcounted(&self) -> bool {
use self::Builtin::*;
use Layout::*;
@ -3039,7 +3082,9 @@ pub fn ext_var_is_empty_tag_union(subs: &Subs, ext_var: Variable) -> bool {
// the ext_var is empty
let mut ext_fields = std::vec::Vec::new();
match roc_types::pretty_print::chase_ext_tag_union(subs, ext_var, &mut ext_fields) {
Ok(()) | Err((_, Content::FlexVar(_) | Content::RigidVar(_))) => ext_fields.is_empty(),
Ok(()) | Err((_, Content::FlexVar(_) | Content::RigidVar(_) | Content::Error)) => {
ext_fields.is_empty()
}
Err(content) => panic!("invalid content in ext_var: {:?}", content),
}
}
@ -3258,6 +3303,23 @@ impl<'a> LayoutIds<'a> {
}
}
/// Compare two fields when sorting them for code gen.
/// This is called by both code gen and glue, so that
/// their field orderings agree.
#[inline(always)]
pub fn cmp_fields<L: Ord>(
label1: &L,
layout1: &Layout<'_>,
label2: &L,
layout2: &Layout<'_>,
target_info: TargetInfo,
) -> Ordering {
let size1 = layout1.alignment_bytes(target_info);
let size2 = layout2.alignment_bytes(target_info);
size2.cmp(&size1).then(label1.cmp(label2))
}
#[cfg(test)]
mod test {
use super::*;
@ -3289,23 +3351,12 @@ mod test {
let layout = Layout::Union(union_layout);
let target_info = TargetInfo::default_x86_64();
assert_eq!(layout.stack_size_without_alignment(target_info), 5);
assert_eq!(layout.stack_size_without_alignment(target_info), 8);
}
#[test]
fn void_stack_size() {
let target_info = TargetInfo::default_x86_64();
assert_eq!(Layout::VOID.stack_size(target_info), 0);
}
}
/// Compare two fields when sorting them for code gen.
/// This is called by both code gen and bindgen, so that
/// their field orderings agree.
#[inline(always)]
pub fn cmp_fields<L: Ord>(
label1: &L,
layout1: &Layout<'_>,
label2: &L,
layout2: &Layout<'_>,
target_info: TargetInfo,
) -> Ordering {
let size1 = layout1.alignment_bytes(target_info);
let size2 = layout2.alignment_bytes(target_info);
size2.cmp(&size1).then(label1.cmp(label2))
}

View File

@ -331,7 +331,10 @@ pub enum ValueDef<'a> {
body_expr: &'a Loc<Expr<'a>>,
},
Expect(&'a Loc<Expr<'a>>),
Expect {
condition: &'a Loc<Expr<'a>>,
preceding_comment: Region,
},
}
#[derive(Debug, Clone, PartialEq, Default)]
@ -437,7 +440,7 @@ pub struct HasClause<'a> {
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum HasImpls<'a> {
// `{ eq: myEq }`
HasImpls(Collection<'a, Loc<AssignedField<'a, TypeAnnotation<'a>>>>),
HasImpls(Collection<'a, Loc<AssignedField<'a, Expr<'a>>>>),
// We preserve this for the formatter; canonicalization ignores it.
SpaceBefore(&'a HasImpls<'a>, &'a [CommentOrNewline<'a>]),
@ -1042,6 +1045,10 @@ impl<'a> Expr<'a> {
pub fn is_tag(&self) -> bool {
matches!(self, Expr::Tag(_))
}
pub fn is_opaque(&self) -> bool {
matches!(self, Expr::OpaqueRef(_))
}
}
macro_rules! impl_extract_spaces {
@ -1108,7 +1115,6 @@ impl_extract_spaces!(Tag);
impl_extract_spaces!(AssignedField<T>);
impl_extract_spaces!(TypeAnnotation);
impl_extract_spaces!(HasAbility);
impl_extract_spaces!(HasImpls);
impl<'a, T: Copy> ExtractSpaces<'a> for Spaced<'a, T> {
type Item = T;
@ -1161,3 +1167,45 @@ impl<'a, T: Copy> ExtractSpaces<'a> for Spaced<'a, T> {
}
}
}
impl<'a> ExtractSpaces<'a> for HasImpls<'a> {
type Item = Collection<'a, Loc<AssignedField<'a, Expr<'a>>>>;
fn extract_spaces(&self) -> Spaces<'a, Self::Item> {
match self {
HasImpls::HasImpls(inner) => Spaces {
before: &[],
item: *inner,
after: &[],
},
HasImpls::SpaceBefore(item, before) => match item {
HasImpls::HasImpls(inner) => Spaces {
before,
item: *inner,
after: &[],
},
HasImpls::SpaceBefore(_, _) => todo!(),
HasImpls::SpaceAfter(HasImpls::HasImpls(inner), after) => Spaces {
before,
item: *inner,
after,
},
HasImpls::SpaceAfter(_, _) => todo!(),
},
HasImpls::SpaceAfter(item, after) => match item {
HasImpls::HasImpls(inner) => Spaces {
before: &[],
item: *inner,
after,
},
HasImpls::SpaceBefore(HasImpls::HasImpls(inner), before) => Spaces {
before,
item: *inner,
after,
},
HasImpls::SpaceBefore(_, _) => todo!(),
HasImpls::SpaceAfter(_, _) => todo!(),
},
}
}
}

View File

@ -189,7 +189,7 @@ fn record_field_access<'a>() -> impl Parser<'a, &'a str, EExpr<'a>> {
/// In some contexts we want to parse the `_` as an expression, so it can then be turned into a
/// pattern later
fn parse_loc_term_or_underscore<'a>(
fn parse_loc_term_or_underscore_or_conditional<'a>(
min_indent: u32,
options: ExprParseOptions,
arena: &'a Bump,
@ -198,6 +198,35 @@ fn parse_loc_term_or_underscore<'a>(
one_of!(
loc_expr_in_parens_etc_help(min_indent),
loc!(specialize(EExpr::If, if_expr_help(min_indent, options))),
loc!(specialize(
EExpr::When,
when::expr_help(min_indent, options)
)),
loc!(specialize(EExpr::Str, string_literal_help())),
loc!(specialize(EExpr::SingleQuote, single_quote_literal_help())),
loc!(specialize(EExpr::Number, positive_number_literal_help())),
loc!(specialize(EExpr::Lambda, closure_help(min_indent, options))),
loc!(underscore_expression()),
loc!(record_literal_help(min_indent)),
loc!(specialize(EExpr::List, list_literal_help(min_indent))),
loc!(map_with_arena!(
assign_or_destructure_identifier(),
ident_to_expr
)),
)
.parse(arena, state)
}
/// In some contexts we want to parse the `_` as an expression, so it can then be turned into a
/// pattern later
fn parse_loc_term_or_underscore<'a>(
min_indent: u32,
options: ExprParseOptions,
arena: &'a Bump,
state: State<'a>,
) -> ParseResult<'a, Loc<Expr<'a>>, EExpr<'a>> {
one_of!(
loc_expr_in_parens_etc_help(min_indent),
loc!(specialize(EExpr::Str, string_literal_help())),
loc!(specialize(EExpr::SingleQuote, single_quote_literal_help())),
loc!(specialize(EExpr::Number, positive_number_literal_help())),
@ -279,7 +308,9 @@ fn loc_possibly_negative_or_negated_term<'a>(
Expr::UnaryOp(arena.alloc(loc_expr), Loc::at(loc_op.region, UnaryOp::Not))
}
)),
|arena, state| { parse_loc_term_or_underscore(min_indent, options, arena, state) }
|arena, state| {
parse_loc_term_or_underscore_or_conditional(min_indent, options, arena, state)
}
]
}
@ -413,7 +444,10 @@ impl<'a> ExprState<'a> {
let fail = EExpr::BadOperator(opchar, loc_op.region.start());
Err(fail)
} else if !self.expr.value.is_tag() && !self.arguments.is_empty() {
} else if !self.expr.value.is_tag()
&& !self.expr.value.is_opaque()
&& !self.arguments.is_empty()
{
let region = Region::across_all(self.arguments.iter().map(|v| &v.region));
Err(argument_error(region, loc_op.region.start()))
@ -584,6 +618,7 @@ fn parse_defs_end<'a>(
let initial = state.clone();
let mut spaces_before_current = &[] as &[_];
let spaces_before_current_start = state.pos();
let state = match space0_e(min_indent, EExpr::IndentStart).parse(arena, state) {
Err((MadeProgress, _, s)) => {
@ -625,7 +660,27 @@ fn parse_defs_end<'a>(
let end = loc_def_expr.region.end();
let region = Region::new(start, end);
let value_def = ValueDef::Expect(arena.alloc(loc_def_expr));
// drop newlines before the preceding comment
let spaces_before_start = spaces_before_current_start.offset as usize;
let spaces_before_end = start.offset as usize;
let mut spaces_before_current_start = spaces_before_current_start;
for byte in &state.original_bytes()[spaces_before_start..spaces_before_end]
{
match byte {
b' ' | b'\n' => {
spaces_before_current_start.offset += 1;
}
_ => break,
}
}
let preceding_comment = Region::new(spaces_before_current_start, start);
let value_def = ValueDef::Expect {
condition: arena.alloc(loc_def_expr),
preceding_comment,
};
defs.push_value_def(value_def, region, spaces_before_current, &[]);
global_state = state;
@ -1498,7 +1553,7 @@ fn parse_expr_end<'a>(
) -> ParseResult<'a, Expr<'a>, EExpr<'a>> {
let parser = skip_first!(
crate::blankspace::check_indent(min_indent, EExpr::IndentEnd),
move |a, s| parse_loc_term(min_indent, options, a, s)
move |a, s| parse_loc_term_or_underscore(min_indent, options, a, s)
);
match parser.parse(arena, state.clone()) {
@ -2440,7 +2495,7 @@ fn list_literal_help<'a>(min_indent: u32) -> impl Parser<'a, Expr<'a>, EList<'a>
}
}
fn record_field_help<'a>(
pub fn record_value_field<'a>(
min_indent: u32,
) -> impl Parser<'a, AssignedField<'a, Expr<'a>>, ERecord<'a>> {
use AssignedField::*;
@ -2543,7 +2598,7 @@ fn record_help<'a>(
trailing_sep_by0(
word1(b',', ERecord::End),
space0_before_optional_after(
loc!(record_field_help(min_indent)),
loc!(record_value_field(min_indent)),
min_indent,
ERecord::IndentEnd,
ERecord::IndentEnd

View File

@ -666,22 +666,33 @@ pub enum ETypeAbilityImpl<'a> {
IndentColon(Position),
IndentOptional(Position),
IndentEnd(Position),
Updateable(Position),
QuestionMark(Position),
Bar(Position),
Ampersand(Position),
Expr(&'a EExpr<'a>, Position),
IndentBar(Position),
IndentAmpersand(Position),
}
impl<'a> From<ETypeRecord<'a>> for ETypeAbilityImpl<'a> {
fn from(e: ETypeRecord<'a>) -> Self {
impl<'a> From<ERecord<'a>> for ETypeAbilityImpl<'a> {
fn from(e: ERecord<'a>) -> Self {
match e {
ETypeRecord::End(p) => ETypeAbilityImpl::End(p),
ETypeRecord::Open(p) => ETypeAbilityImpl::Open(p),
ETypeRecord::Field(p) => ETypeAbilityImpl::Field(p),
ETypeRecord::Colon(p) => ETypeAbilityImpl::Colon(p),
ETypeRecord::Optional(p) => ETypeAbilityImpl::Optional(p),
ETypeRecord::Type(t, p) => ETypeAbilityImpl::Type(t, p),
ETypeRecord::Space(s, p) => ETypeAbilityImpl::Space(s, p),
ETypeRecord::IndentOpen(p) => ETypeAbilityImpl::IndentOpen(p),
ETypeRecord::IndentColon(p) => ETypeAbilityImpl::IndentColon(p),
ETypeRecord::IndentOptional(p) => ETypeAbilityImpl::IndentOptional(p),
ETypeRecord::IndentEnd(p) => ETypeAbilityImpl::IndentEnd(p),
ERecord::End(p) => ETypeAbilityImpl::End(p),
ERecord::Open(p) => ETypeAbilityImpl::Open(p),
ERecord::Field(p) => ETypeAbilityImpl::Field(p),
ERecord::Colon(p) => ETypeAbilityImpl::Colon(p),
ERecord::Space(s, p) => ETypeAbilityImpl::Space(s, p),
ERecord::IndentOpen(p) => ETypeAbilityImpl::IndentOpen(p),
ERecord::IndentColon(p) => ETypeAbilityImpl::IndentColon(p),
ERecord::IndentEnd(p) => ETypeAbilityImpl::IndentEnd(p),
ERecord::Updateable(p) => ETypeAbilityImpl::Updateable(p),
ERecord::QuestionMark(p) => ETypeAbilityImpl::QuestionMark(p),
ERecord::Bar(p) => ETypeAbilityImpl::Bar(p),
ERecord::Ampersand(p) => ETypeAbilityImpl::Ampersand(p),
ERecord::Expr(e, p) => ETypeAbilityImpl::Expr(e, p),
ERecord::IndentBar(p) => ETypeAbilityImpl::IndentBar(p),
ERecord::IndentAmpersand(p) => ETypeAbilityImpl::IndentAmpersand(p),
}
}
}

View File

@ -3,6 +3,7 @@ use crate::ast::{
Spaced, Tag, TypeAnnotation, TypeHeader,
};
use crate::blankspace::{space0_around_ee, space0_before_e, space0_e};
use crate::expr::record_value_field;
use crate::ident::lowercase_ident;
use crate::keyword;
use crate::parser::{
@ -10,7 +11,7 @@ use crate::parser::{
ETypeApply, ETypeInParens, ETypeInlineAlias, ETypeRecord, ETypeTagUnion, ParseResult, Parser,
Progress::{self, *},
};
use crate::parser::{then, ETypeAbilityImpl};
use crate::parser::{then, ERecord, ETypeAbilityImpl};
use crate::state::State;
use bumpalo::collections::vec::Vec;
use bumpalo::Bump;
@ -515,8 +516,8 @@ fn parse_has_ability<'a>(min_indent: u32) -> impl Parser<'a, HasAbility<'a>, ETy
collection_trailing_sep_e!(
word1(b'{', ETypeAbilityImpl::Open),
specialize(
|e: ETypeRecord<'_>, _| e.into(),
loc!(record_type_field(min_indent + 1))
|e: ERecord<'_>, _| e.into(),
loc!(record_value_field(min_indent + 1))
),
word1(b',', ETypeAbilityImpl::End),
word1(b'}', ETypeAbilityImpl::End),

View File

@ -0,0 +1,71 @@
Defs(
Defs {
tags: [
Index(2147483648),
],
regions: [
@0-22,
],
space_before: [
Slice(start = 0, length = 0),
],
space_after: [
Slice(start = 0, length = 0),
],
spaces: [],
type_defs: [],
value_defs: [
Body(
@0-6 Apply(
@0-6 OpaqueRef(
"@Thunk",
),
[
@7-9 Identifier(
"it",
),
],
),
@12-22 Apply(
@12-14 Var {
module_name: "",
ident: "id",
},
[
@16-21 ParensAround(
Apply(
@16-18 OpaqueRef(
"@A",
),
[
@19-21 Record(
[],
),
],
Space,
),
),
],
Space,
),
),
],
},
@23-28 SpaceBefore(
Apply(
@23-25 Var {
module_name: "",
ident: "it",
},
[
@26-28 Record(
[],
),
],
Space,
),
[
Newline,
],
),
)

View File

@ -0,0 +1,2 @@
@Thunk it = id (@A {})
it {}

View File

@ -0,0 +1,54 @@
BinOps(
[
(
@0-1 Num(
"1",
),
@2-3 Plus,
),
],
@8-53 SpaceBefore(
When(
@13-16 Tag(
"Foo",
),
[
WhenBranch {
patterns: [
@28-31 SpaceBefore(
Tag(
"Foo",
),
[
Newline,
],
),
],
value: @35-36 Num(
"2",
),
guard: None,
},
WhenBranch {
patterns: [
@45-48 SpaceBefore(
Tag(
"Bar",
),
[
Newline,
],
),
],
value: @52-53 Num(
"3",
),
guard: None,
},
],
),
[
Newline,
],
),
)

View File

@ -0,0 +1,4 @@
1 +
when Foo is
Foo -> 2
Bar -> 3

View File

@ -0,0 +1,231 @@
Defs(
Defs {
tags: [
Index(2147483648),
Index(2147483649),
Index(2147483650),
Index(2147483651),
Index(2147483652),
],
regions: [
@0-19,
@20-39,
@40-59,
@60-72,
@73-128,
],
space_before: [
Slice(start = 0, length = 0),
Slice(start = 0, length = 1),
Slice(start = 1, length = 1),
Slice(start = 2, length = 1),
Slice(start = 3, length = 1),
],
space_after: [
Slice(start = 0, length = 0),
Slice(start = 1, length = 0),
Slice(start = 2, length = 0),
Slice(start = 3, length = 0),
Slice(start = 4, length = 0),
],
spaces: [
Newline,
Newline,
Newline,
Newline,
],
type_defs: [],
value_defs: [
Body(
@0-4 Apply(
@0-4 Tag(
"Pair",
),
[
@5-6 Identifier(
"x",
),
@7-8 Underscore(
"",
),
],
),
@11-19 Apply(
@11-15 Tag(
"Pair",
),
[
@16-17 Num(
"0",
),
@18-19 Num(
"1",
),
],
Space,
),
),
Body(
@20-28 Apply(
@20-24 Tag(
"Pair",
),
[
@25-26 Underscore(
"",
),
@27-28 Identifier(
"y",
),
],
),
@31-39 Apply(
@31-35 Tag(
"Pair",
),
[
@36-37 Num(
"0",
),
@38-39 Num(
"1",
),
],
Space,
),
),
Body(
@40-48 Apply(
@40-44 Tag(
"Pair",
),
[
@45-46 Underscore(
"",
),
@47-48 Underscore(
"",
),
],
),
@51-59 Apply(
@51-55 Tag(
"Pair",
),
[
@56-57 Num(
"0",
),
@58-59 Num(
"1",
),
],
Space,
),
),
Body(
@60-61 Underscore(
"",
),
@64-72 Apply(
@64-68 Tag(
"Pair",
),
[
@69-70 Num(
"0",
),
@71-72 Num(
"1",
),
],
Space,
),
),
Body(
@73-98 Apply(
@73-77 Tag(
"Pair",
),
[
@79-87 Apply(
@79-83 Tag(
"Pair",
),
[
@84-85 Identifier(
"x",
),
@86-87 Underscore(
"",
),
],
),
@90-98 Apply(
@90-94 Tag(
"Pair",
),
[
@95-96 Underscore(
"",
),
@97-98 Identifier(
"y",
),
],
),
],
),
@102-128 Apply(
@102-106 Tag(
"Pair",
),
[
@108-116 ParensAround(
Apply(
@108-112 Tag(
"Pair",
),
[
@113-114 Num(
"0",
),
@115-116 Num(
"1",
),
],
Space,
),
),
@119-127 ParensAround(
Apply(
@119-123 Tag(
"Pair",
),
[
@124-125 Num(
"2",
),
@126-127 Num(
"3",
),
],
Space,
),
),
],
Space,
),
),
],
},
@130-131 SpaceBefore(
Num(
"0",
),
[
Newline,
Newline,
],
),
)

View File

@ -0,0 +1,7 @@
Pair x _ = Pair 0 1
Pair _ y = Pair 0 1
Pair _ _ = Pair 0 1
_ = Pair 0 1
Pair (Pair x _) (Pair _ y) = Pair (Pair 0 1) (Pair 2 3)
0

View File

@ -209,6 +209,7 @@ mod test_parse {
pass/one_minus_two.expr,
pass/one_plus_two.expr,
pass/one_spaced_def.expr,
pass/opaque_destructure_first_item_in_body.expr,
pass/opaque_has_abilities.expr,
pass/opaque_reference_expr_with_arguments.expr,
pass/opaque_reference_expr.expr,
@ -229,6 +230,7 @@ mod test_parse {
pass/parse_as_ann.expr,
pass/pattern_with_space_in_parens.expr, // https://github.com/rtfeldman/roc/issues/929
pass/plus_if.expr,
pass/plus_when.expr,
pass/pos_inf_float.expr,
pass/positive_float.expr,
pass/positive_int.expr,
@ -266,6 +268,7 @@ mod test_parse {
pass/unary_not_with_parens.expr,
pass/unary_not.expr,
pass/underscore_backpassing.expr,
pass/underscore_in_assignment_pattern.expr,
pass/var_else.expr,
pass/var_if.expr,
pass/var_is.expr,

View File

@ -16,7 +16,6 @@ pub struct CycleEntry {
#[derive(Clone, Debug, PartialEq)]
pub enum BadPattern {
UnderscoreInDef,
Unsupported(PatternType),
}
@ -130,7 +129,51 @@ pub enum Problem {
},
AbilityUsedAsType(Lowercase, Symbol, Region),
NestedSpecialization(Symbol, Region),
IllegalDerive(Region),
IllegalDerivedAbility(Region),
ImplementationNotFound {
member: Symbol,
region: Region,
},
NotAnAbilityMember {
ability: Symbol,
name: String,
region: Region,
},
OptionalAbilityImpl {
ability: Symbol,
region: Region,
},
QualifiedAbilityImpl {
region: Region,
},
AbilityImplNotIdent {
region: Region,
},
DuplicateImpl {
original: Region,
duplicate: Region,
},
NotAnAbility(Region),
ImplementsNonRequired {
region: Region,
ability: Symbol,
not_required: Vec<Symbol>,
},
DoesNotImplementAbility {
region: Region,
ability: Symbol,
not_implemented: Vec<Symbol>,
},
NotBoundInAllPatterns {
unbound_symbol: Symbol,
region: Region,
},
NoIdentifiersIntroduced(Region),
OverloadedSpecialization {
overload: Region,
original_opaque: Symbol,
ability_member: Symbol,
},
}
#[derive(Clone, Debug, PartialEq)]
@ -291,6 +334,24 @@ pub enum RuntimeError {
EmptySingleQuote(Region),
/// where 'aa'
MultipleCharsInSingleQuote(Region),
DegenerateBranch(Region),
}
impl RuntimeError {
pub fn runtime_message(self) -> String {
use RuntimeError::*;
match self {
DegenerateBranch(region) => {
format!(
"Hit a branch pattern that does not bind all symbols its body needs, at {:?}",
region
)
}
err => format!("{:?}", err),
}
}
}
#[derive(Clone, Copy, Debug, PartialEq)]

View File

@ -36,3 +36,4 @@ tempfile = "3.2.0"
bumpalo = { version = "3.8.0", features = ["collections"] }
regex = "1.5.5"
lazy_static = "1.4.0"
insta = "1.15.0"

View File

@ -86,7 +86,7 @@ impl PendingDerivesTable {
} in derives
{
debug_assert!(
ability.is_builtin_ability(),
ability.is_derivable_ability(),
"Not a builtin - should have been caught during can"
);
let derive_key = RequestedDeriveKey { opaque, ability };
@ -394,7 +394,7 @@ impl ObligationCache<'_> {
for &member in members_of_ability {
if self
.abilities_store
.get_specialization(member, opaque)
.get_implementation(member, opaque)
.is_none()
{
let root_data = self.abilities_store.member_def(member).unwrap();
@ -671,9 +671,15 @@ pub fn resolve_ability_specialization(
let resolved = match obligated {
Obligated::Opaque(symbol) => {
let specialization = abilities_store.get_specialization(ability_member, symbol)?;
Resolved::Specialization(specialization.symbol)
match abilities_store.get_implementation(ability_member, symbol)? {
roc_types::types::MemberImpl::Impl(spec_symbol) => {
Resolved::Specialization(*spec_symbol)
}
roc_types::types::MemberImpl::Derived => Resolved::NeedsGenerated,
// TODO this is not correct. We can replace `Resolved` with `MemberImpl` entirely,
// which will make this simpler.
roc_types::types::MemberImpl::Error => Resolved::Specialization(Symbol::UNDERSCORE),
}
}
Obligated::Adhoc(_) => {
// TODO: more rules need to be validated here, like is this a builtin ability?

View File

@ -1,15 +1,15 @@
use crate::solve::{self, Aliases};
use roc_can::abilities::{AbilitiesStore, ResolvedSpecializations};
use roc_can::abilities::{AbilitiesStore, ImplKey, ResolvedImpl};
use roc_can::constraint::{Constraint as ConstraintSoa, Constraints};
use roc_can::expr::PendingDerives;
use roc_can::module::{ExposedByModule, RigidVariables};
use roc_can::module::{ExposedByModule, ResolvedImplementations, RigidVariables};
use roc_collections::all::MutMap;
use roc_collections::VecMap;
use roc_derive::SharedDerivedModule;
use roc_error_macros::internal_error;
use roc_module::symbol::{ModuleId, Symbol};
use roc_types::subs::{Content, ExposedTypesStorageSubs, FlatType, StorageSubs, Subs, Variable};
use roc_types::types::Alias;
use roc_types::types::{Alias, MemberImpl};
/// A marker that a given Subs has been solved.
/// The only way to obtain a Solved<Subs> is by running the solver on it.
@ -48,7 +48,7 @@ pub struct SolvedModule {
pub exposed_vars_by_symbol: Vec<(Symbol, Variable)>,
/// Used when importing this module into another module
pub solved_specializations: ResolvedSpecializations,
pub solved_implementations: ResolvedImplementations,
pub exposed_types: ExposedTypesStorageSubs,
}
@ -108,7 +108,7 @@ pub fn exposed_types_storage_subs(
home: ModuleId,
solved_subs: &mut Solved<Subs>,
exposed_vars_by_symbol: &[(Symbol, Variable)],
solved_specializations: &ResolvedSpecializations,
solved_implementations: &ResolvedImplementations,
abilities_store: &AbilitiesStore,
) -> ExposedTypesStorageSubs {
let subs = solved_subs.inner_mut();
@ -121,31 +121,42 @@ pub fn exposed_types_storage_subs(
}
let mut stored_specialization_lambda_set_vars =
VecMap::with_capacity(solved_specializations.len());
VecMap::with_capacity(solved_implementations.len());
for (_, member_specialization) in solved_specializations.iter() {
for (_, &lset_var) in member_specialization.specialization_lambda_sets.iter() {
let specialization_lset_ambient_function_var =
subs.get_lambda_set(lset_var).ambient_function;
for (_, member_impl) in solved_implementations.iter() {
match member_impl {
ResolvedImpl::Impl(member_specialization) => {
// Export all the lambda sets and their ambient functions.
for (_, &lset_var) in member_specialization.specialization_lambda_sets.iter() {
let specialization_lset_ambient_function_var =
subs.get_lambda_set(lset_var).ambient_function;
// Import the ambient function of this specialization lambda set; that will import the
// lambda set as well. The ambient function is needed for the lambda set compaction
// algorithm.
let imported_lset_ambient_function_var = storage_subs
.import_variable_from(subs, specialization_lset_ambient_function_var)
.variable;
// Import the ambient function of this specialization lambda set; that will import the
// lambda set as well. The ambient function is needed for the lambda set compaction
// algorithm.
let imported_lset_ambient_function_var = storage_subs
.import_variable_from(subs, specialization_lset_ambient_function_var)
.variable;
let imported_lset_var = match storage_subs
.as_inner()
.get_content_without_compacting(imported_lset_ambient_function_var)
{
Content::Structure(FlatType::Func(_, lambda_set_var, _)) => *lambda_set_var,
content => internal_error!(
"ambient lambda set function import is not a function, found: {:?}",
roc_types::subs::SubsFmtContent(content, storage_subs.as_inner())
),
};
stored_specialization_lambda_set_vars.insert(lset_var, imported_lset_var);
let imported_lset_var = match storage_subs
.as_inner()
.get_content_without_compacting(imported_lset_ambient_function_var)
{
Content::Structure(FlatType::Func(_, lambda_set_var, _)) => *lambda_set_var,
content => internal_error!(
"ambient lambda set function import is not a function, found: {:?}",
roc_types::subs::SubsFmtContent(content, storage_subs.as_inner())
),
};
stored_specialization_lambda_set_vars.insert(lset_var, imported_lset_var);
}
}
ResolvedImpl::Derived => {
// nothing to do
}
ResolvedImpl::Error => {
// nothing to do
}
}
}
@ -171,3 +182,37 @@ pub fn exposed_types_storage_subs(
stored_ability_member_vars,
}
}
/// Extracts the ability member implementations owned by a solved module.
pub fn extract_module_owned_implementations(
module_id: ModuleId,
abilities_store: &AbilitiesStore,
) -> ResolvedImplementations {
abilities_store
.iter_declared_implementations()
.filter_map(|((member, typ), member_impl)| {
// This module solved this specialization if either the member or the type comes from the
// module.
if member.module_id() != module_id && typ.module_id() != module_id {
return None;
}
let impl_key = ImplKey {
opaque: typ,
ability_member: member,
};
let resolved_impl = match member_impl {
MemberImpl::Impl(impl_symbol) => {
let specialization = abilities_store.specialization_info(*impl_symbol).expect(
"declared implementations should be resolved conclusively after solving",
);
ResolvedImpl::Impl(specialization.clone())
}
MemberImpl::Derived => ResolvedImpl::Derived,
MemberImpl::Error => ResolvedImpl::Error,
};
Some((impl_key, resolved_impl))
})
.collect()
}

View File

@ -4,7 +4,7 @@ use crate::ability::{
};
use crate::module::Solved;
use bumpalo::Bump;
use roc_can::abilities::{AbilitiesStore, MemberSpecialization};
use roc_can::abilities::{AbilitiesStore, MemberSpecializationInfo};
use roc_can::constraint::Constraint::{self, *};
use roc_can::constraint::{Constraints, Cycle, LetConstraint, OpportunisticResolve};
use roc_can::expected::{Expected, PExpected};
@ -16,7 +16,7 @@ use roc_debug_flags::dbg_do;
use roc_debug_flags::{ROC_TRACE_COMPACTION, ROC_VERIFY_RIGID_LET_GENERALIZED};
use roc_derive::SharedDerivedModule;
use roc_derive_key::{DeriveError, DeriveKey};
use roc_error_macros::internal_error;
use roc_error_macros::{internal_error, todo_abilities};
use roc_module::ident::TagName;
use roc_module::symbol::{ModuleId, Symbol};
use roc_problem::can::CycleEntry;
@ -28,8 +28,8 @@ use roc_types::subs::{
};
use roc_types::types::Type::{self, *};
use roc_types::types::{
gather_fields_unsorted_iter, AliasCommon, AliasKind, Category, ErrorType, OptAbleType,
OptAbleVar, PatternCategory, Reason, TypeExtension, Uls,
gather_fields_unsorted_iter, AliasCommon, AliasKind, Category, ErrorType, MemberImpl,
OptAbleType, OptAbleVar, PatternCategory, Reason, TypeExtension, Uls,
};
use roc_unify::unify::{
unify, unify_introduced_ability_specialization, Mode, MustImplementConstraints, Obligated,
@ -1653,25 +1653,35 @@ fn open_tag_union(subs: &mut Subs, var: Variable) {
use {Content::*, FlatType::*};
let desc = subs.get(var);
if let Structure(TagUnion(tags, ext)) = desc.content {
if let Structure(EmptyTagUnion) = subs.get_content_without_compacting(ext) {
let new_ext = subs.fresh_unnamed_flex_var();
subs.set_rank(new_ext, desc.rank);
let new_union = Structure(TagUnion(tags, new_ext));
subs.set_content(var, new_union);
match desc.content {
Structure(TagUnion(tags, ext)) => {
if let Structure(EmptyTagUnion) = subs.get_content_without_compacting(ext) {
let new_ext = subs.fresh_unnamed_flex_var();
subs.set_rank(new_ext, desc.rank);
let new_union = Structure(TagUnion(tags, new_ext));
subs.set_content(var, new_union);
}
// Also open up all nested tag unions.
let all_vars = tags.variables().into_iter();
stack.extend(all_vars.flat_map(|slice| subs[slice]).map(|var| subs[var]));
}
// Also open up all nested tag unions.
let all_vars = tags.variables().into_iter();
stack.extend(all_vars.flat_map(|slice| subs[slice]).map(|var| subs[var]));
Structure(Record(fields, _)) => {
// Open up all nested tag unions.
stack.extend(subs.get_subs_slice(fields.variables()));
}
_ => {
// Everything else is not a structural type that can be opened
// (i.e. cannot be matched in a pattern-match)
}
}
// Today, an "open" constraint doesn't affect any types
// other than tag unions. Recursive tag unions are constructed
// at a later time (during occurs checks after tag unions are
// resolved), so that's not handled here either.
// NB: Handle record types here if we add presence constraints
// to their type inference as well.
}
}
@ -1695,7 +1705,8 @@ fn check_ability_specialization(
// If the symbol specializes an ability member, we need to make sure that the
// inferred type for the specialization actually aligns with the expected
// implementation.
if let Some((ability_member, root_data)) = abilities_store.root_name_and_def(symbol) {
if let Some((impl_key, root_data)) = abilities_store.impl_key_and_def(symbol) {
let ability_member = impl_key.ability_member;
let root_signature_var = root_data.signature_var();
let parent_ability = root_data.parent_ability;
@ -1716,7 +1727,7 @@ fn check_ability_specialization(
Mode::EQ,
);
match unified {
let resolved_mark = match unified {
Success {
vars,
must_implement_ability,
@ -1745,12 +1756,7 @@ fn check_ability_specialization(
let specialization_region = symbol_loc_var.region;
let specialization =
MemberSpecialization::new(symbol, specialization_lambda_sets);
abilities_store.register_specialization_for_type(
ability_member,
opaque,
specialization,
);
MemberSpecializationInfo::new(symbol, specialization_lambda_sets);
// Make sure we check that the opaque has specialized all members of the
// ability, after we finish solving the module.
@ -1764,6 +1770,8 @@ fn check_ability_specialization(
},
specialization_region,
);
Ok(specialization)
}
Some(Obligated::Adhoc(var)) => {
// This is a specialization of a structural type - never allowed.
@ -1781,6 +1789,8 @@ fn check_ability_specialization(
};
problems.push(problem);
Err(())
}
None => {
// This can happen when every ability constriant on a type variable went
@ -1807,6 +1817,8 @@ fn check_ability_specialization(
);
problems.push(problem);
Err(())
}
}
}
@ -1829,14 +1841,22 @@ fn check_ability_specialization(
);
problems.push(problem);
Err(())
}
BadType(vars, problem) => {
subs.commit_snapshot(snapshot);
introduce(subs, rank, pools, &vars);
problems.push(TypeError::BadType(problem));
Err(())
}
}
};
abilities_store
.mark_implementation(impl_key.ability_member, impl_key.opaque, resolved_mark)
.expect("marked as a custom implementation, but not recorded as such");
}
}
@ -2291,7 +2311,7 @@ fn get_specialization_lambda_set_ambient_function<P: Phase>(
let external_specialized_lset =
phase.with_module_abilities_store(opaque_home, |abilities_store| {
let opt_specialization =
abilities_store.get_specialization(ability_member, opaque);
abilities_store.get_implementation(ability_member, opaque);
match (P::IS_LATE, opt_specialization) {
(false, None) => {
// doesn't specialize, we'll have reported an error for this
@ -2304,13 +2324,20 @@ fn get_specialization_lambda_set_ambient_function<P: Phase>(
ability_member,
);
}
(_, Some(specialization)) => {
let specialized_lambda_set = *specialization
.specialization_lambda_sets
.get(&lset_region)
.expect("lambda set region not resolved");
Ok(specialized_lambda_set)
}
(_, Some(member_impl)) => match member_impl {
MemberImpl::Impl(spec_symbol) => {
let specialization =
abilities_store.specialization_info(*spec_symbol).expect("expected custom implementations to always have complete specialization info by this point");
let specialized_lambda_set = *specialization
.specialization_lambda_sets
.get(&lset_region)
.expect("lambda set region not resolved");
Ok(specialized_lambda_set)
}
MemberImpl::Derived => todo_abilities!(),
MemberImpl::Error => todo_abilities!(),
},
}
})?;
@ -2807,7 +2834,8 @@ fn type_to_variable<'a>(
subs,
UnionTags::default(),
temp_ext_var,
);
)
.expect("extension var could not be seen as a tag union");
for _ in it {
unreachable!("we assert that the ext var is empty; otherwise we'd already know it was a tag union!");
@ -3351,7 +3379,8 @@ fn type_to_union_tags<'a>(
subs,
UnionTags::default(),
temp_ext_var,
);
)
.expect("extension var could not be seen as tag union");
tag_vars.extend(it.map(|(n, v)| (n.clone(), v)));

View File

@ -19,7 +19,10 @@ mod solve_expr {
use roc_region::all::{LineColumn, LineColumnRegion, LineInfo, Region};
use roc_reporting::report::{can_problem, type_problem, RocDocAllocator};
use roc_solve::solve::TypeError;
use roc_types::pretty_print::{name_and_print_var, DebugPrint};
use roc_types::{
pretty_print::{name_and_print_var, DebugPrint},
types::MemberImpl,
};
use std::path::PathBuf;
// HELPERS
@ -245,7 +248,13 @@ mod solve_expr {
assert_eq!(actual, expected.to_string());
}
fn infer_queries_help(src: &str, expected: &[&'static str], print_only_under_alias: bool) {
#[derive(Default)]
struct InferOptions {
print_only_under_alias: bool,
allow_errors: bool,
}
fn infer_queries_help(src: &str, expected: impl FnOnce(&str), options: InferOptions) {
let (
LoadedModule {
module_id: home,
@ -269,12 +278,14 @@ mod solve_expr {
let (can_problems, type_problems) =
format_problems(&src, home, &interns, can_problems, type_problems);
assert!(
can_problems.is_empty(),
"Canonicalization problems: {}",
can_problems
);
assert!(type_problems.is_empty(), "Type problems: {}", type_problems);
if !options.allow_errors {
assert!(
can_problems.is_empty(),
"Canonicalization problems: {}",
can_problems
);
assert!(type_problems.is_empty(), "Type problems: {}", type_problems);
}
let queries = parse_queries(&src);
assert!(!queries.is_empty(), "No queries provided!");
@ -295,7 +306,7 @@ mod solve_expr {
&interns,
DebugPrint {
print_lambda_sets: true,
print_only_under_alias,
print_only_under_alias: options.print_only_under_alias,
},
);
subs.rollback_to(snapshot);
@ -319,15 +330,16 @@ mod solve_expr {
solved_queries.push(elaborated);
}
assert_eq!(solved_queries, expected)
let pretty_solved_queries = solved_queries.join("\n");
expected(&pretty_solved_queries);
}
macro_rules! infer_queries {
($program:expr, $queries:expr $(,)?) => {
infer_queries_help($program, $queries, false)
};
($program:expr, $queries:expr, print_only_under_alias=true $(,)?) => {
infer_queries_help($program, $queries, true)
($program:expr, @$queries:literal $($option:ident: $value:expr)*) => {
infer_queries_help($program, |golden| insta::assert_snapshot!(golden, @$queries), InferOptions {
$($option: $value,)* ..InferOptions::default()
})
};
}
@ -354,11 +366,22 @@ mod solve_expr {
panic!();
}
let known_specializations = abilities_store.iter_specializations();
let known_specializations = abilities_store.iter_declared_implementations().filter_map(
|((member, typ), member_impl)| match member_impl {
MemberImpl::Impl(impl_symbol) => {
let specialization = abilities_store.specialization_info(*impl_symbol).expect(
"declared implementations should be resolved conclusively after solving",
);
Some((member, typ, specialization.clone()))
}
MemberImpl::Derived | MemberImpl::Error => None,
},
);
use std::collections::HashSet;
let pretty_specializations = known_specializations
.into_iter()
.map(|((member, typ), _)| {
.map(|(member, typ, _)| {
let member_data = abilities_store.member_def(member).unwrap();
let member_str = member.as_str(&interns);
let ability_str = member_data.parent_ability.as_str(&interns);
@ -1484,14 +1507,13 @@ mod solve_expr {
infer_eq(
indoc!(
r#"
# technically, an empty record can be destructured
{} = {}
thunk = \{} -> 42
# technically, an empty record can be destructured
thunk = \{} -> 42
xEmpty = if thunk {} == 42 then { x: {} } else { x: {} }
xEmpty = if thunk {} == 42 then { x: {} } else { x: {} }
when xEmpty is
{ x: {} } -> {}
when xEmpty is
{ x: {} } -> {}
"#
),
"{}",
@ -5938,7 +5960,7 @@ mod solve_expr {
Hash has hash : a -> U64 | a has Hash
Id := U64
Id := U64 has [Hash {hash}]
hash = \@Id n -> n
"#
@ -5958,7 +5980,7 @@ mod solve_expr {
hash : a -> U64 | a has Hash
hash32 : a -> U32 | a has Hash
Id := U64
Id := U64 has [Hash {hash, hash32}]
hash = \@Id n -> n
hash32 = \@Id n -> Num.toU32 n
@ -5983,7 +6005,7 @@ mod solve_expr {
eq : a, a -> Bool | a has Ord
le : a, a -> Bool | a has Ord
Id := U64
Id := U64 has [Hash {hash, hash32}, Ord {eq, le}]
hash = \@Id n -> n
hash32 = \@Id n -> Num.toU32 n
@ -6011,7 +6033,7 @@ mod solve_expr {
Hash has
hash : a -> U64 | a has Hash
Id := U64
Id := U64 has [Hash {hash}]
hash : Id -> U64
hash = \@Id n -> n
@ -6031,7 +6053,7 @@ mod solve_expr {
Hash has
hash : a -> U64 | a has Hash
Id := U64
Id := U64 has [Hash {hash}]
hash : Id -> U64
"#
@ -6050,7 +6072,7 @@ mod solve_expr {
Hash has
hash : a -> U64 | a has Hash
Id := U64
Id := U64 has [Hash {hash}]
hash = \@Id n -> n
@ -6144,7 +6166,7 @@ mod solve_expr {
hashEq = \x, y -> hash x == hash y
Id := U64
Id := U64 has [Hash {hash}]
hash = \@Id n -> n
result = hashEq (@Id 100) (@Id 101)
@ -6166,11 +6188,11 @@ mod solve_expr {
mulHashes = \x, y -> hash x * hash y
Id := U64
hash = \@Id n -> n
Id := U64 has [Hash { hash: hashId }]
hashId = \@Id n -> n
Three := {}
hash = \@Three _ -> 3
Three := {} has [Hash { hash: hashThree }]
hashThree = \@Three _ -> 3
result = mulHashes (@Id 100) (@Three {})
"#
@ -6197,12 +6219,12 @@ mod solve_expr {
# ^^^^^
"#
),
&[
"ob : Bool",
"ob : Bool",
"True : [False, True]",
"False : [False, True]",
],
@r###"
ob : Bool
ob : Bool
True : [False, True]
False : [False, True]
"###
)
}
@ -6329,15 +6351,13 @@ mod solve_expr {
toBytes = \val, fmt -> appendWith [] (toEncoder val) fmt
Linear := {}
Linear := {} has [Format {u8}]
# impl Format for Linear
u8 = \n -> @Encoder (\lst, @Linear {} -> List.append lst n)
#^^{-1}
MyU8 := U8
MyU8 := U8 has [Encoding {toEncoder}]
# impl Encoding for MyU8
toEncoder = \@MyU8 n -> u8 n
#^^^^^^^^^{-1}
@ -6345,11 +6365,11 @@ mod solve_expr {
#^^^^^^^^^{-1}
"#
),
&[
"Linear#u8(22) : U8 -[[u8(22)]]-> Encoder Linear",
"MyU8#toEncoder(23) : MyU8 -[[toEncoder(23)]]-> Encoder fmt | fmt has Format",
"myU8Bytes : List U8",
],
@r###"
Linear#u8(10) : U8 -[[u8(10)]]-> Encoder Linear
MyU8#toEncoder(11) : MyU8 -[[toEncoder(11)]]-> Encoder fmt | fmt has Format
myU8Bytes : List U8
"###
)
}
@ -6383,18 +6403,16 @@ mod solve_expr {
Err e -> Err e
Linear := {}
Linear := {} has [DecoderFormatting {u8}]
# impl DecoderFormatting for Linear
u8 = @Decoder \lst, @Linear {} ->
#^^{-1}
when List.first lst is
Ok n -> { result: Ok n, rest: List.dropFirst lst }
Err _ -> { result: Err TooShort, rest: [] }
MyU8 := U8
MyU8 := U8 has [Decoding {decoder}]
# impl Decoding for MyU8
decoder = @Decoder \lst, fmt ->
#^^^^^^^{-1}
when decodeWith lst u8 fmt is
@ -6406,11 +6424,11 @@ mod solve_expr {
#^^^^{-1}
"#
),
&[
"Linear#u8(27) : Decoder U8 Linear",
"MyU8#decoder(28) : Decoder MyU8 fmt | fmt has DecoderFormatting",
"myU8 : Result MyU8 DecodeError",
],
@r#"
Linear#u8(11) : Decoder U8 Linear
MyU8#decoder(12) : Decoder MyU8 fmt | fmt has DecoderFormatting
myU8 : Result MyU8 DecodeError
"#
)
}
@ -6444,7 +6462,7 @@ mod solve_expr {
Default has default : {} -> a | a has Default
A := {}
A := {} has [Default {default}]
default = \{} -> @A {}
main =
@ -6454,7 +6472,7 @@ mod solve_expr {
a
"#
),
&["A#default(5) : {} -[[default(5)]]-> A"],
@"A#default(4) : {} -[[default(4)]]-> A"
)
}
@ -6464,10 +6482,10 @@ mod solve_expr {
indoc!(
r#"
app "test"
imports [Encode.{ toEncoder }, Json]
imports [Encode.{ Encoding, toEncoder }, Json]
provides [main] to "./platform"
HelloWorld := {}
HelloWorld := {} has [Encoding {toEncoder}]
toEncoder = \@HelloWorld {} ->
Encode.custom \bytes, fmt ->
@ -6498,9 +6516,9 @@ mod solve_expr {
# ^^^^^^^^^
"#
),
&[
"Encoding#toEncoder(2) : { a : Str } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting",
],
@r#"
"Encoding#toEncoder(2) : { a : Str } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting",
"#
)
}
@ -6521,9 +6539,9 @@ mod solve_expr {
# ^^^^^^^^^
"#
),
&[
"Encoding#toEncoder(2) : { a : A } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting",
],
@r#"
"Encoding#toEncoder(2) : { a : A } -[[#Derived.toEncoder_{a}(0)]]-> Encoder fmt | fmt has EncoderFormatting",
"#
)
}
@ -6536,7 +6554,7 @@ mod solve_expr {
Id has id : a -> a | a has Id
A := {}
A := {} has [Id {id}]
id = \@A {} -> @A {}
#^^{-1}
@ -6553,12 +6571,12 @@ mod solve_expr {
a
"#
),
&[
"A#id(5) : A -[[id(5)]]-> A",
"Id#id(4) : a -[[] + a:id(4):1]-> a | a has Id",
"alias1 : a -[[] + a:id(4):1]-> a | a has Id",
"alias2 : A -[[id(5)]]-> A",
],
@r###"
A#id(4) : A -[[id(4)]]-> A
Id#id(2) : a -[[] + a:id(2):1]-> a | a has Id
alias1 : a -[[] + a:id(2):1]-> a | a has Id
alias2 : A -[[id(4)]]-> A
"###
)
}
@ -6572,7 +6590,7 @@ mod solve_expr {
Id1 has id1 : a -> a | a has Id1
Id2 has id2 : a -> a | a has Id2
A := {}
A := {} has [Id1 {id1}, Id2 {id2}]
id1 = \@A {} -> @A {}
#^^^{-1}
@ -6587,14 +6605,12 @@ mod solve_expr {
a
"#
),
&[
"A#id1(8) : A -[[id1(8)]]-> A",
//
"A#id2(9) : A -[[id2(9)]]-> A",
"A#id1(8) : A -[[id1(8)]]-> A",
//
"A#id2(9) : A -[[id2(9)]]-> A",
],
@r###"
A#id1(6) : A -[[id1(6)]]-> A
A#id2(7) : A -[[id2(7)]]-> A
A#id1(6) : A -[[id1(6)]]-> A
A#id2(7) : A -[[id2(7)]]-> A
"###
)
}
@ -6607,7 +6623,7 @@ mod solve_expr {
Id has id : a -> a | a has Id
A := {}
A := {} has [Id {id}]
id = \@A {} -> @A {}
#^^{-1}
@ -6627,12 +6643,12 @@ mod solve_expr {
#^^^^^^^^{-1}
"#
),
&[
"A#id(5) : A -[[id(5)]]-> A",
"idNotAbility : a -[[idNotAbility(6)]]-> a",
"idChoice : a -[[idNotAbility(6)] + a:id(4):1]-> a | a has Id",
"idChoice : A -[[id(5), idNotAbility(6)]]-> A",
],
@r###"
A#id(4) : A -[[id(4)]]-> A
idNotAbility : a -[[idNotAbility(5)]]-> a
idChoice : a -[[idNotAbility(5)] + a:id(2):1]-> a | a has Id
idChoice : A -[[id(4), idNotAbility(5)]]-> A
"###
)
}
@ -6645,7 +6661,7 @@ mod solve_expr {
Id has id : a -> a | a has Id
A := {}
A := {} has [Id {id}]
id = \@A {} -> @A {}
#^^{-1}
@ -6662,11 +6678,11 @@ mod solve_expr {
#^^^^^^^^{-1}
"#
),
&[
"A#id(5) : A -[[id(5)]]-> A",
"idChoice : a -[[] + a:id(4):1]-> a | a has Id",
"idChoice : A -[[id(5)]]-> A",
],
@r#"
A#id(4) : A -[[id(4)]]-> A
idChoice : a -[[] + a:id(2):1]-> a | a has Id
idChoice : A -[[id(4)]]-> A
"#
)
}
@ -6681,7 +6697,7 @@ mod solve_expr {
Id has id : a -> Thunk a | a has Id
A := {}
A := {} has [Id {id}]
id = \@A {} -> \{} -> @A {}
#^^{-1}
@ -6696,12 +6712,12 @@ mod solve_expr {
a
"#
),
&[
"A#id(7) : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
"Id#id(6) : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
"alias : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
],
print_only_under_alias = true,
@r#"
A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
Id#id(3) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
alias : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
"#
print_only_under_alias: true
)
}
@ -6716,7 +6732,7 @@ mod solve_expr {
Id has id : a -> Thunk a | a has Id
A := {}
A := {} has [Id {id}]
id = \@A {} -> @Thunk (\{} -> @A {})
#^^{-1}
@ -6727,11 +6743,11 @@ mod solve_expr {
#^^{-1}
"#
),
&[
"A#id(7) : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
"it : {} -[[8(8)]]-> {}",
],
print_only_under_alias = true,
@r#"
A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
it : {} -[[8(8)]]-> {}
"#
print_only_under_alias: true
)
}
@ -6746,7 +6762,7 @@ mod solve_expr {
Id has id : a -> Thunk a | a has Id
A := {}
A := {} has [Id {id}]
id = \@A {} -> \{} -> @A {}
#^^{-1}
@ -6758,11 +6774,11 @@ mod solve_expr {
a
"#
),
&[
"A#id(7) : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
"A#id(7) : {} -[[id(7)]]-> ({} -[[8(8)]]-> {})",
],
print_only_under_alias = true,
@r#"
A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
A#id(5) : {} -[[id(5)]]-> ({} -[[8(8)]]-> {})
"#
print_only_under_alias: true
)
}
@ -6775,7 +6791,9 @@ mod solve_expr {
Diverge has diverge : a -> a | a has Diverge
A := {}
A := {} has [Diverge {diverge}]
diverge : A -> A
diverge = \@A {} -> diverge (@A {})
#^^^^^^^{-1} ^^^^^^^
@ -6787,12 +6805,11 @@ mod solve_expr {
a
"#
),
&[
"A#diverge(5) : A -[[diverge(5)]]-> A",
"Diverge#diverge(4) : A -[[diverge(5)]]-> A",
//
"A#diverge(5) : A -[[diverge(5)]]-> A",
],
@r###"
A#diverge(4) : A -[[diverge(4)]]-> A
A#diverge(4) : A -[[diverge(4)]]-> A
A#diverge(4) : A -[[diverge(4)]]-> A
"###
)
}
@ -6807,7 +6824,47 @@ mod solve_expr {
ping : a -> a | a has Bounce
pong : a -> a | a has Bounce
A := {}
A := {} has [Bounce {ping, pong}]
ping : A -> A
ping = \@A {} -> pong (@A {})
#^^^^{-1} ^^^^
pong : A -> A
pong = \@A {} -> ping (@A {})
#^^^^{-1} ^^^^
main =
a : A
a = ping (@A {})
# ^^^^
a
"#
),
@r###"
A#ping(5) : A -[[ping(5)]]-> A
A#pong(6) : A -[[pong(6)]]-> A
A#pong(6) : A -[[pong(6)]]-> A
A#ping(5) : A -[[ping(5)]]-> A
A#ping(5) : A -[[ping(5)]]-> A
"###
)
}
#[test]
#[ignore = "TODO: this currently runs into trouble with ping and pong first being inferred as overly-general before recursive constraining"]
fn resolve_mutually_recursive_ability_lambda_sets_inferred() {
infer_queries!(
indoc!(
r#"
app "test" provides [main] to "./platform"
Bounce has
ping : a -> a | a has Bounce
pong : a -> a | a has Bounce
A := {} has [Bounce {ping, pong}]
ping = \@A {} -> pong (@A {})
#^^^^{-1} ^^^^
@ -6823,15 +6880,13 @@ mod solve_expr {
a
"#
),
&[
"A#ping(7) : A -[[ping(7)]]-> A",
"Bounce#pong(6) : A -[[pong(8)]]-> A",
//
"A#pong(8) : A -[[pong(8)]]-> A",
"A#ping(7) : A -[[ping(7)]]-> A",
//
"A#ping(7) : A -[[ping(7)]]-> A",
],
@r###"
A#ping(5) : A -[[ping(5)]]-> A
Bounce#pong(3) : A -[[pong(6)]]-> A
A#pong(6) : A -[[pong(6)]]-> A
A#ping(5) : A -[[ping(5)]]-> A
A#ping(5) : A -[[ping(5)]]-> A
"###
)
}
@ -6844,7 +6899,7 @@ mod solve_expr {
#^^^^^^^^^^^^^^^^^^^^^^{-1}
"#
),
&[r#"[\{} -> {}, \{} -> {}] : List ({}* -[[1(1), 2(2)]]-> {})"#],
@r#"[\{} -> {}, \{} -> {}] : List ({}* -[[1(1), 2(2)]]-> {})"#
)
}
@ -6897,13 +6952,13 @@ mod solve_expr {
{name, outerList}
"#
),
&[
"foo : [Named Str (List a)] as a",
"Named name outerList : [Named Str (List a)] as a",
"name : Str",
"outerList : List ([Named Str (List a)] as a)",
],
print_only_under_alias = true
@r#"
foo : [Named Str (List a)] as a
Named name outerList : [Named Str (List a)] as a
name : Str
outerList : List ([Named Str (List a)] as a)
"#
print_only_under_alias: true
)
}
@ -6984,11 +7039,11 @@ mod solve_expr {
#^^^{-1}
"#
),
&[
"capture : Str -[[capture(1)]]-> ({} -[[thunk(5) {}, thunk(5) Str]]-> Str)",
"capture : {} -[[capture(1)]]-> ({} -[[thunk(5) {}, thunk(5) Str]]-> Str)",
"fun : {} -[[thunk(5) {}, thunk(5) Str]]-> Str",
]
@r#"
capture : Str -[[capture(1)]]-> ({} -[[thunk(5) {}, thunk(5) Str]]-> Str)
capture : {} -[[capture(1)]]-> ({} -[[thunk(5) {}, thunk(5) Str]]-> Str)
fun : {} -[[thunk(5) {}, thunk(5) Str]]-> Str
"#
);
}
@ -7019,11 +7074,8 @@ mod solve_expr {
#^^^{-1}
"#
),
&[
"fun : {} -[[thunk(9) (({} -[[15(15)]]-> { s1 : Str })) ({ s1 : Str } -[[g(4)]]-> ({} -[[13(13) Str]]-> Str)), \
thunk(9) (({} -[[14(14)]]-> Str)) (Str -[[f(3)]]-> ({} -[[11(11)]]-> Str))]]-> Str",
],
print_only_under_alias = true,
@r#"fun : {} -[[thunk(9) (({} -[[15(15)]]-> { s1 : Str })) ({ s1 : Str } -[[g(4)]]-> ({} -[[13(13) Str]]-> Str)), thunk(9) (({} -[[14(14)]]-> Str)) (Str -[[f(3)]]-> ({} -[[11(11)]]-> Str))]]-> Str"#
print_only_under_alias: true
);
}
@ -7050,7 +7102,7 @@ mod solve_expr {
#^^^{-1}
"#
),
&["fun : {} -[[thunk(5) [A Str]*, thunk(5) { a : Str }]]-> Str",]
@r#"fun : {} -[[thunk(5) [A Str]*, thunk(5) { a : Str }]]-> Str"#
);
}
@ -7115,11 +7167,11 @@ mod solve_expr {
F has f : a -> (b -> {}) | a has F, b has G
G has g : b -> {} | b has G
Fo := {}
Fo := {} has [F {f}]
f = \@Fo {} -> g
#^{-1}
Go := {}
Go := {} has [G {g}]
g = \@Go {} -> {}
#^{-1}
@ -7128,12 +7180,12 @@ mod solve_expr {
# ^^^^^^^^^^
"#
),
&[
"Fo#f(10) : Fo -[[f(10)]]-> (b -[[] + b:g(8):1]-> {}) | b has G",
"Go#g(11) : Go -[[g(11)]]-> {}",
"Fo#f(10) : Fo -[[f(10)]]-> (Go -[[g(11)]]-> {})",
"f (@Fo {}) : Go -[[g(11)]]-> {}",
],
@r###"
Fo#f(7) : Fo -[[f(7)]]-> (b -[[] + b:g(4):1]-> {}) | b has G
Go#g(8) : Go -[[g(8)]]-> {}
Fo#f(7) : Fo -[[f(7)]]-> (Go -[[g(8)]]-> {})
f (@Fo {}) : Go -[[g(8)]]-> {}
"###
);
}
@ -7147,11 +7199,11 @@ mod solve_expr {
F has f : a -> ({} -> b) | a has F, b has G
G has g : {} -> b | b has G
Fo := {}
Fo := {} has [F {f}]
f = \@Fo {} -> g
#^{-1}
Go := {}
Go := {} has [G {g}]
g = \{} -> @Go {}
#^{-1}
@ -7164,12 +7216,12 @@ mod solve_expr {
{foo, it}
"#
),
&[
"Fo#f(10) : Fo -[[f(10)]]-> ({} -[[] + b:g(8):1]-> b) | b has G",
"Go#g(11) : {} -[[g(11)]]-> Go",
"Fo#f(10) : Fo -[[f(10)]]-> ({} -[[g(11)]]-> Go)",
"f (@Fo {}) : {} -[[g(11)]]-> Go",
],
@r###"
Fo#f(7) : Fo -[[f(7)]]-> ({} -[[] + b:g(4):1]-> b) | b has G
Go#g(8) : {} -[[g(8)]]-> Go
Fo#f(7) : Fo -[[f(7)]]-> ({} -[[g(8)]]-> Go)
f (@Fo {}) : {} -[[g(8)]]-> Go
"###
);
}
@ -7183,11 +7235,11 @@ mod solve_expr {
F has f : a -> (b -> {}) | a has F, b has G
G has g : b -> {} | b has G
Fo := {}
Fo := {} has [F {f}]
f = \@Fo {} -> g
#^{-1}
Go := {}
Go := {} has [G {g}]
g = \@Go {} -> {}
#^{-1}
@ -7198,26 +7250,26 @@ mod solve_expr {
# ^
"#
),
&[
"Fo#f(10) : Fo -[[f(10)]]-> (b -[[] + b:g(8):1]-> {}) | b has G",
"Go#g(11) : Go -[[g(11)]]-> {}",
// TODO SERIOUS: Let generalization is broken here, and this is NOT correct!!
// Two problems:
// - 1. `{}` always has its rank adjusted to the toplevel, which forces the rest
// of the type to the toplevel, but that is NOT correct here!
// - 2. During solving lambda set compaction cannot happen until an entire module
// is solved, which forces resolved-but-not-yet-compacted lambdas in
// unspecialized lambda sets to pull the rank into a lower, non-generalized
// rank. Special-casing for that is a TERRIBLE HACK that interferes very
// poorly with (1)
//
// We are BLOCKED on https://github.com/rtfeldman/roc/issues/3207 to make this work
// correctly!
// See also https://github.com/rtfeldman/roc/pull/3175, a separate, but similar problem.
"h : Go -[[g(11)]]-> {}",
"Fo#f(10) : Fo -[[f(10)]]-> (Go -[[g(11)]]-> {})",
"h : Go -[[g(11)]]-> {}",
],
// TODO SERIOUS: Let generalization is broken here, and this is NOT correct!!
// Two problems:
// - 1. `{}` always has its rank adjusted to the toplevel, which forces the rest
// of the type to the toplevel, but that is NOT correct here!
// - 2. During solving lambda set compaction cannot happen until an entire module
// is solved, which forces resolved-but-not-yet-compacted lambdas in
// unspecialized lambda sets to pull the rank into a lower, non-generalized
// rank. Special-casing for that is a TERRIBLE HACK that interferes very
// poorly with (1)
//
// We are BLOCKED on https://github.com/rtfeldman/roc/issues/3207 to make this work
// correctly!
// See also https://github.com/rtfeldman/roc/pull/3175, a separate, but similar problem.
@r###"
Fo#f(7) : Fo -[[f(7)]]-> (b -[[] + b:g(4):1]-> {}) | b has G
Go#g(8) : Go -[[g(8)]]-> {}
h : Go -[[g(8)]]-> {}
Fo#f(7) : Fo -[[f(7)]]-> (Go -[[g(8)]]-> {})
h : Go -[[g(8)]]-> {}
"###
);
}
@ -7231,11 +7283,11 @@ mod solve_expr {
F has f : a -> (b -> {}) | a has F, b has G
G has g : b -> {} | b has G
Fo := {}
Fo := {} has [F {f}]
f = \@Fo {} -> g
#^{-1}
Go := {}
Go := {} has [G {g}]
g = \@Go {} -> {}
#^{-1}
@ -7246,13 +7298,13 @@ mod solve_expr {
h
"#
),
&[
"Fo#f(10) : Fo -[[f(10)]]-> (b -[[] + b:g(8):1]-> {}) | b has G",
"Go#g(11) : Go -[[g(11)]]-> {}",
"main : b -[[] + b:g(8):1]-> {} | b has G",
"h : b -[[] + b:g(8):1]-> {} | b has G",
"Fo#f(10) : Fo -[[f(10)]]-> (b -[[] + b:g(8):1]-> {}) | b has G",
],
@r###"
Fo#f(7) : Fo -[[f(7)]]-> (b -[[] + b:g(4):1]-> {}) | b has G
Go#g(8) : Go -[[g(8)]]-> {}
main : b -[[] + b:g(4):1]-> {} | b has G
h : b -[[] + b:g(4):1]-> {} | b has G
Fo#f(7) : Fo -[[f(7)]]-> (b -[[] + b:g(4):1]-> {}) | b has G
"###
);
}
@ -7266,11 +7318,11 @@ mod solve_expr {
F has f : a, b -> ({} -> ({} -> {})) | a has F, b has G
G has g : b -> ({} -> {}) | b has G
Fo := {}
Fo := {} has [F {f}]
f = \@Fo {}, b -> \{} -> g b
#^{-1}
Go := {}
Go := {} has [G {g}]
g = \@Go {} -> \{} -> {}
#^{-1}
@ -7279,11 +7331,11 @@ mod solve_expr {
# ^
"#
),
&[
"Fo#f(10) : Fo, b -[[f(10)]]-> ({} -[[13(13) b]]-> ({} -[[] + b:g(8):2]-> {})) | b has G",
"Go#g(11) : Go -[[g(11)]]-> ({} -[[14(14)]]-> {})",
"Fo#f(10) : Fo, Go -[[f(10)]]-> ({} -[[13(13) Go]]-> ({} -[[14(14)]]-> {}))",
],
@r###"
Fo#f(7) : Fo, b -[[f(7)]]-> ({} -[[13(13) b]]-> ({} -[[] + b:g(4):2]-> {})) | b has G
Go#g(8) : Go -[[g(8)]]-> ({} -[[14(14)]]-> {})
Fo#f(7) : Fo, Go -[[f(7)]]-> ({} -[[13(13) Go]]-> ({} -[[14(14)]]-> {}))
"###
);
}
@ -7367,4 +7419,94 @@ mod solve_expr {
"List (A U8)",
);
}
#[test]
fn shared_pattern_variable_in_when_patterns() {
infer_queries!(
indoc!(
r#"
when A "" is
# ^^^^
A x | B x -> x
# ^ ^ ^
"#
),
@r###"
A "" : [A Str, B Str]
x : Str
x : Str
x : Str
"###
);
}
#[test]
fn shared_pattern_variable_in_multiple_branch_when_patterns() {
infer_queries!(
indoc!(
r#"
when A "" is
# ^^^^
A x | B x -> x
# ^ ^ ^
C x | D x -> x
# ^ ^ ^
"#
),
@r###"
A "" : [A Str, B Str, C Str, D Str]
x : Str
x : Str
x : Str
x : Str
x : Str
x : Str
"###
);
}
#[test]
fn catchall_branch_for_pattern_not_last() {
infer_queries!(
indoc!(
r#"
\x -> when x is
#^
A B _ -> ""
A _ C -> ""
"#
),
@r#"x : [A [B]* [C]*]"#
allow_errors: true
);
}
#[test]
fn catchall_branch_walk_into_nested_types() {
infer_queries!(
indoc!(
r#"
\x -> when x is
#^
{ a: A { b: B } } -> ""
_ -> ""
"#
),
@r#"x : { a : [A { b : [B]* }*]* }*"#
);
}
#[test]
fn infer_type_with_underscore_destructure_assignment() {
infer_eq_without_problem(
indoc!(
r#"
Pair x _ = Pair 0 1
x
"#
),
"Num *",
);
}
}

View File

@ -13,11 +13,14 @@ use ven_pretty::DocAllocator;
use crate::pretty_print::{pretty_print_def, Ctx};
use roc_can::{
abilities::{AbilitiesStore, ResolvedSpecializations, SpecializationLambdaSets},
abilities::{AbilitiesStore, SpecializationLambdaSets},
constraint::Constraints,
def::Def,
expr::Declarations,
module::{ExposedByModule, ExposedForModule, ExposedModuleTypes, RigidVariables},
module::{
ExposedByModule, ExposedForModule, ExposedModuleTypes, ResolvedImplementations,
RigidVariables,
},
};
use roc_collections::VecSet;
use roc_constrain::expr::constrain_decls;
@ -134,7 +137,7 @@ fn check_derived_typechecks_and_golden(
ModuleId::ENCODE,
ExposedModuleTypes {
exposed_types_storage_subs: exposed_encode_types,
resolved_specializations: ResolvedSpecializations::default(),
resolved_implementations: ResolvedImplementations::default(),
},
);
let exposed_for_module =
@ -230,7 +233,7 @@ where
mut interns,
exposed_types_storage: exposed_encode_types,
abilities_store,
resolved_specializations,
resolved_implementations,
..
} = roc_load_internal::file::load_and_typecheck_str(
&arena,
@ -256,7 +259,7 @@ where
ModuleId::ENCODE,
ExposedModuleTypes {
exposed_types_storage_subs: exposed_encode_types.clone(),
resolved_specializations,
resolved_implementations,
},
);

View File

@ -221,7 +221,7 @@ fn branch<'a>(c: &Ctx, f: &'a Arena<'a>, b: &'a WhenBranch) -> DocBuilder<'a, Ar
f.intersperse(
patterns
.iter()
.map(|lp| pattern(c, PPrec::Free, f, &lp.value)),
.map(|lp| pattern(c, PPrec::Free, f, &lp.pattern.value)),
f.text(" | "),
)
.append(match guard {

View File

@ -33,7 +33,9 @@ roc_can = { path = "../can" }
roc_parse = { path = "../parse" }
roc_build = { path = "../build", features = ["target-aarch64", "target-x86_64", "target-wasm32"] }
roc_target = { path = "../roc_target" }
roc_error_macros = { path = "../../error_macros" }
roc_std = { path = "../../roc_std" }
roc_debug_flags = {path="../debug_flags"}
bumpalo = { version = "3.8.0", features = ["collections"] }
either = "1.6.1"
libc = "0.2.106"
@ -44,6 +46,7 @@ tempfile = "3.2.0"
indoc = "1.0.3"
criterion = { git = "https://github.com/Anton-4/criterion.rs" }
wasm3 = "0.3.1"
lazy_static = "1.4.0"
[features]
default = ["gen-llvm"]

Some files were not shown because too many files have changed in this diff Show More