Resolve a bunnnch of merge conflicts

This commit is contained in:
Chelsea Troy 2021-11-29 23:14:29 -06:00
commit 6cf755ad8d
705 changed files with 57996 additions and 28320 deletions

4
.cargo/config Normal file
View File

@ -0,0 +1,4 @@
[alias]
test-gen-llvm = "test -p test_gen"
test-gen-dev = "test -p roc_gen_dev -p test_gen --no-default-features --features gen-dev"
test-gen-wasm = "test -p test_gen --no-default-features --features gen-wasm"

1
.envrc
View File

@ -1 +0,0 @@
use nix

View File

@ -1,32 +1,31 @@
on:
schedule:
- cron: '0 0 * * *'
- cron: '0 9 * * *'
name: Nightly Release Build
jobs:
build:
name: Test and Build
runs-on: ubuntu-latest
runs-on: [self-hosted, i5-4690K]
timeout-minutes: 90
env:
FORCE_COLOR: 1 # for earthly logging
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
- name: Earthly print version
run: earthly --version
- name: install dependencies, build, run tests, build release
run: ./ci/safe-earthly.sh +build-nightly-release
- name: Create pre-release with test_archive.tar.gz
uses: WebFreak001/deploy-nightly@v1.1.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # automatically provided by github actions
with:
profile: minimal
toolchain: stable
override: true
- run: rustup component add rustfmt
- uses: actions-rs/cargo@v1
name: cargo update (needed for Inkwell)
with:
command: update
- uses: actions-rs/cargo@v1
name: cargo test
with:
command: test
args: --release
- uses: actions-rs/cargo@v1
name: cargo build
with:
command: build
args: --release
upload_url: https://uploads.github.com/repos/rtfeldman/roc/releases/51880579/assets{?name,label}
release_id: 51880579
asset_path: ./roc_linux_x86_64.tar.gz
asset_name: roc_nightly-linux_x86_64-$$.tar.gz # $$ to inserts date (YYYYMMDD) and 6 letter commit hash
asset_content_type: application/gzip
max_releases: 3

View File

@ -8,7 +8,7 @@ env:
jobs:
spell-check:
name: spell check
runs-on: [self-hosted]
runs-on: [self-hosted, linux]
timeout-minutes: 10
env:
FORCE_COLOR: 1

View File

@ -9,7 +9,7 @@ on:
jobs:
deploy:
name: 'Deploy to Netlify'
runs-on: [self-hosted]
runs-on: [self-hosted, linux]
steps:
- uses: jsmrcaga/action-netlify-deploy@v1.6.0
with:

11
.gitignore vendored
View File

@ -2,8 +2,10 @@ target
generated-docs
zig-cache
.direnv
.envrc
*.rs.bk
*.o
*.tmp
# llvm human-readable output
*.ll
@ -26,6 +28,9 @@ editor/benches/resources/25000000_lines.roc
editor/benches/resources/50000_lines.roc
editor/benches/resources/500_lines.roc
# file editor creates when no arg is passed
new-roc-project
# rust cache (sccache folder)
sccache_dir
@ -34,3 +39,9 @@ bench-folder*
# earthly
earthly_log.txt
# created to test release
roc_linux_x86_64.tar.gz
# macOS .DS_Store files
.DS_Store

31
AUTHORS
View File

@ -28,3 +28,34 @@ Ju Liu <ju@noredink.com>
Peter Fields <pcfields@gmail.com>
Brian J. Cardiff <bcardiff@gmail.com>
Basile Henry <bjm.henry@gmail.com>
Tarjei Skjærset <tskj@tarjei.org>
Brian Hicks <brian@brianthicks.com>
Dan Gieschen Knutson <dan.knutson@gmail.com>
Joshua Hoeflich <joshuaharry411@icloud.com>
Brian Carroll <brian.carroll.ireland@gmail.com>
Kofi Gumbs <h.kofigumbs@gmail.com>
Luiz de Oliveira <luizcarlos1405@gmail.com>
Chelsea Troy <chelsea.dommert@gmail.com>
Shritesh Bhattarai <shr@ite.sh>
Kevin Sjöberg <mail@kevinsjoberg.com>
Viktor Fröberg <vikfroberg@gmail.com>
Locria Cyber <locriacyber@noreply.users.github.com>
Matthias Beyer <mail@beyermatthias.de>
Tim Whiting <tim@whitings.org>
Logan Lowder <logan.lowder@logikcull.com>
Joshua Warner <joshuawarner32@gmail.com>
Luiz Carlos L. G. de Oliveira <luizcarlos1405@gmail.com>
Oleksii Skidan <al.skidan@gmail.com>
Martin Janiczek <martin@janiczek.cz>
Eric Newbury <enewbury@users.noreply.github.com>
Ayaz Hafiz <ayaz.hafiz.1@gmail.com>
Johannes Maas <github@j-maas.de>
Takeshi Sato <doublequotation@gmail.com>
Joost Baas <joost@joostbaas.eu>
Callum Dunster <cdunster@users.noreply.github.com>
Martin Stewart <MartinSStewart@gmail.com>
James Hegedus <jthegedus@hey.com>
Cristiano Piemontese <cristiano.piemontese@vidiemme.it>
Yann Simon <yann.simon.fr@gmail.com>
Shahn Hogan <shahnhogan@hotmail.com>
Tankor Smash <tankorsmash+github@gmail.com>

View File

@ -1,13 +1,13 @@
# Building the Roc compiler from source
## Installing LLVM, Zig, valgrind, and Python 2.7
## Installing LLVM, Zig, valgrind, and Python
To build the compiler, you need these installed:
* Python 2.7 (Windows only), `python-is-python3` (Ubuntu)
* [Zig](https://ziglang.org/), see below for version
* `libxkbcommon` - macOS seems to have it already; on Ubuntu or Debian you can get it with `apt-get install libxkbcommon-dev`
* On Debian/Ubuntu `sudo apt-get install pkg-config`
* LLVM, see below for version
To run the test suite (via `cargo test`), you additionally need to install:
@ -49,16 +49,23 @@ If you want to install it manually, you can also download Zig directly [here](ht
**version: 12.0.x**
For macOS, you can install LLVM 12 using `brew install llvm@12` and then adding
`/usr/local/opt/llvm/bin` to your `PATH`. You can confirm this worked by
`/usr/local/opt/llvm@12/bin` to your `PATH`. You can confirm this worked by
running `llc --version` - it should mention "LLVM version 12.0.0" at the top.
For Ubuntu and Debian, you can use the `Automatic installation script` at [apt.llvm.org](https://apt.llvm.org):
You may also need to manually specify a prefix env var like so:
```
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
export LLVM_SYS_120_PREFIX=/usr/local/opt/llvm@12
```
For Ubuntu and Debian:
```
sudo apt -y install lsb-release software-properties-common gnupg
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
./llvm.sh 12
```
If you use this script, you'll need to add `clang` and `llvm-as` to your `PATH`.
By default, the script installs them as `llvm-as-12` and `clang-12`,
By default, the script installs them as `clang-12` and `llvm-as-12`,
respectively. You can address this with symlinks like so:
```
@ -74,29 +81,23 @@ There are also alternative installation options at http://releases.llvm.org/down
## Using Nix
:exclamation: **Our Nix setup is currently broken, you'll have to install manually for now** :exclamation:
### Install
Using [nix](https://nixos.org/download.html) is a quick way to get an environment bootstrapped with a single command.
Anyone having trouble installing the proper version of LLVM themselves might also prefer this method.
First, install nix:
If you are running ArchLinux or a derivative like Manjaro, you'll need to run `sudo sysctl -w kernel.unprivileged_userns_clone=1` before installing nix.
Install nix:
`curl -L https://nixos.org/nix/install | sh`
If MacOS and using a version >= 10.15:
`sh <(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume`
You may prefer to setup up the volume manually by following nix documentation.
> You may need to restart your terminal
You will need to start a fresh terminal session to use nix.
### Usage
Now with nix installed you just need to run one command:
Now with nix installed, you just need to run one command:
`nix-shell`
@ -112,33 +113,50 @@ You should be in a repl now. Have fun!
### Extra tips
If you plan on using `nix-shell` regularly, check out [direnv](https://direnv.net/) and [lorri](https://github.com/target/lorri). Whenever you `cd` into `roc/`, they will automatically load the Nix dependecies into your current shell, so you never have to run nix-shell directly!
If you plan on using `nix-shell` regularly, check out [direnv](https://direnv.net/) and [lorri](https://github.com/nix-community/lorri). Whenever you `cd` into `roc/`, they will automatically load the Nix dependencies into your current shell, so you never have to run nix-shell directly!
### Editor
When you want to run the editor from Ubuntu inside nix you need to install [nixGL](https://github.com/guibou/nixGL) as well:
The editor is a WIP and not ready yet to replace your favorite editor, although if you want to try it out on nix, read on.
`cargo run edit` should work from NixOS, if you use a nix-shell from inside another OS, follow the instructions below.
#### Nvidia GPU
Outside of a nix shell, execute the following:
```
nix-channel --add https://github.com/guibou/nixGL/archive/main.tar.gz nixgl && nix-channel --update
nix-env -iA nixgl.auto.nixVulkanNvidia
```
Running the editor does not work with `nix-shell --pure`.
```
nix-shell
```
460.91.03 may be different for you, type nixVulkanNvidia and press tab to autocomplete for your version.
```
nixVulkanNvidia-460.91.03 cargo run edit
```
#### Integrated Intel Graphics
:exclamation: ** Our Nix setup currently cannot run the editor with integrated intel graphics, see #1856 ** :exclamation:
Outside of a nix shell, run:
```bash
nix-shell
git clone https://github.com/guibou/nixGL
cd nixGL
```
If you have an Nvidia graphics card, run:
```
nix-env -f ./ -iA nixVulkanNvidia
```
If you have integrated Intel graphics, run:
```
nix-env -f ./ -iA nixVulkanIntel
```
Check the [nixGL repo](https://github.com/guibou/nixGL) for other configurations.
Now you should be able to run the editor:
```bash
cd roc
nixVulkanNvidia cargo run edit `# replace Nvidia with the config you chose in the previous step`
cd to the roc repo, and run (without --pure):
```
nix-shell
nixVulkanIntel cargo run edit
```
#### Other configs
Check the [nixGL repo](https://github.com/guibou/nixGL) for other graphics configurations.
## Troubleshooting
@ -189,6 +207,11 @@ on Windows. After lots of help from [**@IanMacKenzie**](https://github.com/IanMa
Once all that was done, `cargo` ran successfully for Roc!
### Build speed on WSL/WSL2
If your Roc project folder is in the Windows filesystem but you're compiling from Linux, rebuilds may be as much as 20x slower than they should be!
Disk access during linking seems to be the bottleneck. It's recommended to move your folder to the Linux filesystem.
## Use LLD for the linker
Using [`lld` for Rust's linker](https://github.com/rust-lang/rust/issues/39915#issuecomment-538049306)

2599
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -12,10 +12,8 @@ members = [
"compiler/constrain",
"compiler/unify",
"compiler/solve",
"compiler/reporting",
"compiler/fmt",
"compiler/mono",
"compiler/test_mono_macros",
"compiler/test_mono",
"compiler/load",
"compiler/gen_llvm",
@ -29,13 +27,22 @@ members = [
"vendor/pathfinding",
"vendor/pretty",
"editor",
"ast",
"cli",
"cli/cli_utils",
"code_markup",
"reporting",
"roc_std",
"utils",
"docs",
"linker",
]
exclude = [ "ci/bench-runner" ]
exclude = [
"ci/bench-runner",
# Ignore building these normally. They are only imported by tests.
# The tests will still correctly build them.
"cli_utils",
"compiler/test_mono_macros",
]
# Needed to be able to run `cargo run -p roc_cli --no-default-features` -
# see www/build.sh for more.
#

View File

@ -34,8 +34,8 @@ In the Roc community we strive to go the extra step to look out for each other.
And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you could've communicated better — remember that it's your responsibility to make your fellow Roc programmers comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust.
The enforcement policies listed above apply to all official Roc venues; including official IRC channels (#rust, #rust-internals, #rust-tools, #rust-libs, #rustc, #rust-beginners, #rust-docs, #rust-community, #rust-lang, and #cargo); GitHub repositories under rust-lang, rust-lang-nursery, and rust-lang-deprecated; and all forums under rust-lang.org (users.rust-lang.org, internals.rust-lang.org). For other projects adopting the Roc Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
The enforcement policies listed above apply to all official Roc venues; including official Zulip chat (https://roc.zulipchat.com); and GitHub repositories under the roc-lang organization. If you wish to use this code of conduct (or the Rust code of conduct, on which it is based) for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
*Adapted from the [Node.js Policy on Trolling](http://blog.izs.me/post/30036893703/policy-on-trolling) as well as the [Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).*
[mod_team]: https://www.rust-lang.org/team.html#Moderation-team
[mod_team]: https://www.roc-lang.org/moderation

View File

@ -1,4 +1,4 @@
FROM rust:1.54-slim-bullseye
FROM rust:1.56.1-slim-bullseye
WORKDIR /earthbuild
prep-debian:
@ -8,6 +8,7 @@ install-other-libs:
FROM +prep-debian
RUN apt -y install wget git
RUN apt -y install libxcb-shape0-dev libxcb-xfixes0-dev # for editor clipboard
RUN apt -y install libasound2-dev # for editor sounds
RUN apt -y install libunwind-dev pkg-config libx11-dev zlib1g-dev
install-zig-llvm-valgrind-clippy-rustfmt:
@ -34,7 +35,7 @@ install-zig-llvm-valgrind-clippy-rustfmt:
RUN rustup component add rustfmt
# criterion
RUN cargo install cargo-criterion
# wasm
# editor
RUN apt -y install libxkbcommon-dev
# sccache
RUN apt -y install libssl-dev
@ -46,7 +47,7 @@ install-zig-llvm-valgrind-clippy-rustfmt:
copy-dirs:
FROM +install-zig-llvm-valgrind-clippy-rustfmt
COPY --dir cli compiler docs editor roc_std vendor examples linker Cargo.toml Cargo.lock ./
COPY --dir cli cli_utils compiler docs editor ast code_markup utils reporting roc_std vendor examples linker Cargo.toml Cargo.lock version.txt ./
test-zig:
FROM +install-zig-llvm-valgrind-clippy-rustfmt
@ -65,22 +66,30 @@ check-rustfmt:
RUN cargo fmt --all -- --check
check-typos:
RUN cargo install --version 1.0.11 typos-cli
COPY --dir .github ci cli compiler docs editor examples linker nightly_benches packages roc_std www *.md LEGAL_DETAILS shell.nix ./
RUN cargo install typos-cli --version 1.0.11 # version set to prevent confusion if the version is updated automatically
COPY --dir .github ci cli cli_utils compiler docs editor examples ast code_markup utils linker nightly_benches packages roc_std www *.md LEGAL_DETAILS shell.nix version.txt ./
RUN typos
test-rust:
FROM +copy-dirs
ENV RUST_BACKTRACE=1
# for race condition problem with cli test
ENV ROC_NUM_WORKERS=1
# run one of the benchmarks to make sure the host is compiled
# not pre-compiling the host can cause race conditions
RUN echo "4" | cargo run --release examples/benchmarks/NQueens.roc
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo test --release && sccache --show-stats
# run i386 (32-bit linux) cli tests
RUN echo "4" | cargo run --release -- --backend=x86_32 examples/benchmarks/NQueens.roc
cargo test --locked --release --features with_sound --workspace && sccache --show-stats
# test the dev and wasm backend: they require an explicit feature flag.
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo test --release --test cli_run i386 --features="i386-cli-run" && sccache --show-stats
cargo test --locked --release --package test_gen --no-default-features --features gen-dev && sccache --show-stats
# gen-wasm has some multithreading problems to do with the wasmer runtime. Run it single-threaded as a separate job
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo test --locked --release --package test_gen --no-default-features --features gen-wasm -- --test-threads=1 && sccache --show-stats
# run i386 (32-bit linux) cli tests
RUN echo "4" | cargo run --locked --release --features="target-x86" -- --backend=x86_32 examples/benchmarks/NQueens.roc
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo test --locked --release --features with_sound --test cli_run i386 --features="i386-cli-run" && sccache --show-stats
verify-no-git-changes:
FROM +test-rust
@ -97,11 +106,22 @@ test-all:
BUILD +test-zig
BUILD +check-rustfmt
BUILD +check-clippy
BUILD +check-typos
BUILD +test-rust
BUILD +verify-no-git-changes
# compile everything needed for benchmarks and output a self-contained folder
build-nightly-release:
FROM +test-rust
COPY --dir .git LICENSE LEGAL_DETAILS ./
# version.txt is used by the CLI: roc --version
RUN printf "nightly pre-release, built from commit " > version.txt
RUN git log --pretty=format:'%h' -n 1 >> version.txt
RUN printf " on: " >> version.txt
RUN date >> version.txt
RUN cargo build --features with_sound --release
RUN cd ./target/release && tar -czvf roc_linux_x86_64.tar.gz ./roc ../../LICENSE ../../LEGAL_DETAILS ../../examples/hello-world ../../examples/hello-rust ../../examples/hello-zig ../../compiler/builtins/bitcode/src/ ../../roc_std
SAVE ARTIFACT ./target/release/roc_linux_x86_64.tar.gz AS LOCAL roc_linux_x86_64.tar.gz
# compile everything needed for benchmarks and output a self-contained dir from which benchmarks can be run.
prep-bench-folder:
FROM +copy-dirs
ARG BENCH_SUFFIX=branch

View File

@ -2,33 +2,33 @@
Roc is a language for making delightful software.
If you already know [Elm](https://elm-lang.org/), then [Roc for Elm Programmers](https://github.com/rtfeldman/roc/blob/trunk/roc-for-elm-programmers.md) may be of interest.
The [tutorial](TUTORIAL.md) is the best place to learn about how to use the language - it assumes no prior knowledge of Roc or similar languages. (If you already know [Elm](https://elm-lang.org/), then [Roc for Elm Programmers](https://github.com/rtfeldman/roc/blob/trunk/roc-for-elm-programmers.md) may be of interest.)
If you're curious about where the language's name and logo came from,
[here's an explanation](https://github.com/rtfeldman/roc/blob/trunk/name-and-logo.md).
There's also a folder of [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) - the [CLI example](https://github.com/rtfeldman/roc/tree/trunk/examples/cli) in particular is a reasonable starting point to build on.
[Roc Zulip chat](https://roc.zulipchat.com) is the best place to ask questions and get help! It's also where we discuss [ideas](https://roc.zulipchat.com/#narrow/stream/304641-ideas) for the language. If you want to get involved in contributing to the language, Zulip is also a great place to ask about good first projects.
## State of Roc
Roc is not ready for production yet. You are likely to encounter bugs. Publishing packages or documentation is not yet supported.
Many programs can however be compiled correctly. Check out [examples](examples) and [examples/benchmarks](examples/benchmarks). There are minimal platforms for Rust, Zig, C, Swift and an HTTP server. We are hard at work to make programming in Roc a delightful experience!
## Getting started
1. [Install Rust](https://rustup.rs/)
2. [Build from source](BUILDING_FROM_SOURCE.md)
3. In a terminal, run this from the root folder:
```
cargo run repl
```
4. Check out [these tests](https://github.com/rtfeldman/roc/blob/trunk/cli/tests/repl_eval.rs) for examples of using the REPL
- [Linux x86](getting_started/linux_x86.md)
- [Windows](getting_started/windows.md)
- [Other](getting_started/other.md)
### Examples
Took a look at the `examples` folder, `examples/benchmarks` contains some larger examples.
Run examples as follows:
1. Navigate to `/examples`
2. Run with:
```
cargo run hello-world/Hello.roc
cargo run examples/hello-world/Hello.roc
```
Some examples like `examples/benchmarks/NQueens.roc` require input after running.
For NQueens, input 10 in the terminal and press enter.
For NQueens, input 10 in the terminal and press enter.
[examples/benchmarks](examples/benchmarks) contains larger examples.
## Applications and Platforms
@ -59,7 +59,7 @@ By using systems-level programming languages like C and C++, platform authors sa
Roc is designed to make the "systems-level platform, higher-level application" experience as nice as possible.
* **Application** authors code exclusively in Roc. It's a language designed for nice ergonomics. The syntax resembles Ruby or CoffeeScript, and it has a fast compiler with full type inference.
* **Platform** authors code almost exclusively in a systems-level language like C, C++, Rust, or [Zig](https://ziglang.org/), except for the thin Roc API they expose to application authors. Roc application code compiles to machine code, and production builds of Roc apps benefit from the same [LLVM](https://llvm.org/) optimizations that C++, Rust, and Zig do. Roc application authors do not need to know this lower-level code exists; all they have to interact with is the platform's API, which is exposed as an ordinary Roc API.
* **Platform** authors code almost exclusively in a systems-level language like C, C++, Rust, Swift or [Zig](https://ziglang.org/), except for the thin Roc API they expose to application authors. Roc application code compiles to machine code, and production builds of Roc apps benefit from the same [LLVM](https://llvm.org/) optimizations that C++, Rust, Swift and Zig do. Roc application authors do not need to know this lower-level code exists; all they have to interact with is the platform's API, which is exposed as an ordinary Roc API.
Every Roc application is built on top of exactly one Roc platform. There is no such thing as a Roc application that runs without a platform, and there is no default platform. You must choose one!
@ -100,5 +100,9 @@ never done anything with Rust and also never worked on a compiler, but we've
been able to find beginner-friendly projects to get people up to speed gradually.)
If you're interested in getting involved, check out
[CONTRIBUTING.md](https://github.com/rtfeldman/roc/blob/trunk/CONTRIBUTING.md) which has more info
and a link to our Zulip chat!
[CONTRIBUTING.md](https://github.com/rtfeldman/roc/blob/trunk/CONTRIBUTING.md)!
## Name and Logo
If you're curious about where the language's name and logo came from,
[here's an explanation](https://github.com/rtfeldman/roc/blob/trunk/name-and-logo.md).

1358
TUTORIAL.md Normal file

File diff suppressed because it is too large Load Diff

28
ast/Cargo.toml Normal file
View File

@ -0,0 +1,28 @@
[package]
name = "roc_ast"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
description = "AST as used by the editor and (soon) docs. In contrast to the compiler, these types do not keep track of a location in a file."
[dependencies]
roc_builtins = { path = "../compiler/builtins"}
roc_can = { path = "../compiler/can" }
roc_collections = { path = "../compiler/collections" }
roc_region = { path = "../compiler/region" }
roc_module = { path = "../compiler/module" }
roc_parse = { path = "../compiler/parse" }
roc_problem = { path = "../compiler/problem" }
roc_types = { path = "../compiler/types" }
roc_unify = { path = "../compiler/unify"}
roc_load = { path = "../compiler/load" }
arraystring = "0.3.0"
bumpalo = { version = "3.8.0", features = ["collections"] }
libc = "0.2.106"
page_size = "0.4.2"
snafu = { version = "0.6.10", features = ["backtraces"] }
ven_graph = { path = "../vendor/pathfinding" }
[dev-dependencies]
indoc = "1.0.3"

73
ast/src/ast_error.rs Normal file
View File

@ -0,0 +1,73 @@
use roc_module::{ident::Ident, module_err::ModuleError};
use roc_parse::parser::SyntaxError;
use roc_region::all::{Located, Region};
use snafu::{Backtrace, Snafu};
use crate::lang::core::ast::ASTNodeId;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub))]
pub enum ASTError {
#[snafu(display(
"ASTNodeIdWithoutExprId: The expr_id_opt in ASTNode({:?}) was `None` but I was expecting `Some(ExprId)` .",
ast_node_id
))]
ASTNodeIdWithoutExprId {
ast_node_id: ASTNodeId,
backtrace: Backtrace,
},
#[snafu(display(
"UnexpectedASTNode: required a {} at this position, node was a {}.",
required_node_type,
encountered_node_type
))]
UnexpectedASTNode {
required_node_type: String,
encountered_node_type: String,
backtrace: Backtrace,
},
#[snafu(display(
"UnexpectedPattern2Variant: required a {} at this position, Pattern2 was a {}.",
required_pattern2,
encountered_pattern2,
))]
UnexpectedPattern2Variant {
required_pattern2: String,
encountered_pattern2: String,
backtrace: Backtrace,
},
#[snafu(display("IdentExistsError: {}", msg))]
IdentExistsError { msg: String },
WrapModuleError {
#[snafu(backtrace)]
source: ModuleError,
},
#[snafu(display("SyntaxError: {}", msg))]
SyntaxErrorNoBacktrace { msg: String },
}
pub type ASTResult<T, E = ASTError> = std::result::Result<T, E>;
impl From<ModuleError> for ASTError {
fn from(module_err: ModuleError) -> Self {
Self::WrapModuleError { source: module_err }
}
}
impl From<(Region, Located<Ident>)> for ASTError {
fn from(ident_exists_err: (Region, Located<Ident>)) -> Self {
Self::IdentExistsError {
msg: format!("{:?}", ident_exists_err),
}
}
}
impl<'a> From<SyntaxError<'a>> for ASTError {
fn from(syntax_err: SyntaxError) -> Self {
Self::SyntaxErrorNoBacktrace {
msg: format!("{:?}", syntax_err),
}
}
}

View File

@ -0,0 +1,306 @@
use roc_collections::all::MutMap;
use roc_problem::can::Problem;
use roc_region::all::{Located, Region};
use roc_types::subs::Variable;
use crate::{
lang::{
core::{
def::def::References,
expr::{
expr2::{Expr2, ExprId, WhenBranch},
expr_to_expr2::expr_to_expr2,
output::Output,
record_field::RecordField,
},
pattern::to_pattern2,
},
env::Env,
scope::Scope,
},
mem_pool::{pool_str::PoolStr, pool_vec::PoolVec, shallow_clone::ShallowClone},
};
pub(crate) enum CanonicalizeRecordProblem {
#[allow(dead_code)]
InvalidOptionalValue {
field_name: PoolStr,
field_region: Region,
record_region: Region,
},
}
enum FieldVar {
VarAndExprId(Variable, ExprId),
OnlyVar(Variable),
}
pub(crate) fn canonicalize_fields<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
fields: &'a [Located<roc_parse::ast::AssignedField<'a, roc_parse::ast::Expr<'a>>>],
) -> Result<(PoolVec<RecordField>, Output), CanonicalizeRecordProblem> {
let mut can_fields: MutMap<&'a str, FieldVar> = MutMap::default();
let mut output = Output::default();
for loc_field in fields.iter() {
match canonicalize_field(env, scope, &loc_field.value) {
Ok(can_field) => {
match can_field {
CanonicalField::LabelAndValue {
label,
value_expr,
value_output,
var,
} => {
let expr_id = env.pool.add(value_expr);
let replaced =
can_fields.insert(label, FieldVar::VarAndExprId(var, expr_id));
if let Some(_old) = replaced {
// env.problems.push(Problem::DuplicateRecordFieldValue {
// field_name: label,
// field_region: loc_field.region,
// record_region: region,
// replaced_region: old.region,
// });
todo!()
}
output.references.union_mut(value_output.references);
}
CanonicalField::InvalidLabelOnly { label, var } => {
let replaced = can_fields.insert(label, FieldVar::OnlyVar(var));
if let Some(_old) = replaced {
todo!()
}
}
}
}
Err(CanonicalizeFieldProblem::InvalidOptionalValue {
field_name: _,
field_region: _,
}) => {
// env.problem(Problem::InvalidOptionalValue {
// field_name: field_name.clone(),
// field_region,
// record_region: region,
// });
// return Err(CanonicalizeRecordProblem::InvalidOptionalValue {
// field_name,
// field_region,
// record_region: region,
// });
todo!()
}
}
}
let pool_vec = PoolVec::with_capacity(can_fields.len() as u32, env.pool);
for (node_id, (string, field_var)) in pool_vec.iter_node_ids().zip(can_fields.into_iter()) {
let name = PoolStr::new(string, env.pool);
match field_var {
FieldVar::VarAndExprId(var, expr_id) => {
env.pool[node_id] = RecordField::LabeledValue(name, var, expr_id);
}
FieldVar::OnlyVar(var) => {
env.pool[node_id] = RecordField::InvalidLabelOnly(name, var);
} // TODO RecordField::LabelOnly
}
}
Ok((pool_vec, output))
}
#[allow(dead_code)]
enum CanonicalizeFieldProblem {
InvalidOptionalValue {
field_name: PoolStr,
field_region: Region,
},
}
enum CanonicalField<'a> {
LabelAndValue {
label: &'a str,
value_expr: Expr2,
value_output: Output,
var: Variable,
},
InvalidLabelOnly {
label: &'a str,
var: Variable,
}, // TODO make ValidLabelOnly
}
fn canonicalize_field<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
field: &'a roc_parse::ast::AssignedField<'a, roc_parse::ast::Expr<'a>>,
) -> Result<CanonicalField<'a>, CanonicalizeFieldProblem> {
use roc_parse::ast::AssignedField::*;
match field {
// Both a label and a value, e.g. `{ name: "blah" }`
RequiredValue(label, _, loc_expr) => {
let field_var = env.var_store.fresh();
let (loc_can_expr, output) =
expr_to_expr2(env, scope, &loc_expr.value, loc_expr.region);
Ok(CanonicalField::LabelAndValue {
label: label.value,
value_expr: loc_can_expr,
value_output: output,
var: field_var,
})
}
OptionalValue(label, _, loc_expr) => Err(CanonicalizeFieldProblem::InvalidOptionalValue {
field_name: PoolStr::new(label.value, env.pool),
field_region: Region::span_across(&label.region, &loc_expr.region),
}),
// A label with no value, e.g. `{ name }` (this is sugar for { name: name })
LabelOnly(label) => {
let field_var = env.var_store.fresh();
// TODO return ValidLabel if label points to in scope variable
Ok(CanonicalField::InvalidLabelOnly {
label: label.value,
var: field_var,
})
}
SpaceBefore(sub_field, _) | SpaceAfter(sub_field, _) => {
canonicalize_field(env, scope, sub_field)
}
Malformed(_string) => {
panic!("TODO canonicalize malformed record field");
}
}
}
#[inline(always)]
pub(crate) fn canonicalize_when_branch<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
branch: &'a roc_parse::ast::WhenBranch<'a>,
output: &mut Output,
) -> (WhenBranch, References) {
let patterns = PoolVec::with_capacity(branch.patterns.len() as u32, env.pool);
let original_scope = scope;
let mut scope = original_scope.shallow_clone();
// TODO report symbols not bound in all patterns
for (node_id, loc_pattern) in patterns.iter_node_ids().zip(branch.patterns.iter()) {
let (new_output, can_pattern) = to_pattern2(
env,
&mut scope,
roc_parse::pattern::PatternType::WhenBranch,
&loc_pattern.value,
loc_pattern.region,
);
output.union(new_output);
env.set_region(node_id, loc_pattern.region);
env.pool[node_id] = can_pattern;
}
let (value, mut branch_output) =
expr_to_expr2(env, &mut scope, &branch.value.value, branch.value.region);
let value_id = env.pool.add(value);
env.set_region(value_id, branch.value.region);
let guard = match &branch.guard {
None => None,
Some(loc_expr) => {
let (can_guard, guard_branch_output) =
expr_to_expr2(env, &mut scope, &loc_expr.value, loc_expr.region);
let expr_id = env.pool.add(can_guard);
env.set_region(expr_id, loc_expr.region);
branch_output.union(guard_branch_output);
Some(expr_id)
}
};
// Now that we've collected all the references for this branch, check to see if
// any of the new idents it defined were unused. If any were, report it.
for (symbol, region) in scope.symbols() {
let symbol = symbol;
if !output.references.has_lookup(symbol)
&& !branch_output.references.has_lookup(symbol)
&& !original_scope.contains_symbol(symbol)
{
env.problem(Problem::UnusedDef(symbol, region));
}
}
let references = branch_output.references.clone();
output.union(branch_output);
(
WhenBranch {
patterns,
body: value_id,
guard,
},
references,
)
}
pub(crate) fn canonicalize_lookup(
env: &mut Env<'_>,
scope: &mut Scope,
module_name: &str,
ident: &str,
region: Region,
) -> (Expr2, Output) {
use Expr2::*;
let mut output = Output::default();
let can_expr = if module_name.is_empty() {
// Since module_name was empty, this is an unqualified var.
// Look it up in scope!
match scope.lookup(&(*ident).into(), region) {
Ok(symbol) => {
output.references.lookups.insert(symbol);
Var(symbol)
}
Err(problem) => {
env.problem(Problem::RuntimeError(problem));
RuntimeError()
}
}
} else {
// Since module_name was nonempty, this is a qualified var.
// Look it up in the env!
match env.qualified_lookup(module_name, ident, region) {
Ok(symbol) => {
output.references.lookups.insert(symbol);
Var(symbol)
}
Err(problem) => {
// Either the module wasn't imported, or
// it was imported but it doesn't expose this ident.
env.problem(Problem::RuntimeError(problem));
RuntimeError()
}
}
};
// If it's valid, this ident should be in scope already.
(can_expr, output)
}

View File

@ -0,0 +1,2 @@
pub mod canonicalize;
pub mod module;

View File

@ -2,14 +2,6 @@
#![allow(dead_code)]
#![allow(unused_imports)]
#![allow(unused_variables)]
use crate::lang::ast::{Expr2, FunctionDef, ValueDef};
use crate::lang::def::{canonicalize_defs, sort_can_defs, Declaration, Def};
use crate::lang::expr::Env;
use crate::lang::expr::Output;
use crate::lang::pattern::Pattern2;
use crate::lang::pool::{NodeId, Pool, PoolStr, PoolVec, ShallowClone};
use crate::lang::scope::Scope;
use crate::lang::types::Alias;
use bumpalo::Bump;
use roc_can::operator::desugar_def;
use roc_collections::all::{default_hasher, ImMap, ImSet, MutMap, MutSet, SendMap};
@ -22,6 +14,21 @@ use roc_problem::can::{Problem, RuntimeError};
use roc_region::all::{Located, Region};
use roc_types::subs::{VarStore, Variable};
use crate::lang::core::def::def::canonicalize_defs;
use crate::lang::core::def::def::Def;
use crate::lang::core::def::def::{sort_can_defs, Declaration};
use crate::lang::core::expr::expr2::Expr2;
use crate::lang::core::expr::output::Output;
use crate::lang::core::pattern::Pattern2;
use crate::lang::core::types::Alias;
use crate::lang::core::val_def::ValueDef;
use crate::lang::env::Env;
use crate::lang::scope::Scope;
use crate::mem_pool::pool::NodeId;
use crate::mem_pool::pool::Pool;
use crate::mem_pool::pool_vec::PoolVec;
use crate::mem_pool::shallow_clone::ShallowClone;
pub struct ModuleOutput {
pub aliases: MutMap<Symbol, NodeId<Alias>>,
pub rigid_variables: MutMap<Variable, Lowercase>,

View File

@ -1,13 +1,5 @@
use bumpalo::{collections::Vec as BumpVec, Bump};
use crate::lang::{
ast::{ClosureExtra, Expr2, ExprId, RecordField, ValueDef, WhenBranch},
expr::Env,
pattern::{DestructType, Pattern2, PatternId, PatternState2, RecordDestruct},
pool::{Pool, PoolStr, PoolVec, ShallowClone},
types::{Type2, TypeId},
};
use roc_can::expected::{Expected, PExpected};
use roc_collections::all::{BumpMap, BumpMapDefault, Index, SendMap};
use roc_module::{
@ -21,6 +13,23 @@ use roc_types::{
types::{Category, Reason},
};
use crate::{
lang::{
core::{
expr::{
expr2::{ClosureExtra, Expr2, ExprId, WhenBranch},
record_field::RecordField,
},
fun_def::FunctionDef,
pattern::{DestructType, Pattern2, PatternId, PatternState2, RecordDestruct},
types::{Type2, TypeId},
val_def::ValueDef,
},
env::Env,
},
mem_pool::{pool::Pool, pool_str::PoolStr, pool_vec::PoolVec, shallow_clone::ShallowClone},
};
#[derive(Debug)]
pub enum Constraint<'a> {
Eq(Type2, Expected<Type2>, Category, Region),
@ -265,10 +274,10 @@ pub fn constrain_expr<'a>(
Expr2::Call {
args,
expr_var,
expr: expr_node_id,
expr_id: expr_node_id,
closure_var,
fn_var,
..
called_via,
} => {
// The expression that evaluates to the function being called, e.g. `foo` in
// (foo) bar baz
@ -340,7 +349,7 @@ pub fn constrain_expr<'a>(
region,
);
let category = Category::CallResult(opt_symbol);
let category = Category::CallResult(opt_symbol, *called_via);
let mut and_constraints = BumpVec::with_capacity_in(4, arena);
@ -497,7 +506,7 @@ pub fn constrain_expr<'a>(
flex_vars.push(*expr_var);
match expected {
Expected::FromAnnotation(name, arity, _, tipe) => {
Expected::FromAnnotation(name, arity, ann_source, tipe) => {
let num_branches = branches.len() + 1;
for (index, branch_id) in branches.iter_node_ids().enumerate() {
@ -519,6 +528,7 @@ pub fn constrain_expr<'a>(
AnnotationSource::TypedIfBranch {
index: Index::zero_based(index),
num_branches,
region: ann_source.region(),
},
tipe.shallow_clone(),
),
@ -539,6 +549,7 @@ pub fn constrain_expr<'a>(
AnnotationSource::TypedIfBranch {
index: Index::zero_based(branches.len()),
num_branches,
region: ann_source.region(),
},
tipe.shallow_clone(),
),
@ -645,7 +656,7 @@ pub fn constrain_expr<'a>(
flex_vars.push(*expr_var);
match &expected {
Expected::FromAnnotation(name, arity, _, _typ) => {
Expected::FromAnnotation(name, arity, ann_source, _typ) => {
// NOTE deviation from elm.
//
// in elm, `_typ` is used, but because we have this `expr_var` too
@ -678,6 +689,7 @@ pub fn constrain_expr<'a>(
*arity,
AnnotationSource::TypedWhenBranch {
index: Index::zero_based(index),
region: ann_source.region(),
},
typ.shallow_clone(),
),
@ -810,6 +822,126 @@ pub fn constrain_expr<'a>(
}
}
}
// In an expression like
// id = \x -> x
//
// id 1
// The `def_id` refers to the definition `id = \x -> x`,
// and the body refers to `id 1`.
Expr2::LetFunction {
def_id,
body_id,
body_var: _,
} => {
let body = env.pool.get(*body_id);
let body_con = constrain_expr(arena, env, body, expected.shallow_clone(), region);
let function_def = env.pool.get(*def_id);
let (name, arguments, body_id, rigid_vars, args_constrs) = match function_def {
FunctionDef::WithAnnotation {
name,
arguments,
body_id,
rigids,
return_type: _,
} => {
// The annotation gives us arguments with proper Type2s, but the constraints we
// generate below args bound to type variables. Create fresh ones and bind them
// to the types we already know.
let mut args_constrs = BumpVec::with_capacity_in(arguments.len(), arena);
let args_vars = PoolVec::with_capacity(arguments.len() as u32, env.pool);
for (arg_ty_node_id, arg_var_node_id) in
arguments.iter_node_ids().zip(args_vars.iter_node_ids())
{
let (ty, pattern) = env.pool.get(arg_ty_node_id);
let arg_var = env.var_store.fresh();
let ty = env.pool.get(*ty);
args_constrs.push(Eq(
Type2::Variable(arg_var),
Expected::NoExpectation(ty.shallow_clone()),
Category::Storage(std::file!(), std::line!()),
// TODO: should be the actual region of the argument
region,
));
env.pool[arg_var_node_id] = (arg_var, *pattern);
}
let rigids = env.pool.get(*rigids);
let rigid_vars: BumpVec<Variable> =
BumpVec::from_iter_in(rigids.names.iter(env.pool).map(|&(_, v)| v), arena);
(name, args_vars, body_id, rigid_vars, args_constrs)
}
FunctionDef::NoAnnotation {
name,
arguments,
body_id,
return_var: _,
} => {
(
name,
arguments.shallow_clone(),
body_id,
BumpVec::new_in(arena), // The function is unannotated, so there are no rigid type vars
BumpVec::new_in(arena), // No extra constraints to generate for arguments
)
}
};
// A function definition is equivalent to a named value definition, where the
// value is a closure. So, we create a closure definition in correspondence
// with the function definition, generate type constraints for it, and demand
// that type of the function is just the type of the resolved closure.
let fn_var = env.var_store.fresh();
let fn_ty = Type2::Variable(fn_var);
let extra = ClosureExtra {
return_type: env.var_store.fresh(),
captured_symbols: PoolVec::empty(env.pool),
closure_type: env.var_store.fresh(),
closure_ext_var: env.var_store.fresh(),
};
let clos = Expr2::Closure {
args: arguments.shallow_clone(),
uniq_symbol: *name,
body_id: *body_id,
function_type: env.var_store.fresh(),
extra: env.pool.add(extra),
recursive: roc_can::expr::Recursive::Recursive,
};
let clos_con = constrain_expr(
arena,
env,
&clos,
Expected::NoExpectation(fn_ty.shallow_clone()),
region,
);
// This is the `foo` part in `foo = \...`. We want to bind the name of the
// function with its type, whose constraints we generated above.
let mut def_pattern_state = PatternState2 {
headers: BumpMap::new_in(arena),
vars: BumpVec::new_in(arena),
constraints: args_constrs,
};
def_pattern_state.headers.insert(*name, fn_ty);
def_pattern_state.vars.push(fn_var);
Let(arena.alloc(LetConstraint {
rigid_vars,
flex_vars: def_pattern_state.vars,
def_types: def_pattern_state.headers, // Binding function name -> its type
defs_constraint: Let(arena.alloc(LetConstraint {
rigid_vars: BumpVec::new_in(arena), // always empty
flex_vars: BumpVec::new_in(arena), // empty, because our functions have no arguments
def_types: BumpMap::new_in(arena), // empty, because our functions have no arguments
defs_constraint: And(def_pattern_state.constraints),
ret_constraint: clos_con,
})),
ret_constraint: body_con,
}))
}
Expr2::Update {
symbol,
updates,
@ -936,8 +1068,8 @@ pub fn constrain_expr<'a>(
}
Expr2::Closure {
args,
name,
body: body_id,
uniq_symbol,
body_id,
function_type: fn_var,
extra,
..
@ -984,7 +1116,7 @@ pub fn constrain_expr<'a>(
let closure_constraint = constrain_closure_size(
arena,
env,
*name,
*uniq_symbol,
region,
captured_symbols,
*closure_var,
@ -1023,7 +1155,6 @@ pub fn constrain_expr<'a>(
exists(arena, vars, And(and_constraints))
}
Expr2::LetRec { .. } => todo!(),
Expr2::LetFunction { .. } => todo!(),
}
}
@ -1774,3 +1905,741 @@ fn num_num(pool: &mut Pool, type_id: TypeId) -> Type2 {
pool.add(alias_content),
)
}
#[cfg(test)]
pub mod test_constrain {
use bumpalo::Bump;
use roc_can::expected::Expected;
use roc_collections::all::MutMap;
use roc_module::{
ident::Lowercase,
symbol::{IdentIds, Interns, ModuleIds, Symbol},
};
use roc_parse::parser::SyntaxError;
use roc_region::all::Region;
use roc_types::{
pretty_print::{content_to_string, name_all_type_vars},
solved_types::Solved,
subs::{Subs, VarStore, Variable},
};
use super::Constraint;
use crate::{
constrain::constrain_expr,
lang::{
core::{
expr::{expr2::Expr2, expr_to_expr2::loc_expr_to_expr2, output::Output},
types::Type2,
},
env::Env,
scope::Scope,
},
mem_pool::pool::Pool,
solve_type,
};
use indoc::indoc;
fn run_solve<'a>(
arena: &'a Bump,
mempool: &mut Pool,
aliases: MutMap<Symbol, roc_types::types::Alias>,
rigid_variables: MutMap<Variable, Lowercase>,
constraint: Constraint,
var_store: VarStore,
) -> (Solved<Subs>, solve_type::Env, Vec<solve_type::TypeError>) {
let env = solve_type::Env {
vars_by_symbol: MutMap::default(),
aliases,
};
let mut subs = Subs::new_from_varstore(var_store);
for (var, name) in rigid_variables {
subs.rigid_var(var, name);
}
// Now that the module is parsed, canonicalized, and constrained,
// we need to type check it.
let mut problems = Vec::new();
// Run the solver to populate Subs.
let (solved_subs, solved_env) =
solve_type::run(arena, mempool, &env, &mut problems, subs, &constraint);
(solved_subs, solved_env, problems)
}
fn infer_eq(actual: &str, expected_str: &str) {
let mut env_pool = Pool::with_capacity(1024);
let env_arena = Bump::new();
let code_arena = Bump::new();
let mut var_store = VarStore::default();
let var = var_store.fresh();
let dep_idents = IdentIds::exposed_builtins(8);
let exposed_ident_ids = IdentIds::default();
let mut module_ids = ModuleIds::default();
let mod_id = module_ids.get_or_insert(&"ModId123".into());
let mut env = Env::new(
mod_id,
&env_arena,
&mut env_pool,
&mut var_store,
dep_idents,
&module_ids,
exposed_ident_ids,
);
let mut scope = Scope::new(env.home, env.pool, env.var_store);
let region = Region::zero();
let expr2_result = str_to_expr2(&code_arena, actual, &mut env, &mut scope, region);
match expr2_result {
Ok((expr, output)) => {
let constraint = constrain_expr(
&code_arena,
&mut env,
&expr,
Expected::NoExpectation(Type2::Variable(var)),
Region::zero(),
);
let Env {
pool,
var_store: ref_var_store,
mut dep_idents,
..
} = env;
// extract the var_store out of the env again
let mut var_store = VarStore::default();
std::mem::swap(ref_var_store, &mut var_store);
let rigids = output.introduced_variables.name_by_var;
let (mut solved, _, _) = run_solve(
&code_arena,
pool,
Default::default(),
rigids,
constraint,
var_store,
);
let subs = solved.inner_mut();
// name type vars
name_all_type_vars(var, subs);
let content = subs.get_content_without_compacting(var);
// Connect the ModuleId to it's IdentIds
dep_idents.insert(mod_id, env.ident_ids);
let interns = Interns {
module_ids: env.module_ids.clone(),
all_ident_ids: dep_idents,
};
let actual_str = content_to_string(content, subs, mod_id, &interns);
assert_eq!(actual_str, expected_str);
}
Err(e) => panic!("syntax error {:?}", e),
}
}
pub fn str_to_expr2<'a>(
arena: &'a Bump,
input: &'a str,
env: &mut Env<'a>,
scope: &mut Scope,
region: Region,
) -> Result<(Expr2, Output), SyntaxError<'a>> {
match roc_parse::test_helpers::parse_loc_with(arena, input.trim()) {
Ok(loc_expr) => Ok(loc_expr_to_expr2(arena, loc_expr, env, scope, region)),
Err(fail) => Err(fail),
}
}
#[test]
fn constrain_str() {
infer_eq(
indoc!(
r#"
"type inference!"
"#
),
"Str",
)
}
// This will be more useful once we actually map
// strings less than 15 chars to SmallStr
#[test]
fn constrain_small_str() {
infer_eq(
indoc!(
r#"
"a"
"#
),
"Str",
)
}
#[test]
fn constrain_empty_record() {
infer_eq(
indoc!(
r#"
{}
"#
),
"{}",
)
}
#[test]
fn constrain_small_int() {
infer_eq(
indoc!(
r#"
12
"#
),
"Num *",
)
}
#[test]
fn constrain_float() {
infer_eq(
indoc!(
r#"
3.14
"#
),
"Float *",
)
}
#[test]
fn constrain_record() {
infer_eq(
indoc!(
r#"
{ x : 1, y : "hi" }
"#
),
"{ x : Num *, y : Str }",
)
}
#[test]
fn constrain_empty_list() {
infer_eq(
indoc!(
r#"
[]
"#
),
"List *",
)
}
#[test]
fn constrain_list() {
infer_eq(
indoc!(
r#"
[ 1, 2 ]
"#
),
"List (Num *)",
)
}
#[test]
fn constrain_list_of_records() {
infer_eq(
indoc!(
r#"
[ { x: 1 }, { x: 3 } ]
"#
),
"List { x : Num * }",
)
}
#[test]
fn constrain_global_tag() {
infer_eq(
indoc!(
r#"
Foo
"#
),
"[ Foo ]*",
)
}
#[test]
fn constrain_private_tag() {
infer_eq(
indoc!(
r#"
@Foo
"#
),
"[ @Foo ]*",
)
}
#[test]
fn constrain_call_and_accessor() {
infer_eq(
indoc!(
r#"
.foo { foo: "bar" }
"#
),
"Str",
)
}
#[test]
fn constrain_access() {
infer_eq(
indoc!(
r#"
{ foo: "bar" }.foo
"#
),
"Str",
)
}
#[test]
fn constrain_if() {
infer_eq(
indoc!(
r#"
if True then Green else Red
"#
),
"[ Green, Red ]*",
)
}
#[test]
fn constrain_when() {
infer_eq(
indoc!(
r#"
when if True then Green else Red is
Green -> Blue
Red -> Purple
"#
),
"[ Blue, Purple ]*",
)
}
#[test]
fn constrain_let_value() {
infer_eq(
indoc!(
r#"
person = { name: "roc" }
person
"#
),
"{ name : Str }",
)
}
#[test]
fn constrain_update() {
infer_eq(
indoc!(
r#"
person = { name: "roc" }
{ person & name: "bird" }
"#
),
"{ name : Str }",
)
}
#[ignore = "TODO: implement builtins in the editor"]
#[test]
fn constrain_run_low_level() {
infer_eq(
indoc!(
r#"
List.map [ { name: "roc" }, { name: "bird" } ] .name
"#
),
"List Str",
)
}
#[test]
fn dual_arity_lambda() {
infer_eq(
indoc!(
r#"
\a, b -> Pair a b
"#
),
"a, b -> [ Pair a b ]*",
);
}
#[test]
fn anonymous_identity() {
infer_eq(
indoc!(
r#"
(\a -> a) 3.14
"#
),
"Float *",
);
}
#[test]
fn identity_of_identity() {
infer_eq(
indoc!(
r#"
(\val -> val) (\val -> val)
"#
),
"a -> a",
);
}
#[test]
fn identity_function() {
infer_eq(
indoc!(
r#"
\val -> val
"#
),
"a -> a",
);
}
#[test]
fn apply_function() {
infer_eq(
indoc!(
r#"
\f, x -> f x
"#
),
"(a -> b), a -> b",
);
}
#[test]
fn flip_function() {
infer_eq(
indoc!(
r#"
\f -> (\a, b -> f b a)
"#
),
"(a, b -> c) -> (b, a -> c)",
);
}
#[test]
fn always_function() {
infer_eq(
indoc!(
r#"
\val -> \_ -> val
"#
),
"a -> (* -> a)",
);
}
#[test]
fn pass_a_function() {
infer_eq(
indoc!(
r#"
\f -> f {}
"#
),
"({} -> a) -> a",
);
}
#[test]
fn constrain_closure() {
infer_eq(
indoc!(
r#"
x = 1
\{} -> x
"#
),
"{}* -> Num *",
)
}
#[test]
fn recursive_identity() {
infer_eq(
indoc!(
r#"
identity = \val -> val
identity
"#
),
"a -> a",
);
}
#[test]
fn use_apply() {
infer_eq(
indoc!(
r#"
identity = \a -> a
apply = \f, x -> f x
apply identity 5
"#
),
"Num *",
);
}
#[test]
fn nested_let_function() {
infer_eq(
indoc!(
r#"
curryPair = \a ->
getB = \b -> Pair a b
getB
curryPair
"#
),
"a -> (b -> [ Pair a b ]*)",
);
}
#[test]
fn record_with_bound_var() {
infer_eq(
indoc!(
r#"
fn = \rec ->
x = rec.x
rec
fn
"#
),
"{ x : a }b -> { x : a }b",
);
}
#[test]
fn using_type_signature() {
infer_eq(
indoc!(
r#"
bar : custom -> custom
bar = \x -> x
bar
"#
),
"custom -> custom",
);
}
#[ignore = "Currently panics at 'Invalid Cycle', ast/src/lang/core/def/def.rs:1212:21"]
#[test]
fn using_type_signature2() {
infer_eq(
indoc!(
r#"
id1 : tya -> tya
id1 = \x -> x
id2 : tyb -> tyb
id2 = id1
id2
"#
),
"tyb -> tyb",
);
}
#[ignore = "Implement annotation-only decls"]
#[test]
fn type_signature_without_body() {
infer_eq(
indoc!(
r#"
foo: Str -> {}
foo "hi"
"#
),
"{}",
);
}
#[ignore = "Implement annotation-only decls"]
#[test]
fn type_signature_without_body_rigid() {
infer_eq(
indoc!(
r#"
foo : Num * -> custom
foo 2
"#
),
"custom",
);
}
#[test]
fn inference_var_inside_arrow() {
infer_eq(
indoc!(
r#"
id : _ -> _
id = \x -> x
id
"#
),
"a -> a",
)
}
#[test]
#[ignore = "TODO: Type2::substitute"]
fn inference_var_inside_ctor() {
infer_eq(
indoc!(
r#"
canIGo : _ -> Result _ _
canIGo = \color ->
when color is
"green" -> Ok "go!"
"yellow" -> Err (SlowIt "whoa, let's slow down!")
"red" -> Err (StopIt "absolutely not")
_ -> Err (UnknownColor "this is a weird stoplight")
canIGo
"#
),
"Str -> Result Str [ SlowIt Str, StopIt Str, UnknownColor Str ]*",
)
}
#[test]
#[ignore = "TODO: Gives { x : *, y : * } -> { x : *, y : * }. This is a bug in typechecking defs with annotations."]
fn inference_var_inside_ctor_linked() {
infer_eq(
indoc!(
r#"
swapRcd: {x: _, y: _} -> {x: _, y: _}
swapRcd = \{x, y} -> {x: y, y: x}
swapRcd
"#
),
"{ x : a, y : b } -> { x : b, y : a }",
)
}
#[test]
fn inference_var_link_with_rigid() {
infer_eq(
indoc!(
r#"
swapRcd: {x: tx, y: ty} -> {x: _, y: _}
swapRcd = \{x, y} -> {x: y, y: x}
swapRcd
"#
),
"{ x : tx, y : ty } -> { x : ty, y : tx }",
)
}
#[test]
#[ignore = "TODO: Type2::substitute"]
fn inference_var_inside_tag_ctor() {
infer_eq(
indoc!(
r#"
badComics: Bool -> [ CowTools _, Thagomizer _ ]
badComics = \c ->
when c is
True -> CowTools "The Far Side"
False -> Thagomizer "The Far Side"
badComics
"#
),
"Bool -> [ CowTools Str, Thagomizer Str ]",
)
}
#[test]
fn inference_var_tag_union_ext() {
// TODO: we should really be inferring [ Blue, Orange ]a -> [ Lavender, Peach ]a here.
// See https://github.com/rtfeldman/roc/issues/2053
infer_eq(
indoc!(
r#"
pastelize: _ -> [ Lavender, Peach ]_
pastelize = \color ->
when color is
Blue -> Lavender
Orange -> Peach
col -> col
pastelize
"#
),
"[ Blue, Lavender, Orange, Peach ]a -> [ Blue, Lavender, Orange, Peach ]a",
)
}
#[test]
#[ignore = "TODO: gives { email : a, name : b }c -> { email : a, name : b }c. This is a bug in typechecking defs with annotations."]
fn inference_var_rcd_union_ext() {
infer_eq(
indoc!(
r#"
setRocEmail : _ -> { name: Str, email: Str }_
setRocEmail = \person ->
{ person & email: "\(person.name)@roclang.com" }
setRocEmail
"#
),
"{ email : Str, name : Str }a -> { email : Str, name : Str }a",
)
}
}

45
ast/src/lang/core/ast.rs Normal file
View File

@ -0,0 +1,45 @@
use crate::{
ast_error::{ASTNodeIdWithoutExprId, ASTResult},
mem_pool::pool::Pool,
};
use super::{
def::def2::{def2_to_string, DefId},
expr::{expr2::ExprId, expr2_to_string::expr2_to_string},
header::AppHeader,
};
#[derive(Debug)]
pub struct AST {
pub header: AppHeader,
pub def_ids: Vec<DefId>,
}
#[derive(Debug, PartialEq, Copy, Clone)]
pub enum ASTNodeId {
ADefId(DefId),
AExprId(ExprId),
}
impl ASTNodeId {
pub fn to_expr_id(&self) -> ASTResult<ExprId> {
match self {
ASTNodeId::AExprId(expr_id) => Ok(*expr_id),
_ => ASTNodeIdWithoutExprId { ast_node_id: *self }.fail()?,
}
}
pub fn to_def_id(&self) -> ASTResult<DefId> {
match self {
ASTNodeId::ADefId(def_id) => Ok(*def_id),
_ => ASTNodeIdWithoutExprId { ast_node_id: *self }.fail()?,
}
}
}
pub fn ast_node_to_string(node_id: ASTNodeId, pool: &Pool) -> String {
match node_id {
ASTNodeId::ADefId(def_id) => def2_to_string(def_id, pool),
ASTNodeId::AExprId(expr_id) => expr2_to_string(expr_id, pool),
}
}

View File

@ -0,0 +1,70 @@
use roc_types::subs::VarStore;
use crate::{
lang::core::{def::def::Def, expr::expr2::Expr2},
mem_pool::{pool::Pool, pool_vec::PoolVec},
};
use super::def::def::Declaration;
pub(crate) fn decl_to_let(
pool: &mut Pool,
var_store: &mut VarStore,
decl: Declaration,
ret: Expr2,
) -> Expr2 {
match decl {
Declaration::Declare(def) => match def {
Def::AnnotationOnly { .. } => todo!(),
Def::Value(value_def) => {
let def_id = pool.add(value_def);
let body_id = pool.add(ret);
Expr2::LetValue {
def_id,
body_id,
body_var: var_store.fresh(),
}
}
Def::Function(function_def) => {
let def_id = pool.add(function_def);
let body_id = pool.add(ret);
Expr2::LetFunction {
def_id,
body_id,
body_var: var_store.fresh(),
}
}
},
Declaration::DeclareRec(defs) => {
let mut function_defs = vec![];
for def in defs {
match def {
Def::AnnotationOnly { .. } => todo!(),
Def::Function(function_def) => function_defs.push(function_def),
Def::Value(_) => unreachable!(),
}
}
let body_id = pool.add(ret);
Expr2::LetRec {
defs: PoolVec::new(function_defs.into_iter(), pool),
body_var: var_store.fresh(),
body_id,
}
}
Declaration::InvalidCycle(_entries, _) => {
// TODO: replace with something from Expr2
// Expr::RuntimeError(RuntimeError::CircularDef(entries))
todo!()
}
Declaration::Builtin(_) => {
// Builtins should only be added to top-level decls, not to let-exprs!
unreachable!()
}
}
}

View File

@ -12,15 +12,6 @@
// };
// use crate::pattern::{bindings_from_patterns, canonicalize_pattern, Pattern};
// use crate::procedure::References;
use crate::lang::ast::{Expr2, FunctionDef, Rigids, ValueDef};
use crate::lang::expr::Output;
use crate::lang::expr::{to_expr2, to_expr_id, Env};
use crate::lang::pattern::{
symbols_and_variables_from_pattern, symbols_from_pattern, to_pattern_id, Pattern2, PatternId,
};
use crate::lang::pool::{NodeId, Pool, PoolStr, PoolVec, ShallowClone};
use crate::lang::scope::Scope;
use crate::lang::types::{to_annotation2, Alias, Annotation2, Signature, Type2, TypeId};
use roc_collections::all::{default_hasher, ImMap, MutMap, MutSet, SendMap};
use roc_module::ident::Lowercase;
use roc_module::symbol::Symbol;
@ -33,6 +24,26 @@ use std::collections::HashMap;
use std::fmt::Debug;
use ven_graph::{strongly_connected_components, topological_sort_into_groups};
use crate::{
lang::{
core::{
expr::{expr2::Expr2, expr_to_expr2::expr_to_expr2, output::Output},
fun_def::FunctionDef,
pattern::{self, symbols_from_pattern, to_pattern_id, Pattern2, PatternId},
types::{to_annotation2, Alias, Annotation2, Signature, Type2, TypeId},
val_def::ValueDef,
},
env::Env,
rigids::Rigids,
scope::Scope,
},
mem_pool::{
pool::{NodeId, Pool},
pool_vec::PoolVec,
shallow_clone::ShallowClone,
},
};
#[derive(Debug)]
pub enum Def {
AnnotationOnly { rigids: Rigids, annotation: TypeId },
@ -127,7 +138,7 @@ fn to_pending_def<'a>(
match def {
Annotation(loc_pattern, loc_ann) => {
// This takes care of checking for shadowing and adding idents to scope.
let (output, loc_can_pattern) = crate::lang::pattern::to_pattern_id(
let (output, loc_can_pattern) = pattern::to_pattern_id(
env,
scope,
pattern_type,
@ -142,7 +153,7 @@ fn to_pending_def<'a>(
}
Body(loc_pattern, loc_expr) => {
// This takes care of checking for shadowing and adding idents to scope.
let (output, loc_can_pattern) = crate::lang::pattern::to_pattern_id(
let (output, loc_can_pattern) = pattern::to_pattern_id(
env,
scope,
pattern_type,
@ -309,7 +320,7 @@ fn from_pending_alias<'a>(
}
for loc_lowercase in vars {
if !named_rigids.contains_key(loc_lowercase.value.as_str()) {
if !named_rigids.contains_key(&loc_lowercase.value) {
env.problem(Problem::PhantomTypeArgument {
alias: symbol,
variable_region: loc_lowercase.region,
@ -447,6 +458,10 @@ fn canonicalize_pending_def<'a>(
output.references.referenced_aliases.insert(symbol);
}
// Ensure rigid type vars and their names are known in the output.
for (name, &var) in named_rigids.iter() {
output.introduced_variables.insert_named(name.clone(), var);
}
let rigids = Rigids::new(named_rigids, unnamed_rigids, env.pool);
// bookkeeping for tail-call detection. If we're assigning to an
@ -457,7 +472,7 @@ fn canonicalize_pending_def<'a>(
env.tailcallable_symbol = Some(*defined_symbol);
};
// regiser the name of this closure, to make sure the closure won't capture it's own name
// register the name of this closure, to make sure the closure won't capture it's own name
if let (Pattern2::Identifier(ref defined_symbol), &ast::Expr::Closure(_, _)) =
(&env.pool[loc_can_pattern], &loc_expr.value)
{
@ -465,7 +480,7 @@ fn canonicalize_pending_def<'a>(
};
let (loc_can_expr, can_output) =
to_expr2(env, scope, &loc_expr.value, loc_expr.region);
expr_to_expr2(env, scope, &loc_expr.value, loc_expr.region);
output.references.union_mut(can_output.references.clone());
@ -481,9 +496,9 @@ fn canonicalize_pending_def<'a>(
match loc_can_expr {
Expr2::Closure {
args: closure_args,
body,
body_id,
extra,
name: closure_symbol,
uniq_symbol: closure_symbol,
..
} => {
let symbol = match env.pool[loc_can_pattern] {
@ -514,7 +529,7 @@ fn canonicalize_pending_def<'a>(
// parent commit for the bug this fixed!
let refs = References::new();
let arguments: PoolVec<(PatternId, Type2)> =
let arguments: PoolVec<(NodeId<Type2>, PatternId)> =
PoolVec::with_capacity(closure_args.len() as u32, env.pool);
let return_type: TypeId;
@ -551,7 +566,8 @@ fn canonicalize_pending_def<'a>(
for (node_id, ((_, pattern_id), typ)) in
arguments.iter_node_ids().zip(it.into_iter())
{
env.pool[node_id] = (pattern_id, typ);
let typ = env.pool.add(typ);
env.pool[node_id] = (typ, pattern_id);
}
return_type = return_type_id;
@ -563,7 +579,7 @@ fn canonicalize_pending_def<'a>(
arguments,
rigids: env.pool.add(rigids),
return_type,
body,
body_id,
};
let def = Def::Function(function_def);
@ -625,14 +641,15 @@ fn canonicalize_pending_def<'a>(
env.tailcallable_symbol = Some(*defined_symbol);
};
// regiser the name of this closure, to make sure the closure won't capture it's own name
// register the name of this closure, to make sure the closure won't capture it's own name
if let (Pattern2::Identifier(ref defined_symbol), &ast::Expr::Closure(_, _)) =
(&env.pool[loc_can_pattern], &loc_expr.value)
{
env.closure_name_symbol = Some(*defined_symbol);
};
let (loc_can_expr, can_output) = to_expr2(env, scope, &loc_expr.value, loc_expr.region);
let (loc_can_expr, can_output) =
expr_to_expr2(env, scope, &loc_expr.value, loc_expr.region);
output.references.union_mut(can_output.references.clone());
@ -648,9 +665,9 @@ fn canonicalize_pending_def<'a>(
match loc_can_expr {
Expr2::Closure {
args: closure_args,
body,
body_id,
extra,
name: closure_symbol,
uniq_symbol: closure_symbol,
..
} => {
let symbol = match env.pool[loc_can_pattern] {
@ -681,21 +698,21 @@ fn canonicalize_pending_def<'a>(
// parent commit for the bug this fixed!
let refs = References::new();
let arguments: PoolVec<(PatternId, Variable)> =
let arguments: PoolVec<(Variable, PatternId)> =
PoolVec::with_capacity(closure_args.len() as u32, env.pool);
let it: Vec<_> = closure_args.iter(env.pool).map(|(x, y)| (*x, *y)).collect();
for (node_id, (_, pattern_id)) in arguments.iter_node_ids().zip(it.into_iter())
{
env.pool[node_id] = (pattern_id, env.var_store.fresh());
env.pool[node_id] = (env.var_store.fresh(), pattern_id);
}
let function_def = FunctionDef::NoAnnotation {
name: symbol,
arguments,
return_var: env.var_store.fresh(),
body,
body_id,
};
let def = Def::Function(function_def);

View File

@ -0,0 +1,42 @@
use roc_module::symbol::IdentId;
use crate::{
lang::core::expr::{expr2::Expr2, expr2_to_string::expr2_to_string},
mem_pool::pool::{NodeId, Pool},
};
// A top level definition, not inside a function. For example: `main = "Hello, world!"`
#[derive(Debug)]
pub enum Def2 {
// ValueDef example: `main = "Hello, world!"`. identifier -> `main`, expr -> "Hello, world!"
ValueDef {
identifier_id: IdentId,
expr_id: NodeId<Expr2>,
},
Blank,
}
pub type DefId = NodeId<Def2>;
pub fn def2_to_string(node_id: DefId, pool: &Pool) -> String {
let mut full_string = String::new();
let def2 = pool.get(node_id);
match def2 {
Def2::ValueDef {
identifier_id,
expr_id,
} => {
full_string.push_str(&format!(
"Def2::ValueDef(identifier_id: >>{:?}), expr_id: >>{:?})",
identifier_id,
expr2_to_string(*expr_id, pool)
));
}
Def2::Blank => {
full_string.push_str("Def2::Blank");
}
}
full_string
}

View File

@ -0,0 +1,86 @@
use bumpalo::collections::Vec as BumpVec;
use bumpalo::Bump;
use roc_module::ident::{Ident, IdentStr};
use roc_parse::parser::SyntaxError;
use roc_region::all::Region;
use crate::lang::{core::expr::expr_to_expr2::loc_expr_to_expr2, env::Env, scope::Scope};
use super::def2::Def2;
pub fn defs_to_defs2<'a>(
arena: &'a Bump,
env: &mut Env<'a>,
scope: &mut Scope,
parsed_defs: &'a BumpVec<roc_region::all::Loc<roc_parse::ast::Def<'a>>>,
region: Region,
) -> Vec<Def2> {
parsed_defs
.iter()
.map(|loc| def_to_def2(arena, env, scope, &loc.value, region))
.collect()
}
pub fn def_to_def2<'a>(
arena: &'a Bump,
env: &mut Env<'a>,
scope: &mut Scope,
parsed_def: &'a roc_parse::ast::Def<'a>,
region: Region,
) -> Def2 {
use roc_parse::ast::Def::*;
match parsed_def {
SpaceBefore(inner_def, _) => def_to_def2(arena, env, scope, inner_def, region),
SpaceAfter(inner_def, _) => def_to_def2(arena, env, scope, inner_def, region),
Body(&loc_pattern, &loc_expr) => {
let expr2 = loc_expr_to_expr2(arena, loc_expr, env, scope, region).0;
let expr_id = env.pool.add(expr2);
use roc_parse::ast::Pattern::*;
match loc_pattern.value {
Identifier(id_str) => {
let identifier_id = env.ident_ids.get_or_insert(&Ident(IdentStr::from(id_str)));
// TODO support with annotation
Def2::ValueDef {
identifier_id,
expr_id,
}
}
other => {
unimplemented!(
"I don't yet know how to convert the pattern {:?} into an expr2",
other
)
}
}
}
other => {
unimplemented!(
"I don't know how to make an expr2 from this def yet: {:?}",
other
)
}
}
}
pub fn str_to_def2<'a>(
arena: &'a Bump,
input: &'a str,
env: &mut Env<'a>,
scope: &mut Scope,
region: Region,
) -> Result<Vec<Def2>, SyntaxError<'a>> {
match roc_parse::test_helpers::parse_defs_with(arena, input.trim()) {
Ok(vec_loc_def) => Ok(defs_to_defs2(
arena,
env,
scope,
arena.alloc(vec_loc_def),
region,
)),
Err(fail) => Err(fail),
}
}

View File

@ -0,0 +1,3 @@
pub mod def;
pub mod def2;
pub mod def_to_def2;

View File

@ -0,0 +1,233 @@
use arraystring::{typenum::U30, ArrayString};
use roc_types::subs::Variable;
use crate::{
lang::core::{fun_def::FunctionDef, pattern::Pattern2, val_def::ValueDef},
mem_pool::{pool::NodeId, pool_str::PoolStr, pool_vec::PoolVec},
};
use roc_can::expr::Recursive;
use roc_module::called_via::CalledVia;
use roc_module::low_level::LowLevel;
use roc_module::symbol::Symbol;
use super::record_field::RecordField;
pub type ArrString = ArrayString<U30>;
// TODO make the inner types private?
pub type ExprId = NodeId<Expr2>;
/// An Expr that fits in 32B.
/// It has a 1B discriminant and variants which hold payloads of at most 31B.
#[derive(Debug)]
pub enum Expr2 {
/// A negative number literal without a dot
SmallInt {
number: IntVal, // 16B
var: Variable, // 4B
style: IntStyle, // 1B
text: PoolStr, // 8B
},
// TODO(rvcas): rename this eventually
/// A large (over 64-bit) negative number literal without a dot.
/// This variant can't use IntVal because if IntVal stored 128-bit
/// integers, it would be 32B on its own because of alignment.
I128 {
number: i128, // 16B
var: Variable, // 4B
style: IntStyle, // 1B
text: PoolStr, // 8B
},
// TODO(rvcas): rename this eventually
/// A large (over 64-bit) nonnegative number literal without a dot
/// This variant can't use IntVal because if IntVal stored 128-bit
/// integers, it would be 32B on its own because of alignment.
U128 {
number: u128, // 16B
var: Variable, // 4B
style: IntStyle, // 1B
text: PoolStr, // 8B
},
/// A floating-point literal (with a dot)
Float {
number: FloatVal, // 16B
var: Variable, // 4B
text: PoolStr, // 8B
},
/// string literals of length up to 30B
SmallStr(ArrString), // 31B
/// string literals of length 31B or more
Str(PoolStr), // 8B
// Lookups
Var(Symbol), // 8B
InvalidLookup(PoolStr), // 8B
List {
elem_var: Variable, // 4B
elems: PoolVec<ExprId>, // 8B
},
If {
cond_var: Variable, // 4B
expr_var: Variable, // 4B
branches: PoolVec<(ExprId, ExprId)>, // 8B
final_else: ExprId, // 4B
},
When {
cond_var: Variable, // 4B
expr_var: Variable, // 4B
branches: PoolVec<WhenBranch>, // 8B
cond: ExprId, // 4B
},
LetRec {
defs: PoolVec<FunctionDef>, // 8B
body_var: Variable, // 8B
body_id: ExprId, // 4B
},
LetFunction {
def_id: NodeId<FunctionDef>, // 4B
body_var: Variable, // 8B
body_id: ExprId, // 4B
},
LetValue {
def_id: NodeId<ValueDef>, // 4B
body_id: ExprId, // 4B
body_var: Variable, // 4B
},
Call {
args: PoolVec<(Variable, ExprId)>, // 8B
expr_id: ExprId, // 4B
expr_var: Variable, // 4B
fn_var: Variable, // 4B
closure_var: Variable, // 4B
called_via: CalledVia, // 2B
},
RunLowLevel {
op: LowLevel, // 1B
args: PoolVec<(Variable, ExprId)>, // 8B
ret_var: Variable, // 4B
},
Closure {
args: PoolVec<(Variable, NodeId<Pattern2>)>, // 8B
uniq_symbol: Symbol, // 8B This is a globally unique symbol for the closure
body_id: ExprId, // 4B
function_type: Variable, // 4B
recursive: Recursive, // 1B
extra: NodeId<ClosureExtra>, // 4B
},
// Product Types
Record {
record_var: Variable, // 4B
fields: PoolVec<RecordField>, // 8B
},
/// Empty record constant
EmptyRecord,
/// Look up exactly one field on a record, e.g. (expr).foo.
Access {
field: PoolStr, // 4B
expr: ExprId, // 4B
record_var: Variable, // 4B
ext_var: Variable, // 4B
field_var: Variable, // 4B
},
/// field accessor as a function, e.g. (.foo) expr
Accessor {
function_var: Variable, // 4B
closure_var: Variable, // 4B
field: PoolStr, // 4B
record_var: Variable, // 4B
ext_var: Variable, // 4B
field_var: Variable, // 4B
},
Update {
symbol: Symbol, // 8B
updates: PoolVec<RecordField>, // 8B
record_var: Variable, // 4B
ext_var: Variable, // 4B
},
// Sum Types
GlobalTag {
name: PoolStr, // 4B
variant_var: Variable, // 4B
ext_var: Variable, // 4B
arguments: PoolVec<(Variable, ExprId)>, // 8B
},
PrivateTag {
name: Symbol, // 8B
variant_var: Variable, // 4B
ext_var: Variable, // 4B
arguments: PoolVec<(Variable, ExprId)>, // 8B
},
Blank, // Rendered as empty box in editor
// Compiles, but will crash if reached
RuntimeError(/* TODO make a version of RuntimeError that fits in 15B */),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Problem {
RanOutOfNodeIds,
}
pub type Res<T> = Result<T, Problem>;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum IntStyle {
Decimal,
Octal,
Hex,
Binary,
}
impl IntStyle {
pub fn from_base(base: roc_parse::ast::Base) -> Self {
use roc_parse::ast::Base;
match base {
Base::Decimal => Self::Decimal,
Base::Octal => Self::Octal,
Base::Hex => Self::Hex,
Base::Binary => Self::Binary,
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum IntVal {
I64(i64),
U64(u64),
I32(i32),
U32(u32),
I16(i16),
U16(u16),
I8(i8),
U8(u8),
}
#[test]
fn size_of_intval() {
assert_eq!(std::mem::size_of::<IntVal>(), 16);
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum FloatVal {
F64(f64),
F32(f32),
}
#[derive(Debug)]
pub struct WhenBranch {
pub patterns: PoolVec<Pattern2>, // 4B
pub body: ExprId, // 3B
pub guard: Option<ExprId>, // 4B
}
/// This is overflow data from a Closure variant, which needs to store
/// more than 32B of total data
#[derive(Debug)]
pub struct ClosureExtra {
pub return_type: Variable, // 4B
pub captured_symbols: PoolVec<(Symbol, Variable)>, // 8B
pub closure_type: Variable, // 4B
pub closure_ext_var: Variable, // 4B
}

View File

@ -0,0 +1,159 @@
use crate::{
lang::core::{expr::record_field::RecordField, val_def::value_def_to_string},
mem_pool::pool::Pool,
};
use super::expr2::{Expr2, ExprId};
use roc_types::subs::Variable;
pub fn expr2_to_string(node_id: ExprId, pool: &Pool) -> String {
let mut full_string = String::new();
let expr2 = pool.get(node_id);
expr2_to_string_helper(expr2, 0, pool, &mut full_string);
full_string
}
fn get_spacing(indent_level: usize) -> String {
std::iter::repeat(" ")
.take(indent_level)
.collect::<Vec<&str>>()
.join("")
}
fn expr2_to_string_helper(
expr2: &Expr2,
indent_level: usize,
pool: &Pool,
out_string: &mut String,
) {
out_string.push_str(&get_spacing(indent_level));
match expr2 {
Expr2::SmallStr(arr_string) => out_string.push_str(&format!(
"{}{}{}",
"SmallStr(\"",
arr_string.as_str(),
"\")",
)),
Expr2::Str(pool_str) => {
out_string.push_str(&format!("{}{}{}", "Str(\"", pool_str.as_str(pool), "\")",))
}
Expr2::Blank => out_string.push_str("Blank"),
Expr2::EmptyRecord => out_string.push_str("EmptyRecord"),
Expr2::Record { record_var, fields } => {
out_string.push_str("Record:\n");
out_string.push_str(&var_to_string(record_var, indent_level + 1));
out_string.push_str(&format!("{}fields: [\n", get_spacing(indent_level + 1)));
let mut first_child = true;
for field in fields.iter(pool) {
if !first_child {
out_string.push_str(", ")
} else {
first_child = false;
}
match field {
RecordField::InvalidLabelOnly(pool_str, var) => {
out_string.push_str(&format!(
"{}({}, Var({:?})",
get_spacing(indent_level + 2),
pool_str.as_str(pool),
var,
));
}
RecordField::LabelOnly(pool_str, var, symbol) => {
out_string.push_str(&format!(
"{}({}, Var({:?}), Symbol({:?})",
get_spacing(indent_level + 2),
pool_str.as_str(pool),
var,
symbol
));
}
RecordField::LabeledValue(pool_str, var, val_node_id) => {
out_string.push_str(&format!(
"{}({}, Var({:?}), Expr2(\n",
get_spacing(indent_level + 2),
pool_str.as_str(pool),
var,
));
let val_expr2 = pool.get(*val_node_id);
expr2_to_string_helper(val_expr2, indent_level + 3, pool, out_string);
out_string.push_str(&format!("{})\n", get_spacing(indent_level + 2)));
}
}
}
out_string.push_str(&format!("{}]\n", get_spacing(indent_level + 1)));
}
Expr2::List { elem_var, elems } => {
out_string.push_str("List:\n");
out_string.push_str(&var_to_string(elem_var, indent_level + 1));
out_string.push_str(&format!("{}elems: [\n", get_spacing(indent_level + 1)));
let mut first_elt = true;
for elem_expr2_id in elems.iter(pool) {
if !first_elt {
out_string.push_str(", ")
} else {
first_elt = false;
}
let elem_expr2 = pool.get(*elem_expr2_id);
expr2_to_string_helper(elem_expr2, indent_level + 2, pool, out_string)
}
out_string.push_str(&format!("{}]\n", get_spacing(indent_level + 1)));
}
Expr2::InvalidLookup(pool_str) => {
out_string.push_str(&format!("InvalidLookup({})", pool_str.as_str(pool)));
}
Expr2::SmallInt { text, .. } => {
out_string.push_str(&format!("SmallInt({})", text.as_str(pool)));
}
Expr2::LetValue {
def_id, body_id, ..
} => {
out_string.push_str(&format!(
"LetValue(def_id: >>{:?}), body_id: >>{:?})",
value_def_to_string(pool.get(*def_id), pool),
pool.get(*body_id)
));
}
Expr2::Call { .. } => {
out_string.push_str(&format!("Call({:?})", expr2,));
}
Expr2::Closure { args, .. } => {
out_string.push_str("Closure:\n");
out_string.push_str(&format!("{}args: [\n", get_spacing(indent_level + 1)));
for (_, pattern_id) in args.iter(pool) {
let arg_pattern2 = pool.get(*pattern_id);
out_string.push_str(&format!(
"{}{:?}\n",
get_spacing(indent_level + 2),
arg_pattern2
));
}
}
&Expr2::Var { .. } => {
out_string.push_str(&format!("{:?}", expr2,));
}
other => todo!("Implement for {:?}", other),
}
out_string.push('\n');
}
fn var_to_string(some_var: &Variable, indent_level: usize) -> String {
format!("{}Var({:?})\n", get_spacing(indent_level + 1), some_var)
}

View File

@ -0,0 +1,711 @@
use bumpalo::Bump;
use roc_can::expr::Recursive;
use roc_can::num::{finish_parsing_base, finish_parsing_float, finish_parsing_int};
use roc_can::operator::desugar_expr;
use roc_collections::all::MutSet;
use roc_module::symbol::Symbol;
use roc_parse::{ast::Expr, pattern::PatternType};
use roc_problem::can::{Problem, RuntimeError};
use roc_region::all::{Located, Region};
use super::{expr2::Expr2, output::Output};
use crate::canonicalization::canonicalize::{
canonicalize_fields, canonicalize_lookup, canonicalize_when_branch, CanonicalizeRecordProblem,
};
use crate::lang::core::declaration::decl_to_let;
use crate::lang::core::def::def::{canonicalize_defs, sort_can_defs};
use crate::lang::core::expr::expr2::ClosureExtra;
use crate::lang::core::pattern::to_pattern2;
use crate::lang::core::str::flatten_str_literal;
use crate::mem_pool::shallow_clone::ShallowClone;
use crate::{
lang::{
core::expr::expr2::{ExprId, FloatVal, IntStyle, IntVal},
env::Env,
scope::Scope,
},
mem_pool::{pool_str::PoolStr, pool_vec::PoolVec},
};
pub fn loc_expr_to_expr2<'a>(
arena: &'a Bump,
loc_expr: Located<Expr<'a>>,
env: &mut Env<'a>,
scope: &mut Scope,
region: Region,
) -> (Expr2, Output) {
let desugared_loc_expr = desugar_expr(arena, arena.alloc(loc_expr));
expr_to_expr2(env, scope, arena.alloc(desugared_loc_expr.value), region)
}
const ZERO: Region = Region::zero();
pub fn expr_to_expr2<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
parse_expr: &'a roc_parse::ast::Expr<'a>,
region: Region,
) -> (Expr2, self::Output) {
use roc_parse::ast::Expr::*;
match parse_expr {
Float(string) => {
match finish_parsing_float(string) {
Ok(float) => {
let expr = Expr2::Float {
number: FloatVal::F64(float),
var: env.var_store.fresh(),
text: PoolStr::new(string, &mut env.pool),
};
(expr, Output::default())
}
Err((raw, error)) => {
// emit runtime error
let runtime_error = RuntimeError::InvalidFloat(error, ZERO, raw.into());
env.problem(Problem::RuntimeError(runtime_error));
//
// Expr::RuntimeError(runtime_error)
todo!()
}
}
}
Num(string) => {
match finish_parsing_int(string) {
Ok(int) => {
let expr = Expr2::SmallInt {
number: IntVal::I64(int),
var: env.var_store.fresh(),
// TODO non-hardcode
style: IntStyle::Decimal,
text: PoolStr::new(string, &mut env.pool),
};
(expr, Output::default())
}
Err((raw, error)) => {
// emit runtime error
let runtime_error = RuntimeError::InvalidInt(
error,
roc_parse::ast::Base::Decimal,
ZERO,
raw.into(),
);
env.problem(Problem::RuntimeError(runtime_error));
//
// Expr::RuntimeError(runtime_error)
todo!()
}
}
}
NonBase10Int {
string,
base,
is_negative,
} => {
match finish_parsing_base(string, *base, *is_negative) {
Ok(int) => {
let expr = Expr2::SmallInt {
number: IntVal::I64(int),
var: env.var_store.fresh(),
// TODO non-hardcode
style: IntStyle::from_base(*base),
text: PoolStr::new(string, &mut env.pool),
};
(expr, Output::default())
}
Err((raw, error)) => {
// emit runtime error
let runtime_error = RuntimeError::InvalidInt(error, *base, ZERO, raw.into());
env.problem(Problem::RuntimeError(runtime_error));
//
// Expr::RuntimeError(runtime_error)
todo!()
}
}
}
Str(literal) => flatten_str_literal(env, scope, literal),
List(items) => {
let mut output = Output::default();
let output_ref = &mut output;
let elems: PoolVec<ExprId> = PoolVec::with_capacity(items.len() as u32, env.pool);
for (node_id, item) in elems.iter_node_ids().zip(items.iter()) {
let (expr, sub_output) = expr_to_expr2(env, scope, &item.value, item.region);
output_ref.union(sub_output);
let expr_id = env.pool.add(expr);
env.pool[node_id] = expr_id;
}
let expr = Expr2::List {
elem_var: env.var_store.fresh(),
elems,
};
(expr, output)
}
GlobalTag(tag) => {
// a global tag without any arguments
(
Expr2::GlobalTag {
name: PoolStr::new(tag, env.pool),
variant_var: env.var_store.fresh(),
ext_var: env.var_store.fresh(),
arguments: PoolVec::empty(env.pool),
},
Output::default(),
)
}
PrivateTag(name) => {
// a private tag without any arguments
let ident_id = env.ident_ids.get_or_insert(&(*name).into());
let name = Symbol::new(env.home, ident_id);
(
Expr2::PrivateTag {
name,
variant_var: env.var_store.fresh(),
ext_var: env.var_store.fresh(),
arguments: PoolVec::empty(env.pool),
},
Output::default(),
)
}
RecordUpdate {
fields,
update: loc_update,
} => {
let (can_update, update_out) =
expr_to_expr2(env, scope, &loc_update.value, loc_update.region);
if let Expr2::Var(symbol) = &can_update {
match canonicalize_fields(env, scope, fields.items) {
Ok((can_fields, mut output)) => {
output.references.union_mut(update_out.references);
let answer = Expr2::Update {
record_var: env.var_store.fresh(),
ext_var: env.var_store.fresh(),
symbol: *symbol,
updates: can_fields,
};
(answer, output)
}
Err(CanonicalizeRecordProblem::InvalidOptionalValue {
field_name: _,
field_region: _,
record_region: _,
}) => {
// let runtime_error = roc_problem::can::RuntimeError::InvalidOptionalValue {
// field_name,
// field_region,
// record_region,
// };
//
// env.problem(Problem::RuntimeError(runtime_error));
todo!()
}
}
} else {
// only (optionally qualified) variables can be updated, not arbitrary expressions
// let error = roc_problem::can::RuntimeError::InvalidRecordUpdate {
// region: can_update.region,
// };
//
// let answer = Expr::RuntimeError(error.clone());
//
// env.problems.push(Problem::RuntimeError(error));
//
// (answer, Output::default())
todo!("{:?}", &can_update)
}
}
Record(fields) => {
if fields.is_empty() {
(Expr2::EmptyRecord, Output::default())
} else {
match canonicalize_fields(env, scope, fields.items) {
Ok((can_fields, output)) => (
Expr2::Record {
record_var: env.var_store.fresh(),
fields: can_fields,
},
output,
),
Err(CanonicalizeRecordProblem::InvalidOptionalValue {
field_name: _,
field_region: _,
record_region: _,
}) => {
// let runtime_error = RuntimeError::InvalidOptionalValue {
// field_name,
// field_region,
// record_region,
// };
//
// env.problem(runtime_error);
// (
// Expr::RuntimeError(
// ),
// Output::default(),
//
// )
todo!()
}
}
}
}
Access(record_expr, field) => {
// TODO
let region = ZERO;
let (record_expr_id, output) = to_expr_id(env, scope, record_expr, region);
(
Expr2::Access {
record_var: env.var_store.fresh(),
field_var: env.var_store.fresh(),
ext_var: env.var_store.fresh(),
expr: record_expr_id,
field: PoolStr::new(field, env.pool),
},
output,
)
}
AccessorFunction(field) => (
Expr2::Accessor {
function_var: env.var_store.fresh(),
record_var: env.var_store.fresh(),
ext_var: env.var_store.fresh(),
closure_var: env.var_store.fresh(),
field_var: env.var_store.fresh(),
field: PoolStr::new(field, env.pool),
},
Output::default(),
),
If(branches, final_else) => {
let mut new_branches = Vec::with_capacity(branches.len());
let mut output = Output::default();
for (condition, then_branch) in branches.iter() {
let (cond, cond_output) =
expr_to_expr2(env, scope, &condition.value, condition.region);
let (then_expr, then_output) =
expr_to_expr2(env, scope, &then_branch.value, then_branch.region);
output.references.union_mut(cond_output.references);
output.references.union_mut(then_output.references);
new_branches.push((env.pool.add(cond), env.pool.add(then_expr)));
}
let (else_expr, else_output) =
expr_to_expr2(env, scope, &final_else.value, final_else.region);
output.references.union_mut(else_output.references);
let expr = Expr2::If {
cond_var: env.var_store.fresh(),
expr_var: env.var_store.fresh(),
branches: PoolVec::new(new_branches.into_iter(), env.pool),
final_else: env.pool.add(else_expr),
};
(expr, output)
}
When(loc_cond, branches) => {
// Infer the condition expression's type.
let cond_var = env.var_store.fresh();
let (can_cond, mut output) =
expr_to_expr2(env, scope, &loc_cond.value, loc_cond.region);
// the condition can never be a tail-call
output.tail_call = None;
let can_branches = PoolVec::with_capacity(branches.len() as u32, env.pool);
for (node_id, branch) in can_branches.iter_node_ids().zip(branches.iter()) {
let (can_when_branch, branch_references) =
canonicalize_when_branch(env, scope, *branch, &mut output);
output.references.union_mut(branch_references);
env.pool[node_id] = can_when_branch;
}
// A "when" with no branches is a runtime error, but it will mess things up
// if code gen mistakenly thinks this is a tail call just because its condition
// happened to be one. (The condition gave us our initial output value.)
if branches.is_empty() {
output.tail_call = None;
}
// Incorporate all three expressions into a combined Output value.
let expr = Expr2::When {
expr_var: env.var_store.fresh(),
cond_var,
cond: env.pool.add(can_cond),
branches: can_branches,
};
(expr, output)
}
Closure(loc_arg_patterns, loc_body_expr) => {
// The globally unique symbol that will refer to this closure once it gets converted
// into a top-level procedure for code gen.
//
// In the Foo module, this will look something like Foo.$1 or Foo.$2.
let symbol = env
.closure_name_symbol
.unwrap_or_else(|| env.gen_unique_symbol());
env.closure_name_symbol = None;
// The body expression gets a new scope for canonicalization.
// Shadow `scope` to make sure we don't accidentally use the original one for the
// rest of this block, but keep the original around for later diffing.
let original_scope = scope;
let mut scope = original_scope.shallow_clone();
let can_args = PoolVec::with_capacity(loc_arg_patterns.len() as u32, env.pool);
let mut output = Output::default();
let mut bound_by_argument_patterns = MutSet::default();
for (node_id, loc_pattern) in can_args.iter_node_ids().zip(loc_arg_patterns.iter()) {
let (new_output, can_arg) = to_pattern2(
env,
&mut scope,
roc_parse::pattern::PatternType::FunctionArg,
&loc_pattern.value,
loc_pattern.region,
);
bound_by_argument_patterns
.extend(new_output.references.bound_symbols.iter().copied());
output.union(new_output);
let pattern_id = env.add(can_arg, loc_pattern.region);
env.pool[node_id] = (env.var_store.fresh(), pattern_id);
}
let (body_expr, new_output) =
expr_to_expr2(env, &mut scope, &loc_body_expr.value, loc_body_expr.region);
let mut captured_symbols: MutSet<Symbol> =
new_output.references.lookups.iter().copied().collect();
// filter out the closure's name itself
captured_symbols.remove(&symbol);
// symbols bound either in this pattern or deeper down are not captured!
captured_symbols.retain(|s| !new_output.references.bound_symbols.contains(s));
captured_symbols.retain(|s| !bound_by_argument_patterns.contains(s));
// filter out top-level symbols
// those will be globally available, and don't need to be captured
captured_symbols.retain(|s| !env.top_level_symbols.contains(s));
// filter out imported symbols
// those will be globally available, and don't need to be captured
captured_symbols.retain(|s| s.module_id() == env.home);
// TODO any Closure that has an empty `captured_symbols` list could be excluded!
output.union(new_output);
// filter out aliases
captured_symbols.retain(|s| !output.references.referenced_aliases.contains(s));
// filter out functions that don't close over anything
captured_symbols.retain(|s| !output.non_closures.contains(s));
// Now that we've collected all the references, check to see if any of the args we defined
// went unreferenced. If any did, report them as unused arguments.
for (sub_symbol, region) in scope.symbols() {
if !original_scope.contains_symbol(sub_symbol) {
if !output.references.has_lookup(sub_symbol) {
// The body never referenced this argument we declared. It's an unused argument!
env.problem(Problem::UnusedArgument(symbol, sub_symbol, region));
}
// We shouldn't ultimately count arguments as referenced locals. Otherwise,
// we end up with weird conclusions like the expression (\x -> x + 1)
// references the (nonexistent) local variable x!
output.references.lookups.remove(&sub_symbol);
}
}
env.register_closure(symbol, output.references.clone());
let mut captured_symbols: Vec<_> = captured_symbols
.into_iter()
.map(|s| (s, env.var_store.fresh()))
.collect();
// sort symbols, so we know the order in which they're stored in the closure record
captured_symbols.sort();
// store that this function doesn't capture anything. It will be promoted to a
// top-level function, and does not need to be captured by other surrounding functions.
if captured_symbols.is_empty() {
output.non_closures.insert(symbol);
}
let captured_symbols = PoolVec::new(captured_symbols.into_iter(), env.pool);
let extra = ClosureExtra {
return_type: env.var_store.fresh(), // 4B
captured_symbols, // 8B
closure_type: env.var_store.fresh(), // 4B
closure_ext_var: env.var_store.fresh(), // 4B
};
(
Expr2::Closure {
function_type: env.var_store.fresh(),
uniq_symbol: symbol,
recursive: Recursive::NotRecursive,
args: can_args,
body_id: env.add(body_expr, loc_body_expr.region),
extra: env.pool.add(extra),
},
output,
)
}
Apply(loc_fn, loc_args, application_style) => {
// The expression that evaluates to the function being called, e.g. `foo` in
// (foo) bar baz
let fn_region = loc_fn.region;
// Canonicalize the function expression and its arguments
let (fn_expr, mut output) = expr_to_expr2(env, scope, &loc_fn.value, fn_region);
// The function's return type
let args = PoolVec::with_capacity(loc_args.len() as u32, env.pool);
for (node_id, loc_arg) in args.iter_node_ids().zip(loc_args.iter()) {
let (arg_expr_id, arg_out) = to_expr_id(env, scope, &loc_arg.value, loc_arg.region);
env.pool[node_id] = (env.var_store.fresh(), arg_expr_id);
output.references.union_mut(arg_out.references);
}
// Default: We're not tail-calling a symbol (by name), we're tail-calling a function value.
output.tail_call = None;
let expr = match fn_expr {
Expr2::Var(ref symbol) => {
output.references.calls.insert(*symbol);
// we're tail-calling a symbol by name, check if it's the tail-callable symbol
output.tail_call = match &env.tailcallable_symbol {
Some(tc_sym) if *tc_sym == *symbol => Some(*symbol),
Some(_) | None => None,
};
// IDEA: Expr2::CallByName?
let fn_expr_id = env.add(fn_expr, fn_region);
Expr2::Call {
args,
expr_id: fn_expr_id,
expr_var: env.var_store.fresh(),
fn_var: env.var_store.fresh(),
closure_var: env.var_store.fresh(),
called_via: *application_style,
}
}
Expr2::RuntimeError() => {
// We can't call a runtime error; bail out by propagating it!
return (fn_expr, output);
}
Expr2::GlobalTag {
variant_var,
ext_var,
name,
..
} => Expr2::GlobalTag {
variant_var,
ext_var,
name,
arguments: args,
},
Expr2::PrivateTag {
variant_var,
ext_var,
name,
..
} => Expr2::PrivateTag {
variant_var,
ext_var,
name,
arguments: args,
},
_ => {
// This could be something like ((if True then fn1 else fn2) arg1 arg2).
let fn_expr_id = env.add(fn_expr, fn_region);
Expr2::Call {
args,
expr_id: fn_expr_id,
expr_var: env.var_store.fresh(),
fn_var: env.var_store.fresh(),
closure_var: env.var_store.fresh(),
called_via: *application_style,
}
}
};
(expr, output)
}
Defs(loc_defs, loc_ret) => {
let (unsorted, mut scope, defs_output, symbols_introduced) = canonicalize_defs(
env,
Output::default(),
scope,
loc_defs,
PatternType::DefExpr,
);
// The def as a whole is a tail call iff its return expression is a tail call.
// Use its output as a starting point because its tail_call already has the right answer!
let (ret_expr, mut output) =
expr_to_expr2(env, &mut scope, &loc_ret.value, loc_ret.region);
output
.introduced_variables
.union(&defs_output.introduced_variables);
output.references.union_mut(defs_output.references);
// Now that we've collected all the references, check to see if any of the new idents
// we defined went unused by the return expression. If any were unused, report it.
for (symbol, region) in symbols_introduced {
if !output.references.has_lookup(symbol) {
env.problem(Problem::UnusedDef(symbol, region));
}
}
let (can_defs, output) = sort_can_defs(env, unsorted, output);
match can_defs {
Ok(decls) => {
let mut expr = ret_expr;
for declaration in decls.into_iter().rev() {
expr = decl_to_let(env.pool, env.var_store, declaration, expr);
}
(expr, output)
}
Err(_err) => {
// TODO: fix this to be something from Expr2
// (RuntimeError(err), output)
todo!()
}
}
}
PrecedenceConflict { .. } => {
// use roc_problem::can::RuntimeError::*;
//
// let problem = PrecedenceProblem::BothNonAssociative(
// *whole_region,
// binop1.clone(),
// binop2.clone(),
// );
//
// env.problem(Problem::PrecedenceProblem(problem.clone()));
//
// (
// RuntimeError(InvalidPrecedence(problem, region)),
// Output::default(),
// )
todo!()
}
MalformedClosure => {
// use roc_problem::can::RuntimeError::*;
// (RuntimeError(MalformedClosure(region)), Output::default())
todo!()
}
MalformedIdent(_name, _problem) => {
// use roc_problem::can::RuntimeError::*;
//
// let problem = MalformedIdentifier((*name).into(), region);
// env.problem(Problem::RuntimeError(problem.clone()));
//
// (RuntimeError(problem), Output::default())
todo!()
}
Var {
module_name, // module_name will only be filled if the original Roc code stated something like `5 + SomeModule.myVar`, module_name will be blank if it was `5 + myVar`
ident,
} => canonicalize_lookup(env, scope, module_name, ident, region),
// Below this point, we shouln't see any of these nodes anymore because
// operator desugaring should have removed them!
bad_expr @ ParensAround(_) => {
panic!(
"A ParensAround did not get removed during operator desugaring somehow: {:#?}",
bad_expr
);
}
bad_expr @ SpaceBefore(_, _) => {
panic!(
"A SpaceBefore did not get removed during operator desugaring somehow: {:#?}",
bad_expr
);
}
bad_expr @ SpaceAfter(_, _) => {
panic!(
"A SpaceAfter did not get removed during operator desugaring somehow: {:#?}",
bad_expr
);
}
bad_expr @ BinOps { .. } => {
panic!(
"A binary operator chain did not get desugared somehow: {:#?}",
bad_expr
);
}
bad_expr @ UnaryOp(_, _) => {
panic!(
"A unary operator did not get desugared somehow: {:#?}",
bad_expr
);
}
rest => todo!("not yet implemented {:?}", rest),
}
}
pub fn to_expr_id<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
parse_expr: &'a roc_parse::ast::Expr<'a>,
region: Region,
) -> (ExprId, Output) {
let (expr, output) = expr_to_expr2(env, scope, parse_expr, region);
(env.add(expr, region), output)
}

View File

@ -0,0 +1,51 @@
use roc_collections::all::MutMap;
use roc_module::ident::Lowercase;
use roc_module::symbol::Symbol;
use roc_types::subs::Variable;
#[derive(Clone, Debug, PartialEq, Default)]
pub struct IntroducedVariables {
// Rigids must be unique within a type annotation.
// E.g. in `identity : a -> a`, there should only be one
// variable (a rigid one, with name "a").
// Hence `rigids : Map<Lowercase, Variable>`
//
// But then between annotations, the same name can occur multiple times,
// but a variable can only have one name. Therefore
// `ftv : Map<Variable, Lowercase>`.
pub wildcards: Vec<Variable>,
pub var_by_name: MutMap<Lowercase, Variable>,
pub name_by_var: MutMap<Variable, Lowercase>,
pub host_exposed_aliases: MutMap<Symbol, Variable>,
}
impl IntroducedVariables {
pub fn insert_named(&mut self, name: Lowercase, var: Variable) {
self.var_by_name.insert(name.clone(), var);
self.name_by_var.insert(var, name);
}
pub fn insert_wildcard(&mut self, var: Variable) {
self.wildcards.push(var);
}
pub fn insert_host_exposed_alias(&mut self, symbol: Symbol, var: Variable) {
self.host_exposed_aliases.insert(symbol, var);
}
pub fn union(&mut self, other: &Self) {
self.wildcards.extend(other.wildcards.iter().cloned());
self.var_by_name.extend(other.var_by_name.clone());
self.name_by_var.extend(other.name_by_var.clone());
self.host_exposed_aliases
.extend(other.host_exposed_aliases.clone());
}
pub fn var_by_name(&self, name: &Lowercase) -> Option<&Variable> {
self.var_by_name.get(name)
}
pub fn name_by_var(&self, var: Variable) -> Option<&Lowercase> {
self.name_by_var.get(&var)
}
}

View File

@ -0,0 +1,6 @@
pub mod expr2;
pub mod expr2_to_string;
pub mod expr_to_expr2;
mod introduced_vars;
pub(crate) mod output;
pub mod record_field;

View File

@ -0,0 +1,30 @@
use crate::{
lang::core::{def::def::References, types::Alias},
mem_pool::pool::NodeId,
};
use roc_collections::all::{MutMap, MutSet};
use roc_module::symbol::Symbol;
use super::introduced_vars::IntroducedVariables;
#[derive(Clone, Default, Debug, PartialEq)]
pub struct Output {
pub references: References,
pub tail_call: Option<Symbol>,
pub introduced_variables: IntroducedVariables,
pub aliases: MutMap<Symbol, NodeId<Alias>>,
pub non_closures: MutSet<Symbol>,
}
impl Output {
pub fn union(&mut self, other: Self) {
self.references.union_mut(other.references);
if let (None, Some(later)) = (self.tail_call, other.tail_call) {
self.tail_call = Some(later);
}
self.aliases.extend(other.aliases);
self.non_closures.extend(other.non_closures);
}
}

View File

@ -0,0 +1,49 @@
use roc_types::subs::Variable;
use crate::mem_pool::pool_str::PoolStr;
use roc_module::symbol::Symbol;
use super::expr2::ExprId;
#[derive(Debug)]
pub enum RecordField {
InvalidLabelOnly(PoolStr, Variable),
LabelOnly(PoolStr, Variable, Symbol),
LabeledValue(PoolStr, Variable, ExprId),
}
use RecordField::*;
impl RecordField {
pub fn get_record_field_var(&self) -> &Variable {
match self {
InvalidLabelOnly(_, var) => var,
LabelOnly(_, var, _) => var,
LabeledValue(_, var, _) => var,
}
}
pub fn get_record_field_pool_str(&self) -> &PoolStr {
match self {
InvalidLabelOnly(pool_str, _) => pool_str,
LabelOnly(pool_str, _, _) => pool_str,
LabeledValue(pool_str, _, _) => pool_str,
}
}
pub fn get_record_field_pool_str_mut(&mut self) -> &mut PoolStr {
match self {
InvalidLabelOnly(pool_str, _) => pool_str,
LabelOnly(pool_str, _, _) => pool_str,
LabeledValue(pool_str, _, _) => pool_str,
}
}
pub fn get_record_field_val_node_id(&self) -> Option<ExprId> {
match self {
InvalidLabelOnly(_, _) => None,
LabelOnly(_, _, _) => None,
LabeledValue(_, _, field_val_id) => Some(*field_val_id),
}
}
}

View File

@ -0,0 +1,61 @@
use crate::{
lang::rigids::Rigids,
mem_pool::{pool::NodeId, pool_vec::PoolVec, shallow_clone::ShallowClone},
};
use roc_module::symbol::Symbol;
use roc_types::subs::Variable;
use super::{
expr::expr2::ExprId,
pattern::PatternId,
types::{Type2, TypeId},
};
#[derive(Debug)]
pub enum FunctionDef {
WithAnnotation {
name: Symbol, // 8B
arguments: PoolVec<(NodeId<Type2>, PatternId)>, // 8B
rigids: NodeId<Rigids>, // 4B
return_type: TypeId, // 4B
body_id: ExprId, // 4B
},
NoAnnotation {
name: Symbol, // 8B
arguments: PoolVec<(Variable, PatternId)>, // 8B
return_var: Variable, // 4B
body_id: ExprId, // 4B
},
}
impl ShallowClone for FunctionDef {
fn shallow_clone(&self) -> Self {
match self {
Self::WithAnnotation {
name,
arguments,
rigids,
return_type,
body_id,
} => Self::WithAnnotation {
name: *name,
arguments: arguments.shallow_clone(),
rigids: *rigids,
return_type: *return_type,
body_id: *body_id,
},
Self::NoAnnotation {
name,
arguments,
return_var,
body_id,
} => Self::NoAnnotation {
name: *name,
arguments: arguments.shallow_clone(),
return_var: *return_var,
body_id: *body_id,
},
}
}
}

View File

@ -0,0 +1,10 @@
use super::expr::expr2::ExprId;
#[derive(Debug)]
pub struct AppHeader {
pub app_name: String,
pub packages_base: String,
pub imports: Vec<String>,
pub provides: Vec<String>,
pub ast_node_id: ExprId, // TODO probably want to create and use HeaderId
}

10
ast/src/lang/core/mod.rs Normal file
View File

@ -0,0 +1,10 @@
pub mod ast;
mod declaration;
pub mod def;
pub mod expr;
pub mod fun_def;
pub mod header;
pub mod pattern;
pub mod str;
pub mod types;
pub mod val_def;

View File

@ -1,22 +1,30 @@
#![allow(clippy::all)]
#![allow(dead_code)]
#![allow(unused_imports)]
use crate::lang::ast::{ExprId, FloatVal, IntVal};
use crate::lang::expr::{to_expr_id, Env, Output};
use crate::lang::pool::{NodeId, Pool, PoolStr, PoolVec, ShallowClone};
use crate::lang::scope::Scope;
use bumpalo::collections::Vec as BumpVec;
use roc_can::expr::unescape_char;
use roc_can::num::{finish_parsing_base, finish_parsing_float, finish_parsing_int};
use roc_collections::all::BumpMap;
use roc_module::symbol::Symbol;
use roc_module::symbol::{Interns, Symbol};
use roc_parse::ast::{StrLiteral, StrSegment};
use roc_parse::pattern::PatternType;
use roc_problem::can::{MalformedPatternProblem, Problem, RuntimeError};
use roc_region::all::Region;
use roc_types::subs::Variable;
use super::constrain::Constraint;
use crate::ast_error::{ASTResult, UnexpectedPattern2Variant};
use crate::constrain::Constraint;
use crate::lang::core::expr::expr_to_expr2::to_expr_id;
use crate::lang::env::Env;
use crate::lang::scope::Scope;
use crate::mem_pool::pool::{NodeId, Pool};
use crate::mem_pool::pool_str::PoolStr;
use crate::mem_pool::pool_vec::PoolVec;
use crate::mem_pool::shallow_clone::ShallowClone;
use super::expr::expr2::{ExprId, FloatVal, IntVal};
use super::expr::output::Output;
use super::types::Type2;
pub type PatternId = NodeId<Pattern2>;
@ -504,6 +512,17 @@ pub fn symbols_from_pattern(pool: &Pool, initial: &Pattern2) -> Vec<Symbol> {
symbols
}
pub fn get_identifier_string(pattern: &Pattern2, interns: &Interns) -> ASTResult<String> {
match pattern {
Pattern2::Identifier(symbol) => Ok(symbol.ident_str(interns).to_string()),
other => UnexpectedPattern2Variant {
required_pattern2: "Identifier".to_string(),
encountered_pattern2: format!("{:?}", other),
}
.fail()?,
}
}
pub fn symbols_and_variables_from_pattern(
pool: &Pool,
initial: &Pattern2,
@ -580,7 +599,7 @@ fn underscore_in_def<'a>(env: &mut Env<'a>, region: Region) -> Pattern2 {
Pattern2::UnsupportedPattern(region)
}
fn flatten_str_literal(pool: &mut Pool, literal: &StrLiteral<'_>) -> Pattern2 {
pub(crate) fn flatten_str_literal(pool: &mut Pool, literal: &StrLiteral<'_>) -> Pattern2 {
use roc_parse::ast::StrLiteral::*;
match literal {
@ -590,7 +609,7 @@ fn flatten_str_literal(pool: &mut Pool, literal: &StrLiteral<'_>) -> Pattern2 {
}
}
fn flatten_str_lines(pool: &mut Pool, lines: &[&[StrSegment<'_>]]) -> Pattern2 {
pub(crate) fn flatten_str_lines(pool: &mut Pool, lines: &[&[StrSegment<'_>]]) -> Pattern2 {
use StrSegment::*;
let mut buf = String::new();

229
ast/src/lang/core/str.rs Normal file
View File

@ -0,0 +1,229 @@
use roc_module::{called_via::CalledVia, symbol::Symbol};
use roc_parse::ast::StrLiteral;
use crate::{
ast_error::{ASTResult, UnexpectedASTNode},
lang::{core::expr::expr_to_expr2::expr_to_expr2, env::Env, scope::Scope},
mem_pool::{pool::Pool, pool_str::PoolStr, pool_vec::PoolVec},
};
use super::expr::{
expr2::{Expr2, ExprId},
output::Output,
};
pub(crate) fn flatten_str_literal<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
literal: &StrLiteral<'a>,
) -> (Expr2, Output) {
use roc_parse::ast::StrLiteral::*;
match literal {
PlainLine(str_slice) => {
// TODO use smallstr
let expr = Expr2::Str(PoolStr::new(str_slice, &mut env.pool));
(expr, Output::default())
}
Line(segments) => flatten_str_lines(env, scope, &[segments]),
Block(lines) => flatten_str_lines(env, scope, lines),
}
}
enum StrSegment {
Interpolation(Expr2),
Plaintext(PoolStr),
}
fn flatten_str_lines<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
lines: &[&[roc_parse::ast::StrSegment<'a>]],
) -> (Expr2, Output) {
use roc_parse::ast::StrSegment::*;
let mut buf = String::new();
let mut segments = Vec::new();
let mut output = Output::default();
for line in lines {
for segment in line.iter() {
match segment {
Plaintext(string) => {
buf.push_str(string);
}
Unicode(loc_hex_digits) => match u32::from_str_radix(loc_hex_digits.value, 16) {
Ok(code_pt) => match std::char::from_u32(code_pt) {
Some(ch) => {
buf.push(ch);
}
None => {
// env.problem(Problem::InvalidUnicodeCodePt(loc_hex_digits.region));
//
// return (
// Expr::RuntimeError(RuntimeError::InvalidUnicodeCodePt(
// loc_hex_digits.region,
// )),
// output,
// );
todo!()
}
},
Err(_) => {
// env.problem(Problem::InvalidHexadecimal(loc_hex_digits.region));
//
// return (
// Expr::RuntimeError(RuntimeError::InvalidHexadecimal(
// loc_hex_digits.region,
// )),
// output,
// );
todo!()
}
},
Interpolated(loc_expr) => {
if roc_can::expr::is_valid_interpolation(loc_expr.value) {
// Interpolations desugar to Str.concat calls
output.references.calls.insert(Symbol::STR_CONCAT);
if !buf.is_empty() {
segments.push(StrSegment::Plaintext(PoolStr::new(&buf, &mut env.pool)));
buf = String::new();
}
let (loc_expr, new_output) =
expr_to_expr2(env, scope, loc_expr.value, loc_expr.region);
output.union(new_output);
segments.push(StrSegment::Interpolation(loc_expr));
} else {
// env.problem(Problem::InvalidInterpolation(loc_expr.region));
//
// return (
// Expr::RuntimeError(RuntimeError::InvalidInterpolation(loc_expr.region)),
// output,
// );
todo!()
}
}
EscapedChar(escaped) => buf.push(roc_can::expr::unescape_char(escaped)),
}
}
}
if !buf.is_empty() {
segments.push(StrSegment::Plaintext(PoolStr::new(&buf, &mut env.pool)));
}
(desugar_str_segments(env, segments), output)
}
/// Resolve string interpolations by desugaring a sequence of StrSegments
/// into nested calls to Str.concat
fn desugar_str_segments(env: &mut Env, segments: Vec<StrSegment>) -> Expr2 {
use StrSegment::*;
let pool = &mut env.pool;
let var_store = &mut env.var_store;
let mut iter = segments.into_iter().rev();
let mut expr = match iter.next() {
Some(Plaintext(pool_str)) => Expr2::Str(pool_str),
Some(Interpolation(expr_id)) => expr_id,
None => {
// No segments? Empty string!
let pool_str = PoolStr::new("", pool);
Expr2::Str(pool_str)
}
};
for seg in iter {
let new_expr = match seg {
Plaintext(string) => Expr2::Str(string),
Interpolation(expr_id) => expr_id,
};
let concat_expr_id = pool.add(Expr2::Var(Symbol::STR_CONCAT));
let args = vec![
(var_store.fresh(), pool.add(new_expr)),
(var_store.fresh(), pool.add(expr)),
];
let args = PoolVec::new(args.into_iter(), pool);
let new_call = Expr2::Call {
args,
expr_id: concat_expr_id,
expr_var: var_store.fresh(),
fn_var: var_store.fresh(),
closure_var: var_store.fresh(),
called_via: CalledVia::Space,
};
expr = new_call
}
expr
}
pub fn update_str_expr(
node_id: ExprId,
new_char: char,
insert_index: usize,
pool: &mut Pool,
) -> ASTResult<()> {
let str_expr = pool.get_mut(node_id);
enum Either {
MyString(String),
MyPoolStr(PoolStr),
Done,
}
let insert_either = match str_expr {
Expr2::SmallStr(arr_string) => {
// TODO make sure this works for unicode "characters"
let insert_res = arr_string.try_insert(insert_index as u8, new_char);
match insert_res {
Ok(_) => Either::Done,
_ => {
let mut new_string = arr_string.as_str().to_string();
new_string.insert(insert_index, new_char);
Either::MyString(new_string)
}
}
}
Expr2::Str(old_pool_str) => Either::MyPoolStr(*old_pool_str),
other => UnexpectedASTNode {
required_node_type: "SmallStr or Str",
encountered_node_type: format!("{:?}", other),
}
.fail()?,
};
match insert_either {
Either::MyString(new_string) => {
let new_pool_str = PoolStr::new(&new_string, pool);
pool.set(node_id, Expr2::Str(new_pool_str))
}
Either::MyPoolStr(old_pool_str) => {
let mut new_string = old_pool_str.as_str(pool).to_owned();
new_string.insert(insert_index, new_char);
let new_pool_str = PoolStr::new(&new_string, pool);
pool.set(node_id, Expr2::Str(new_pool_str))
}
Either::Done => (),
}
Ok(())
}

View File

@ -1,44 +1,53 @@
#![allow(clippy::all)]
#![allow(dead_code)]
#![allow(unused_imports)]
use crate::lang::expr::Env;
use crate::lang::pool::{NodeId, Pool, PoolStr, PoolVec, ShallowClone};
use crate::lang::scope::Scope;
// use roc_can::expr::Output;
use roc_collections::all::{MutMap, MutSet};
use roc_module::ident::{Ident, TagName};
use roc_module::ident::{Ident, Lowercase, TagName};
use roc_module::symbol::Symbol;
use roc_region::all::{Located, Region};
use roc_types::types::{Problem, RecordField};
use roc_types::{subs::Variable, types::ErrorType};
use crate::lang::env::Env;
use crate::lang::scope::Scope;
use crate::mem_pool::pool::{NodeId, Pool};
use crate::mem_pool::pool_str::PoolStr;
use crate::mem_pool::pool_vec::PoolVec;
use crate::mem_pool::shallow_clone::ShallowClone;
pub type TypeId = NodeId<Type2>;
#[derive(Debug)]
pub enum Type2 {
Variable(Variable),
Variable(Variable), // 4B
Alias(Symbol, PoolVec<(PoolStr, TypeId)>, TypeId), // 24B = 8B + 12B + 4B
AsAlias(Symbol, PoolVec<(PoolStr, TypeId)>, TypeId), // 24B = 8B + 12B + 4B
Alias(Symbol, PoolVec<(PoolStr, TypeId)>, TypeId), // 24B = 8B + 8B + 4B + pad
AsAlias(Symbol, PoolVec<(PoolStr, TypeId)>, TypeId), // 24B = 8B + 8B + 4B + pad
// 32B
// 24B
HostExposedAlias {
name: Symbol, // 8B
arguments: PoolVec<(PoolStr, TypeId)>, // 12B
arguments: PoolVec<(PoolStr, TypeId)>, // 8B
actual_var: Variable, // 4B
actual: TypeId, // 4B
},
EmptyTagUnion,
TagUnion(PoolVec<(TagName, PoolVec<Type2>)>, TypeId), // 16B = 12B + 4B
RecursiveTagUnion(Variable, PoolVec<(TagName, PoolVec<Type2>)>, TypeId), // 20B = 4B + 12B + 4B
TagUnion(PoolVec<(TagName, PoolVec<Type2>)>, TypeId), // 12B = 8B + 4B
RecursiveTagUnion(Variable, PoolVec<(TagName, PoolVec<Type2>)>, TypeId), // 16B = 4B + 8B + 4B
EmptyRec,
Record(PoolVec<(PoolStr, RecordField<TypeId>)>, TypeId), // 16B = 12B + 4B
Record(PoolVec<(PoolStr, RecordField<TypeId>)>, TypeId), // 12B = 8B + 4B
Function(PoolVec<Type2>, TypeId, TypeId), // 20B = 12B + 4B + 4B
Apply(Symbol, PoolVec<Type2>), // 20B = 8B + 12B
Function(PoolVec<Type2>, TypeId, TypeId), // 16B = 8B + 4B + 4B
Apply(Symbol, PoolVec<Type2>), // 16B = 8B + 8B
Erroneous(Problem2),
Erroneous(Problem2), // 24B
}
#[test]
fn type2_size() {
assert_eq!(std::mem::size_of::<Type2>(), 32); // 24B + pad
}
#[derive(Debug)]
@ -167,9 +176,9 @@ pub enum Signature {
},
}
pub enum Annotation2<'a> {
pub enum Annotation2 {
Annotation {
named_rigids: MutMap<&'a str, Variable>,
named_rigids: MutMap<Lowercase, Variable>,
unnamed_rigids: MutSet<Variable>,
symbols: MutSet<Symbol>,
signature: Signature,
@ -182,7 +191,7 @@ pub fn to_annotation2<'a>(
scope: &mut Scope,
annotation: &'a roc_parse::ast::TypeAnnotation<'a>,
region: Region,
) -> Annotation2<'a> {
) -> Annotation2 {
let mut references = References::default();
let annotation = to_type2(env, scope, &mut references, annotation, region);
@ -236,11 +245,7 @@ pub fn to_annotation2<'a>(
}
}
fn shallow_dealias<'a>(
env: &mut Env,
references: References<'a>,
annotation: Type2,
) -> Annotation2<'a> {
fn shallow_dealias<'a>(env: &mut Env, references: References, annotation: Type2) -> Annotation2 {
let References {
named,
unnamed,
@ -284,8 +289,8 @@ fn shallow_dealias<'a>(
}
#[derive(Default)]
pub struct References<'a> {
named: MutMap<&'a str, Variable>,
pub struct References {
named: MutMap<Lowercase, Variable>,
unnamed: MutSet<Variable>,
hidden: MutSet<Variable>,
symbols: MutSet<Symbol>,
@ -294,7 +299,7 @@ pub struct References<'a> {
pub fn to_type_id<'a>(
env: &mut Env,
scope: &mut Scope,
rigids: &mut References<'a>,
rigids: &mut References,
annotation: &roc_parse::ast::TypeAnnotation<'a>,
region: Region,
) -> TypeId {
@ -306,7 +311,7 @@ pub fn to_type_id<'a>(
pub fn as_type_id<'a>(
env: &mut Env,
scope: &mut Scope,
rigids: &mut References<'a>,
rigids: &mut References,
type_id: TypeId,
annotation: &roc_parse::ast::TypeAnnotation<'a>,
region: Region,
@ -320,7 +325,7 @@ pub fn as_type_id<'a>(
pub fn to_type2<'a>(
env: &mut Env,
scope: &mut Scope,
references: &mut References<'a>,
references: &mut References,
annotation: &roc_parse::ast::TypeAnnotation<'a>,
region: Region,
) -> Type2 {
@ -371,8 +376,9 @@ pub fn to_type2<'a>(
Type2::Function(arguments, closure_type_id, return_type_id)
}
BoundVariable(v) => {
// a rigid type variable
match references.named.get(v) {
// A rigid type variable. The parser should have already ensured that the name is indeed a lowercase.
let v = Lowercase::from(*v);
match references.named.get(&v) {
Some(var) => Type2::Variable(*var),
None => {
let var = env.var_store.fresh();
@ -383,6 +389,11 @@ pub fn to_type2<'a>(
}
}
}
Inferred => {
let var = env.var_store.fresh();
Type2::Variable(var)
}
Wildcard | Malformed(_) => {
let var = env.var_store.fresh();
@ -391,12 +402,13 @@ pub fn to_type2<'a>(
Type2::Variable(var)
}
Record { fields, ext, .. } => {
let field_types_map = can_assigned_fields(env, scope, references, fields, region);
let field_types_map =
can_assigned_fields(env, scope, references, &fields.items, region);
let field_types = PoolVec::with_capacity(field_types_map.len() as u32, env.pool);
for (node_id, (label, field)) in field_types.iter_node_ids().zip(field_types_map) {
let poolstr = PoolStr::new(label, env.pool);
let poolstr = PoolStr::new(label.as_str(), env.pool);
let rec_field = match field {
RecordField::Optional(_) => {
@ -423,7 +435,7 @@ pub fn to_type2<'a>(
Type2::Record(field_types, ext_type)
}
TagUnion { tags, ext, .. } => {
let tag_types_vec = can_tags(env, scope, references, tags, region);
let tag_types_vec = can_tags(env, scope, references, tags.items, region);
let tag_types = PoolVec::with_capacity(tag_types_vec.len() as u32, env.pool);
@ -475,10 +487,10 @@ pub fn to_type2<'a>(
{
match loc_var.value {
BoundVariable(ident) => {
let var_name = ident;
let var_name = Lowercase::from(ident);
if let Some(var) = references.named.get(&var_name) {
let poolstr = PoolStr::new(var_name, env.pool);
let poolstr = PoolStr::new(var_name.as_str(), env.pool);
let type_id = env.pool.add(Type2::Variable(*var));
env.pool[var_id] = (poolstr.shallow_clone(), type_id);
@ -489,7 +501,7 @@ pub fn to_type2<'a>(
let var = env.var_store.fresh();
references.named.insert(var_name.clone(), var);
let poolstr = PoolStr::new(var_name, env.pool);
let poolstr = PoolStr::new(var_name.as_str(), env.pool);
let type_id = env.pool.add(Type2::Variable(var));
env.pool[var_id] = (poolstr.shallow_clone(), type_id);
@ -571,10 +583,10 @@ pub fn to_type2<'a>(
fn can_assigned_fields<'a>(
env: &mut Env,
scope: &mut Scope,
rigids: &mut References<'a>,
rigids: &mut References,
fields: &&[Located<roc_parse::ast::AssignedField<'a, roc_parse::ast::TypeAnnotation<'a>>>],
region: Region,
) -> MutMap<&'a str, RecordField<Type2>> {
) -> MutMap<Lowercase, RecordField<Type2>> {
use roc_parse::ast::AssignedField::*;
use roc_types::types::RecordField::*;
@ -597,8 +609,8 @@ fn can_assigned_fields<'a>(
let field_type =
to_type2(env, scope, rigids, &annotation.value, annotation.region);
let label = field_name.value;
field_types.insert(label, Required(field_type));
let label = Lowercase::from(field_name.value);
field_types.insert(label.clone(), Required(field_type));
break 'inner label;
}
@ -606,20 +618,20 @@ fn can_assigned_fields<'a>(
let field_type =
to_type2(env, scope, rigids, &annotation.value, annotation.region);
let label = field_name.value;
let label = Lowercase::from(field_name.value);
field_types.insert(label.clone(), Optional(field_type));
break 'inner label;
}
LabelOnly(loc_field_name) => {
// Interpret { a, b } as { a : a, b : b }
let field_name = loc_field_name.value;
let field_name = Lowercase::from(loc_field_name.value);
let field_type = {
if let Some(var) = rigids.named.get(&field_name) {
Type2::Variable(*var)
} else {
let field_var = env.var_store.fresh();
rigids.named.insert(field_name, field_var);
rigids.named.insert(field_name.clone(), field_var);
Type2::Variable(field_var)
}
};
@ -659,7 +671,7 @@ fn can_assigned_fields<'a>(
fn can_tags<'a>(
env: &mut Env,
scope: &mut Scope,
rigids: &mut References<'a>,
rigids: &mut References,
tags: &'a [Located<roc_parse::ast::Tag<'a>>],
region: Region,
) -> Vec<(TagName, PoolVec<Type2>)> {
@ -743,7 +755,7 @@ enum TypeApply {
fn to_type_apply<'a>(
env: &mut Env,
scope: &mut Scope,
rigids: &mut References<'a>,
rigids: &mut References,
module_name: &str,
ident: &str,
type_arguments: &[Located<roc_parse::ast::TypeAnnotation<'a>>],

View File

@ -0,0 +1,101 @@
use crate::{
lang::{core::expr::expr2_to_string::expr2_to_string, rigids::Rigids},
mem_pool::{
pool::{NodeId, Pool},
shallow_clone::ShallowClone,
},
};
use roc_types::subs::Variable;
use super::{
expr::expr2::ExprId,
pattern::{Pattern2, PatternId},
types::TypeId,
};
#[derive(Debug)]
pub enum ValueDef {
WithAnnotation {
pattern_id: PatternId, // 4B
expr_id: ExprId, // 4B
type_id: TypeId,
rigids: Rigids,
expr_var: Variable, // 4B
},
NoAnnotation {
pattern_id: PatternId, // 4B
expr_id: ExprId, // 4B
expr_var: Variable, // 4B
},
}
impl ShallowClone for ValueDef {
fn shallow_clone(&self) -> Self {
match self {
Self::WithAnnotation {
pattern_id,
expr_id,
type_id,
rigids,
expr_var,
} => Self::WithAnnotation {
pattern_id: *pattern_id,
expr_id: *expr_id,
type_id: *type_id,
rigids: rigids.shallow_clone(),
expr_var: *expr_var,
},
Self::NoAnnotation {
pattern_id,
expr_id,
expr_var,
} => Self::NoAnnotation {
pattern_id: *pattern_id,
expr_id: *expr_id,
expr_var: *expr_var,
},
}
}
}
impl ValueDef {
pub fn get_expr_id(&self) -> ExprId {
match self {
ValueDef::WithAnnotation { expr_id, .. } => *expr_id,
ValueDef::NoAnnotation { expr_id, .. } => *expr_id,
}
}
pub fn get_pattern_id(&self) -> NodeId<Pattern2> {
match self {
ValueDef::WithAnnotation { pattern_id, .. } => *pattern_id,
ValueDef::NoAnnotation { pattern_id, .. } => *pattern_id,
}
}
}
pub fn value_def_to_string(val_def: &ValueDef, pool: &Pool) -> String {
match val_def {
ValueDef::WithAnnotation {
pattern_id,
expr_id,
type_id,
rigids,
expr_var,
} => {
format!("WithAnnotation {{ pattern_id: {:?}, expr_id: {:?}, type_id: {:?}, rigids: {:?}, expr_var: {:?}}}", pool.get(*pattern_id), expr2_to_string(*expr_id, pool), pool.get(*type_id), rigids, expr_var)
}
ValueDef::NoAnnotation {
pattern_id,
expr_id,
expr_var,
} => {
format!(
"NoAnnotation {{ pattern_id: {:?}, expr_id: {:?}, expr_var: {:?}}}",
pool.get(*pattern_id),
expr2_to_string(*expr_id, pool),
expr_var
)
}
}
}

182
ast/src/lang/env.rs Normal file
View File

@ -0,0 +1,182 @@
use crate::mem_pool::pool::{NodeId, Pool};
use bumpalo::{collections::Vec as BumpVec, Bump};
use roc_collections::all::{MutMap, MutSet};
use roc_module::ident::{Ident, Lowercase, ModuleName};
use roc_module::symbol::{IdentIds, ModuleId, ModuleIds, Symbol};
use roc_problem::can::{Problem, RuntimeError};
use roc_region::all::{Located, Region};
use roc_types::subs::VarStore;
use super::core::def::def::References;
#[derive(Debug)]
pub struct Env<'a> {
pub home: ModuleId,
pub var_store: &'a mut VarStore,
pub pool: &'a mut Pool,
pub arena: &'a Bump,
pub problems: BumpVec<'a, Problem>,
pub dep_idents: MutMap<ModuleId, IdentIds>,
pub module_ids: &'a ModuleIds,
pub ident_ids: IdentIds,
pub exposed_ident_ids: IdentIds,
pub closures: MutMap<Symbol, References>,
/// Symbols which were referenced by qualified lookups.
pub qualified_lookups: MutSet<Symbol>,
pub top_level_symbols: MutSet<Symbol>,
pub closure_name_symbol: Option<Symbol>,
pub tailcallable_symbol: Option<Symbol>,
}
impl<'a> Env<'a> {
#[allow(clippy::too_many_arguments)]
pub fn new(
home: ModuleId,
arena: &'a Bump,
pool: &'a mut Pool,
var_store: &'a mut VarStore,
dep_idents: MutMap<ModuleId, IdentIds>,
module_ids: &'a ModuleIds,
exposed_ident_ids: IdentIds,
) -> Env<'a> {
Env {
home,
arena,
pool,
problems: BumpVec::new_in(arena),
var_store,
dep_idents,
module_ids,
ident_ids: exposed_ident_ids.clone(), // we start with these, but will add more later using Scope.introduce
exposed_ident_ids,
closures: MutMap::default(),
qualified_lookups: MutSet::default(),
tailcallable_symbol: None,
closure_name_symbol: None,
top_level_symbols: MutSet::default(),
}
}
pub fn add<T>(&mut self, item: T, region: Region) -> NodeId<T> {
let id = self.pool.add(item);
self.set_region(id, region);
id
}
pub fn problem(&mut self, problem: Problem) {
self.problems.push(problem);
}
pub fn set_region<T>(&mut self, _node_id: NodeId<T>, _region: Region) {
dbg!("Don't Forget to set the region eventually");
}
pub fn register_closure(&mut self, symbol: Symbol, references: References) {
self.closures.insert(symbol, references);
}
/// Generates a unique, new symbol like "$1" or "$5",
/// using the home module as the module_id.
///
/// This is used, for example, during canonicalization of an Expr::Closure
/// to generate a unique symbol to refer to that closure.
pub fn gen_unique_symbol(&mut self) -> Symbol {
let ident_id = self.ident_ids.gen_unique();
Symbol::new(self.home, ident_id)
}
/// Returns Err if the symbol resolved, but it was not exposed by the given module
pub fn qualified_lookup(
&mut self,
module_name: &str,
ident: &str,
region: Region,
) -> Result<Symbol, RuntimeError> {
debug_assert!(
!module_name.is_empty(),
"Called env.qualified_lookup with an unqualified ident: {:?}",
ident
);
let module_name: ModuleName = module_name.into();
match self.module_ids.get_id(&module_name) {
Some(&module_id) => {
let ident: Ident = ident.into();
// You can do qualified lookups on your own module, e.g.
// if I'm in the Foo module, I can do a `Foo.bar` lookup.
if module_id == self.home {
match self.ident_ids.get_id(&ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, *ident_id);
self.qualified_lookups.insert(symbol);
Ok(symbol)
}
None => Err(RuntimeError::LookupNotInScope(
Located {
value: ident,
region,
},
self.ident_ids
.idents()
.map(|(_, string)| string.as_ref().into())
.collect(),
)),
}
} else {
match self.dep_idents.get(&module_id) {
Some(exposed_ids) => match exposed_ids.get_id(&ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, *ident_id);
self.qualified_lookups.insert(symbol);
Ok(symbol)
}
None => {
let exposed_values = exposed_ids
.idents()
.filter(|(_, ident)| {
ident.as_ref().starts_with(|c: char| c.is_lowercase())
})
.map(|(_, ident)| Lowercase::from(ident.as_ref()))
.collect();
Err(RuntimeError::ValueNotExposed {
module_name,
ident,
region,
exposed_values,
})
}
},
None => {
panic!(
"Module {} exists, but is not recorded in dep_idents",
module_name
)
}
}
}
}
None => Err(RuntimeError::ModuleNotImported {
module_name,
imported_modules: self
.module_ids
.available_modules()
.map(|string| string.as_ref().into())
.collect(),
region,
}),
}
}
}

4
ast/src/lang/mod.rs Normal file
View File

@ -0,0 +1,4 @@
pub mod core;
pub mod env;
mod rigids;
pub mod scope;

82
ast/src/lang/rigids.rs Normal file
View File

@ -0,0 +1,82 @@
use std::{
collections::{HashMap, HashSet},
hash::BuildHasherDefault,
};
use crate::mem_pool::{
pool::Pool, pool_str::PoolStr, pool_vec::PoolVec, shallow_clone::ShallowClone,
};
use roc_collections::all::WyHash;
use roc_module::ident::Lowercase;
use roc_types::subs::Variable;
#[derive(Debug)]
pub struct Rigids {
pub names: PoolVec<(Option<PoolStr>, Variable)>, // 8B
padding: [u8; 1],
}
#[allow(clippy::needless_collect)]
impl Rigids {
pub fn new(
named: HashMap<Lowercase, Variable, BuildHasherDefault<WyHash>>,
unnamed: HashSet<Variable, BuildHasherDefault<WyHash>>,
pool: &mut Pool,
) -> Self {
let names = PoolVec::with_capacity((named.len() + unnamed.len()) as u32, pool);
let mut temp_names = Vec::new();
temp_names.extend(named.iter().map(|(name, var)| (Some(name.as_str()), *var)));
temp_names.extend(unnamed.iter().map(|var| (None, *var)));
for (node_id, (opt_name, variable)) in names.iter_node_ids().zip(temp_names) {
let poolstr = opt_name.map(|name| PoolStr::new(name, pool));
pool[node_id] = (poolstr, variable);
}
Self {
names,
padding: Default::default(),
}
}
pub fn named(&self, pool: &mut Pool) -> PoolVec<(PoolStr, Variable)> {
let named = self
.names
.iter(pool)
.filter_map(|(opt_pool_str, var)| {
opt_pool_str.as_ref().map(|pool_str| (*pool_str, *var))
})
.collect::<Vec<(PoolStr, Variable)>>();
PoolVec::new(named.into_iter(), pool)
}
pub fn unnamed(&self, pool: &mut Pool) -> PoolVec<Variable> {
let unnamed = self
.names
.iter(pool)
.filter_map(|(opt_pool_str, var)| {
if opt_pool_str.is_none() {
Some(*var)
} else {
None
}
})
.collect::<Vec<Variable>>();
PoolVec::new(unnamed.into_iter(), pool)
}
}
impl ShallowClone for Rigids {
fn shallow_clone(&self) -> Self {
Self {
names: self.names.shallow_clone(),
padding: self.padding,
}
}
}

View File

@ -1,11 +1,19 @@
#![allow(clippy::all)]
#![allow(dead_code)]
#![allow(unused_imports)]
use crate::lang::pool::{Pool, PoolStr, PoolVec, ShallowClone};
use crate::lang::types::{Alias, Type2, TypeId};
use std::fmt;
use crate::ast_error::ASTResult;
use crate::mem_pool::pool::Pool;
use crate::mem_pool::pool_str::PoolStr;
use crate::mem_pool::pool_vec::PoolVec;
use crate::mem_pool::shallow_clone::ShallowClone;
use roc_collections::all::{MutMap, MutSet};
use roc_module::ident::{Ident, Lowercase};
use roc_module::symbol::{IdentIds, ModuleId, Symbol};
use roc_module::symbol::{
get_module_ident_ids, get_module_ident_ids_mut, IdentIds, Interns, ModuleId, Symbol,
};
use roc_problem::can::RuntimeError;
use roc_region::all::{Located, Region};
use roc_types::{
@ -14,6 +22,9 @@ use roc_types::{
subs::{VarId, VarStore, Variable},
};
use super::core::types::{Alias, Type2, TypeId};
use super::env::Env;
fn solved_type_to_type_id(
pool: &mut Pool,
solved_type: &SolvedType,
@ -151,7 +162,7 @@ impl Scope {
let alias = Alias {
actual,
/// We know that builtin aliases have no hiddden variables (e.g. in closures)
/// We know that builtin aliases have no hidden variables (e.g. in closures)
hidden_variables: PoolVec::empty(pool),
targs: variables,
};
@ -307,6 +318,25 @@ impl Scope {
pub fn contains_alias(&mut self, name: Symbol) -> bool {
self.aliases.contains_key(&name)
}
pub fn fill_scope(
&mut self,
env: &Env,
all_ident_ids: &mut MutMap<ModuleId, IdentIds>,
) -> ASTResult<()> {
let ident_ids = get_module_ident_ids(all_ident_ids, &env.home)?.clone();
for (_, ident_ref) in ident_ids.idents() {
self.introduce(
ident_ref.as_inline_str().as_str().into(),
&env.exposed_ident_ids,
get_module_ident_ids_mut(all_ident_ids, &env.home)?,
Region::zero(),
)?;
}
Ok(())
}
}
impl ShallowClone for Scope {

8
ast/src/lib.rs Normal file
View File

@ -0,0 +1,8 @@
pub mod ast_error;
mod canonicalization;
pub mod constrain;
pub mod lang;
pub mod mem_pool;
pub mod module;
pub mod parse;
pub mod solve_type;

4
ast/src/mem_pool/mod.rs Normal file
View File

@ -0,0 +1,4 @@
pub mod pool;
pub mod pool_str;
pub mod pool_vec;
pub mod shallow_clone;

239
ast/src/mem_pool/pool.rs Normal file
View File

@ -0,0 +1,239 @@
/// A memory pool of 32-byte nodes. The node value 0 is reserved for the pool's
/// use, and valid nodes may never have that value.
///
/// Internally, the pool is divided into pages of 4096 bytes. It stores nodes
/// into one page at a time, and when it runs out, it uses mmap to reserve an
/// anonymous memory page in which to store nodes.
///
/// Since nodes are 32 bytes, one page can store 128 nodes; you can access a
/// particular node by its NodeId, which is an opaque wrapper around a pointer.
///
/// Pages also use the node value 0 (all 0 bits) to mark nodes as unoccupied.
/// This is important for performance.
use libc::{MAP_ANONYMOUS, MAP_PRIVATE, PROT_READ, PROT_WRITE};
use std::any::type_name;
use std::ffi::c_void;
use std::marker::PhantomData;
use std::mem::{align_of, size_of, MaybeUninit};
use std::ptr::null;
pub const NODE_BYTES: usize = 32;
// Each page has 128 slots. Each slot holds one 32B node
// This means each page is 4096B, which is the size of a memory page
// on typical systems where the compiler will be run.
//
// Nice things about this system include:
// * Allocating a new page is as simple as asking the OS for a memory page.
// * Since each node is 32B, each node's memory address will be a multiple of 16.
// * Thanks to the free lists and our consistent chunk sizes, we should
// end up with very little fragmentation.
// * Finding a slot for a given node should be very fast: see if the relevant
// free list has any openings; if not, try the next size up.
//
// Less nice things include:
// * This system makes it very hard to ever give a page back to the OS.
// We could try doing the Mesh Allocator strategy: whenever we allocate
// something, assign it to a random slot in the page, and then periodically
// try to merge two pages into one (by locking and remapping them in the OS)
// and then returning the redundant physical page back to the OS. This should
// work in theory, but is pretty complicated, and we'd need to schedule it.
// Keep in mind that we can't use the Mesh Allocator itself because it returns
// usize pointers, which would be too big for us to have 16B nodes.
// On the plus side, we could be okay with higher memory usage early on,
// and then later use the Mesh strategy to reduce long-running memory usage.
//
// With this system, we can allocate up to 4B nodes. If we wanted to keep
// a generational index in there, like https://crates.io/crates/sharded-slab
// does, we could use some of the 32 bits for that. For example, if we wanted
// to have a 5-bit generational index (supporting up to 32 generations), then
// we would have 27 bits remaining, meaning we could only support at most
// 134M nodes. Since the editor has a separate Pool for each module, is that
// enough for any single module we'll encounter in practice? Probably, and
// especially if we allocate super large collection literals on the heap instead
// of in the pool.
//
// Another possible design is to try to catch reuse bugs using an "ASan" like
// approach: in development builds, whenever we "free" a particular slot, we
// can add it to a dev-build-only "freed nodes" list and don't hand it back
// out (so, we leak the memory.) Then we can (again, in development builds only)
// check to see if we're about to store something in zeroed-out memory; if so, check
// to see if it was
#[derive(Debug, Eq)]
pub struct NodeId<T> {
pub(super) index: u32,
pub(super) _phantom: PhantomData<T>,
}
impl<T> Clone for NodeId<T> {
fn clone(&self) -> Self {
NodeId {
index: self.index,
_phantom: PhantomData::default(),
}
}
}
impl<T> PartialEq for NodeId<T> {
fn eq(&self, other: &Self) -> bool {
self.index == other.index
}
}
impl<T> Copy for NodeId<T> {}
#[derive(Debug)]
pub struct Pool {
pub(super) nodes: *mut [MaybeUninit<u8>; NODE_BYTES],
num_nodes: u32,
capacity: u32,
// free_1node_slots: Vec<NodeId<T>>,
}
impl Pool {
pub fn with_capacity(nodes: u32) -> Self {
// round up number of nodes requested to nearest page size in bytes
let bytes_per_page = page_size::get();
let node_bytes = NODE_BYTES * nodes as usize;
let leftover = node_bytes % bytes_per_page;
let bytes_to_mmap = if leftover == 0 {
node_bytes
} else {
node_bytes + bytes_per_page - leftover
};
let nodes = unsafe {
// mmap anonymous memory pages - that is, contiguous virtual memory
// addresses from the OS which will be lazily translated into
// physical memory one 4096-byte page at a time, once we actually
// try to read or write in that page's address range.
libc::mmap(
null::<c_void>() as *mut c_void,
bytes_to_mmap,
PROT_READ | PROT_WRITE,
MAP_PRIVATE | MAP_ANONYMOUS,
0,
0,
)
} as *mut [MaybeUninit<u8>; NODE_BYTES];
// This is our actual capacity, in nodes.
// It might be higher than the requested capacity due to rounding up
// to nearest page size.
let capacity = (bytes_to_mmap / NODE_BYTES) as u32;
Pool {
nodes,
num_nodes: 0,
capacity,
}
}
pub fn add<T>(&mut self, node: T) -> NodeId<T> {
// It's only safe to store this if T fits in S.
debug_assert!(
size_of::<T>() <= NODE_BYTES,
"{} has a size of {}, but it needs to be at most {}",
type_name::<T>(),
size_of::<T>(),
NODE_BYTES
);
let node_id = self.reserve(1);
let node_ptr = self.get_ptr(node_id);
unsafe { node_ptr.write(MaybeUninit::new(node)) };
node_id
}
/// Reserves the given number of contiguous node slots, and returns
/// the NodeId of the first one. We only allow reserving 2^32 in a row.
pub(super) fn reserve<T>(&mut self, nodes: u32) -> NodeId<T> {
// TODO once we have a free list, look in there for an open slot first!
let index = self.num_nodes;
if index < self.capacity {
self.num_nodes = index + nodes;
NodeId {
index,
_phantom: PhantomData::default(),
}
} else {
todo!("pool ran out of capacity. TODO reallocate the nodes pointer to map to a bigger space. Can use mremap on Linux, but must memcpy lots of bytes on macOS and Windows.");
}
}
pub fn get<'a, 'b, T>(&'a self, node_id: NodeId<T>) -> &'b T {
unsafe {
let node_ptr = self.get_ptr(node_id) as *const T;
&*node_ptr
}
}
pub fn get_mut<T>(&mut self, node_id: NodeId<T>) -> &mut T {
unsafe {
let node_ptr = self.get_ptr(node_id) as *mut T;
&mut *node_ptr
}
}
pub fn set<T>(&mut self, node_id: NodeId<T>, element: T) {
unsafe {
let node_ptr = self.get_ptr(node_id);
node_ptr.write(MaybeUninit::new(element));
}
}
fn get_ptr<T>(&self, node_id: NodeId<T>) -> *mut MaybeUninit<T> {
let node_offset = unsafe { self.nodes.offset(node_id.index as isize) };
// This checks if the node_offset is aligned to T
assert!(0 == (node_offset as usize) & (align_of::<T>() - 1));
node_offset as *mut MaybeUninit<T>
}
// A node is available iff its bytes are all zeroes
#[allow(dead_code)]
fn is_available<T>(&self, node_id: NodeId<T>) -> bool {
debug_assert_eq!(size_of::<T>(), NODE_BYTES);
unsafe {
let node_ptr = self.nodes.offset(node_id.index as isize) as *const [u8; NODE_BYTES];
*node_ptr == [0; NODE_BYTES]
}
}
}
impl<T> std::ops::Index<NodeId<T>> for Pool {
type Output = T;
fn index(&self, node_id: NodeId<T>) -> &Self::Output {
self.get(node_id)
}
}
impl<T> std::ops::IndexMut<NodeId<T>> for Pool {
fn index_mut(&mut self, node_id: NodeId<T>) -> &mut Self::Output {
self.get_mut(node_id)
}
}
impl Drop for Pool {
fn drop(&mut self) {
unsafe {
libc::munmap(
self.nodes as *mut c_void,
NODE_BYTES * self.capacity as usize,
);
}
}
}

View File

@ -0,0 +1,86 @@
use super::pool::{NodeId, Pool, NODE_BYTES};
use super::shallow_clone::ShallowClone;
use std::ffi::c_void;
use std::marker::PhantomData;
use std::mem::size_of;
/// A string containing at most 2^32 pool-allocated bytes.
#[derive(Debug, Copy, Clone)]
pub struct PoolStr {
first_node_id: NodeId<()>,
len: u32,
}
#[test]
fn pool_str_size() {
assert_eq!(size_of::<PoolStr>(), 8);
}
impl PoolStr {
pub fn new(string: &str, pool: &mut Pool) -> Self {
debug_assert!(string.len() <= u32::MAX as usize);
let chars_per_node = NODE_BYTES / size_of::<char>();
let number_of_nodes = f64::ceil(string.len() as f64 / chars_per_node as f64) as u32;
if number_of_nodes > 0 {
let first_node_id = pool.reserve(number_of_nodes);
let index = first_node_id.index as isize;
let next_node_ptr = unsafe { pool.nodes.offset(index) } as *mut c_void;
unsafe {
libc::memcpy(
next_node_ptr,
string.as_ptr() as *const c_void,
string.len(),
);
}
PoolStr {
first_node_id,
len: string.len() as u32,
}
} else {
PoolStr {
first_node_id: NodeId {
index: 0,
_phantom: PhantomData::default(),
},
len: 0,
}
}
}
pub fn as_str(&self, pool: &Pool) -> &str {
unsafe {
let node_ptr = pool.nodes.offset(self.first_node_id.index as isize) as *const u8;
let node_slice: &[u8] = std::slice::from_raw_parts(node_ptr, self.len as usize);
std::str::from_utf8_unchecked(&node_slice[0..self.len as usize])
}
}
#[allow(clippy::len_without_is_empty)]
pub fn len(&self, pool: &Pool) -> usize {
let contents = self.as_str(pool);
contents.len()
}
pub fn is_empty(&self, pool: &Pool) -> bool {
self.len(pool) == 0
}
}
impl ShallowClone for PoolStr {
fn shallow_clone(&self) -> Self {
// Question: should this fully clone, or is a shallow copy
// (and the aliasing it entails) OK?
Self {
first_node_id: self.first_node_id,
len: self.len,
}
}
}

View File

@ -0,0 +1,323 @@
use super::pool::{NodeId, Pool, NODE_BYTES};
use super::shallow_clone::ShallowClone;
use std::any::type_name;
use std::cmp::Ordering;
use std::ffi::c_void;
use std::marker::PhantomData;
use std::mem::size_of;
/// An array of at most 2^32 pool-allocated nodes.
#[derive(Debug)]
pub struct PoolVec<T> {
first_node_id: NodeId<T>,
len: u32,
}
#[test]
fn pool_vec_size() {
assert_eq!(size_of::<PoolVec<()>>(), 8);
}
impl<'a, T: 'a + Sized> PoolVec<T> {
pub fn empty(pool: &mut Pool) -> Self {
Self::new(std::iter::empty(), pool)
}
pub fn with_capacity(len: u32, pool: &mut Pool) -> Self {
debug_assert!(
size_of::<T>() <= NODE_BYTES,
"{} has a size of {}",
type_name::<T>(),
size_of::<T>()
);
if len == 0 {
Self::empty(pool)
} else {
let first_node_id = pool.reserve(len);
PoolVec { first_node_id, len }
}
}
pub fn len(&self) -> usize {
self.len as usize
}
pub fn is_empty(&self) -> bool {
self.len == 0
}
pub fn new<I: ExactSizeIterator<Item = T>>(nodes: I, pool: &mut Pool) -> Self {
debug_assert!(nodes.len() <= u32::MAX as usize);
debug_assert!(size_of::<T>() <= NODE_BYTES);
let len = nodes.len() as u32;
if len > 0 {
let first_node_id = pool.reserve(len);
let index = first_node_id.index as isize;
let mut next_node_ptr = unsafe { pool.nodes.offset(index) } as *mut T;
for (indx_inc, node) in nodes.enumerate() {
unsafe {
*next_node_ptr = node;
next_node_ptr = pool.nodes.offset(index + (indx_inc as isize) + 1) as *mut T;
}
}
PoolVec { first_node_id, len }
} else {
PoolVec {
first_node_id: NodeId {
index: 0,
_phantom: PhantomData::default(),
},
len: 0,
}
}
}
pub fn iter(&self, pool: &'a Pool) -> impl ExactSizeIterator<Item = &'a T> {
self.pool_list_iter(pool)
}
pub fn iter_mut(&self, pool: &'a mut Pool) -> impl ExactSizeIterator<Item = &'a mut T> {
self.pool_list_iter_mut(pool)
}
pub fn iter_node_ids(&self) -> impl ExactSizeIterator<Item = NodeId<T>> {
self.pool_list_iter_node_ids()
}
/// Private version of into_iter which exposes the implementation detail
/// of PoolVecIter. We don't want that struct to be public, but we
/// actually do want to have this separate function for code reuse
/// in the iterator's next() method.
#[inline(always)]
fn pool_list_iter(&self, pool: &'a Pool) -> PoolVecIter<'a, T> {
PoolVecIter {
pool,
current_node_id: self.first_node_id,
len_remaining: self.len,
}
}
#[inline(always)]
fn pool_list_iter_mut(&self, pool: &'a Pool) -> PoolVecIterMut<'a, T> {
PoolVecIterMut {
pool,
current_node_id: self.first_node_id,
len_remaining: self.len,
}
}
#[inline(always)]
fn pool_list_iter_node_ids(&self) -> PoolVecIterNodeIds<T> {
PoolVecIterNodeIds {
current_node_id: self.first_node_id,
len_remaining: self.len,
}
}
pub fn free<S>(self, pool: &'a mut Pool) {
// zero out the memory
unsafe {
let index = self.first_node_id.index as isize;
let node_ptr = pool.nodes.offset(index) as *mut c_void;
let bytes = self.len as usize * NODE_BYTES;
libc::memset(node_ptr, 0, bytes);
}
// TODO insert it into the pool's free list
}
}
impl<T> ShallowClone for PoolVec<T> {
fn shallow_clone(&self) -> Self {
// Question: should this fully clone, or is a shallow copy
// (and the aliasing it entails) OK?
Self {
first_node_id: self.first_node_id,
len: self.len,
}
}
}
struct PoolVecIter<'a, T> {
pool: &'a Pool,
current_node_id: NodeId<T>,
len_remaining: u32,
}
impl<'a, T> ExactSizeIterator for PoolVecIter<'a, T>
where
T: 'a,
{
fn len(&self) -> usize {
self.len_remaining as usize
}
}
impl<'a, T> Iterator for PoolVecIter<'a, T>
where
T: 'a,
{
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
let len_remaining = self.len_remaining;
match len_remaining.cmp(&1) {
Ordering::Greater => {
// Get the current node
let index = self.current_node_id.index;
let node_ptr = unsafe { self.pool.nodes.offset(index as isize) } as *const T;
// Advance the node pointer to the next node in the current page
self.current_node_id = NodeId {
index: index + 1,
_phantom: PhantomData::default(),
};
self.len_remaining = len_remaining - 1;
Some(unsafe { &*node_ptr })
}
Ordering::Equal => {
self.len_remaining = 0;
// Don't advance the node pointer's node, because that might
// advance past the end of the page!
let index = self.current_node_id.index;
let node_ptr = unsafe { self.pool.nodes.offset(index as isize) } as *const T;
Some(unsafe { &*node_ptr })
}
Ordering::Less => {
// len_remaining was 0
None
}
}
}
}
struct PoolVecIterMut<'a, T> {
pool: &'a Pool,
current_node_id: NodeId<T>,
len_remaining: u32,
}
impl<'a, T> ExactSizeIterator for PoolVecIterMut<'a, T>
where
T: 'a,
{
fn len(&self) -> usize {
self.len_remaining as usize
}
}
impl<'a, T> Iterator for PoolVecIterMut<'a, T>
where
T: 'a,
{
type Item = &'a mut T;
fn next(&mut self) -> Option<Self::Item> {
let len_remaining = self.len_remaining;
match len_remaining.cmp(&1) {
Ordering::Greater => {
// Get the current node
let index = self.current_node_id.index;
let node_ptr = unsafe { self.pool.nodes.offset(index as isize) } as *mut T;
// Advance the node pointer to the next node in the current page
self.current_node_id = NodeId {
index: index + 1,
_phantom: PhantomData::default(),
};
self.len_remaining = len_remaining - 1;
Some(unsafe { &mut *node_ptr })
}
Ordering::Equal => {
self.len_remaining = 0;
// Don't advance the node pointer's node, because that might
// advance past the end of the page!
let index = self.current_node_id.index;
let node_ptr = unsafe { self.pool.nodes.offset(index as isize) } as *mut T;
Some(unsafe { &mut *node_ptr })
}
Ordering::Less => {
// len_remaining was 0
None
}
}
}
}
struct PoolVecIterNodeIds<T> {
current_node_id: NodeId<T>,
len_remaining: u32,
}
impl<T> ExactSizeIterator for PoolVecIterNodeIds<T> {
fn len(&self) -> usize {
self.len_remaining as usize
}
}
impl<T> Iterator for PoolVecIterNodeIds<T> {
type Item = NodeId<T>;
fn next(&mut self) -> Option<Self::Item> {
let len_remaining = self.len_remaining;
match len_remaining.cmp(&1) {
Ordering::Greater => {
// Get the current node
let current = self.current_node_id;
let index = current.index;
// Advance the node pointer to the next node in the current page
self.current_node_id = NodeId {
index: index + 1,
_phantom: PhantomData::default(),
};
self.len_remaining = len_remaining - 1;
Some(current)
}
Ordering::Equal => {
self.len_remaining = 0;
// Don't advance the node pointer's node, because that might
// advance past the end of the page!
Some(self.current_node_id)
}
Ordering::Less => {
// len_remaining was 0
None
}
}
}
}
#[test]
fn pool_vec_iter_test() {
let expected_vec: Vec<usize> = vec![2, 4, 8, 16];
let mut test_pool = Pool::with_capacity(1024);
let pool_vec = PoolVec::new(expected_vec.clone().into_iter(), &mut test_pool);
let current_vec: Vec<usize> = pool_vec.iter(&test_pool).copied().collect();
assert_eq!(current_vec, expected_vec);
}

View File

@ -0,0 +1,35 @@
use roc_can::expected::Expected;
use roc_can::expected::PExpected;
/// Clones the outer node, but does not clone any nodeids
pub trait ShallowClone {
fn shallow_clone(&self) -> Self;
}
impl<T> ShallowClone for Expected<T>
where
T: ShallowClone,
{
fn shallow_clone(&self) -> Self {
use Expected::*;
match self {
NoExpectation(t) => NoExpectation(t.shallow_clone()),
ForReason(reason, t, region) => ForReason(reason.clone(), t.shallow_clone(), *region),
FromAnnotation(loc_pat, n, source, t) => {
FromAnnotation(loc_pat.clone(), *n, *source, t.shallow_clone())
}
}
}
}
impl<T: ShallowClone> ShallowClone for PExpected<T> {
fn shallow_clone(&self) -> Self {
use PExpected::*;
match self {
NoExpectation(t) => NoExpectation(t.shallow_clone()),
ForReason(reason, t, region) => ForReason(reason.clone(), t.shallow_clone(), *region),
}
}
}

39
ast/src/module.rs Normal file
View File

@ -0,0 +1,39 @@
use std::path::Path;
use bumpalo::Bump;
use roc_collections::all::MutMap;
use roc_load::file::LoadedModule;
pub fn load_module(src_file: &Path) -> LoadedModule {
let subs_by_module = MutMap::default();
let arena = Bump::new();
let loaded = roc_load::file::load_and_typecheck(
&arena,
src_file.to_path_buf(),
arena.alloc(roc_builtins::std::standard_stdlib()),
src_file.parent().unwrap_or_else(|| {
panic!(
"src_file {:?} did not have a parent directory but I need to have one.",
src_file
)
}),
subs_by_module,
8,
roc_can::builtins::builtin_defs_map,
);
match loaded {
Ok(x) => x,
Err(roc_load::file::LoadingProblem::FormattedReport(report)) => {
panic!(
"Failed to load module from src_file {:?}. Report: {:?}",
src_file, report
);
}
Err(e) => panic!(
"Failed to load module from src_file {:?}: {:?}",
src_file, e
),
}
}

2
ast/src/parse/mod.rs Normal file
View File

@ -0,0 +1,2 @@
pub mod parse_ast;
pub mod parse_header;

View File

@ -0,0 +1,54 @@
use bumpalo::Bump;
use roc_module::symbol::Interns;
use roc_region::all::Region;
use crate::{
ast_error::ASTResult,
lang::{
core::{
ast::AST,
def::{def2::DefId, def_to_def2::str_to_def2},
expr::expr2::Expr2,
},
env::Env,
scope::Scope,
},
};
use super::parse_header;
pub fn parse_from_string<'a>(
code_str: &'a str,
env: &mut Env<'a>,
ast_arena: &'a Bump,
interns: &mut Interns,
) -> ASTResult<AST> {
let blank_line_indx = code_str
.find("\n\n")
.expect("I was expecting a double newline to split header and rest of code.");
let header_str = &code_str[0..blank_line_indx];
let tail_str = &code_str[blank_line_indx..];
let mut scope = Scope::new(env.home, env.pool, env.var_store);
scope.fill_scope(env, &mut interns.all_ident_ids)?;
let region = Region::new(0, 0, 0, 0);
let mut def_ids = Vec::<DefId>::new();
let def2_vec = str_to_def2(ast_arena, tail_str, env, &mut scope, region)?;
for def2 in def2_vec {
let def_id = env.pool.add(def2);
def_ids.push(def_id);
}
let ast_node_id = env.pool.add(Expr2::Blank);
Ok(AST {
header: parse_header::parse_from_string(header_str, ast_node_id),
def_ids,
})
}

View File

@ -0,0 +1,12 @@
use crate::lang::core::{expr::expr2::ExprId, header::AppHeader};
// TODO don't use mock struct and actually parse string
pub fn parse_from_string(_header_str: &str, ast_node_id: ExprId) -> AppHeader {
AppHeader {
app_name: "\"untitled-app\"".to_owned(),
packages_base: "\"platform\"".to_owned(),
imports: vec![],
provides: vec!["main".to_owned()],
ast_node_id,
}
}

View File

@ -1,8 +1,5 @@
#![allow(clippy::all)]
#![allow(dead_code)]
use crate::lang::constrain::Constraint::{self, *};
use crate::lang::pool::{Pool, PoolVec, ShallowClone};
use crate::lang::types::Type2;
use bumpalo::Bump;
use roc_can::expected::{Expected, PExpected};
use roc_collections::all::{BumpMap, BumpMapDefault, MutMap};
@ -20,6 +17,12 @@ use roc_types::types::{
use roc_unify::unify::unify;
use roc_unify::unify::Unified::*;
use crate::constrain::Constraint;
use crate::lang::core::types::Type2;
use crate::mem_pool::pool::Pool;
use crate::mem_pool::pool_vec::PoolVec;
use crate::mem_pool::shallow_clone::ShallowClone;
// Type checking system adapted from Elm by Evan Czaplicki, BSD-3-Clause Licensed
// https://github.com/elm/compiler
// Thank you, Evan!
@ -197,6 +200,8 @@ fn solve<'a>(
subs: &mut Subs,
constraint: &Constraint,
) -> State {
use crate::solve_type::Constraint::*;
match constraint {
True => state,
// SaveTheEnvironment => {
@ -805,17 +810,13 @@ fn type_to_variable<'a>(
*/
let mut arg_vars = Vec::with_capacity(args.len());
let mut new_aliases = BumpMap::new_in(arena);
for (arg, arg_type_id) in args.iter(mempool) {
for (_, arg_type_id) in args.iter(mempool) {
let arg_type = mempool.get(*arg_type_id);
let arg_var = type_to_variable(arena, mempool, subs, rank, pools, cached, arg_type);
let arg_str = arg.as_str(mempool);
arg_vars.push((roc_module::ident::Lowercase::from(arg_str), arg_var));
new_aliases.insert(arg_str, arg_var);
arg_vars.push(arg_var);
}
let arg_vars = AliasVariables::insert_into_subs(subs, arg_vars, []);

236
cli/Cargo.lock generated
View File

@ -1,236 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "ascii"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "autocfg"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "byteorder"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cfg-if"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "combine"
version = "3.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ascii 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "either"
version = "1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "fraction"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"num 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "im-rc"
version = "13.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"sized-chunks 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "lazy_static"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "log"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "memchr"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "num"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"num-complex 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
"num-iter 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)",
"num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-bigint"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-complex"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-integer"
version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-iter"
version = "0.1.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-rational"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-traits"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "roc"
version = "0.1.0"
dependencies = [
"combine 3.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"fraction 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"im-rc 13.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"num 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "roc-cli"
version = "0.1.0"
dependencies = [
"roc 0.1.0",
]
[[package]]
name = "rustc_version"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "semver"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "semver-parser"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "sized-chunks"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "typenum"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "unreachable"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "void"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
"checksum ascii 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a5fc969a8ce2c9c0c4b0429bb8431544f6658283c8326ba5ff8c762b75369335"
"checksum autocfg 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "0e49efa51329a5fd37e7c79db4621af617cd4e3e5bc224939808d076077077bf"
"checksum byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a019b10a2a7cdeb292db131fc8113e57ea2a908f6e7894b0c3c671893b65dbeb"
"checksum cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33"
"checksum combine 3.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "da3da6baa321ec19e1cc41d31bf599f00c783d0517095cdaf0332e3fe8d20680"
"checksum either 1.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5527cfe0d098f36e3f8839852688e63c8fff1c90b2b405aef730615f9a7bcf7b"
"checksum fraction 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1055159ac82fb210c813303f716b6c8db57ace9d5ec2dbbc2e1d7a864c1dd74e"
"checksum im-rc 13.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0a0197597d095c0d11107975d3175173f810ee572c2501ff4de64f4f3f119806"
"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14"
"checksum log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6"
"checksum memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2efc7bc57c883d4a4d6e3246905283d8dae951bb3bd32f49d6ef297f546e1c39"
"checksum num 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cf4825417e1e1406b3782a8ce92f4d53f26ec055e3622e1881ca8e9f5f9e08db"
"checksum num-bigint 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "57450397855d951f1a41305e54851b1a7b8f5d2e349543a02a2effe25459f718"
"checksum num-complex 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fcb0cf31fb3ff77e6d2a6ebd6800df7fdcd106f2ad89113c9130bcd07f93dffc"
"checksum num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09"
"checksum num-iter 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "76bd5272412d173d6bf9afdf98db8612bbabc9a7a830b7bfc9c188911716132e"
"checksum num-rational 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f2885278d5fe2adc2f75ced642d52d879bffaceb5a2e0b1d4309ffdfb239b454"
"checksum num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "6ba9a427cfca2be13aa6f6403b0b7e7368fe982bfa16fccc450ce74c46cd9b32"
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
"checksum sized-chunks 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a2a2eb3fe454976eefb479f78f9b394d34d661b647c6326a3a6e66f68bb12c26"
"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169"
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"

View File

@ -14,26 +14,32 @@ path = "src/main.rs"
test = false
bench = false
[features]
default = ["target-x86", "llvm", "editor"]
wasm32-cli-run = []
i386-cli-run = []
[features]
default = ["target-aarch64", "target-x86_64", "target-wasm32", "llvm", "editor"]
wasm32-cli-run = ["target-wasm32", "run-wasm32"]
i386-cli-run = ["target-x86"]
# This is a separate feature because when we generate docs on Netlify,
# it doesn't have LLVM installed. (Also, it doesn't need to do code gen.)
llvm = ["inkwell", "roc_gen_llvm", "roc_build/llvm"]
editor = ["roc_editor"]
target-x86 = []
run-wasm32 = ["wasmer", "wasmer-wasi"]
# arm and wasm give linker errors on some platforms
target-arm = []
target-webassembly = []
# Compiling for a different platform than the host can cause linker errors.
target-arm = ["roc_build/target-arm"]
target-aarch64 = ["roc_build/target-aarch64"]
target-x86 = ["roc_build/target-x86"]
target-x86_64 = ["roc_build/target-x86_64"]
target-wasm32 = ["roc_build/target-wasm32"]
target-all = [
"target-x86",
"target-aarch64",
"target-arm",
"target-webassembly"
"target-x86",
"target-x86_64",
"target-wasm32"
]
@ -55,40 +61,34 @@ roc_load = { path = "../compiler/load" }
roc_gen_llvm = { path = "../compiler/gen_llvm", optional = true }
roc_build = { path = "../compiler/build", default-features = false }
roc_fmt = { path = "../compiler/fmt" }
roc_reporting = { path = "../compiler/reporting" }
roc_reporting = { path = "../reporting" }
roc_editor = { path = "../editor", optional = true }
roc_linker = { path = "../linker" }
# TODO switch to clap 3.0.0 once it's out. Tried adding clap = "~3.0.0-beta.1" and cargo wouldn't accept it
clap = { git = "https://github.com/rtfeldman/clap", branch = "master" }
const_format = "0.2"
clap = { version = "= 3.0.0-beta.5", default-features = false, features = ["std", "color", "suggestions"] }
const_format = "0.2.22"
rustyline = { git = "https://github.com/rtfeldman/rustyline", tag = "prompt-fix" }
rustyline-derive = { git = "https://github.com/rtfeldman/rustyline", tag = "prompt-fix" }
im = "14" # im and im-rc should always have the same version!
im-rc = "14" # im and im-rc should always have the same version!
bumpalo = { version = "3.2", features = ["collections"] }
libc = "0.2"
libloading = "0.6"
bumpalo = { version = "3.8.0", features = ["collections"] }
libloading = "0.7.1"
mimalloc = { version = "0.1.26", default-features = false }
inkwell = { path = "../vendor/inkwell", optional = true }
target-lexicon = "0.12.2"
tempfile = "3.1.0"
tempfile = "3.2.0"
wasmer = "2.0.0"
wasmer-wasi = "2.0.0"
wasmer = { version = "2.0.0", optional = true, default-features = false, features = ["default-cranelift", "default-universal"] }
wasmer-wasi = { version = "2.0.0", optional = true }
[dev-dependencies]
pretty_assertions = "0.5.1"
maplit = "1.0.1"
indoc = "0.3.3"
quickcheck = "0.8"
quickcheck_macros = "0.8"
serial_test = "0.5"
tempfile = "3.1.0"
wasmer = { version = "2.0.0", default-features = false, features = ["default-cranelift", "default-universal"] }
wasmer-wasi = "2.0.0"
pretty_assertions = "1.0.0"
indoc = "1.0.3"
serial_test = "0.5.1"
tempfile = "3.2.0"
criterion = { git = "https://github.com/Anton-4/criterion.rs"}
cli_utils = { path = "cli_utils" }
cli_utils = { path = "../cli_utils" }
[[bench]]
name = "time_bench"
harness = false

View File

@ -1,7 +1,7 @@
use std::time::Duration;
use cli_utils::bench_utils::{
bench_cfold, bench_deriv, bench_nqueens, bench_quicksort, bench_rbtree_ck, bench_rbtree_delete,
bench_cfold, bench_deriv, bench_nqueens, bench_quicksort, bench_rbtree_ck,
};
use criterion::{measurement::WallTime, BenchmarkGroup, Criterion, SamplingMode};
@ -25,12 +25,12 @@ fn bench_group_wall_time(c: &mut Criterion) {
group.sample_size(nr_of_runs);
let bench_funcs: Vec<fn(Option<&mut BenchmarkGroup<WallTime>>) -> ()> = vec![
bench_nqueens, // queens 11
bench_cfold, // e = mkExpr 17 1
bench_deriv, // nest deriv 8 f
bench_rbtree_ck, // ms = makeMap 5 80000
bench_rbtree_delete, // m = makeMap 100000
bench_quicksort, // list size 10000
bench_nqueens, // queens 11
bench_cfold, // e = mkExpr 17 1
bench_deriv, // nest deriv 8 f
bench_rbtree_ck, // ms = makeMap 5 80000
// bench_rbtree_delete, // m = makeMap 100000
bench_quicksort, // list size 10000
];
for bench_func in bench_funcs.iter() {

View File

@ -1,24 +0,0 @@
[package]
name = "cli_utils"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
repository = "https://github.com/rtfeldman/roc"
edition = "2018"
description = "Shared code for cli tests and benchmarks"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
roc_cli = { path = "../../cli" }
roc_collections = { path = "../../compiler/collections" }
roc_load = { path = "../../compiler/load" }
roc_module = { path = "../../compiler/module" }
bumpalo = { version = "3.6.1", features = ["collections"] }
criterion = { git = "https://github.com/Anton-4/criterion.rs"}
inlinable_string = "0.1"
serde = { version = "1.0", features = ["derive"] }
serde-xml-rs = "0.4"
strip-ansi-escapes = "0.1"
tempfile = "3.1.0"
rlimit = "0.6.2"

View File

@ -3,7 +3,6 @@ use roc_build::{
link::{link, rebuild_host, LinkType},
program,
};
#[cfg(feature = "llvm")]
use roc_builtins::bitcode;
use roc_can::builtins::builtin_defs_map;
use roc_collections::all::MutMap;
@ -12,7 +11,6 @@ use roc_mono::ir::OptLevel;
use std::path::PathBuf;
use std::time::{Duration, SystemTime};
use target_lexicon::Triple;
#[cfg(feature = "llvm")]
use tempfile::Builder;
fn report_timing(buf: &mut String, label: &str, duration: Duration) {
@ -45,7 +43,6 @@ pub struct BuiltFile {
pub total_time: Duration,
}
#[cfg(feature = "llvm")]
#[allow(clippy::too_many_arguments)]
pub fn build_file<'a>(
arena: &'a Bump,
@ -58,6 +55,7 @@ pub fn build_file<'a>(
link_type: LinkType,
surgically_link: bool,
precompiled: bool,
target_valgrind: bool,
) -> Result<BuiltFile, LoadingProblem<'a>> {
let compilation_start = SystemTime::now();
let ptr_bytes = target.pointer_width().unwrap().bytes() as u32;
@ -119,6 +117,7 @@ pub fn build_file<'a>(
.keys()
.map(|x| x.as_str(&loaded.interns).to_string())
.collect(),
target_valgrind,
);
// TODO try to move as much of this linking as possible to the precompiled
@ -176,23 +175,18 @@ pub fn build_file<'a>(
// This only needs to be mutable for report_problems. This can't be done
// inside a nested scope without causing a borrow error!
let mut loaded = loaded;
program::report_problems(&mut loaded);
program::report_problems_monomorphized(&mut loaded);
let loaded = loaded;
let code_gen_timing = match opt_level {
OptLevel::Normal | OptLevel::Optimize => program::gen_from_mono_module_llvm(
arena,
loaded,
&roc_file_path,
target,
app_o_file,
opt_level,
emit_debug_info,
),
OptLevel::Development => {
program::gen_from_mono_module_dev(arena, loaded, target, app_o_file)
}
};
let code_gen_timing = program::gen_from_mono_module(
arena,
loaded,
&roc_file_path,
target,
app_o_file,
opt_level,
emit_debug_info,
);
buf.push('\n');
buf.push_str(" ");
@ -248,14 +242,14 @@ pub fn build_file<'a>(
app_o_file.to_str().unwrap(),
];
if matches!(opt_level, OptLevel::Development) {
inputs.push(bitcode::OBJ_PATH);
inputs.push(bitcode::BUILTINS_HOST_OBJ_PATH);
}
let (mut child, _) = // TODO use lld
link(
target,
binary_path.clone(),
&inputs,
&inputs,
link_type
)
.map_err(|_| {
@ -288,6 +282,7 @@ pub fn build_file<'a>(
})
}
#[allow(clippy::too_many_arguments)]
fn spawn_rebuild_thread(
opt_level: OptLevel,
surgically_link: bool,
@ -296,9 +291,12 @@ fn spawn_rebuild_thread(
binary_path: PathBuf,
target: &Triple,
exported_symbols: Vec<String>,
target_valgrind: bool,
) -> std::thread::JoinHandle<u128> {
let thread_local_target = target.clone();
std::thread::spawn(move || {
print!("🔨 Rebuilding host... ");
let rebuild_host_start = SystemTime::now();
if !precompiled {
if surgically_link {
@ -307,6 +305,7 @@ fn spawn_rebuild_thread(
&thread_local_target,
host_input_path.as_path(),
exported_symbols,
target_valgrind,
)
.unwrap();
} else {
@ -315,6 +314,7 @@ fn spawn_rebuild_thread(
&thread_local_target,
host_input_path.as_path(),
None,
target_valgrind,
);
}
}
@ -324,6 +324,9 @@ fn spawn_rebuild_thread(
std::fs::copy(prehost, binary_path.as_path()).unwrap();
}
let rebuild_host_end = rebuild_host_start.elapsed().unwrap();
println!("Done!");
rebuild_host_end.as_millis()
})
}
@ -347,7 +350,7 @@ pub fn check_file(
// Release builds use uniqueness optimizations
let stdlib = arena.alloc(roc_builtins::std::standard_stdlib());
let mut loaded = roc_load::file::load_and_monomorphize(
let mut loaded = roc_load::file::load_and_typecheck(
arena,
roc_file_path,
stdlib,
@ -410,5 +413,5 @@ pub fn check_file(
println!("Finished checking in {} ms\n", compilation_end.as_millis(),);
}
Ok(program::report_problems(&mut loaded))
Ok(program::report_problems_typechecked(&mut loaded))
}

618
cli/src/format.rs Normal file
View File

@ -0,0 +1,618 @@
use std::path::PathBuf;
use bumpalo::collections::{String, Vec};
use bumpalo::Bump;
use roc_fmt::def::fmt_def;
use roc_fmt::module::fmt_module;
use roc_module::called_via::{BinOp, UnaryOp};
use roc_parse::ast::{
AssignedField, Collection, Expr, Pattern, StrLiteral, StrSegment, Tag, TypeAnnotation,
WhenBranch,
};
use roc_parse::header::{
AppHeader, Effects, ExposesEntry, ImportsEntry, InterfaceHeader, ModuleName, PackageEntry,
PackageName, PackageOrPath, PlatformHeader, PlatformRequires, PlatformRigid, To, TypedIdent,
};
use roc_parse::{
ast::{Def, Module},
module::{self, module_defs},
parser::{Parser, State, SyntaxError},
};
use roc_region::all::Located;
use roc_reporting::{internal_error, user_error};
pub fn format(files: std::vec::Vec<PathBuf>) {
for file in files {
let arena = Bump::new();
let src = std::fs::read_to_string(&file).unwrap();
let ast = arena.alloc(parse_all(&arena, &src).unwrap_or_else(|e| {
user_error!("Unexpected parse failure when parsing this formatting:\n\n{:?}\n\nParse error was:\n\n{:?}\n\n", src, e)
}));
let mut buf = String::new_in(&arena);
fmt_all(&arena, &mut buf, ast);
let reparsed_ast = arena.alloc(parse_all(&arena, &buf).unwrap_or_else(|e| {
let mut fail_file = file.clone();
fail_file.set_extension("roc-format-failed");
std::fs::write(&fail_file, &buf).unwrap();
internal_error!(
"Formatting bug; formatted code isn't valid\n\n\
I wrote the incorrect result to this file for debugging purposes:\n{}\n\n\
Parse error was: {:?}\n\n",
fail_file.display(),
e
);
}));
let ast = ast.remove_spaces(&arena);
let reparsed_ast = reparsed_ast.remove_spaces(&arena);
// HACK!
// We compare the debug format strings of the ASTs, because I'm finding in practice that _somewhere_ deep inside the ast,
// the PartialEq implementation is returning `false` even when the Debug-formatted impl is exactly the same.
// I don't have the patience to debug this right now, so let's leave it for another day...
// TODO: fix PartialEq impl on ast types
if format!("{:?}", ast) != format!("{:?}", reparsed_ast) {
let mut fail_file = file.clone();
fail_file.set_extension("roc-format-failed");
std::fs::write(&fail_file, &buf).unwrap();
let mut before_file = file.clone();
before_file.set_extension("roc-format-failed-ast-before");
std::fs::write(&before_file, &format!("{:#?}\n", ast)).unwrap();
let mut after_file = file.clone();
after_file.set_extension("roc-format-failed-ast-after");
std::fs::write(&after_file, &format!("{:#?}\n", reparsed_ast)).unwrap();
internal_error!(
"Formatting bug; formatting didn't reparse as the same tree\n\n\
I wrote the incorrect result to this file for debugging purposes:\n{}\n\n\
I wrote the tree before and after formatting to these files for debugging purposes:\n{}\n{}\n\n",
fail_file.display(),
before_file.display(),
after_file.display());
}
std::fs::write(&file, &buf).unwrap();
}
}
#[derive(Debug, PartialEq)]
struct Ast<'a> {
module: Module<'a>,
defs: Vec<'a, Located<Def<'a>>>,
}
fn parse_all<'a>(arena: &'a Bump, src: &'a str) -> Result<Ast<'a>, SyntaxError<'a>> {
let (module, state) =
module::parse_header(arena, State::new(src.as_bytes())).map_err(SyntaxError::Header)?;
let (_, defs, _) = module_defs().parse(arena, state).map_err(|(_, e, _)| e)?;
Ok(Ast { module, defs })
}
fn fmt_all<'a>(arena: &'a Bump, buf: &mut String<'a>, ast: &'a Ast) {
fmt_module(buf, &ast.module);
for def in &ast.defs {
fmt_def(buf, arena.alloc(def.value), 0);
}
}
/// RemoveSpaces normalizes the ast to something that we _expect_ to be invariant under formatting.
///
/// Currently this consists of:
/// * Removing newlines
/// * Removing comments
/// * Removing parens in Exprs
///
/// Long term, we actuall want this transform to preserve comments (so we can assert they're maintained by formatting)
/// - but there are currently several bugs where they're _not_ preserved.
/// TODO: ensure formatting retains comments
trait RemoveSpaces<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self;
}
impl<'a> RemoveSpaces<'a> for Ast<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
Ast {
module: self.module.remove_spaces(arena),
defs: {
let mut defs = Vec::with_capacity_in(self.defs.len(), arena);
for d in &self.defs {
defs.push(d.remove_spaces(arena))
}
defs
},
}
}
}
impl<'a> RemoveSpaces<'a> for Module<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match self {
Module::Interface { header } => Module::Interface {
header: InterfaceHeader {
name: header.name.remove_spaces(arena),
exposes: header.exposes.remove_spaces(arena),
imports: header.imports.remove_spaces(arena),
before_header: &[],
after_interface_keyword: &[],
before_exposes: &[],
after_exposes: &[],
before_imports: &[],
after_imports: &[],
},
},
Module::App { header } => Module::App {
header: AppHeader {
name: header.name.remove_spaces(arena),
packages: header.packages.remove_spaces(arena),
imports: header.imports.remove_spaces(arena),
provides: header.provides.remove_spaces(arena),
to: header.to.remove_spaces(arena),
before_header: &[],
after_app_keyword: &[],
before_packages: &[],
after_packages: &[],
before_imports: &[],
after_imports: &[],
before_provides: &[],
after_provides: &[],
before_to: &[],
after_to: &[],
},
},
Module::Platform { header } => Module::Platform {
header: PlatformHeader {
name: header.name.remove_spaces(arena),
requires: header.requires.remove_spaces(arena),
exposes: header.exposes.remove_spaces(arena),
packages: header.packages.remove_spaces(arena),
imports: header.imports.remove_spaces(arena),
provides: header.provides.remove_spaces(arena),
effects: Effects {
spaces_before_effects_keyword: &[],
spaces_after_effects_keyword: &[],
spaces_after_type_name: &[],
effect_shortname: header.effects.effect_shortname.remove_spaces(arena),
effect_type_name: header.effects.effect_type_name.remove_spaces(arena),
entries: header.effects.entries.remove_spaces(arena),
},
before_header: &[],
after_platform_keyword: &[],
before_requires: &[],
after_requires: &[],
before_exposes: &[],
after_exposes: &[],
before_packages: &[],
after_packages: &[],
before_imports: &[],
after_imports: &[],
before_provides: &[],
after_provides: &[],
},
},
}
}
}
impl<'a> RemoveSpaces<'a> for &'a str {
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
self
}
}
impl<'a, T: RemoveSpaces<'a> + Copy> RemoveSpaces<'a> for ExposesEntry<'a, T> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
ExposesEntry::Exposed(a) => ExposesEntry::Exposed(a.remove_spaces(arena)),
ExposesEntry::SpaceBefore(a, _) => a.remove_spaces(arena),
ExposesEntry::SpaceAfter(a, _) => a.remove_spaces(arena),
}
}
}
impl<'a> RemoveSpaces<'a> for ModuleName<'a> {
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
*self
}
}
impl<'a> RemoveSpaces<'a> for PackageName<'a> {
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
*self
}
}
impl<'a> RemoveSpaces<'a> for To<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
To::ExistingPackage(a) => To::ExistingPackage(a),
To::NewPackage(a) => To::NewPackage(a.remove_spaces(arena)),
}
}
}
impl<'a> RemoveSpaces<'a> for TypedIdent<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
TypedIdent::Entry {
ident,
spaces_before_colon: _,
ann,
} => TypedIdent::Entry {
ident: ident.remove_spaces(arena),
spaces_before_colon: &[],
ann: ann.remove_spaces(arena),
},
TypedIdent::SpaceBefore(a, _) => a.remove_spaces(arena),
TypedIdent::SpaceAfter(a, _) => a.remove_spaces(arena),
}
}
}
impl<'a> RemoveSpaces<'a> for PlatformRequires<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
PlatformRequires {
rigids: self.rigids.remove_spaces(arena),
signature: self.signature.remove_spaces(arena),
}
}
}
impl<'a> RemoveSpaces<'a> for PlatformRigid<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
PlatformRigid::Entry { rigid, alias } => PlatformRigid::Entry { rigid, alias },
PlatformRigid::SpaceBefore(a, _) => a.remove_spaces(arena),
PlatformRigid::SpaceAfter(a, _) => a.remove_spaces(arena),
}
}
}
impl<'a> RemoveSpaces<'a> for PackageEntry<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
PackageEntry::Entry {
shorthand,
spaces_after_shorthand: _,
package_or_path,
} => PackageEntry::Entry {
shorthand,
spaces_after_shorthand: &[],
package_or_path: package_or_path.remove_spaces(arena),
},
PackageEntry::SpaceBefore(a, _) => a.remove_spaces(arena),
PackageEntry::SpaceAfter(a, _) => a.remove_spaces(arena),
}
}
}
impl<'a> RemoveSpaces<'a> for PackageOrPath<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
PackageOrPath::Package(a, b) => PackageOrPath::Package(a, b),
PackageOrPath::Path(p) => PackageOrPath::Path(p.remove_spaces(arena)),
}
}
}
impl<'a> RemoveSpaces<'a> for ImportsEntry<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
ImportsEntry::Module(a, b) => ImportsEntry::Module(a, b.remove_spaces(arena)),
ImportsEntry::Package(a, b, c) => ImportsEntry::Package(a, b, c.remove_spaces(arena)),
ImportsEntry::SpaceBefore(a, _) => a.remove_spaces(arena),
ImportsEntry::SpaceAfter(a, _) => a.remove_spaces(arena),
}
}
}
impl<'a, T: RemoveSpaces<'a>> RemoveSpaces<'a> for Option<T> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
self.as_ref().map(|a| a.remove_spaces(arena))
}
}
impl<'a, T: RemoveSpaces<'a> + std::fmt::Debug> RemoveSpaces<'a> for Located<T> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
let res = self.value.remove_spaces(arena);
Located::new(0, 0, 0, 0, res)
}
}
impl<'a, A: RemoveSpaces<'a>, B: RemoveSpaces<'a>> RemoveSpaces<'a> for (A, B) {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
(self.0.remove_spaces(arena), self.1.remove_spaces(arena))
}
}
impl<'a, T: RemoveSpaces<'a>> RemoveSpaces<'a> for Collection<'a, T> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
let mut items = Vec::with_capacity_in(self.items.len(), arena);
for item in self.items {
items.push(item.remove_spaces(arena));
}
Collection::with_items(items.into_bump_slice())
}
}
impl<'a, T: RemoveSpaces<'a> + std::fmt::Debug> RemoveSpaces<'a> for &'a [T] {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
let mut items = Vec::with_capacity_in(self.len(), arena);
for item in *self {
let res = item.remove_spaces(arena);
items.push(res);
}
items.into_bump_slice()
}
}
impl<'a> RemoveSpaces<'a> for UnaryOp {
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
*self
}
}
impl<'a> RemoveSpaces<'a> for BinOp {
fn remove_spaces(&self, _arena: &'a Bump) -> Self {
*self
}
}
impl<'a, T: RemoveSpaces<'a>> RemoveSpaces<'a> for &'a T {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
arena.alloc((*self).remove_spaces(arena))
}
}
impl<'a> RemoveSpaces<'a> for Def<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
Def::Annotation(a, b) => {
Def::Annotation(a.remove_spaces(arena), b.remove_spaces(arena))
}
Def::Alias { name, vars, ann } => Def::Alias {
name: name.remove_spaces(arena),
vars: vars.remove_spaces(arena),
ann: ann.remove_spaces(arena),
},
Def::Body(a, b) => Def::Body(
arena.alloc(a.remove_spaces(arena)),
arena.alloc(b.remove_spaces(arena)),
),
Def::AnnotatedBody {
ann_pattern,
ann_type,
comment: _,
body_pattern,
body_expr,
} => Def::AnnotatedBody {
ann_pattern: arena.alloc(ann_pattern.remove_spaces(arena)),
ann_type: arena.alloc(ann_type.remove_spaces(arena)),
comment: None,
body_pattern: arena.alloc(body_pattern.remove_spaces(arena)),
body_expr: arena.alloc(body_expr.remove_spaces(arena)),
},
Def::Expect(a) => Def::Expect(arena.alloc(a.remove_spaces(arena))),
Def::NotYetImplemented(a) => Def::NotYetImplemented(a),
Def::SpaceBefore(a, _) | Def::SpaceAfter(a, _) => a.remove_spaces(arena),
}
}
}
impl<'a> RemoveSpaces<'a> for WhenBranch<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
WhenBranch {
patterns: self.patterns.remove_spaces(arena),
value: self.value.remove_spaces(arena),
guard: self.guard.remove_spaces(arena),
}
}
}
impl<'a, T: RemoveSpaces<'a> + Copy + std::fmt::Debug> RemoveSpaces<'a> for AssignedField<'a, T> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
AssignedField::RequiredValue(a, _, c) => AssignedField::RequiredValue(
a.remove_spaces(arena),
arena.alloc([]),
arena.alloc(c.remove_spaces(arena)),
),
AssignedField::OptionalValue(a, _, c) => AssignedField::OptionalValue(
a.remove_spaces(arena),
arena.alloc([]),
arena.alloc(c.remove_spaces(arena)),
),
AssignedField::LabelOnly(a) => AssignedField::LabelOnly(a.remove_spaces(arena)),
AssignedField::Malformed(a) => AssignedField::Malformed(a),
AssignedField::SpaceBefore(a, _) => a.remove_spaces(arena),
AssignedField::SpaceAfter(a, _) => a.remove_spaces(arena),
}
}
}
impl<'a> RemoveSpaces<'a> for StrLiteral<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
StrLiteral::PlainLine(t) => StrLiteral::PlainLine(t),
StrLiteral::Line(t) => StrLiteral::Line(t.remove_spaces(arena)),
StrLiteral::Block(t) => StrLiteral::Block(t.remove_spaces(arena)),
}
}
}
impl<'a> RemoveSpaces<'a> for StrSegment<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
StrSegment::Plaintext(t) => StrSegment::Plaintext(t),
StrSegment::Unicode(t) => StrSegment::Unicode(t.remove_spaces(arena)),
StrSegment::EscapedChar(c) => StrSegment::EscapedChar(c),
StrSegment::Interpolated(t) => StrSegment::Interpolated(t.remove_spaces(arena)),
}
}
}
impl<'a> RemoveSpaces<'a> for Expr<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
Expr::Float(a) => Expr::Float(a),
Expr::Num(a) => Expr::Num(a),
Expr::NonBase10Int {
string,
base,
is_negative,
} => Expr::NonBase10Int {
string,
base,
is_negative,
},
Expr::Str(a) => Expr::Str(a.remove_spaces(arena)),
Expr::Access(a, b) => Expr::Access(arena.alloc(a.remove_spaces(arena)), b),
Expr::AccessorFunction(a) => Expr::AccessorFunction(a),
Expr::List(a) => Expr::List(a.remove_spaces(arena)),
Expr::RecordUpdate { update, fields } => Expr::RecordUpdate {
update: arena.alloc(update.remove_spaces(arena)),
fields: fields.remove_spaces(arena),
},
Expr::Record(a) => Expr::Record(a.remove_spaces(arena)),
Expr::Var { module_name, ident } => Expr::Var { module_name, ident },
Expr::Underscore(a) => Expr::Underscore(a),
Expr::GlobalTag(a) => Expr::GlobalTag(a),
Expr::PrivateTag(a) => Expr::PrivateTag(a),
Expr::Closure(a, b) => Expr::Closure(
arena.alloc(a.remove_spaces(arena)),
arena.alloc(b.remove_spaces(arena)),
),
Expr::Defs(a, b) => {
Expr::Defs(a.remove_spaces(arena), arena.alloc(b.remove_spaces(arena)))
}
Expr::Backpassing(a, b, c) => Expr::Backpassing(
arena.alloc(a.remove_spaces(arena)),
arena.alloc(b.remove_spaces(arena)),
arena.alloc(c.remove_spaces(arena)),
),
Expr::Expect(a, b) => Expr::Expect(
arena.alloc(a.remove_spaces(arena)),
arena.alloc(b.remove_spaces(arena)),
),
Expr::Apply(a, b, c) => Expr::Apply(
arena.alloc(a.remove_spaces(arena)),
b.remove_spaces(arena),
c,
),
Expr::BinOps(a, b) => {
Expr::BinOps(a.remove_spaces(arena), arena.alloc(b.remove_spaces(arena)))
}
Expr::UnaryOp(a, b) => {
Expr::UnaryOp(arena.alloc(a.remove_spaces(arena)), b.remove_spaces(arena))
}
Expr::If(a, b) => Expr::If(a.remove_spaces(arena), arena.alloc(b.remove_spaces(arena))),
Expr::When(a, b) => {
Expr::When(arena.alloc(a.remove_spaces(arena)), b.remove_spaces(arena))
}
Expr::ParensAround(a) => {
// The formatter can remove redundant parentheses, so also remove these when normalizing for comparison.
a.remove_spaces(arena)
}
Expr::MalformedIdent(a, b) => Expr::MalformedIdent(a, b),
Expr::MalformedClosure => Expr::MalformedClosure,
Expr::PrecedenceConflict(a) => Expr::PrecedenceConflict(a),
Expr::SpaceBefore(a, _) => a.remove_spaces(arena),
Expr::SpaceAfter(a, _) => a.remove_spaces(arena),
}
}
}
impl<'a> RemoveSpaces<'a> for Pattern<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
Pattern::Identifier(a) => Pattern::Identifier(a),
Pattern::GlobalTag(a) => Pattern::GlobalTag(a),
Pattern::PrivateTag(a) => Pattern::PrivateTag(a),
Pattern::Apply(a, b) => Pattern::Apply(
arena.alloc(a.remove_spaces(arena)),
arena.alloc(b.remove_spaces(arena)),
),
Pattern::RecordDestructure(a) => Pattern::RecordDestructure(a.remove_spaces(arena)),
Pattern::RequiredField(a, b) => {
Pattern::RequiredField(a, arena.alloc(b.remove_spaces(arena)))
}
Pattern::OptionalField(a, b) => {
Pattern::OptionalField(a, arena.alloc(b.remove_spaces(arena)))
}
Pattern::NumLiteral(a) => Pattern::NumLiteral(a),
Pattern::NonBase10Literal {
string,
base,
is_negative,
} => Pattern::NonBase10Literal {
string,
base,
is_negative,
},
Pattern::FloatLiteral(a) => Pattern::FloatLiteral(a),
Pattern::StrLiteral(a) => Pattern::StrLiteral(a),
Pattern::Underscore(a) => Pattern::Underscore(a),
Pattern::Malformed(a) => Pattern::Malformed(a),
Pattern::MalformedIdent(a, b) => Pattern::MalformedIdent(a, b),
Pattern::QualifiedIdentifier { module_name, ident } => {
Pattern::QualifiedIdentifier { module_name, ident }
}
Pattern::SpaceBefore(a, _) => a.remove_spaces(arena),
Pattern::SpaceAfter(a, _) => a.remove_spaces(arena),
}
}
}
impl<'a> RemoveSpaces<'a> for TypeAnnotation<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
TypeAnnotation::Function(a, b) => TypeAnnotation::Function(
arena.alloc(a.remove_spaces(arena)),
arena.alloc(b.remove_spaces(arena)),
),
TypeAnnotation::Apply(a, b, c) => TypeAnnotation::Apply(a, b, c.remove_spaces(arena)),
TypeAnnotation::BoundVariable(a) => TypeAnnotation::BoundVariable(a),
TypeAnnotation::As(a, _, c) => TypeAnnotation::As(
arena.alloc(a.remove_spaces(arena)),
&[],
arena.alloc(c.remove_spaces(arena)),
),
TypeAnnotation::Record { fields, ext } => TypeAnnotation::Record {
fields: fields.remove_spaces(arena),
ext: ext.remove_spaces(arena),
},
TypeAnnotation::TagUnion { ext, tags } => TypeAnnotation::TagUnion {
ext: ext.remove_spaces(arena),
tags: tags.remove_spaces(arena),
},
TypeAnnotation::Inferred => TypeAnnotation::Inferred,
TypeAnnotation::Wildcard => TypeAnnotation::Wildcard,
TypeAnnotation::SpaceBefore(a, _) => a.remove_spaces(arena),
TypeAnnotation::SpaceAfter(a, _) => a.remove_spaces(arena),
TypeAnnotation::Malformed(a) => TypeAnnotation::Malformed(a),
}
}
}
impl<'a> RemoveSpaces<'a> for Tag<'a> {
fn remove_spaces(&self, arena: &'a Bump) -> Self {
match *self {
Tag::Global { name, args } => Tag::Global {
name: name.remove_spaces(arena),
args: args.remove_spaces(arena),
},
Tag::Private { name, args } => Tag::Private {
name: name.remove_spaces(arena),
args: args.remove_spaces(arena),
},
Tag::Malformed(a) => Tag::Malformed(a),
Tag::SpaceBefore(a, _) => a.remove_spaces(arena),
Tag::SpaceAfter(a, _) => a.remove_spaces(arena),
}
}
}

View File

@ -1,6 +1,3 @@
#[macro_use]
extern crate clap;
#[macro_use]
extern crate const_format;
@ -12,21 +9,25 @@ use roc_load::file::LoadingProblem;
use roc_mono::ir::OptLevel;
use std::env;
use std::io;
use std::path::{Path, PathBuf};
use std::path::Path;
use std::path::PathBuf;
use std::process;
use std::process::Command;
use target_lexicon::BinaryFormat;
use target_lexicon::{Architecture, OperatingSystem, Triple, X86_32Architecture};
pub mod build;
mod format;
pub mod repl;
pub use format::format;
pub const CMD_RUN: &str = "run";
pub const CMD_BUILD: &str = "build";
pub const CMD_REPL: &str = "repl";
pub const CMD_EDIT: &str = "edit";
pub const CMD_DOCS: &str = "docs";
pub const CMD_CHECK: &str = "check";
pub const CMD_VERSION: &str = "version";
pub const CMD_FORMAT: &str = "format";
pub const FLAG_DEBUG: &str = "debug";
pub const FLAG_DEV: &str = "dev";
@ -36,203 +37,192 @@ pub const FLAG_BACKEND: &str = "backend";
pub const FLAG_TIME: &str = "time";
pub const FLAG_LINK: &str = "roc-linker";
pub const FLAG_PRECOMPILED: &str = "precompiled-host";
pub const FLAG_VALGRIND: &str = "valgrind";
pub const ROC_FILE: &str = "ROC_FILE";
pub const ROC_DIR: &str = "ROC_DIR";
pub const BACKEND: &str = "BACKEND";
pub const DIRECTORY_OR_FILES: &str = "DIRECTORY_OR_FILES";
pub const ARGS_FOR_APP: &str = "ARGS_FOR_APP";
pub fn build_app<'a>() -> App<'a> {
let app = App::new("roc")
.version(concatcp!(crate_version!(), "\n"))
.version(concatcp!(include_str!("../../version.txt"), "\n"))
.about("Runs the given .roc file. Use one of the SUBCOMMANDS below to do something else!")
.subcommand(App::new(CMD_BUILD)
.about("Build a binary from the given .roc file, but don't run it")
.arg(
Arg::with_name(ROC_FILE)
.help("The .roc file to build")
Arg::new(ROC_FILE)
.about("The .roc file to build")
.required(true),
)
.arg(
Arg::with_name(FLAG_OPTIMIZE)
Arg::new(FLAG_OPTIMIZE)
.long(FLAG_OPTIMIZE)
.help("Optimize your compiled Roc program to run faster. (Optimization takes time to complete.)")
.about("Optimize your compiled Roc program to run faster. (Optimization takes time to complete.)")
.required(false),
)
.arg(
Arg::with_name(FLAG_DEV)
Arg::new(FLAG_DEV)
.long(FLAG_DEV)
.help("Make compilation as fast as possible. (Runtime performance may suffer)")
.about("Make compilation as fast as possible. (Runtime performance may suffer)")
.required(false),
)
.arg(
Arg::with_name(FLAG_BACKEND)
Arg::new(FLAG_BACKEND)
.long(FLAG_BACKEND)
.help("Choose a different backend")
.about("Choose a different backend")
// .requires(BACKEND)
.default_value(Backend::default().as_str())
.possible_values(Backend::OPTIONS)
.required(false),
)
.arg(
Arg::with_name(FLAG_LIB)
Arg::new(FLAG_LIB)
.long(FLAG_LIB)
.help("Build a C library instead of an executable.")
.about("Build a C library instead of an executable.")
.required(false),
)
.arg(
Arg::with_name(FLAG_DEBUG)
Arg::new(FLAG_DEBUG)
.long(FLAG_DEBUG)
.help("Store LLVM debug information in the generated program")
.about("Store LLVM debug information in the generated program")
.required(false),
)
.arg(
Arg::with_name(FLAG_TIME)
Arg::new(FLAG_TIME)
.long(FLAG_TIME)
.help("Prints detailed compilation time information.")
.about("Prints detailed compilation time information.")
.required(false),
)
.arg(
Arg::with_name(FLAG_LINK)
Arg::new(FLAG_LINK)
.long(FLAG_LINK)
.help("Uses the roc linker instead of the system linker.")
.about("Uses the roc linker instead of the system linker.")
.required(false),
)
.arg(
Arg::with_name(FLAG_PRECOMPILED)
Arg::new(FLAG_PRECOMPILED)
.long(FLAG_PRECOMPILED)
.help("Assumes the host has been precompiled and skips recompiling the host.")
.required(false),
)
)
.subcommand(App::new(CMD_RUN)
.about("DEPRECATED - now use `roc [FILE]` instead of `roc run [FILE]`")
.setting(AppSettings::TrailingVarArg)
.arg(
Arg::with_name(FLAG_OPTIMIZE)
.long(FLAG_OPTIMIZE)
.help("Optimize the compiled program to run faster. (Optimization takes time to complete.)")
.about("Assumes the host has been precompiled and skips recompiling the host.")
.required(false),
)
.arg(
Arg::with_name(FLAG_DEV)
.long(FLAG_DEV)
.help("Make compilation as fast as possible. (Runtime performance may suffer)")
Arg::new(FLAG_VALGRIND)
.long(FLAG_VALGRIND)
.about("Some assembly instructions are not supported by valgrind, this flag prevents those from being output when building the host.")
.required(false),
)
.arg(
Arg::with_name(FLAG_DEBUG)
.long(FLAG_DEBUG)
.help("Store LLVM debug information in the generated program")
.required(false),
)
.arg(
Arg::with_name(ROC_FILE)
.help("The .roc file of an app to run")
.required(true),
)
.arg(
Arg::with_name(ARGS_FOR_APP)
.help("Arguments to pass into the app being run")
.multiple(true),
)
)
.subcommand(App::new(CMD_REPL)
.about("Launch the interactive Read Eval Print Loop (REPL)")
)
.subcommand(App::new(CMD_CHECK)
.about("Build a binary from the given .roc file, but don't run it")
.subcommand(App::new(CMD_FORMAT)
.about("Format Roc code")
.arg(
Arg::with_name(FLAG_TIME)
Arg::new(DIRECTORY_OR_FILES)
.index(1)
.multiple_values(true)
.required(false))
)
.subcommand(App::new(CMD_VERSION)
.about("Print version information")
)
.subcommand(App::new(CMD_CHECK)
.about("When developing, it's recommended to run `check` before `build`. It may provide a useful error message in cases where `build` panics")
.arg(
Arg::new(FLAG_TIME)
.long(FLAG_TIME)
.help("Prints detailed compilation time information.")
.about("Prints detailed compilation time information.")
.required(false),
)
.arg(
Arg::with_name(ROC_FILE)
.help("The .roc file of an app to run")
Arg::new(ROC_FILE)
.about("The .roc file of an app to run")
.required(true),
)
)
.subcommand(
App::new(CMD_DOCS)
.about("Generate documentation for Roc modules")
.arg(Arg::with_name(DIRECTORY_OR_FILES)
.about("Generate documentation for Roc modules (Work In Progress)")
.arg(Arg::new(DIRECTORY_OR_FILES)
.index(1)
.multiple(true)
.multiple_values(true)
.required(false)
.help("The directory or files to build documentation for")
.about("The directory or files to build documentation for")
)
)
.setting(AppSettings::TrailingVarArg)
.arg(
Arg::with_name(FLAG_OPTIMIZE)
Arg::new(FLAG_OPTIMIZE)
.long(FLAG_OPTIMIZE)
.help("Optimize the compiled program to run faster. (Optimization takes time to complete.)")
.about("Optimize the compiled program to run faster. (Optimization takes time to complete.)")
.requires(ROC_FILE)
.required(false),
)
.arg(
Arg::with_name(FLAG_DEV)
Arg::new(FLAG_DEV)
.long(FLAG_DEV)
.help("Make compilation as fast as possible. (Runtime performance may suffer)")
.about("Make compilation as fast as possible. (Runtime performance may suffer)")
.required(false),
)
.arg(
Arg::with_name(FLAG_DEBUG)
Arg::new(FLAG_DEBUG)
.long(FLAG_DEBUG)
.help("Store LLVM debug information in the generated program")
.about("Store LLVM debug information in the generated program")
.requires(ROC_FILE)
.required(false),
)
.arg(
Arg::with_name(FLAG_TIME)
Arg::new(FLAG_TIME)
.long(FLAG_TIME)
.help("Prints detailed compilation time information.")
.about("Prints detailed compilation time information.")
.required(false),
)
.arg(
Arg::with_name(FLAG_LINK)
Arg::new(FLAG_LINK)
.long(FLAG_LINK)
.help("Uses the roc linker instead of the system linker.")
.about("Uses the roc linker instead of the system linker.")
.required(false),
)
.arg(
Arg::with_name(FLAG_PRECOMPILED)
Arg::new(FLAG_PRECOMPILED)
.long(FLAG_PRECOMPILED)
.help("Assumes the host has been precompiled and skips recompiling the host.")
.about("Assumes the host has been precompiled and skips recompiling the host.")
.required(false),
)
.arg(
Arg::with_name(FLAG_BACKEND)
Arg::new(FLAG_BACKEND)
.long(FLAG_BACKEND)
.help("Choose a different backend")
.about("Choose a different backend")
// .requires(BACKEND)
.default_value(Backend::default().as_str())
.possible_values(Backend::OPTIONS)
.required(false),
)
.arg(
Arg::with_name(ROC_FILE)
.help("The .roc file of an app to build and run")
Arg::new(ROC_FILE)
.about("The .roc file of an app to build and run")
.required(false),
)
.arg(
Arg::with_name(ARGS_FOR_APP)
.help("Arguments to pass into the app being run")
Arg::new(ARGS_FOR_APP)
.about("Arguments to pass into the app being run")
.requires(ROC_FILE)
.multiple(true),
.multiple_values(true),
);
if cfg!(feature = "editor") {
app.subcommand(
App::new(CMD_EDIT).about("Launch the Roc editor").arg(
Arg::with_name(DIRECTORY_OR_FILES)
.index(1)
.multiple(true)
.required(false)
.help("(optional) The directory or files to open on launch."),
),
App::new(CMD_EDIT)
.about("Launch the Roc editor (Work In Progress)")
.arg(
Arg::new(DIRECTORY_OR_FILES)
.index(1)
.multiple_values(true)
.required(false)
.about("(optional) The directory or files to open on launch."),
),
)
} else {
app
@ -240,7 +230,7 @@ pub fn build_app<'a>() -> App<'a> {
}
pub fn docs(files: Vec<PathBuf>) {
roc_docs::generate(
roc_docs::generate_docs_html(
files,
roc_builtins::std::standard_stdlib(),
Path::new("./generated-docs"),
@ -253,7 +243,6 @@ pub enum BuildConfig {
BuildAndRun { roc_file_arg_index: usize },
}
#[cfg(feature = "llvm")]
pub fn build(matches: &ArgMatches, config: BuildConfig) -> io::Result<i32> {
use build::build_file;
use std::str::FromStr;
@ -289,6 +278,7 @@ pub fn build(matches: &ArgMatches, config: BuildConfig) -> io::Result<i32> {
};
let surgically_link = matches.is_present(FLAG_LINK);
let precompiled = matches.is_present(FLAG_PRECOMPILED);
if surgically_link && !roc_linker::supported(&link_type, &target) {
panic!(
"Link type, {:?}, with target, {}, not supported by roc linker",
@ -318,6 +308,7 @@ pub fn build(matches: &ArgMatches, config: BuildConfig) -> io::Result<i32> {
});
let src_dir = path.parent().unwrap().canonicalize().unwrap();
let target_valgrind = matches.is_present(FLAG_VALGRIND);
let res_binary_path = build_file(
&arena,
&target,
@ -329,6 +320,7 @@ pub fn build(matches: &ArgMatches, config: BuildConfig) -> io::Result<i32> {
link_type,
surgically_link,
precompiled,
target_valgrind,
);
match res_binary_path {
@ -448,6 +440,7 @@ fn roc_run(cmd: &mut Command) -> io::Result<i32> {
}
}
#[cfg(feature = "run-wasm32")]
fn run_with_wasmer(wasm_path: &std::path::Path, args: &[String]) {
use wasmer::{Instance, Module, Store};
@ -478,6 +471,11 @@ fn run_with_wasmer(wasm_path: &std::path::Path, args: &[String]) {
}
}
#[cfg(not(feature = "run-wasm32"))]
fn run_with_wasmer(_wasm_path: &std::path::Path, _args: &[String]) {
println!("Running wasm files not support");
}
enum Backend {
Host,
X86_32,

View File

@ -1,26 +1,23 @@
use roc_cli::build::check_file;
use roc_cli::{
build_app, docs, repl, BuildConfig, CMD_BUILD, CMD_CHECK, CMD_DOCS, CMD_EDIT, CMD_REPL,
CMD_RUN, DIRECTORY_OR_FILES, FLAG_TIME, ROC_FILE,
build_app, docs, format, repl, BuildConfig, CMD_BUILD, CMD_CHECK, CMD_DOCS, CMD_EDIT,
CMD_FORMAT, CMD_REPL, CMD_VERSION, DIRECTORY_OR_FILES, FLAG_TIME, ROC_FILE,
};
use roc_load::file::LoadingProblem;
use std::fs::{self, FileType};
use std::io;
use std::path::{Path, PathBuf};
#[macro_use]
extern crate const_format;
#[global_allocator]
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
use std::ffi::{OsStr, OsString};
#[cfg(feature = "llvm")]
use roc_cli::build;
#[cfg(not(feature = "llvm"))]
fn build(_matches: &clap::ArgMatches, _config: BuildConfig) -> io::Result<i32> {
panic!("Building without LLVM is not currently supported.");
}
fn main() -> io::Result<()> {
let matches = build_app().get_matches();
@ -34,7 +31,7 @@ fn main() -> io::Result<()> {
}
None => {
launch_editor(&[])?;
launch_editor(None)?;
Ok(0)
}
@ -44,17 +41,6 @@ fn main() -> io::Result<()> {
matches.subcommand_matches(CMD_BUILD).unwrap(),
BuildConfig::BuildOnly,
)?),
Some(CMD_RUN) => {
// TODO remove CMD_RUN altogether if it is currently September 2021 or later.
println!(
r#"`roc run` is deprecated!
If you're using a prebuilt binary, you no longer need the `run` - just do `roc [FILE]` instead of `roc run [FILE]`.
If you're building the compiler from source you'll want to do `cargo run [FILE]` instead of `cargo run run [FILE]`.
"#
);
Ok(1)
}
Some(CMD_CHECK) => {
let arena = bumpalo::Bump::new();
@ -91,16 +77,13 @@ If you're building the compiler from source you'll want to do `cargo run [FILE]`
.subcommand_matches(CMD_EDIT)
.unwrap()
.values_of_os(DIRECTORY_OR_FILES)
.map(|mut values| values.next())
{
None => {
launch_editor(&[])?;
Some(Some(os_str)) => {
launch_editor(Some(Path::new(os_str)))?;
}
Some(values) => {
let paths = values
.map(|os_str| Path::new(os_str))
.collect::<Vec<&Path>>();
launch_editor(&paths)?;
_ => {
launch_editor(None)?;
}
}
@ -118,7 +101,10 @@ If you're building the compiler from source you'll want to do `cargo run [FILE]`
match maybe_values {
None => {
let mut os_string_values: Vec<OsString> = Vec::new();
read_all_roc_files(&OsStr::new("./").to_os_string(), &mut os_string_values)?;
read_all_roc_files(
&std::env::current_dir()?.as_os_str().to_os_string(),
&mut os_string_values,
)?;
for os_string in os_string_values {
values.push(os_string);
}
@ -142,6 +128,49 @@ If you're building the compiler from source you'll want to do `cargo run [FILE]`
Ok(0)
}
Some(CMD_FORMAT) => {
let maybe_values = matches
.subcommand_matches(CMD_FORMAT)
.unwrap()
.values_of_os(DIRECTORY_OR_FILES);
let mut values: Vec<OsString> = Vec::new();
match maybe_values {
None => {
let mut os_string_values: Vec<OsString> = Vec::new();
read_all_roc_files(
&std::env::current_dir()?.as_os_str().to_os_string(),
&mut os_string_values,
)?;
for os_string in os_string_values {
values.push(os_string);
}
}
Some(os_values) => {
for os_str in os_values {
values.push(os_str.to_os_string());
}
}
}
let mut roc_files = Vec::new();
// Populate roc_files
for os_str in values {
let metadata = fs::metadata(os_str.clone())?;
roc_files_recursive(os_str.as_os_str(), metadata.file_type(), &mut roc_files)?;
}
format(roc_files);
Ok(0)
}
Some(CMD_VERSION) => {
println!("roc {}", concatcp!(include_str!("../../version.txt"), "\n"));
Ok(0)
}
_ => unreachable!(),
}?;
@ -187,11 +216,11 @@ fn roc_files_recursive<P: AsRef<Path>>(
}
#[cfg(feature = "editor")]
fn launch_editor(filepaths: &[&Path]) -> io::Result<()> {
roc_editor::launch(filepaths)
fn launch_editor(project_dir_path: Option<&Path>) -> io::Result<()> {
roc_editor::launch(project_dir_path)
}
#[cfg(not(feature = "editor"))]
fn launch_editor(_filepaths: &[&Path]) -> io::Result<()> {
fn launch_editor(_project_dir_path: Option<&Path>) -> io::Result<()> {
panic!("Cannot launch the editor because this build of roc did not include `feature = \"editor\"`!");
}

View File

@ -1,13 +1,14 @@
use bumpalo::collections::Vec;
use bumpalo::Bump;
use libloading::Library;
use roc_builtins::bitcode::{FloatWidth, IntWidth};
use roc_gen_llvm::{run_jit_function, run_jit_function_dynamic_type};
use roc_module::called_via::CalledVia;
use roc_module::ident::TagName;
use roc_module::operator::CalledVia;
use roc_module::symbol::{Interns, ModuleId, Symbol};
use roc_mono::ir::ProcLayout;
use roc_mono::layout::{union_sorted_tags_help, Builtin, Layout, UnionLayout, UnionVariant};
use roc_parse::ast::{AssignedField, Expr, StrLiteral};
use roc_parse::ast::{AssignedField, Collection, Expr, StrLiteral};
use roc_region::all::{Located, Region};
use roc_types::subs::{Content, FlatType, GetSubsSlice, RecordFields, Subs, UnionTags, Variable};
@ -71,74 +72,66 @@ fn jit_to_ast_help<'a>(
content: &Content,
) -> Result<Expr<'a>, ToAstProblem> {
match layout {
Layout::Builtin(Builtin::Int1) => Ok(run_jit_function!(lib, main_fn_name, bool, |num| {
Layout::Builtin(Builtin::Bool) => Ok(run_jit_function!(lib, main_fn_name, bool, |num| {
bool_to_ast(env, num, content)
})),
Layout::Builtin(Builtin::Int8) => {
Ok(
// NOTE: this is does not handle 8-bit numbers yet
run_jit_function!(lib, main_fn_name, u8, |num| byte_to_ast(env, num, content)),
)
}
Layout::Builtin(Builtin::Usize) => Ok(run_jit_function!(lib, main_fn_name, usize, |num| {
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
})),
Layout::Builtin(Builtin::Int16) => {
Ok(run_jit_function!(lib, main_fn_name, i16, |num| num_to_ast(
env,
number_literal_to_ast(env.arena, num),
content
)))
}
Layout::Builtin(Builtin::Int32) => {
Ok(run_jit_function!(lib, main_fn_name, i32, |num| num_to_ast(
env,
number_literal_to_ast(env.arena, num),
content
)))
}
Layout::Builtin(Builtin::Int64) => {
Ok(run_jit_function!(lib, main_fn_name, i64, |num| num_to_ast(
env,
number_literal_to_ast(env.arena, num),
content
)))
}
Layout::Builtin(Builtin::Int128) => {
Ok(run_jit_function!(
lib,
main_fn_name,
i128,
|num| num_to_ast(env, number_literal_to_ast(env.arena, num), content)
))
}
Layout::Builtin(Builtin::Float32) => {
Ok(run_jit_function!(lib, main_fn_name, f32, |num| num_to_ast(
env,
number_literal_to_ast(env.arena, num),
content
)))
}
Layout::Builtin(Builtin::Float64) => {
Ok(run_jit_function!(lib, main_fn_name, f64, |num| num_to_ast(
env,
number_literal_to_ast(env.arena, num),
content
)))
}
Layout::Builtin(Builtin::Str) | Layout::Builtin(Builtin::EmptyStr) => Ok(
run_jit_function!(lib, main_fn_name, &'static str, |string: &'static str| {
str_to_ast(env.arena, env.arena.alloc(string))
}),
),
Layout::Builtin(Builtin::EmptyList) => {
Ok(run_jit_function!(lib, main_fn_name, &'static str, |_| {
Expr::List {
items: &[],
final_comments: &[],
Layout::Builtin(Builtin::Int(int_width)) => {
use IntWidth::*;
macro_rules! helper {
($ty:ty) => {
run_jit_function!(lib, main_fn_name, $ty, |num| num_to_ast(
env,
number_literal_to_ast(env.arena, num),
content
))
};
}
let result = match int_width {
U8 | I8 => {
// NOTE: this is does not handle 8-bit numbers yet
run_jit_function!(lib, main_fn_name, u8, |num| byte_to_ast(env, num, content))
}
}))
U16 => helper!(u16),
U32 => helper!(u32),
U64 => helper!(u64),
U128 => helper!(u128),
I16 => helper!(i16),
I32 => helper!(i32),
I64 => helper!(i64),
I128 => helper!(i128),
};
Ok(result)
}
Layout::Builtin(Builtin::Float(float_width)) => {
use FloatWidth::*;
macro_rules! helper {
($ty:ty) => {
run_jit_function!(lib, main_fn_name, $ty, |num| num_to_ast(
env,
number_literal_to_ast(env.arena, num),
content
))
};
}
let result = match float_width {
F32 => helper!(f32),
F64 => helper!(f64),
F128 => todo!("F128 not implemented"),
};
Ok(result)
}
Layout::Builtin(Builtin::Str) => Ok(run_jit_function!(
lib,
main_fn_name,
&'static str,
|string: &'static str| { str_to_ast(env.arena, env.arena.alloc(string)) }
)),
Layout::Builtin(Builtin::List(elem_layout)) => Ok(run_jit_function!(
lib,
main_fn_name,
@ -195,7 +188,7 @@ fn jit_to_ast_help<'a>(
}
};
let fields = [Layout::Builtin(Builtin::Int64), *layout];
let fields = [Layout::u64(), *layout];
let layout = Layout::Struct(&fields);
let result_stack_size = layout.stack_size(env.ptr_bytes);
@ -251,16 +244,16 @@ fn jit_to_ast_help<'a>(
.unwrap_or(0);
let tag_id = match union_layout.tag_id_builtin() {
Builtin::Int1 => {
Builtin::Bool => {
*(ptr.add(offset as usize) as *const i8) as i64
}
Builtin::Int8 => {
Builtin::Int(IntWidth::U8) => {
*(ptr.add(offset as usize) as *const i8) as i64
}
Builtin::Int16 => {
Builtin::Int(IntWidth::U16) => {
*(ptr.add(offset as usize) as *const i16) as i64
}
Builtin::Int64 => {
Builtin::Int(IntWidth::U64) => {
// used by non-recursive unions at the
// moment, remove if that is no longer the case
*(ptr.add(offset as usize) as *const i64) as i64
@ -383,58 +376,47 @@ fn ptr_to_ast<'a>(
layout: &Layout<'a>,
content: &Content,
) -> Expr<'a> {
macro_rules! helper {
($ty:ty) => {{
let num = unsafe { *(ptr as *const $ty) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}};
}
match layout {
Layout::Builtin(Builtin::Int128) => {
let num = unsafe { *(ptr as *const i128) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}
Layout::Builtin(Builtin::Int64) => {
let num = unsafe { *(ptr as *const i64) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}
Layout::Builtin(Builtin::Int32) => {
let num = unsafe { *(ptr as *const i32) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}
Layout::Builtin(Builtin::Int16) => {
let num = unsafe { *(ptr as *const i16) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}
Layout::Builtin(Builtin::Int8) => {
let num = unsafe { *(ptr as *const i8) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}
Layout::Builtin(Builtin::Int1) => {
Layout::Builtin(Builtin::Bool) => {
// TODO: bits are not as expected here.
// num is always false at the moment.
let num = unsafe { *(ptr as *const bool) };
bool_to_ast(env, num, content)
}
Layout::Builtin(Builtin::Usize) => {
let num = unsafe { *(ptr as *const usize) };
Layout::Builtin(Builtin::Int(int_width)) => {
use IntWidth::*;
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
match int_width {
U8 => helper!(u8),
U16 => helper!(u16),
U32 => helper!(u32),
U64 => helper!(u64),
U128 => helper!(u128),
I8 => helper!(i8),
I16 => helper!(i16),
I32 => helper!(i32),
I64 => helper!(i64),
I128 => helper!(i128),
}
}
Layout::Builtin(Builtin::Float64) => {
let num = unsafe { *(ptr as *const f64) };
Layout::Builtin(Builtin::Float(float_width)) => {
use FloatWidth::*;
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
match float_width {
F32 => helper!(f32),
F64 => helper!(f64),
F128 => todo!("F128 not implemented"),
}
}
Layout::Builtin(Builtin::Float32) => {
let num = unsafe { *(ptr as *const f32) };
num_to_ast(env, number_literal_to_ast(env.arena, num), content)
}
Layout::Builtin(Builtin::EmptyList) => Expr::List {
items: &[],
final_comments: &[],
},
Layout::Builtin(Builtin::List(elem_layout)) => {
// Turn the (ptr, len) wrapper struct into actual ptr and len values.
let len = unsafe { *(ptr.offset(env.ptr_bytes as isize) as *const usize) };
@ -442,7 +424,6 @@ fn ptr_to_ast<'a>(
list_to_ast(env, ptr, len, elem_layout, content)
}
Layout::Builtin(Builtin::EmptyStr) => Expr::Str(StrLiteral::PlainLine("")),
Layout::Builtin(Builtin::Str) => {
let arena_str = unsafe { *(ptr as *const &'static str) };
@ -522,10 +503,7 @@ fn list_to_ast<'a>(
let output = output.into_bump_slice();
Expr::List {
items: output,
final_comments: &[],
}
Expr::List(Collection::with_items(output))
}
fn single_tag_union_to_ast<'a>(
@ -535,16 +513,21 @@ fn single_tag_union_to_ast<'a>(
tag_name: &TagName,
payload_vars: &[Variable],
) -> Expr<'a> {
debug_assert_eq!(field_layouts.len(), payload_vars.len());
let arena = env.arena;
let tag_expr = tag_name_to_expr(env, tag_name);
let loc_tag_expr = &*arena.alloc(Located::at_zero(tag_expr));
let it = payload_vars.iter().copied().zip(field_layouts);
let output = sequence_of_expr(env, ptr as *const u8, it).into_bump_slice();
let output = if field_layouts.len() == payload_vars.len() {
let it = payload_vars.iter().copied().zip(field_layouts);
sequence_of_expr(env, ptr as *const u8, it).into_bump_slice()
} else if field_layouts.is_empty() && !payload_vars.is_empty() {
// happens for e.g. `Foo Bar` where unit structures are nested and the inner one is dropped
let it = payload_vars.iter().copied().zip([&Layout::Struct(&[])]);
sequence_of_expr(env, ptr as *const u8, it).into_bump_slice()
} else {
unreachable!()
};
Expr::Apply(loc_tag_expr, output, CalledVia::Space)
}
@ -616,10 +599,7 @@ fn struct_to_ast<'a>(
let output = env.arena.alloc([loc_field]);
Expr::Record {
fields: output,
final_comments: &[],
}
Expr::Record(Collection::with_items(output))
} else {
debug_assert_eq!(sorted_fields.len(), field_layouts.len());
@ -653,10 +633,7 @@ fn struct_to_ast<'a>(
let output = output.into_bump_slice();
Expr::Record {
fields: output,
final_comments: &[],
}
Expr::Record(Collection::with_items(output))
}
}
@ -664,8 +641,8 @@ fn unpack_single_element_tag_union(subs: &Subs, tags: UnionTags) -> (&TagName, &
let (tag_name_index, payload_vars_index) = tags.iter_all().next().unwrap();
let tag_name = &subs[tag_name_index];
let subs_slice = subs[payload_vars_index].as_subs_slice();
let payload_vars = subs.get_subs_slice(*subs_slice);
let subs_slice = subs[payload_vars_index];
let payload_vars = subs.get_subs_slice(subs_slice);
(tag_name, payload_vars)
}
@ -678,14 +655,14 @@ fn unpack_two_element_tag_union(
let (tag_name_index, payload_vars_index) = it.next().unwrap();
let tag_name1 = &subs[tag_name_index];
let subs_slice = subs[payload_vars_index].as_subs_slice();
let payload_vars1 = subs.get_subs_slice(*subs_slice);
let subs_slice = subs[payload_vars_index];
let payload_vars1 = subs.get_subs_slice(subs_slice);
let (tag_name_index, payload_vars_index) = it.next().unwrap();
let tag_name2 = &subs[tag_name_index];
let subs_slice = subs[payload_vars_index].as_subs_slice();
let payload_vars2 = subs.get_subs_slice(*subs_slice);
let subs_slice = subs[payload_vars_index];
let payload_vars2 = subs.get_subs_slice(subs_slice);
(tag_name1, payload_vars1, tag_name2, payload_vars2)
}
@ -730,10 +707,7 @@ fn bool_to_ast<'a>(env: &Env<'a, '_>, value: bool, content: &Content) -> Expr<'a
region: Region::zero(),
};
Expr::Record {
fields: arena.alloc([loc_assigned_field]),
final_comments: arena.alloc([]),
}
Expr::Record(Collection::with_items(arena.alloc([loc_assigned_field])))
}
FlatType::TagUnion(tags, _) if tags.len() == 1 => {
let (tag_name, payload_vars) = unpack_single_element_tag_union(env.subs, *tags);
@ -845,10 +819,7 @@ fn byte_to_ast<'a>(env: &Env<'a, '_>, value: u8, content: &Content) -> Expr<'a>
region: Region::zero(),
};
Expr::Record {
fields: arena.alloc([loc_assigned_field]),
final_comments: &[],
}
Expr::Record(Collection::with_items(arena.alloc([loc_assigned_field])))
}
FlatType::TagUnion(tags, _) if tags.len() == 1 => {
let (tag_name, payload_vars) = unpack_single_element_tag_union(env.subs, *tags);
@ -967,10 +938,7 @@ fn num_to_ast<'a>(env: &Env<'a, '_>, num_expr: Expr<'a>, content: &Content) -> E
region: Region::zero(),
};
Expr::Record {
fields: arena.alloc([loc_assigned_field]),
final_comments: arena.alloc([]),
}
Expr::Record(Collection::with_items(arena.alloc([loc_assigned_field])))
}
FlatType::TagUnion(tags, _) => {
// This was a single-tag union that got unwrapped at runtime.

View File

@ -218,8 +218,8 @@ pub fn gen_and_eval<'a>(
// Verify the module
if let Err(errors) = env.module.verify() {
panic!(
"Errors defining module: {}\n\nUncomment things nearby to see more details.",
errors
"Errors defining module:\n{}\n\nUncomment things nearby to see more details.",
errors.to_string()
);
}

View File

@ -13,11 +13,18 @@ mod cli_run {
run_with_valgrind, ValgrindError, ValgrindErrorXWhat,
};
use serial_test::serial;
use std::path::Path;
use std::path::{Path, PathBuf};
#[cfg(not(debug_assertions))]
use roc_collections::all::MutMap;
#[cfg(target_os = "linux")]
const TEST_SURGICAL_LINKER: bool = true;
// Surgical linker currently only supports linux.
#[cfg(not(target_os = "linux"))]
const TEST_SURGICAL_LINKER: bool = false;
#[cfg(not(target_os = "macos"))]
const ALLOW_VALGRIND: bool = true;
@ -32,6 +39,7 @@ mod cli_run {
filename: &'a str,
executable_filename: &'a str,
stdin: &'a [&'a str],
input_file: Option<&'a str>,
expected_ending: &'a str,
use_valgrind: bool,
}
@ -41,10 +49,18 @@ mod cli_run {
stdin: &[&str],
executable_filename: &str,
flags: &[&str],
input_file: Option<PathBuf>,
expected_ending: &str,
use_valgrind: bool,
) {
let compile_out = run_roc(&[&["build", file.to_str().unwrap()], flags].concat());
let mut all_flags = vec![];
all_flags.extend_from_slice(flags);
if use_valgrind {
all_flags.extend_from_slice(&["--valgrind"]);
}
let compile_out = run_roc(&[&["build", file.to_str().unwrap()], &all_flags[..]].concat());
if !compile_out.stderr.is_empty() {
panic!("{}", compile_out.stderr);
}
@ -52,10 +68,20 @@ mod cli_run {
assert!(compile_out.status.success(), "bad status {:?}", compile_out);
let out = if use_valgrind && ALLOW_VALGRIND {
let (valgrind_out, raw_xml) = run_with_valgrind(
stdin,
&[file.with_file_name(executable_filename).to_str().unwrap()],
);
let (valgrind_out, raw_xml) = if let Some(input_file) = input_file {
run_with_valgrind(
stdin,
&[
file.with_file_name(executable_filename).to_str().unwrap(),
input_file.to_str().unwrap(),
],
)
} else {
run_with_valgrind(
stdin,
&[file.with_file_name(executable_filename).to_str().unwrap()],
)
};
if valgrind_out.status.success() {
let memory_errors = extract_valgrind_errors(&raw_xml).unwrap_or_else(|err| {
@ -92,6 +118,12 @@ mod cli_run {
}
valgrind_out
} else if let Some(input_file) = input_file {
run_cmd(
file.with_file_name(executable_filename).to_str().unwrap(),
stdin,
&[input_file.to_str().unwrap()],
)
} else {
run_cmd(
file.with_file_name(executable_filename).to_str().unwrap(),
@ -102,7 +134,7 @@ mod cli_run {
if !&out.stdout.ends_with(expected_ending) {
panic!(
"expected output to end with {:?} but instead got {:#?}",
expected_ending, out
expected_ending, out.stdout
);
}
assert!(out.status.success());
@ -114,8 +146,10 @@ mod cli_run {
stdin: &[&str],
executable_filename: &str,
flags: &[&str],
input_file: Option<PathBuf>,
expected_ending: &str,
) {
assert_eq!(input_file, None, "Wasm does not support input files");
let mut flags = flags.to_vec();
flags.push("--backend=wasm32");
@ -136,7 +170,6 @@ mod cli_run {
);
}
}
/// This macro does two things.
///
/// First, it generates and runs a separate test for each of the given
@ -163,6 +196,12 @@ mod cli_run {
eprintln!("WARNING: skipping testing example {} because the test is broken right now!", example.filename);
return;
}
"hello-swift" => {
if cfg!(not(target_os = "macos")) {
eprintln!("WARNING: skipping testing example {} because it only works on MacOS.", example.filename);
return;
}
}
_ => {}
}
@ -172,18 +211,37 @@ mod cli_run {
example.stdin,
example.executable_filename,
&[],
example.input_file.and_then(|file| Some(example_file(dir_name, file))),
example.expected_ending,
example.use_valgrind,
);
// This is mostly because the false interpreter is still very slow -
// 25s for the cli tests is just not acceptable during development!
#[cfg(not(debug_assertions))]
check_output_with_stdin(
&file_name,
example.stdin,
example.executable_filename,
&["--optimize"],
example.input_file.and_then(|file| Some(example_file(dir_name, file))),
example.expected_ending,
example.use_valgrind,
);
// Also check with the surgical linker.
if TEST_SURGICAL_LINKER {
check_output_with_stdin(
&file_name,
example.stdin,
example.executable_filename,
&["--roc-linker"],
example.input_file.and_then(|file| Some(example_file(dir_name, file))),
example.expected_ending,
example.use_valgrind,
);
}
}
)*
@ -216,6 +274,7 @@ mod cli_run {
filename: "Hello.roc",
executable_filename: "hello-world",
stdin: &[],
input_file: None,
expected_ending:"Hello, World!\n",
use_valgrind: true,
},
@ -223,20 +282,31 @@ mod cli_run {
filename: "Hello.roc",
executable_filename: "hello-world",
stdin: &[],
input_file: None,
expected_ending:"Hello, World!\n",
use_valgrind: true,
},
hello_rust:"hello-rust" => Example {
filename: "Hello.roc",
executable_filename: "hello-world",
executable_filename: "hello-rust",
stdin: &[],
input_file: None,
expected_ending:"Hello, World!\n",
use_valgrind: true,
},
hello_swift:"hello-swift" => Example {
filename: "Hello.roc",
executable_filename: "hello-swift",
stdin: &[],
input_file: None,
expected_ending:"Hello Swift, meet Roc\n",
use_valgrind: true,
},
hello_web:"hello-web" => Example {
filename: "Hello.roc",
executable_filename: "hello-web",
stdin: &[],
input_file: None,
expected_ending:"Hello, World!\n",
use_valgrind: true,
},
@ -244,6 +314,7 @@ mod cli_run {
filename: "Fib.roc",
executable_filename: "fib",
stdin: &[],
input_file: None,
expected_ending:"55\n",
use_valgrind: true,
},
@ -251,6 +322,7 @@ mod cli_run {
filename: "Quicksort.roc",
executable_filename: "quicksort",
stdin: &[],
input_file: None,
expected_ending: "[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2]\n",
use_valgrind: true,
},
@ -258,6 +330,7 @@ mod cli_run {
// filename: "Quicksort.roc",
// executable_filename: "quicksort",
// stdin: &[],
// input_file: None,
// expected_ending: "[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2]\n",
// use_valgrind: true,
// },
@ -265,6 +338,7 @@ mod cli_run {
filename: "Main.roc",
executable_filename: "effect-example",
stdin: &["hi there!"],
input_file: None,
expected_ending: "hi there!\nIt is known\n",
use_valgrind: true,
},
@ -272,6 +346,7 @@ mod cli_run {
// filename: "Main.roc",
// executable_filename: "tea-example",
// stdin: &[],
// input_file: None,
// expected_ending: "",
// use_valgrind: true,
// },
@ -279,6 +354,7 @@ mod cli_run {
filename: "Echo.roc",
executable_filename: "echo",
stdin: &["Giovanni\n", "Giorgio\n"],
input_file: None,
expected_ending: "Hi, Giovanni Giorgio!\n",
use_valgrind: true,
},
@ -286,6 +362,7 @@ mod cli_run {
// filename: "Main.roc",
// executable_filename: "custom-malloc-example",
// stdin: &[],
// input_file: None,
// expected_ending: "ms!\nThe list was small!\n",
// use_valgrind: true,
// },
@ -293,9 +370,20 @@ mod cli_run {
// filename: "Main.roc",
// executable_filename: "task-example",
// stdin: &[],
// input_file: None,
// expected_ending: "successfully wrote to file\n",
// use_valgrind: true,
// },
false_interpreter:"false-interpreter" => {
Example {
filename: "False.roc",
executable_filename: "false",
stdin: &[],
input_file: Some("examples/hello.false"),
expected_ending:"Hello, World!\n",
use_valgrind: true,
}
},
}
macro_rules! benchmarks {
@ -322,6 +410,7 @@ mod cli_run {
benchmark.stdin,
benchmark.executable_filename,
&[],
benchmark.input_file.and_then(|file| Some(examples_dir("benchmarks").join(file))),
benchmark.expected_ending,
benchmark.use_valgrind,
);
@ -331,6 +420,7 @@ mod cli_run {
benchmark.stdin,
benchmark.executable_filename,
&["--optimize"],
benchmark.input_file.and_then(|file| Some(examples_dir("benchmarks").join(file))),
benchmark.expected_ending,
benchmark.use_valgrind,
);
@ -363,6 +453,7 @@ mod cli_run {
benchmark.stdin,
benchmark.executable_filename,
&[],
benchmark.input_file.and_then(|file| Some(examples_dir("benchmarks").join(file))),
benchmark.expected_ending,
);
@ -371,6 +462,7 @@ mod cli_run {
benchmark.stdin,
benchmark.executable_filename,
&["--optimize"],
benchmark.input_file.and_then(|file| Some(examples_dir("benchmarks").join(file))),
benchmark.expected_ending,
);
}
@ -402,6 +494,7 @@ mod cli_run {
benchmark.stdin,
benchmark.executable_filename,
&["--backend=x86_32"],
benchmark.input_file.and_then(|file| Some(examples_dir("benchmarks").join(file))),
benchmark.expected_ending,
benchmark.use_valgrind,
);
@ -411,6 +504,7 @@ mod cli_run {
benchmark.stdin,
benchmark.executable_filename,
&["--backend=x86_32", "--optimize"],
benchmark.input_file.and_then(|file| Some(examples_dir("benchmarks").join(file))),
benchmark.expected_ending,
benchmark.use_valgrind,
);
@ -435,77 +529,87 @@ mod cli_run {
}
benchmarks! {
nqueens => Example {
filename: "NQueens.roc",
executable_filename: "nqueens",
stdin: &["6"],
expected_ending: "4\n",
use_valgrind: true,
},
cfold => Example {
filename: "CFold.roc",
executable_filename: "cfold",
stdin: &["3"],
expected_ending: "11 & 11\n",
use_valgrind: true,
},
deriv => Example {
filename: "Deriv.roc",
executable_filename: "deriv",
stdin: &["2"],
expected_ending: "1 count: 6\n2 count: 22\n",
use_valgrind: true,
},
rbtree_ck => Example {
filename: "RBTreeCk.roc",
executable_filename: "rbtree-ck",
stdin: &["100"],
expected_ending: "10\n",
use_valgrind: true,
},
rbtree_insert => Example {
filename: "RBTreeInsert.roc",
executable_filename: "rbtree-insert",
stdin: &[],
expected_ending: "Node Black 0 {} Empty Empty\n",
use_valgrind: true,
},
rbtree_del => Example {
filename: "RBTreeDel.roc",
executable_filename: "rbtree-del",
stdin: &["420"],
expected_ending: "30\n",
use_valgrind: true,
},
astar => Example {
filename: "TestAStar.roc",
executable_filename: "test-astar",
stdin: &[],
expected_ending: "True\n",
use_valgrind: false,
},
base64 => Example {
filename: "TestBase64.roc",
executable_filename: "test-base64",
stdin: &[],
expected_ending: "encoded: SGVsbG8gV29ybGQ=\ndecoded: Hello World\n",
use_valgrind: true,
},
closure => Example {
filename: "Closure.roc",
executable_filename: "closure",
stdin: &[],
expected_ending: "",
use_valgrind: true,
},
quicksort_app => Example {
filename: "QuicksortApp.roc",
executable_filename: "quicksortapp",
stdin: &[],
expected_ending: "todo put the correct quicksort answer here",
use_valgrind: true,
},
}
nqueens => Example {
filename: "NQueens.roc",
executable_filename: "nqueens",
stdin: &["6"],
input_file: None,
expected_ending: "4\n",
use_valgrind: true,
},
cfold => Example {
filename: "CFold.roc",
executable_filename: "cfold",
stdin: &["3"],
input_file: None,
expected_ending: "11 & 11\n",
use_valgrind: true,
},
deriv => Example {
filename: "Deriv.roc",
executable_filename: "deriv",
stdin: &["2"],
input_file: None,
expected_ending: "1 count: 6\n2 count: 22\n",
use_valgrind: true,
},
rbtree_ck => Example {
filename: "RBTreeCk.roc",
executable_filename: "rbtree-ck",
stdin: &["100"],
input_file: None,
expected_ending: "10\n",
use_valgrind: true,
},
rbtree_insert => Example {
filename: "RBTreeInsert.roc",
executable_filename: "rbtree-insert",
stdin: &[],
input_file: None,
expected_ending: "Node Black 0 {} Empty Empty\n",
use_valgrind: true,
},
// rbtree_del => Example {
// filename: "RBTreeDel.roc",
// executable_filename: "rbtree-del",
// stdin: &["420"],
// input_file: None,
// expected_ending: "30\n",
// use_valgrind: true,
// },
astar => Example {
filename: "TestAStar.roc",
executable_filename: "test-astar",
stdin: &[],
input_file: None,
expected_ending: "True\n",
use_valgrind: false,
},
base64 => Example {
filename: "TestBase64.roc",
executable_filename: "test-base64",
stdin: &[],
input_file: None,
expected_ending: "encoded: SGVsbG8gV29ybGQ=\ndecoded: Hello World\n",
use_valgrind: true,
},
closure => Example {
filename: "Closure.roc",
executable_filename: "closure",
stdin: &[],
input_file: None,
expected_ending: "",
use_valgrind: true,
},
quicksort_app => Example {
filename: "QuicksortApp.roc",
executable_filename: "quicksortapp",
stdin: &[],
input_file: None,
expected_ending: "todo put the correct quicksort answer here",
use_valgrind: true,
},
}
#[cfg(not(debug_assertions))]
fn check_for_tests(examples_dir: &str, all_examples: &mut MutMap<&str, Example<'_>>) {
@ -562,10 +666,10 @@ mod cli_run {
file.read_exact(buf).unwrap();
// Only app modules in this directory are considered benchmarks.
if "app".as_bytes() == buf {
if "app".as_bytes() == buf && !benchmark_file_name.contains("RBTreeDel") {
all_benchmarks.remove(benchmark_file_name.as_str()).unwrap_or_else(|| {
panic!("The benchmark {}/{} does not have any corresponding tests in cli_run. Please add one, so if it ever stops working, we'll know about it right away!", benchmarks_dir, benchmark_file_name);
});
panic!("The benchmark {}/{} does not have any corresponding tests in cli_run. Please add one, so if it ever stops working, we'll know about it right away!", benchmarks_dir, benchmark_file_name);
});
}
}
}
@ -581,6 +685,7 @@ mod cli_run {
&[],
"multi-dep-str",
&[],
None,
"I am Dep2.str2\n",
true,
);
@ -594,6 +699,7 @@ mod cli_run {
&[],
"multi-dep-str",
&["--optimize"],
None,
"I am Dep2.str2\n",
true,
);
@ -607,6 +713,7 @@ mod cli_run {
&[],
"multi-dep-thunk",
&[],
None,
"I am Dep2.value2\n",
true,
);
@ -620,6 +727,7 @@ mod cli_run {
&[],
"multi-dep-thunk",
&["--optimize"],
None,
"I am Dep2.value2\n",
true,
);
@ -693,7 +801,7 @@ fn read_wasi_stdout(wasi_env: wasmer_wasi::WasiEnv) -> String {
let mut buf = String::new();
stdout.read_to_string(&mut buf).unwrap();
return buf;
buf
}
_ => todo!(),
}

View File

@ -7,19 +7,6 @@ extern crate indoc;
#[cfg(test)]
mod repl_eval {
use cli_utils::helpers;
use roc_gen_llvm::run_roc::RocCallResult;
#[test]
fn check_discriminant_size() {
// tells us if the size of the discriminant has changed. Lots of other code
// relies on this size
let value: i64 = 1234;
assert_eq!(
std::mem::size_of_val(&RocCallResult::Success(value)),
roc_gen_llvm::run_roc::ROC_CALL_RESULT_DISCRIMINANT_SIZE
+ std::mem::size_of_val(&value)
)
}
const ERROR_MESSAGE_START: char = '─';
@ -103,6 +90,24 @@ mod repl_eval {
expect_success("299 % 10", "Ok 9 : Result (Int *) [ DivByZero ]*");
}
#[test]
fn num_floor_division_success() {
expect_success("Num.divFloor 4 3", "Ok 1 : Result (Int *) [ DivByZero ]*");
}
#[test]
fn num_floor_division_divby_zero() {
expect_success(
"Num.divFloor 4 0",
"Err DivByZero : Result (Int *) [ DivByZero ]*",
);
}
#[test]
fn num_ceil_division_success() {
expect_success("Num.divCeil 4 3", "Ok 2 : Result (Int *) [ DivByZero ]*")
}
#[test]
fn bool_in_record() {
expect_success("{ x: 1 == 1 }", "{ x: True } : { x : Bool }");
@ -167,6 +172,11 @@ mod repl_eval {
expect_success("Foo 1 3.14", "Foo 1 3.14 : [ Foo (Num *) (Float *) ]*");
}
#[test]
fn newtype_of_unit() {
expect_success("Foo Bar", "Foo Bar : [ Foo [ Bar ]* ]*");
}
#[test]
fn tag_with_arguments() {
expect_success("True 1", "True 1 : [ True (Num *) ]*");
@ -307,7 +317,7 @@ mod repl_eval {
expect_success("Num.addChecked 1 1", "Ok 2 : Result (Num *) [ Overflow ]*");
expect_success(
"Num.addChecked Num.maxInt 1",
"Err (Overflow) : Result I64 [ Overflow ]*",
"Err Overflow : Result I64 [ Overflow ]*",
);
}
@ -316,7 +326,7 @@ mod repl_eval {
expect_success("Num.subChecked 1 1", "Ok 0 : Result (Num *) [ Overflow ]*");
expect_success(
"Num.subChecked Num.minInt 1",
"Err (Overflow) : Result I64 [ Overflow ]*",
"Err Overflow : Result I64 [ Overflow ]*",
);
}
@ -328,7 +338,7 @@ mod repl_eval {
);
expect_success(
"Num.mulChecked Num.maxInt 2",
"Err (Overflow) : Result I64 [ Overflow ]*",
"Err Overflow : Result I64 [ Overflow ]*",
);
}
@ -362,7 +372,7 @@ mod repl_eval {
);
expect_success(
"List.first []",
"Err (ListWasEmpty) : Result * [ ListWasEmpty ]*",
"Err ListWasEmpty : Result * [ ListWasEmpty ]*",
);
}
@ -375,7 +385,7 @@ mod repl_eval {
expect_success(
"List.last []",
"Err (ListWasEmpty) : Result * [ ListWasEmpty ]*",
"Err ListWasEmpty : Result * [ ListWasEmpty ]*",
);
}
@ -502,6 +512,11 @@ mod repl_eval {
expect_success("\\x -> x", "<function> : a -> a");
}
#[test]
fn sum_lambda() {
expect_success("\\x, y -> x + y", "<function> : Num a, Num a -> Num a");
}
#[test]
fn stdlib_function() {
expect_success("Num.abs", "<function> : Num a -> Num a");

3942
cli_utils/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

23
cli_utils/Cargo.toml Normal file
View File

@ -0,0 +1,23 @@
[package]
name = "cli_utils"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
repository = "https://github.com/rtfeldman/roc"
edition = "2018"
description = "Shared code for cli tests and benchmarks"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
roc_cli = { path = "../cli" }
roc_collections = { path = "../compiler/collections" }
roc_load = { path = "../compiler/load" }
roc_module = { path = "../compiler/module" }
bumpalo = { version = "3.8.0", features = ["collections"] }
criterion = { git = "https://github.com/Anton-4/criterion.rs"}
serde = { version = "1.0.130", features = ["derive"] }
serde-xml-rs = "0.5.1"
strip-ansi-escapes = "0.1.1"
tempfile = "3.2.0"
rlimit = "0.6.2"

View File

@ -131,6 +131,7 @@ pub fn bench_rbtree_ck<T: Measurement>(bench_group_opt: Option<&mut BenchmarkGro
);
}
#[allow(dead_code)]
pub fn bench_rbtree_delete<T: Measurement>(bench_group_opt: Option<&mut BenchmarkGroup<T>>) {
exec_bench_w_input(
&example_file("benchmarks", "RBTreeDel.roc"),

17
code_markup/Cargo.toml Normal file
View File

@ -0,0 +1,17 @@
[package]
name = "roc_code_markup"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2018"
description = "Our own markup language for Roc code. Used by the editor and (soon) the docs."
[dependencies]
roc_ast = { path = "../ast" }
roc_module = { path = "../compiler/module" }
roc_utils = { path = "../utils" }
serde = { version = "1.0.130", features = ["derive"] }
palette = "0.6.0"
snafu = { version = "0.6.10", features = ["backtraces"] }
bumpalo = { version = "3.8.0", features = ["collections"] }
itertools = "0.10.1"

22
code_markup/src/colors.rs Normal file
View File

@ -0,0 +1,22 @@
use palette::{FromColor, Hsv, Srgb};
pub type RgbaTup = (f32, f32, f32, f32);
pub const WHITE: RgbaTup = (1.0, 1.0, 1.0, 1.0);
pub fn to_slice((r, g, b, a): RgbaTup) -> [f32; 4] {
[r, g, b, a]
}
pub fn from_hsb(hue: usize, saturation: usize, brightness: usize) -> RgbaTup {
from_hsba(hue, saturation, brightness, 1.0)
}
pub fn from_hsba(hue: usize, saturation: usize, brightness: usize, alpha: f32) -> RgbaTup {
let rgb = Srgb::from_color(Hsv::new(
hue as f32,
(saturation as f32) / 100.0,
(brightness as f32) / 100.0,
));
(rgb.red, rgb.green, rgb.blue, alpha)
}

5
code_markup/src/lib.rs Normal file
View File

@ -0,0 +1,5 @@
pub mod colors;
pub mod markup;
pub mod markup_error;
pub mod slow_pool;
pub mod syntax_highlight;

View File

@ -1,8 +1,8 @@
#![allow(dead_code)]
use crate::editor::ed_error::{CaretNotFound, EdResult};
use snafu::ensure;
use crate::markup_error::{CaretNotFound, MarkResult};
#[derive(Debug, Copy, Clone)]
pub struct Caret {
pub offset_col: usize,
@ -65,10 +65,6 @@ pub struct Attributes {
}
impl Attributes {
pub fn new() -> Attributes {
Attributes { all: Vec::new() }
}
pub fn add(&mut self, attr: Attribute) {
self.all.push(attr);
}
@ -103,7 +99,7 @@ impl Attributes {
carets
}
pub fn delete_caret(&mut self, offset_col: usize, node_id: usize) -> EdResult<()> {
pub fn delete_caret(&mut self, offset_col: usize, node_id: usize) -> MarkResult<()> {
let old_len = self.all.len();
self.all.retain(|attr| {
@ -121,3 +117,9 @@ impl Attributes {
Ok(())
}
}
impl Default for Attributes {
fn default() -> Self {
Attributes { all: Vec::new() }
}
}

View File

@ -0,0 +1,149 @@
use roc_ast::lang::core::{ast::ASTNodeId, expr::expr2::ExprId};
use crate::{slow_pool::MarkNodeId, syntax_highlight::HighlightStyle};
use super::{attribute::Attributes, nodes, nodes::MarkupNode};
pub fn new_equals_mn(ast_node_id: ASTNodeId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::EQUALS.to_owned(),
ast_node_id,
syn_high_style: HighlightStyle::Operator,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
}
pub fn new_comma_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
new_comma_mn_ast(ASTNodeId::AExprId(expr_id), parent_id_opt)
}
pub fn new_comma_mn_ast(ast_node_id: ASTNodeId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::COMMA.to_owned(),
ast_node_id,
syn_high_style: HighlightStyle::Comma,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
}
pub fn new_blank_mn(ast_node_id: ASTNodeId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Blank {
ast_node_id,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
}
pub fn new_blank_mn_w_nls(
ast_node_id: ASTNodeId,
parent_id_opt: Option<MarkNodeId>,
nr_of_newlines: usize,
) -> MarkupNode {
MarkupNode::Blank {
ast_node_id,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: nr_of_newlines,
}
}
pub fn new_colon_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
new_operator_mn(nodes::COLON.to_owned(), expr_id, parent_id_opt)
}
pub fn new_operator_mn(
content: String,
expr_id: ExprId,
parent_id_opt: Option<MarkNodeId>,
) -> MarkupNode {
MarkupNode::Text {
content,
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::Operator,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
}
pub fn new_left_accolade_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::LEFT_ACCOLADE.to_owned(),
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::Bracket,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
}
pub fn new_right_accolade_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::RIGHT_ACCOLADE.to_owned(),
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::Bracket,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
}
pub fn new_left_square_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::LEFT_SQUARE_BR.to_owned(),
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::Bracket,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
}
pub fn new_right_square_mn(expr_id: ExprId, parent_id_opt: Option<MarkNodeId>) -> MarkupNode {
MarkupNode::Text {
content: nodes::RIGHT_SQUARE_BR.to_owned(),
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::Bracket,
attributes: Attributes::default(),
parent_id_opt,
newlines_at_end: 0,
}
}
pub fn new_func_name_mn(content: String, expr_id: ExprId) -> MarkupNode {
MarkupNode::Text {
content,
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::FunctionName,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
}
}
pub fn new_arg_name_mn(content: String, expr_id: ExprId) -> MarkupNode {
MarkupNode::Text {
content,
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::FunctionArgName,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
}
}
pub fn new_arrow_mn(ast_node_id: ASTNodeId, newlines_at_end: usize) -> MarkupNode {
MarkupNode::Text {
content: nodes::ARROW.to_owned(),
ast_node_id,
syn_high_style: HighlightStyle::Operator,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end,
}
}

View File

@ -0,0 +1,34 @@
use roc_ast::{
ast_error::ASTResult,
lang::{core::ast::AST, env::Env},
};
use roc_module::symbol::Interns;
use crate::{
markup::{
convert::{from_def2::def2_to_markup, from_header::header_to_markup},
nodes::set_parent_for_all,
},
slow_pool::{MarkNodeId, SlowPool},
};
pub fn ast_to_mark_nodes<'a>(
env: &mut Env<'a>,
ast: &AST,
mark_node_pool: &mut SlowPool,
interns: &Interns,
) -> ASTResult<Vec<MarkNodeId>> {
let mut all_mark_node_ids = vec![header_to_markup(&ast.header, mark_node_pool)];
for &def_id in ast.def_ids.iter() {
let def2 = env.pool.get(def_id);
let expr2_markup_id = def2_to_markup(env, def2, def_id, mark_node_pool, interns)?;
set_parent_for_all(expr2_markup_id, mark_node_pool);
all_mark_node_ids.push(expr2_markup_id);
}
Ok(all_mark_node_ids)
}

View File

@ -0,0 +1,52 @@
use crate::{
markup::{common_nodes::new_blank_mn_w_nls, top_level_def::tld_mark_node},
slow_pool::{MarkNodeId, SlowPool},
};
use super::from_expr2::expr2_to_markup;
use roc_ast::{
ast_error::ASTResult,
lang::{
core::{
ast::ASTNodeId,
def::def2::{Def2, DefId},
},
env::Env,
},
};
use roc_module::symbol::Interns;
pub fn def2_to_markup<'a>(
env: &mut Env<'a>,
def2: &Def2,
def2_node_id: DefId,
mark_node_pool: &mut SlowPool,
interns: &Interns,
) -> ASTResult<MarkNodeId> {
let ast_node_id = ASTNodeId::ADefId(def2_node_id);
let mark_node_id = match def2 {
Def2::ValueDef {
identifier_id,
expr_id,
} => {
let expr_mn_id = expr2_to_markup(
env,
env.pool.get(*expr_id),
*expr_id,
mark_node_pool,
interns,
0,
)?;
let tld_mn =
tld_mark_node(*identifier_id, expr_mn_id, ast_node_id, mark_node_pool, env)?;
mark_node_pool.add(tld_mn)
}
Def2::Blank => mark_node_pool.add(new_blank_mn_w_nls(ast_node_id, None, 2)),
};
Ok(mark_node_id)
}

View File

@ -0,0 +1,389 @@
use crate::{
markup::{
attribute::Attributes,
common_nodes::{
new_arg_name_mn, new_arrow_mn, new_blank_mn, new_colon_mn, new_comma_mn, new_equals_mn,
new_left_accolade_mn, new_left_square_mn, new_operator_mn, new_right_accolade_mn,
new_right_square_mn,
},
nodes::{
get_string, join_mark_nodes_commas, join_mark_nodes_spaces, new_markup_node, MarkupNode,
},
},
slow_pool::{MarkNodeId, SlowPool},
syntax_highlight::HighlightStyle,
};
use itertools::Itertools;
use roc_ast::{
ast_error::ASTResult,
lang::{
core::{
ast::ASTNodeId,
expr::{
expr2::{Expr2, ExprId},
record_field::RecordField,
},
pattern::{get_identifier_string, Pattern2},
val_def::ValueDef,
},
env::Env,
},
};
use roc_module::{module_err::ModuleResult, symbol::Interns};
// make Markup Nodes: generate String representation, assign Highlighting Style
pub fn expr2_to_markup<'a>(
env: &Env<'a>,
expr2: &Expr2,
expr2_node_id: ExprId,
mark_node_pool: &mut SlowPool,
interns: &Interns,
indent_level: usize,
) -> ASTResult<MarkNodeId> {
let ast_node_id = ASTNodeId::AExprId(expr2_node_id);
// for debugging
//println!("EXPR2 {:?}", expr2);
let mark_node_id = match expr2 {
Expr2::SmallInt { text, .. }
| Expr2::I128 { text, .. }
| Expr2::U128 { text, .. }
| Expr2::Float { text, .. } => {
let num_str = get_string(env, text);
new_markup_node(
with_indent(indent_level, &num_str),
ast_node_id,
HighlightStyle::Number,
mark_node_pool,
indent_level,
)
}
Expr2::Str(text) => {
let content = format!("\"{}\"", text.as_str(env.pool));
new_markup_node(
with_indent(indent_level, &content),
ast_node_id,
HighlightStyle::String,
mark_node_pool,
indent_level,
)
}
Expr2::GlobalTag { name, .. } => new_markup_node(
with_indent(indent_level, &get_string(env, name)),
ast_node_id,
HighlightStyle::Type,
mark_node_pool,
indent_level,
),
Expr2::Call { args, expr_id, .. } => {
let expr = env.pool.get(*expr_id);
let fun_call_mark_id =
expr2_to_markup(env, expr, *expr_id, mark_node_pool, interns, indent_level)?;
let arg_expr_ids: Vec<ExprId> =
args.iter(env.pool).map(|(_, arg_id)| *arg_id).collect();
let arg_call_mark_ids: Vec<MarkNodeId> = arg_expr_ids
.iter()
.map(|arg_id| {
let arg_expr = env.pool.get(*arg_id);
expr2_to_markup(env, arg_expr, *arg_id, mark_node_pool, interns, 0)
})
.collect::<ASTResult<Vec<MarkNodeId>>>()?;
let mut args_with_sapces =
join_mark_nodes_spaces(arg_call_mark_ids, true, ast_node_id, mark_node_pool);
let mut children_ids = vec![fun_call_mark_id];
children_ids.append(&mut args_with_sapces);
let call_node = MarkupNode::Nested {
ast_node_id,
children_ids,
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(call_node)
}
Expr2::Var(symbol) => {
let text = symbol.fully_qualified(interns, env.home);
new_markup_node(
text.to_string(),
ast_node_id,
HighlightStyle::Value,
mark_node_pool,
indent_level,
)
}
Expr2::List { elems, .. } => {
let mut children_ids =
vec![mark_node_pool.add(new_left_square_mn(expr2_node_id, None))];
let indexed_node_ids: Vec<(usize, ExprId)> =
elems.iter(env.pool).copied().enumerate().collect();
for (idx, node_id) in indexed_node_ids.iter() {
let sub_expr2 = env.pool.get(*node_id);
children_ids.push(expr2_to_markup(
env,
sub_expr2,
*node_id,
mark_node_pool,
interns,
indent_level,
)?);
if idx + 1 < elems.len() {
children_ids.push(mark_node_pool.add(new_comma_mn(expr2_node_id, None)));
}
}
children_ids.push(mark_node_pool.add(new_right_square_mn(expr2_node_id, None)));
let list_node = MarkupNode::Nested {
ast_node_id,
children_ids,
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(list_node)
}
Expr2::EmptyRecord => {
let children_ids = vec![
mark_node_pool.add(new_left_accolade_mn(expr2_node_id, None)),
mark_node_pool.add(new_right_accolade_mn(expr2_node_id, None)),
];
let record_node = MarkupNode::Nested {
ast_node_id,
children_ids,
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(record_node)
}
Expr2::Record { fields, .. } => {
let mut children_ids =
vec![mark_node_pool.add(new_left_accolade_mn(expr2_node_id, None))];
for (idx, field_node_id) in fields.iter_node_ids().enumerate() {
let record_field = env.pool.get(field_node_id);
let field_name = record_field.get_record_field_pool_str();
children_ids.push(new_markup_node(
field_name.as_str(env.pool).to_owned(),
ast_node_id,
HighlightStyle::RecordField,
mark_node_pool,
indent_level,
));
match record_field {
RecordField::InvalidLabelOnly(_, _) => (),
RecordField::LabelOnly(_, _, _) => (),
RecordField::LabeledValue(_, _, sub_expr2_node_id) => {
children_ids.push(mark_node_pool.add(new_colon_mn(expr2_node_id, None)));
let sub_expr2 = env.pool.get(*sub_expr2_node_id);
children_ids.push(expr2_to_markup(
env,
sub_expr2,
*sub_expr2_node_id,
mark_node_pool,
interns,
indent_level,
)?);
}
}
if idx + 1 < fields.len() {
children_ids.push(mark_node_pool.add(new_comma_mn(expr2_node_id, None)));
}
}
children_ids.push(mark_node_pool.add(new_right_accolade_mn(expr2_node_id, None)));
let record_node = MarkupNode::Nested {
ast_node_id,
children_ids,
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(record_node)
}
Expr2::Blank => mark_node_pool.add(new_blank_mn(ast_node_id, None)),
Expr2::LetValue {
def_id,
body_id: _,
body_var: _,
} => {
let pattern_id = env.pool.get(*def_id).get_pattern_id();
let pattern2 = env.pool.get(pattern_id);
let val_name = get_identifier_string(pattern2, interns)?;
let val_name_mn = MarkupNode::Text {
content: val_name,
ast_node_id,
syn_high_style: HighlightStyle::Value,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
};
let val_name_mn_id = mark_node_pool.add(val_name_mn);
let equals_mn_id = mark_node_pool.add(new_equals_mn(ast_node_id, None));
let value_def = env.pool.get(*def_id);
match value_def {
ValueDef::NoAnnotation {
pattern_id: _,
expr_id,
expr_var: _,
} => {
let body_mn_id = expr2_to_markup(
env,
env.pool.get(*expr_id),
*expr_id,
mark_node_pool,
interns,
indent_level,
)?;
let body_mn = mark_node_pool.get_mut(body_mn_id);
body_mn.add_newline_at_end();
let full_let_node = MarkupNode::Nested {
ast_node_id,
children_ids: vec![val_name_mn_id, equals_mn_id, body_mn_id],
parent_id_opt: None,
newlines_at_end: 1,
};
mark_node_pool.add(full_let_node)
}
other => {
unimplemented!(
"I don't know how to convert {:?} into a MarkupNode yet.",
other
)
}
}
}
Expr2::Closure {
function_type: _,
uniq_symbol: _,
recursive: _,
args,
body_id,
extra: _,
} => {
let backslash_mn = new_operator_mn("\\".to_string(), expr2_node_id, None);
let backslash_mn_id = mark_node_pool.add(backslash_mn);
let arg_names: Vec<&str> = args
.iter(env.pool)
.map(|(_, arg_node_id)| {
let arg_pattern2 = env.pool.get(*arg_node_id);
match arg_pattern2 {
Pattern2::Identifier(id_symbol) => {
let ident_id = id_symbol.ident_id();
env.ident_ids.get_name_str_res(ident_id)
}
Pattern2::Shadowed { shadowed_ident } => {
Ok(shadowed_ident.as_str(env.pool))
}
other => {
todo!(
"TODO: support the following pattern2 as function arg: {:?}",
other
);
}
}
})
.collect::<ModuleResult<Vec<&str>>>()?;
let arg_mark_nodes = arg_names
.iter()
.map(|arg_name| new_arg_name_mn(arg_name.to_string(), expr2_node_id))
.collect_vec();
let args_with_commas: Vec<MarkupNode> =
join_mark_nodes_commas(arg_mark_nodes, ASTNodeId::AExprId(expr2_node_id));
let mut args_with_commas_ids: Vec<MarkNodeId> = args_with_commas
.into_iter()
.map(|mark_node| mark_node_pool.add(mark_node))
.collect();
let arrow_mn = new_arrow_mn(ASTNodeId::AExprId(expr2_node_id), 1);
let arrow_mn_id = mark_node_pool.add(arrow_mn);
let mut children_ids = vec![backslash_mn_id];
children_ids.append(&mut args_with_commas_ids);
children_ids.push(arrow_mn_id);
let args_mn = MarkupNode::Nested {
ast_node_id: ASTNodeId::AExprId(expr2_node_id),
children_ids,
parent_id_opt: None,
newlines_at_end: 0,
};
let args_mn_id = mark_node_pool.add(args_mn);
let body_expr = env.pool.get(*body_id);
let body_mn_id = expr2_to_markup(
env,
body_expr,
*body_id,
mark_node_pool,
interns,
indent_level + 1,
)?;
let function_node = MarkupNode::Nested {
ast_node_id,
children_ids: vec![args_mn_id, body_mn_id],
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(function_node)
}
Expr2::RuntimeError() => new_markup_node(
"RunTimeError".to_string(),
ast_node_id,
HighlightStyle::Blank,
mark_node_pool,
indent_level,
),
rest => todo!("implement expr2_to_markup for {:?}", rest),
};
Ok(mark_node_id)
}
fn with_indent(indent_level: usize, some_str: &str) -> String {
let full_indent = std::iter::repeat(" ").take(indent_level * 4);
let mut full_string: String = full_indent.collect();
full_string.push_str(some_str);
full_string
}

View File

@ -0,0 +1,205 @@
use roc_ast::lang::core::{ast::ASTNodeId, expr::expr2::ExprId, header::AppHeader};
use crate::{
markup::{
attribute::Attributes,
common_nodes::{
new_comma_mn, new_left_accolade_mn, new_left_square_mn, new_right_accolade_mn,
new_right_square_mn,
},
nodes::{set_parent_for_all, MarkupNode},
},
slow_pool::{MarkNodeId, SlowPool},
syntax_highlight::HighlightStyle,
};
pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool) -> MarkNodeId {
let expr_id = app_header.ast_node_id;
let ast_node_id = ASTNodeId::AExprId(expr_id);
let app_node_id = header_mn("app ".to_owned(), expr_id, mark_node_pool);
let app_name_node_id = header_val_mn(
app_header.app_name.clone(),
expr_id,
HighlightStyle::String,
mark_node_pool,
);
let full_app_node = MarkupNode::Nested {
ast_node_id,
children_ids: vec![app_node_id, app_name_node_id],
parent_id_opt: None,
newlines_at_end: 1,
};
let packages_node_id = header_mn(" packages ".to_owned(), expr_id, mark_node_pool);
let pack_left_acc_node_id = mark_node_pool.add(new_left_accolade_mn(expr_id, None));
let pack_base_node_id = header_val_mn(
"base: ".to_owned(),
expr_id,
HighlightStyle::RecordField,
mark_node_pool,
);
let pack_val_node_id = header_val_mn(
app_header.packages_base.clone(),
expr_id,
HighlightStyle::String,
mark_node_pool,
);
let pack_right_acc_node_id = mark_node_pool.add(new_right_accolade_mn(expr_id, None));
let full_packages_node = MarkupNode::Nested {
ast_node_id,
children_ids: vec![
packages_node_id,
pack_left_acc_node_id,
pack_base_node_id,
pack_val_node_id,
pack_right_acc_node_id,
],
parent_id_opt: None,
newlines_at_end: 1,
};
let imports_node_id = header_mn(" imports ".to_owned(), expr_id, mark_node_pool);
let imports_left_square_node_id = mark_node_pool.add(new_left_square_mn(expr_id, None));
let mut import_child_ids: Vec<MarkNodeId> = add_header_mn_list(
&app_header.imports,
expr_id,
HighlightStyle::Import,
mark_node_pool,
);
let imports_right_square_node_id = mark_node_pool.add(new_right_square_mn(expr_id, None));
let mut full_import_children = vec![imports_node_id, imports_left_square_node_id];
full_import_children.append(&mut import_child_ids);
full_import_children.push(imports_right_square_node_id);
let full_import_node = MarkupNode::Nested {
ast_node_id,
children_ids: full_import_children,
parent_id_opt: None,
newlines_at_end: 1,
};
let provides_node_id = header_mn(" provides ".to_owned(), expr_id, mark_node_pool);
let provides_left_square_node_id = mark_node_pool.add(new_left_square_mn(expr_id, None));
let mut provides_val_node_ids: Vec<MarkNodeId> = add_header_mn_list(
&app_header.provides,
expr_id,
HighlightStyle::Provides,
mark_node_pool,
);
let provides_right_square_node_id = mark_node_pool.add(new_right_square_mn(expr_id, None));
let provides_end_node_id = header_mn(" to base".to_owned(), expr_id, mark_node_pool);
let mut full_provides_children = vec![provides_node_id, provides_left_square_node_id];
full_provides_children.append(&mut provides_val_node_ids);
full_provides_children.push(provides_right_square_node_id);
full_provides_children.push(provides_end_node_id);
let full_provides_node = MarkupNode::Nested {
ast_node_id,
children_ids: full_provides_children,
parent_id_opt: None,
newlines_at_end: 1,
};
let full_app_node_id = mark_node_pool.add(full_app_node);
let full_packages_node = mark_node_pool.add(full_packages_node);
let full_import_node_id = mark_node_pool.add(full_import_node);
let full_provides_node_id = mark_node_pool.add(full_provides_node);
let header_mark_node = MarkupNode::Nested {
ast_node_id,
children_ids: vec![
full_app_node_id,
full_packages_node,
full_import_node_id,
full_provides_node_id,
],
parent_id_opt: None,
newlines_at_end: 1,
};
let header_mn_id = mark_node_pool.add(header_mark_node);
set_parent_for_all(header_mn_id, mark_node_pool);
header_mn_id
}
// Used for provides and imports
fn add_header_mn_list(
str_vec: &[String],
expr_id: ExprId,
highlight_style: HighlightStyle,
mark_node_pool: &mut SlowPool,
) -> Vec<MarkNodeId> {
let nr_of_elts = str_vec.len();
str_vec
.iter()
.enumerate()
.map(|(indx, provide_str)| {
let provide_str = header_val_mn(
provide_str.to_owned(),
expr_id,
highlight_style,
mark_node_pool,
);
if indx != nr_of_elts - 1 {
vec![provide_str, mark_node_pool.add(new_comma_mn(expr_id, None))]
} else {
vec![provide_str]
}
})
.flatten()
.collect()
}
fn header_mn(content: String, expr_id: ExprId, mark_node_pool: &mut SlowPool) -> MarkNodeId {
let mark_node = MarkupNode::Text {
content,
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: HighlightStyle::PackageRelated,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(mark_node)
}
fn header_val_mn(
content: String,
expr_id: ExprId,
highlight_style: HighlightStyle,
mark_node_pool: &mut SlowPool,
) -> MarkNodeId {
let mark_node = MarkupNode::Text {
content,
ast_node_id: ASTNodeId::AExprId(expr_id),
syn_high_style: highlight_style,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(mark_node)
}

View File

@ -0,0 +1,4 @@
pub mod from_ast;
pub mod from_def2;
pub mod from_expr2;
pub mod from_header;

View File

@ -0,0 +1,5 @@
pub mod attribute;
pub mod common_nodes;
pub mod convert;
pub mod nodes;
pub mod top_level_def;

View File

@ -0,0 +1,470 @@
use crate::{
markup_error::MarkResult,
slow_pool::{MarkNodeId, SlowPool},
syntax_highlight::HighlightStyle,
};
use super::{attribute::Attributes, common_nodes::new_comma_mn_ast};
use crate::markup_error::{ExpectedTextNode, NestedNodeMissingChild, NestedNodeRequired};
use itertools::Itertools;
use roc_ast::{
lang::{core::ast::ASTNodeId, env::Env},
mem_pool::pool_str::PoolStr,
};
use roc_utils::{index_of, slice_get};
use std::fmt;
#[derive(Debug)]
pub enum MarkupNode {
Nested {
ast_node_id: ASTNodeId,
children_ids: Vec<MarkNodeId>,
parent_id_opt: Option<MarkNodeId>,
newlines_at_end: usize,
},
Text {
content: String,
ast_node_id: ASTNodeId,
syn_high_style: HighlightStyle,
attributes: Attributes,
parent_id_opt: Option<MarkNodeId>,
newlines_at_end: usize,
},
Blank {
ast_node_id: ASTNodeId,
attributes: Attributes,
parent_id_opt: Option<MarkNodeId>,
newlines_at_end: usize,
},
Indent {
ast_node_id: ASTNodeId,
indent_level: usize,
parent_id_opt: Option<MarkNodeId>,
},
}
impl MarkupNode {
pub fn get_ast_node_id(&self) -> ASTNodeId {
match self {
MarkupNode::Nested { ast_node_id, .. } => *ast_node_id,
MarkupNode::Text { ast_node_id, .. } => *ast_node_id,
MarkupNode::Blank { ast_node_id, .. } => *ast_node_id,
MarkupNode::Indent { ast_node_id, .. } => *ast_node_id,
}
}
pub fn get_parent_id_opt(&self) -> Option<MarkNodeId> {
match self {
MarkupNode::Nested { parent_id_opt, .. } => *parent_id_opt,
MarkupNode::Text { parent_id_opt, .. } => *parent_id_opt,
MarkupNode::Blank { parent_id_opt, .. } => *parent_id_opt,
MarkupNode::Indent { parent_id_opt, .. } => *parent_id_opt,
}
}
pub fn get_children_ids(&self) -> Vec<MarkNodeId> {
match self {
MarkupNode::Nested { children_ids, .. } => children_ids.to_vec(),
MarkupNode::Text { .. } => vec![],
MarkupNode::Blank { .. } => vec![],
MarkupNode::Indent { .. } => vec![],
}
}
pub fn get_sibling_ids(&self, mark_node_pool: &SlowPool) -> Vec<MarkNodeId> {
if let Some(parent_id) = self.get_parent_id_opt() {
let parent_node = mark_node_pool.get(parent_id);
parent_node.get_children_ids()
} else {
vec![]
}
}
// return (index of child in list of children, closest ast index of child corresponding to ast node)
pub fn get_child_indices(
&self,
child_id: MarkNodeId,
mark_node_pool: &SlowPool,
) -> MarkResult<(usize, usize)> {
match self {
MarkupNode::Nested { children_ids, .. } => {
let mut mark_child_index_opt: Option<usize> = None;
let mut child_ids_with_ast: Vec<MarkNodeId> = Vec::new();
let self_ast_id = self.get_ast_node_id();
for (indx, &mark_child_id) in children_ids.iter().enumerate() {
if mark_child_id == child_id {
mark_child_index_opt = Some(indx);
}
let child_mark_node = mark_node_pool.get(mark_child_id);
// a node that points to the same ast_node as the parent is a ',', '[', ']'
// those are not "real" ast children
if child_mark_node.get_ast_node_id() != self_ast_id {
child_ids_with_ast.push(mark_child_id)
}
}
if let Some(child_index) = mark_child_index_opt {
if child_index == (children_ids.len() - 1) {
let ast_child_index = child_ids_with_ast.len();
Ok((child_index, ast_child_index))
} else {
// we want to find the index of the closest ast mark node to child_index
let mut indices_in_mark = vec![];
for &c_id in child_ids_with_ast.iter() {
indices_in_mark.push(index_of(c_id, children_ids)?);
}
let mut last_diff = usize::MAX;
let mut best_index = 0;
for index in indices_in_mark.iter() {
let curr_diff =
isize::abs((*index as isize) - (child_index as isize)) as usize;
if curr_diff >= last_diff {
break;
} else {
last_diff = curr_diff;
best_index = *index;
}
}
let closest_ast_child = slice_get(best_index, children_ids)?;
let closest_ast_child_index =
index_of(*closest_ast_child, &child_ids_with_ast)?;
// +1 because we want to insert after ast_child
Ok((child_index, closest_ast_child_index + 1))
}
} else {
NestedNodeMissingChild {
node_id: child_id,
children_ids: children_ids.clone(),
}
.fail()
}
}
_ => NestedNodeRequired {
node_type: self.node_type_as_string(),
}
.fail(),
}
}
pub fn get_content(&self) -> String {
match self {
MarkupNode::Nested { .. } => "".to_owned(),
MarkupNode::Text { content, .. } => content.clone(),
MarkupNode::Blank { .. } => BLANK_PLACEHOLDER.to_owned(),
MarkupNode::Indent { indent_level, .. } => SINGLE_INDENT.repeat(*indent_level),
}
}
// gets content and adds newline from newline_at_end
pub fn get_full_content(&self) -> String {
let mut full_content = self.get_content();
for _ in 0..self.get_newlines_at_end() {
full_content.push('\n')
}
full_content
}
pub fn get_content_mut(&mut self) -> MarkResult<&mut String> {
match self {
MarkupNode::Text { content, .. } => Ok(content),
_ => ExpectedTextNode {
function_name: "set_content".to_owned(),
node_type: self.node_type_as_string(),
}
.fail(),
}
}
pub fn is_all_alphanumeric(&self) -> bool {
self.get_content()
.chars()
.all(|chr| chr.is_ascii_alphanumeric())
}
pub fn add_child_at_index(&mut self, index: usize, child_id: MarkNodeId) -> MarkResult<()> {
if let MarkupNode::Nested { children_ids, .. } = self {
children_ids.splice(index..index, vec![child_id]);
} else {
NestedNodeRequired {
node_type: self.node_type_as_string(),
}
.fail()?;
}
Ok(())
}
pub fn node_type_as_string(&self) -> String {
let type_str = match self {
MarkupNode::Nested { .. } => "Nested",
MarkupNode::Text { .. } => "Text",
MarkupNode::Blank { .. } => "Blank",
MarkupNode::Indent { .. } => "Indent",
};
type_str.to_owned()
}
pub fn is_blank(&self) -> bool {
matches!(self, MarkupNode::Blank { .. })
}
pub fn is_nested(&self) -> bool {
matches!(self, MarkupNode::Nested { .. })
}
pub fn get_newlines_at_end(&self) -> usize {
match self {
MarkupNode::Nested {
newlines_at_end, ..
} => *newlines_at_end,
MarkupNode::Text {
newlines_at_end, ..
} => *newlines_at_end,
MarkupNode::Blank {
newlines_at_end, ..
} => *newlines_at_end,
MarkupNode::Indent { .. } => 0,
}
}
pub fn add_newline_at_end(&mut self) {
match self {
MarkupNode::Nested {
newlines_at_end, ..
} => *newlines_at_end += 1,
MarkupNode::Text {
newlines_at_end, ..
} => *newlines_at_end += 1,
MarkupNode::Blank {
newlines_at_end, ..
} => *newlines_at_end += 1,
_ => {}
}
}
}
pub fn get_string<'a>(env: &Env<'a>, pool_str: &PoolStr) -> String {
pool_str.as_str(env.pool).to_owned()
}
pub const BLANK_PLACEHOLDER: &str = " ";
pub const LEFT_ACCOLADE: &str = "{ ";
pub const RIGHT_ACCOLADE: &str = " }";
pub const LEFT_SQUARE_BR: &str = "[ ";
pub const RIGHT_SQUARE_BR: &str = " ]";
pub const COLON: &str = ": ";
pub const COMMA: &str = ", ";
pub const STRING_QUOTES: &str = "\"\"";
pub const EQUALS: &str = " = ";
pub const ARROW: &str = " -> ";
pub const SINGLE_INDENT: &str = " "; // 4 spaces
pub fn new_markup_node(
text: String,
node_id: ASTNodeId,
highlight_style: HighlightStyle,
mark_node_pool: &mut SlowPool,
indent_level: usize,
) -> MarkNodeId {
let content_node = MarkupNode::Text {
content: text,
ast_node_id: node_id,
syn_high_style: highlight_style,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
};
let content_node_id = mark_node_pool.add(content_node);
if indent_level > 0 {
let indent_node = MarkupNode::Indent {
ast_node_id: node_id,
indent_level,
parent_id_opt: None,
};
let indent_node_id = mark_node_pool.add(indent_node);
let nested_node = MarkupNode::Nested {
ast_node_id: node_id,
children_ids: vec![indent_node_id, content_node_id],
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(nested_node)
} else {
content_node_id
}
}
pub fn set_parent_for_all(markup_node_id: MarkNodeId, mark_node_pool: &mut SlowPool) {
let node = mark_node_pool.get(markup_node_id);
if let MarkupNode::Nested {
ast_node_id: _,
children_ids,
parent_id_opt: _,
newlines_at_end: _,
} = node
{
// need to clone because of borrowing issues
let children_ids_clone = children_ids.clone();
for child_id in children_ids_clone {
set_parent_for_all_helper(child_id, markup_node_id, mark_node_pool);
}
}
}
pub fn set_parent_for_all_helper(
markup_node_id: MarkNodeId,
parent_node_id: MarkNodeId,
mark_node_pool: &mut SlowPool,
) {
let node = mark_node_pool.get_mut(markup_node_id);
match node {
MarkupNode::Nested {
children_ids,
parent_id_opt,
..
} => {
*parent_id_opt = Some(parent_node_id);
// need to clone because of borrowing issues
let children_ids_clone = children_ids.clone();
for child_id in children_ids_clone {
set_parent_for_all_helper(child_id, markup_node_id, mark_node_pool);
}
}
MarkupNode::Text { parent_id_opt, .. } => *parent_id_opt = Some(parent_node_id),
MarkupNode::Blank { parent_id_opt, .. } => *parent_id_opt = Some(parent_node_id),
MarkupNode::Indent { parent_id_opt, .. } => *parent_id_opt = Some(parent_node_id),
}
}
impl fmt::Display for MarkupNode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{} ({}, {})",
self.node_type_as_string(),
self.get_content(),
self.get_newlines_at_end()
)
}
}
pub fn tree_as_string(root_node_id: MarkNodeId, mark_node_pool: &SlowPool) -> String {
let mut full_string = "\n(mark_node_tree)\n".to_owned();
let node = mark_node_pool.get(root_node_id);
full_string.push_str(&format!("{} mn_id {}\n", node, root_node_id));
tree_as_string_helper(node, 1, &mut full_string, mark_node_pool);
full_string
}
fn tree_as_string_helper(
node: &MarkupNode,
level: usize,
tree_string: &mut String,
mark_node_pool: &SlowPool,
) {
for child_id in node.get_children_ids() {
let mut full_str = std::iter::repeat("|--- ")
.take(level)
.collect::<Vec<&str>>()
.join("")
.to_owned();
let child = mark_node_pool.get(child_id);
let child_str = format!("{}", mark_node_pool.get(child_id)).replace("\n", "\\n");
full_str.push_str(&format!("{} mn_id {}\n", child_str, child_id));
tree_string.push_str(&full_str);
tree_as_string_helper(child, level + 1, tree_string, mark_node_pool);
}
}
// return to the the root parent_id of the current node
pub fn get_root_mark_node_id(mark_node_id: MarkNodeId, mark_node_pool: &SlowPool) -> MarkNodeId {
let mut curr_mark_node_id = mark_node_id;
let mut curr_parent_id_opt = mark_node_pool.get(curr_mark_node_id).get_parent_id_opt();
while let Some(curr_parent_id) = curr_parent_id_opt {
curr_mark_node_id = curr_parent_id;
curr_parent_id_opt = mark_node_pool.get(curr_mark_node_id).get_parent_id_opt();
}
curr_mark_node_id
}
// put space mark nodes between each node in mark_nodes
pub fn join_mark_nodes_spaces(
mark_nodes_ids: Vec<MarkNodeId>,
with_prepend: bool,
ast_node_id: ASTNodeId,
mark_node_pool: &mut SlowPool,
) -> Vec<MarkNodeId> {
let space_range_max = if with_prepend {
mark_nodes_ids.len()
} else {
mark_nodes_ids.len() - 1
};
let join_nodes: Vec<MarkNodeId> = (0..space_range_max)
.map(|_| {
let space_node = MarkupNode::Text {
content: " ".to_string(),
ast_node_id,
syn_high_style: HighlightStyle::Blank,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
};
mark_node_pool.add(space_node)
})
.collect();
if with_prepend {
join_nodes.into_iter().interleave(mark_nodes_ids).collect()
} else {
mark_nodes_ids.into_iter().interleave(join_nodes).collect()
}
}
// put comma mark nodes between each node in mark_nodes
pub fn join_mark_nodes_commas(
mark_nodes: Vec<MarkupNode>,
ast_node_id: ASTNodeId,
) -> Vec<MarkupNode> {
let join_nodes: Vec<MarkupNode> = (0..(mark_nodes.len() - 1))
.map(|_| new_comma_mn_ast(ast_node_id, None))
.collect();
mark_nodes.into_iter().interleave(join_nodes).collect()
}

View File

@ -0,0 +1,43 @@
use roc_ast::{
ast_error::ASTResult,
lang::{core::ast::ASTNodeId, env::Env},
};
use roc_module::symbol::IdentId;
use crate::{
markup::{attribute::Attributes, common_nodes::new_equals_mn, nodes::MarkupNode},
slow_pool::{MarkNodeId, SlowPool},
syntax_highlight::HighlightStyle,
};
pub fn tld_mark_node<'a>(
identifier_id: IdentId,
expr_mark_node_id: MarkNodeId,
ast_node_id: ASTNodeId,
mark_node_pool: &mut SlowPool,
env: &Env<'a>,
) -> ASTResult<MarkupNode> {
let val_name = env.ident_ids.get_name_str_res(identifier_id)?;
let val_name_mn = MarkupNode::Text {
content: val_name.to_owned(),
ast_node_id,
syn_high_style: HighlightStyle::Value,
attributes: Attributes::default(),
parent_id_opt: None,
newlines_at_end: 0,
};
let val_name_mn_id = mark_node_pool.add(val_name_mn);
let equals_mn_id = mark_node_pool.add(new_equals_mn(ast_node_id, None));
let full_let_node = MarkupNode::Nested {
ast_node_id,
children_ids: vec![val_name_mn_id, equals_mn_id, expr_mark_node_id],
parent_id_opt: None,
newlines_at_end: 2,
};
Ok(full_let_node)
}

View File

@ -0,0 +1,55 @@
use roc_utils::util_error::UtilError;
use snafu::{Backtrace, NoneError, ResultExt, Snafu};
use crate::slow_pool::MarkNodeId;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub))]
pub enum MarkError {
#[snafu(display(
"CaretNotFound: No carets were found in the expected node with id {}",
node_id
))]
CaretNotFound {
node_id: MarkNodeId,
backtrace: Backtrace,
},
#[snafu(display(
"ExpectedTextNode: the function {} expected a Text node, got {} instead.",
function_name,
node_type
))]
ExpectedTextNode {
function_name: String,
node_type: String,
backtrace: Backtrace,
},
#[snafu(display("NestedNodeMissingChild: expected to find child with id {} in Nested MarkupNode, but it was missing. Id's of the children are {:?}.", node_id, children_ids))]
NestedNodeMissingChild {
node_id: MarkNodeId,
children_ids: Vec<MarkNodeId>,
backtrace: Backtrace,
},
#[snafu(display(
"NestedNodeRequired: required a Nested node at this position, node was a {}.",
node_type
))]
NestedNodeRequired {
node_type: String,
backtrace: Backtrace,
},
#[snafu(display("UtilError: {}", msg))]
UtilErrorBacktrace { msg: String, backtrace: Backtrace },
}
pub type MarkResult<T, E = MarkError> = std::result::Result<T, E>;
impl From<UtilError> for MarkError {
fn from(util_err: UtilError) -> Self {
let msg = format!("{}", util_err);
// hack to handle MarkError derive
let dummy_res: Result<(), NoneError> = Err(NoneError {});
dummy_res.context(UtilErrorBacktrace { msg }).unwrap_err()
}
}

View File

@ -1,6 +1,7 @@
use crate::editor::markup::nodes::MarkupNode;
use std::fmt;
use crate::markup::nodes::MarkupNode;
pub type MarkNodeId = usize;
#[derive(Debug)]
@ -9,10 +10,6 @@ pub struct SlowPool {
}
impl SlowPool {
pub fn new() -> SlowPool {
SlowPool { nodes: Vec::new() }
}
pub fn add(&mut self, node: MarkupNode) -> MarkNodeId {
let id = self.nodes.len();
@ -63,7 +60,7 @@ impl fmt::Display for SlowPool {
"{}: {} ({}) ast_id {:?} {}",
index,
node.node_type_as_string(),
node.get_content().unwrap_or_else(|_| "".to_string()),
node.get_content(),
ast_node_id.parse::<usize>().unwrap(),
child_str
)?;
@ -72,3 +69,9 @@ impl fmt::Display for SlowPool {
Ok(())
}
}
impl Default for SlowPool {
fn default() -> Self {
SlowPool { nodes: Vec::new() }
}
}

View File

@ -1,36 +1,46 @@
use crate::graphics::colors as gr_colors;
use gr_colors::{from_hsb, RgbaTup};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use crate::colors::{from_hsb, RgbaTup};
#[derive(Hash, Eq, PartialEq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum HighlightStyle {
Operator, // =+-<>...
Comma,
String,
FunctionName,
FunctionArgName,
Type,
Bracket,
Number,
PackageRelated, // app, packages, imports, exposes, provides...
Variable,
Value,
RecordField,
Import,
Provides,
Blank,
}
pub fn default_highlight_map() -> HashMap<HighlightStyle, RgbaTup> {
use HighlightStyle::*;
let almost_white = from_hsb(258, 5, 95);
let mut highlight_map = HashMap::new();
[
(Operator, gr_colors::WHITE),
(Operator, from_hsb(185, 50, 75)),
(Comma, from_hsb(258, 50, 90)),
(String, from_hsb(346, 65, 97)),
(FunctionName, gr_colors::WHITE),
(Type, gr_colors::WHITE),
(FunctionName, almost_white),
(FunctionArgName, from_hsb(225, 50, 100)),
(Type, almost_white),
(Bracket, from_hsb(347, 80, 100)),
(Number, from_hsb(185, 50, 75)),
(PackageRelated, gr_colors::WHITE),
(Variable, gr_colors::WHITE),
(Number, from_hsb(225, 50, 100)),
(PackageRelated, almost_white),
(Value, almost_white),
(RecordField, from_hsb(258, 50, 90)),
(Import, from_hsb(225, 50, 100)),
(Provides, from_hsb(225, 50, 100)),
(Blank, from_hsb(258, 50, 90)),
// comment from_hsb(285, 6, 47) or 186, 35, 40
]

View File

@ -18,9 +18,8 @@ For example, parsing would translate this string...
This `Expr` representation of the expression is useful for things like:
* Checking that all variables are declared before they're used
* Type checking
* Running Roc code in Interpreted Mode (that is, without having to compile it to Rust first - useful for development, since it's a faster feedback loop, but there's a runtime performance penalty compared to doing a full compile to Rust).
- Checking that all variables are declared before they're used
- Type checking
> As of this writing, the compiler doesn't do any of those things yet. They'll be added later!
@ -28,7 +27,7 @@ Since the parser is only concerned with translating String values into Expr valu
For example, parsing will translate this string:
not "foo", "bar"
not "foo", "bar"
...into this `Expr`:
@ -68,7 +67,7 @@ The `eval` function will take this `Expr` and translate it into this much simple
Int(6)
At this point it's become so simple that we can display it to the end user as the number `6`. So running `parse` and then `eval` on the original Roc string of `1 + 8 - 3` will result in displaying `6` as the final output.
At this point it's become so simple that we can display it to the end user as the number `6`. So running `parse` and then `eval` on the original Roc string of `1 + 8 - 3` will result in displaying `6` as the final output.
> The `expr` module includes an `impl fmt::Display for Expr` that takes care of translating `Int(6)` into `6`, `Char('x')` as `'x'`, and so on.
@ -105,7 +104,6 @@ That concludes our original recursive call to `eval`, after which point we'll be
This will work the same way as `Minus` did, and will reduce down to `Int(6)`.
## Optimization philosophy
Focus on optimizations which are only safe in the absence of side effects, and leave the rest to LLVM.
@ -142,3 +140,27 @@ Express operations like map and filter in terms of toStream and fromStream, to u
More info on here:
https://wiki.haskell.org/GHC_optimisations#Fusion
# Getting started with the code
The compiler contains a lot of code! If you're new to the project it can be hard to know where to start. It's useful to have some sort of "main entry point", or at least a "good place to start" for each of the main phases.
After you get into the details, you'll discover that some parts of the compiler have more than one entry point. And things can be interwoven together in subtle and complex ways, for reasons to do with performance, edge case handling, etc. But if this is "day one" for you, and you're just trying to get familiar with things, this should be "good enough".
The compiler is invoked from the CLI via `build_file` in cli/src/build.rs
| Phase | Entry point / main functions |
| ------------------------------------- | ------------------------------------------------ |
| Compiler entry point | load/src/file.rs: load, load_and_monomorphize |
| Parse header | parse/src/module.rs: parse_header |
| Parse definitions | parse/src/module.rs: module_defs |
| Canonicalize | can/src/def.rs: canonicalize_defs |
| Type check | solve/src/module.rs: run_solve |
| Gather types to specialize | mono/src/ir.rs: PartialProc::from_named_function |
| Solve specialized types | mono/src/ir.rs: from_can, with_hole |
| Insert reference counting | mono/src/ir.rs: Proc::insert_refcount_operations |
| Code gen (optimized but slow) | gen_llvm/src/llvm/build.rs: build_procedures |
| Code gen (unoptimized but fast, CPU) | gen_dev/src/object_builder.rs: build_module |
| Code gen (unoptimized but fast, Wasm) | gen_wasm/src/lib.rs: build_module |
For a more detailed understanding of the compilation phases, see the `Phase`, `BuildTask`, and `Msg` enums in `load/src/file.rs`.

View File

@ -6,6 +6,3 @@ license = "UPL-1.0"
repository = "https://github.com/rtfeldman/roc"
edition = "2018"
description = "A CLI for Roc"
[dev-dependencies]
pretty_assertions = "0.5.1"

View File

@ -20,32 +20,27 @@ roc_solve = { path = "../solve" }
roc_mono = { path = "../mono" }
roc_load = { path = "../load" }
roc_gen_llvm = { path = "../gen_llvm", optional = true }
roc_gen_wasm = { path = "../gen_wasm" }
roc_gen_dev = { path = "../gen_dev" }
roc_reporting = { path = "../reporting" }
roc_gen_wasm = { path = "../gen_wasm", optional = true }
roc_gen_dev = { path = "../gen_dev", default-features = false }
roc_reporting = { path = "../../reporting" }
roc_std = { path = "../../roc_std" }
im = "14" # im and im-rc should always have the same version!
im-rc = "14" # im and im-rc should always have the same version!
bumpalo = { version = "3.6.1", features = ["collections"] }
inlinable_string = "0.1.0"
libloading = "0.6"
tempfile = "3.1.0"
serde_json = "1.0"
bumpalo = { version = "3.8.0", features = ["collections"] }
libloading = "0.7.1"
tempfile = "3.2.0"
inkwell = { path = "../../vendor/inkwell", optional = true }
target-lexicon = "0.12.2"
[dev-dependencies]
pretty_assertions = "0.5.1"
maplit = "1.0.1"
indoc = "0.3.3"
quickcheck = "0.8"
quickcheck_macros = "0.8"
[target.'cfg(target_os = "macos")'.dependencies]
serde_json = "1.0.69"
[features]
default = ["llvm", "target-webassembly"]
default = ["llvm", "target-aarch64", "target-x86_64", "target-wasm32"]
target-arm = []
target-aarch64 = []
target-webassembly = []
target-aarch64 = ["roc_gen_dev/target-aarch64"]
target-x86 = []
target-x86_64 = ["roc_gen_dev/target-x86_64"]
target-wasm32 = ["roc_gen_wasm"]
# This is a separate feature because when we generate docs on Netlify,
# it doesn't have LLVM installed. (Also, it doesn't need to do code gen.)
llvm = ["inkwell", "roc_gen_llvm"]

View File

@ -1,4 +1,4 @@
use crate::target::arch_str;
use crate::target::{arch_str, target_triple_str};
#[cfg(feature = "llvm")]
use libloading::{Error, Library};
use roc_builtins::bitcode;
@ -36,7 +36,6 @@ pub fn link(
..
} => link_linux(target, output_path, input_paths, link_type),
Triple {
architecture: Architecture::X86_64,
operating_system: OperatingSystem::Darwin,
..
} => link_macos(target, output_path, input_paths, link_type),
@ -87,6 +86,7 @@ pub fn build_zig_host_native(
target: &str,
opt_level: OptLevel,
shared_lib_path: Option<&Path>,
_target_valgrind: bool,
) -> Output {
let mut command = Command::new("zig");
command
@ -98,7 +98,7 @@ pub fn build_zig_host_native(
"build-exe",
"-fPIE",
shared_lib_path.to_str().unwrap(),
bitcode::OBJ_PATH,
bitcode::BUILTINS_HOST_OBJ_PATH,
]);
} else {
command.args(&["build-obj", "-fPIC"]);
@ -119,6 +119,15 @@ pub fn build_zig_host_native(
"-target",
target,
]);
// use single threaded testing for cli_run and enable this code if valgrind fails with unhandled instruction bytes, see #1963.
/*if target_valgrind {
command.args(&[
"-mcpu",
"x86_64"
]);
}*/
if matches!(opt_level, OptLevel::Optimize) {
command.args(&["-O", "ReleaseSafe"]);
}
@ -136,6 +145,8 @@ pub fn build_zig_host_native(
_target: &str,
opt_level: OptLevel,
shared_lib_path: Option<&Path>,
// For compatibility with the non-macOS def above. Keep these in sync.
_target_valgrind: bool,
) -> Output {
use serde_json::Value;
@ -187,7 +198,7 @@ pub fn build_zig_host_native(
"build-exe",
"-fPIE",
shared_lib_path.to_str().unwrap(),
bitcode::OBJ_PATH,
bitcode::BUILTINS_HOST_OBJ_PATH,
]);
} else {
command.args(&["build-obj", "-fPIC"]);
@ -283,7 +294,7 @@ pub fn build_c_host_native(
if let Some(shared_lib_path) = shared_lib_path {
command.args(&[
shared_lib_path.to_str().unwrap(),
bitcode::OBJ_PATH,
bitcode::BUILTINS_HOST_OBJ_PATH,
"-fPIE",
"-pie",
"-lm",
@ -301,11 +312,46 @@ pub fn build_c_host_native(
command.output().unwrap()
}
pub fn build_swift_host_native(
env_path: &str,
env_home: &str,
dest: &str,
sources: &[&str],
opt_level: OptLevel,
shared_lib_path: Option<&Path>,
objc_header_path: Option<&str>,
) -> Output {
if shared_lib_path.is_some() {
unimplemented!("Linking a shared library to Swift not yet implemented");
}
let mut command = Command::new("swiftc");
command
.env_clear()
.env("PATH", &env_path)
.env("HOME", &env_home)
.args(sources)
.arg("-emit-object")
.arg("-parse-as-library")
.args(&["-o", dest]);
if let Some(objc_header) = objc_header_path {
command.args(&["-import-objc-header", objc_header]);
}
if matches!(opt_level, OptLevel::Optimize) {
command.arg("-O");
}
command.output().unwrap()
}
pub fn rebuild_host(
opt_level: OptLevel,
target: &Triple,
host_input_path: &Path,
shared_lib_path: Option<&Path>,
target_valgrind: bool,
) {
let c_host_src = host_input_path.with_file_name("host.c");
let c_host_dest = host_input_path.with_file_name("c_host.o");
@ -313,6 +359,9 @@ pub fn rebuild_host(
let rust_host_src = host_input_path.with_file_name("host.rs");
let rust_host_dest = host_input_path.with_file_name("rust_host.o");
let cargo_host_src = host_input_path.with_file_name("Cargo.toml");
let swift_host_src = host_input_path.with_file_name("host.swift");
let swift_host_header_src = host_input_path.with_file_name("host.h");
let host_dest_native = host_input_path.with_file_name(if shared_lib_path.is_some() {
"dynhost"
} else {
@ -358,6 +407,7 @@ pub fn rebuild_host(
"native",
opt_level,
shared_lib_path,
target_valgrind,
)
}
Architecture::X86_32(_) => {
@ -371,6 +421,22 @@ pub fn rebuild_host(
"i386-linux-musl",
opt_level,
shared_lib_path,
target_valgrind,
)
}
Architecture::Aarch64(_) => {
let emit_bin = format!("-femit-bin={}", host_dest_native.to_str().unwrap());
build_zig_host_native(
&env_path,
&env_home,
&emit_bin,
zig_host_src.to_str().unwrap(),
zig_str_path.to_str().unwrap(),
target_triple_str(target),
opt_level,
shared_lib_path,
target_valgrind,
)
}
_ => panic!("Unsupported architecture {:?}", target.architecture),
@ -380,7 +446,7 @@ pub fn rebuild_host(
} else if cargo_host_src.exists() {
// Compile and link Cargo.toml, if it exists
let cargo_dir = host_input_path.parent().unwrap();
let libhost_dir =
let cargo_out_dir =
cargo_dir
.join("target")
.join(if matches!(opt_level, OptLevel::Optimize) {
@ -388,30 +454,30 @@ pub fn rebuild_host(
} else {
"debug"
});
let libhost = libhost_dir.join("libhost.a");
let mut command = Command::new("cargo");
command.arg("build").current_dir(cargo_dir);
if matches!(opt_level, OptLevel::Optimize) {
command.arg("--release");
}
let source_file = if shared_lib_path.is_some() {
command.env("RUSTFLAGS", "-C link-dead-code");
command.args(&["--bin", "host"]);
"src/main.rs"
} else {
command.arg("--lib");
"src/lib.rs"
};
let output = command.output().unwrap();
validate_output("src/lib.rs", "cargo build", output);
validate_output(source_file, "cargo build", output);
// Cargo hosts depend on a c wrapper for the api. Compile host.c as well.
if shared_lib_path.is_some() {
// If compiling to executable, let c deal with linking as well.
let output = build_c_host_native(
&env_path,
&env_home,
host_dest_native.to_str().unwrap(),
&[c_host_src.to_str().unwrap(), libhost.to_str().unwrap()],
opt_level,
shared_lib_path,
);
validate_output("host.c", "clang", output);
// For surgical linking, just copy the dynamically linked rust app.
std::fs::copy(cargo_out_dir.join("host"), host_dest_native).unwrap();
} else {
// Cargo hosts depend on a c wrapper for the api. Compile host.c as well.
let output = build_c_host_native(
&env_path,
&env_home,
@ -428,7 +494,7 @@ pub fn rebuild_host(
.args(&[
"-r",
"-L",
libhost_dir.to_str().unwrap(),
cargo_out_dir.to_str().unwrap(),
c_host_dest.to_str().unwrap(),
"-lhost",
"-o",
@ -527,18 +593,25 @@ pub fn rebuild_host(
shared_lib_path,
);
validate_output("host.c", "clang", output);
} else if swift_host_src.exists() {
// Compile host.swift, if it exists
let output = build_swift_host_native(
&env_path,
&env_home,
host_dest_native.to_str().unwrap(),
&[swift_host_src.to_str().unwrap()],
opt_level,
shared_lib_path,
swift_host_header_src
.exists()
.then(|| swift_host_header_src.to_str().unwrap()),
);
validate_output("host.swift", "swiftc", output);
}
}
fn nixos_path() -> String {
env::var("NIXOS_GLIBC_PATH").unwrap_or_else(|_| {
panic!(
"We couldn't find glibc! We tried looking for NIXOS_GLIBC_PATH
to find it via Nix, but that didn't work either. Please file a bug report.
This will only be an issue until we implement surgical linking.",
)
})
fn nix_path_opt() -> Option<String> {
env::var_os("NIX_GLIBC_PATH").map(|path| path.into_string().unwrap())
}
fn library_path<const N: usize>(segments: [&str; N]) -> Option<PathBuf> {
@ -587,21 +660,39 @@ fn link_linux(
));
}
let libcrt_path = library_path(["/usr", "lib", &architecture])
.or_else(|| library_path(["/usr", "lib"]))
.or_else(|| library_path([&nixos_path()]))
.unwrap();
let libcrt_path =
// give preference to nix_path if it's defined, this prevents bugs
if let Some(nix_path) = nix_path_opt() {
library_path([&nix_path])
.unwrap()
} else {
library_path(["/usr", "lib", &architecture])
.or_else(|| library_path(["/usr", "lib"]))
.unwrap()
};
let libgcc_name = "libgcc_s.so.1";
let libgcc_path = library_path(["/lib", &architecture, libgcc_name])
.or_else(|| library_path(["/usr", "lib", &architecture, libgcc_name]))
.or_else(|| library_path(["/usr", "lib", libgcc_name]))
.or_else(|| library_path([&nixos_path(), libgcc_name]))
.unwrap();
let libgcc_path =
// give preference to nix_path if it's defined, this prevents bugs
if let Some(nix_path) = nix_path_opt() {
library_path([&nix_path, libgcc_name])
.unwrap()
} else {
library_path(["/lib", &architecture, libgcc_name])
.or_else(|| library_path(["/usr", "lib", &architecture, libgcc_name]))
.or_else(|| library_path(["/usr", "lib", libgcc_name]))
.unwrap()
};
let ld_linux = match target.architecture {
Architecture::X86_64 => library_path(["/lib64", "ld-linux-x86-64.so.2"])
.or_else(|| library_path([&nixos_path(), "ld-linux-x86-64.so.2"])),
Architecture::X86_64 => {
// give preference to nix_path if it's defined, this prevents bugs
if let Some(nix_path) = nix_path_opt() {
library_path([&nix_path, "ld-linux-x86-64.so.2"])
} else {
library_path(["/lib64", "ld-linux-x86-64.so.2"])
}
}
Architecture::Aarch64(_) => library_path(["/lib", "ld-linux-aarch64.so.1"]),
_ => panic!(
"TODO gracefully handle unsupported linux architecture: {:?}",
@ -666,7 +757,7 @@ fn link_linux(
.args(&[
"--gc-sections",
"--eh-frame-hdr",
"-arch",
"-A",
arch_str(target),
"-pie",
libcrt_path.join("crti.o").to_str().unwrap(),
@ -675,7 +766,7 @@ fn link_linux(
.args(&base_args)
.args(&["-dynamic-linker", ld_linux])
.args(input_paths)
// ld.lld requires this argument, and does not accept -arch
// ld.lld requires this argument, and does not accept --arch
// .args(&["-L/usr/lib/x86_64-linux-gnu"])
.args(&[
// Libraries - see https://github.com/rtfeldman/roc/pull/554#discussion_r496365925
@ -714,52 +805,85 @@ fn link_macos(
}
};
// This path only exists on macOS Big Sur, and it causes ld errors
// on Catalina if it's specified with -L, so we replace it with a
// redundant -lSystem if the directory isn't there.
let big_sur_path = "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib";
let big_sur_fix = if Path::new(big_sur_path).exists() {
format!("-L{}", big_sur_path)
} else {
String::from("-lSystem")
let arch = match target.architecture {
Architecture::Aarch64(_) => "arm64".to_string(),
_ => target.architecture.to_string(),
};
Ok((
let mut ld_command = Command::new("ld");
ld_command
// NOTE: order of arguments to `ld` matters here!
// The `-l` flags should go after the `.o` arguments
Command::new("ld")
// Don't allow LD_ env vars to affect this
.env_clear()
.args(&[
// NOTE: we don't do --gc-sections on macOS because the default
// macOS linker doesn't support it, but it's a performance
// optimization, so if we ever switch to a different linker,
// we'd like to re-enable it on macOS!
// "--gc-sections",
link_type_arg,
"-arch",
target.architecture.to_string().as_str(),
])
.args(input_paths)
.args(&[
// Libraries - see https://github.com/rtfeldman/roc/pull/554#discussion_r496392274
// for discussion and further references
&big_sur_fix,
"-lSystem",
"-lresolv",
"-lpthread",
// "-lrt", // TODO shouldn't we need this?
// "-lc_nonshared", // TODO shouldn't we need this?
// "-lgcc", // TODO will eventually need compiler_rt from gcc or something - see https://github.com/rtfeldman/roc/pull/554#discussion_r496370840
// "-framework", // Uncomment this line & the following ro run the `rand` crate in examples/cli
// "Security",
// Output
"-o",
output_path.to_str().unwrap(), // app
])
.spawn()?,
output_path,
))
// Don't allow LD_ env vars to affect this
.env_clear()
.args(&[
// NOTE: we don't do --gc-sections on macOS because the default
// macOS linker doesn't support it, but it's a performance
// optimization, so if we ever switch to a different linker,
// we'd like to re-enable it on macOS!
// "--gc-sections",
link_type_arg,
"-arch",
&arch,
"-macos_version_min",
&get_macos_version(),
])
.args(input_paths);
let sdk_path = "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib";
if Path::new(sdk_path).exists() {
ld_command.arg(format!("-L{}", sdk_path));
ld_command.arg(format!("-L{}/swift", sdk_path));
};
ld_command.args(&[
// Libraries - see https://github.com/rtfeldman/roc/pull/554#discussion_r496392274
// for discussion and further references
"-lSystem",
"-lresolv",
"-lpthread",
// "-lrt", // TODO shouldn't we need this?
// "-lc_nonshared", // TODO shouldn't we need this?
// "-lgcc", // TODO will eventually need compiler_rt from gcc or something - see https://github.com/rtfeldman/roc/pull/554#discussion_r496370840
// "-framework", // Uncomment this line & the following ro run the `rand` crate in examples/cli
// "Security",
// Output
"-o",
output_path.to_str().unwrap(), // app
]);
let mut ld_child = ld_command.spawn()?;
match target.architecture {
Architecture::Aarch64(_) => {
ld_child.wait()?;
let codesign_child = Command::new("codesign")
.args(&["-s", "-", output_path.to_str().unwrap()])
.spawn()?;
Ok((codesign_child, output_path))
}
_ => Ok((ld_child, output_path)),
}
}
fn get_macos_version() -> String {
let cmd_stdout = Command::new("sw_vers")
.arg("-productVersion")
.output()
.expect("Failed to execute command 'sw_vers -productVersion'")
.stdout;
let full_version_string = String::from_utf8(cmd_stdout)
.expect("Failed to convert output of command 'sw_vers -productVersion' into a utf8 string");
full_version_string
.trim_end()
.split('.')
.take(2)
.collect::<Vec<&str>>()
.join(".")
}
fn link_wasm32(
@ -771,7 +895,7 @@ fn link_wasm32(
let zig_str_path = find_zig_str_path();
let wasi_libc_path = find_wasi_libc_path();
let child = Command::new("zig9")
let child = Command::new("zig")
// .env_clear()
// .env("PATH", &env_path)
.args(&["build-exe"])
@ -838,7 +962,7 @@ pub fn module_to_dylib(
// Load the dylib
let path = dylib_path.as_path().to_str().unwrap();
Library::new(path)
unsafe { Library::new(path) }
}
fn validate_output(file_name: &str, cmd_name: &str, output: Output) {

View File

@ -2,13 +2,15 @@
use roc_gen_llvm::llvm::build::module_from_builtins;
#[cfg(feature = "llvm")]
pub use roc_gen_llvm::llvm::build::FunctionIterator;
use roc_load::file::MonomorphizedModule;
#[cfg(feature = "llvm")]
use roc_load::file::{LoadedModule, MonomorphizedModule};
use roc_module::symbol::{Interns, ModuleId};
use roc_mono::ir::OptLevel;
use std::path::{Path, PathBuf};
use std::time::Duration;
use roc_collections::all::{MutMap, MutSet};
use roc_collections::all::MutMap;
#[cfg(feature = "target-wasm32")]
use roc_collections::all::MutSet;
#[derive(Debug, Clone, Copy, Default)]
pub struct CodeGenTiming {
@ -18,13 +20,46 @@ pub struct CodeGenTiming {
// TODO: If modules besides this one start needing to know which version of
// llvm we're using, consider moving me somewhere else.
#[cfg(feature = "llvm")]
const LLVM_VERSION: &str = "12";
// TODO instead of finding exhaustiveness problems in monomorphization, find
// them after type checking (like Elm does) so we can complete the entire
// `roc check` process without needing to monomorphize.
/// Returns the number of problems reported.
pub fn report_problems(loaded: &mut MonomorphizedModule) -> usize {
pub fn report_problems_monomorphized(loaded: &mut MonomorphizedModule) -> usize {
report_problems_help(
loaded.total_problems(),
&loaded.header_sources,
&loaded.sources,
&loaded.interns,
&mut loaded.can_problems,
&mut loaded.type_problems,
&mut loaded.mono_problems,
)
}
pub fn report_problems_typechecked(loaded: &mut LoadedModule) -> usize {
report_problems_help(
loaded.total_problems(),
&loaded.header_sources,
&loaded.sources,
&loaded.interns,
&mut loaded.can_problems,
&mut loaded.type_problems,
&mut Default::default(),
)
}
fn report_problems_help(
total_problems: usize,
header_sources: &MutMap<ModuleId, (PathBuf, Box<str>)>,
sources: &MutMap<ModuleId, (PathBuf, Box<str>)>,
interns: &Interns,
can_problems: &mut MutMap<ModuleId, Vec<roc_problem::can::Problem>>,
type_problems: &mut MutMap<ModuleId, Vec<roc_solve::solve::TypeError>>,
mono_problems: &mut MutMap<ModuleId, Vec<roc_mono::ir::MonoProblem>>,
) -> usize {
use roc_reporting::report::{
can_problem, mono_problem, type_problem, Report, RocDocAllocator, Severity::*,
DEFAULT_PALETTE,
@ -33,14 +68,13 @@ pub fn report_problems(loaded: &mut MonomorphizedModule) -> usize {
// This will often over-allocate total memory, but it means we definitely
// never need to re-allocate either the warnings or the errors vec!
let total_problems = loaded.total_problems();
let mut warnings = Vec::with_capacity(total_problems);
let mut errors = Vec::with_capacity(total_problems);
for (home, (module_path, src)) in loaded.sources.iter() {
for (home, (module_path, src)) in sources.iter() {
let mut src_lines: Vec<&str> = Vec::new();
if let Some((_, header_src)) = loaded.header_sources.get(home) {
if let Some((_, header_src)) = header_sources.get(home) {
src_lines.extend(header_src.split('\n'));
src_lines.extend(src.split('\n').skip(1));
} else {
@ -48,9 +82,9 @@ pub fn report_problems(loaded: &mut MonomorphizedModule) -> usize {
}
// Report parsing and canonicalization problems
let alloc = RocDocAllocator::new(&src_lines, *home, &loaded.interns);
let alloc = RocDocAllocator::new(&src_lines, *home, interns);
let problems = loaded.can_problems.remove(home).unwrap_or_default();
let problems = can_problems.remove(home).unwrap_or_default();
for problem in problems.into_iter() {
let report = can_problem(&alloc, module_path.clone(), problem);
@ -69,7 +103,7 @@ pub fn report_problems(loaded: &mut MonomorphizedModule) -> usize {
}
}
let problems = loaded.type_problems.remove(home).unwrap_or_default();
let problems = type_problems.remove(home).unwrap_or_default();
for problem in problems {
if let Some(report) = type_problem(&alloc, module_path.clone(), problem) {
@ -89,7 +123,7 @@ pub fn report_problems(loaded: &mut MonomorphizedModule) -> usize {
}
}
let problems = loaded.mono_problems.remove(home).unwrap_or_default();
let problems = mono_problems.remove(home).unwrap_or_default();
for problem in problems {
let report = mono_problem(&alloc, module_path.clone(), problem);
@ -139,6 +173,50 @@ pub fn report_problems(loaded: &mut MonomorphizedModule) -> usize {
problems_reported
}
#[cfg(not(feature = "llvm"))]
pub fn gen_from_mono_module(
arena: &bumpalo::Bump,
loaded: MonomorphizedModule,
_roc_file_path: &Path,
target: &target_lexicon::Triple,
app_o_file: &Path,
opt_level: OptLevel,
_emit_debug_info: bool,
) -> CodeGenTiming {
match opt_level {
OptLevel::Optimize => {
todo!("Return this error message in a better way: optimized builds not supported without llvm backend");
}
OptLevel::Normal | OptLevel::Development => {
gen_from_mono_module_dev(arena, loaded, target, app_o_file)
}
}
}
#[cfg(feature = "llvm")]
pub fn gen_from_mono_module(
arena: &bumpalo::Bump,
loaded: MonomorphizedModule,
roc_file_path: &Path,
target: &target_lexicon::Triple,
app_o_file: &Path,
opt_level: OptLevel,
emit_debug_info: bool,
) -> CodeGenTiming {
match opt_level {
OptLevel::Normal | OptLevel::Optimize => gen_from_mono_module_llvm(
arena,
loaded,
roc_file_path,
target,
app_o_file,
opt_level,
emit_debug_info,
),
OptLevel::Development => gen_from_mono_module_dev(arena, loaded, target, app_o_file),
}
}
// TODO how should imported modules factor into this? What if those use builtins too?
// TODO this should probably use more helper functions
// TODO make this polymorphic in the llvm functions so it can be reused for another backend.
@ -372,7 +450,7 @@ pub fn gen_from_mono_module_llvm(
emit_o_file,
}
}
#[cfg(feature = "target-wasm32")]
pub fn gen_from_mono_module_dev(
arena: &bumpalo::Bump,
loaded: MonomorphizedModule,
@ -383,13 +461,31 @@ pub fn gen_from_mono_module_dev(
match target.architecture {
Architecture::Wasm32 => gen_from_mono_module_dev_wasm32(arena, loaded, app_o_file),
Architecture::X86_64 => {
Architecture::X86_64 | Architecture::Aarch64(_) => {
gen_from_mono_module_dev_assembly(arena, loaded, target, app_o_file)
}
_ => todo!(),
}
}
#[cfg(not(feature = "target-wasm32"))]
pub fn gen_from_mono_module_dev(
arena: &bumpalo::Bump,
loaded: MonomorphizedModule,
target: &target_lexicon::Triple,
app_o_file: &Path,
) -> CodeGenTiming {
use target_lexicon::Architecture;
match target.architecture {
Architecture::X86_64 | Architecture::Aarch64(_) => {
gen_from_mono_module_dev_assembly(arena, loaded, target, app_o_file)
}
_ => todo!(),
}
}
#[cfg(feature = "target-wasm32")]
fn gen_from_mono_module_dev_wasm32(
arena: &bumpalo::Bump,
loaded: MonomorphizedModule,
@ -437,8 +533,7 @@ fn gen_from_mono_module_dev_assembly(
generate_allocators,
};
let module_object = roc_gen_dev::build_module(&env, target, loaded.procedures)
.expect("failed to compile module");
let module_object = roc_gen_dev::build_module(&env, target, loaded.procedures);
let module_out = module_object
.write()

View File

@ -31,6 +31,11 @@ pub fn target_triple_str(target: &Triple) -> &'static str {
operating_system: OperatingSystem::Linux,
..
} => "aarch64-unknown-linux-gnu",
Triple {
architecture: Architecture::Aarch64(_),
operating_system: OperatingSystem::Darwin,
..
} => "aarch64-apple-darwin",
Triple {
architecture: Architecture::X86_64,
operating_system: OperatingSystem::Darwin,
@ -43,7 +48,9 @@ pub fn target_triple_str(target: &Triple) -> &'static str {
#[cfg(feature = "llvm")]
pub fn init_arch(target: &Triple) {
match target.architecture {
Architecture::X86_64 | Architecture::X86_32(_) => {
Architecture::X86_64 | Architecture::X86_32(_)
if cfg!(any(feature = "target-x86", feature = "target-x86_64")) =>
{
Target::initialize_x86(&InitializationConfig::default());
}
Architecture::Aarch64(_) if cfg!(feature = "target-aarch64") => {
@ -52,7 +59,7 @@ pub fn init_arch(target: &Triple) {
Architecture::Arm(_) if cfg!(feature = "target-arm") => {
Target::initialize_arm(&InitializationConfig::default());
}
Architecture::Wasm32 if cfg!(feature = "target-webassembly") => {
Architecture::Wasm32 if cfg!(feature = "target-wasm32") => {
Target::initialize_webassembly(&InitializationConfig::default());
}
_ => panic!(
@ -70,8 +77,8 @@ pub fn arch_str(target: &Triple) -> &'static str {
//
// https://stackoverflow.com/questions/15036909/clang-how-to-list-supported-target-architectures
match target.architecture {
Architecture::X86_64 => "x86-64",
Architecture::X86_32(_) => "x86",
Architecture::X86_64 if cfg!(feature = "target-x86_64") => "x86-64",
Architecture::X86_32(_) if cfg!(feature = "target-x86") => "x86",
Architecture::Aarch64(_) if cfg!(feature = "target-aarch64") => "aarch64",
Architecture::Arm(_) if cfg!(feature = "target-arm") => "arm",
Architecture::Wasm32 if cfg!(feature = "target-webassembly") => "wasm32",

View File

@ -10,9 +10,3 @@ roc_collections = { path = "../collections" }
roc_region = { path = "../region" }
roc_module = { path = "../module" }
roc_types = { path = "../types" }
[dev-dependencies]
pretty_assertions = "0.5.1"
maplit = "1.0.1"
indoc = "0.3.3"
quickcheck = "0.8"
quickcheck_macros = "0.8"

View File

@ -42,7 +42,7 @@ fn list_repeat(symbol: Symbol, var_store: &mut VarStore) -> Def {
)
}
```
In these builtin definitions you will need to allocate for and list the arguments. For `List.repeat`, the arguments are the `elem_var` and the `len_var`. So in both the `body` and `defn` we list these arguments in a vector, with the `Symobl::ARG_1` and` Symvol::ARG_2` designating which argument is which.
In these builtin definitions you will need to allocate for and list the arguments. For `List.repeat`, the arguments are the `elem_var` and the `len_var`. So in both the `body` and `defn` we list these arguments in a vector, with the `Symbol::ARG_1` and` Symvol::ARG_2` designating which argument is which.
Since `List.repeat` is implemented entirely as low level functions, its `body` is a `RunLowLevel`, and the `op` is `LowLevel::ListRepeat`. Lets talk about `LowLevel` in the next section.
@ -60,17 +60,11 @@ Its one thing to actually write these functions, its _another_ thing to let the
## Specifying how we pass args to the function
### builtins/mono/src/borrow.rs
After we have all of this, we need to specify if the arguments we're passing are owned, borrowed or irrelvant. Towards the bottom of this file, add a new case for you builtin and specify each arg. Be sure to read the comment, as it explains this in more detail.
## Specifying the uniqueness of a function
### builtins/src/unique.rs
One of the cool things about Roc is that it evaluates if a value in memory is shared between scopes or if it is used in just one place. If the value is used in one place then it is 'unique', and it therefore can be mutated in place. For a value created by a function, the uniqueness of the output is determined in part by the uniqueness of the input arguments. For example `List.single : elem -> List elem` can return a unique list if the `elem` is also unique.
We have to define the uniqueness constraints of a function just like we have to define a type signature. That is what happens in `unique.rs`. This can be tricky so it would be a good step to ask for help on if it is confusing.
After we have all of this, we need to specify if the arguments we're passing are owned, borrowed or irrelevant. Towards the bottom of this file, add a new case for you builtin and specify each arg. Be sure to read the comment, as it explains this in more detail.
## Testing it
### solve/tests/solve_expr.rs
To make sure that Roc is properly inferring the type of the new builtin, add a test to this file simlar to:
To make sure that Roc is properly inferring the type of the new builtin, add a test to this file similar to:
```
#[test]
fn atan() {
@ -101,4 +95,4 @@ But replace `Num.atan`, the return value, and the return type with your new buil
When implementing a new builtin, it is often easy to copy and paste the implementation for an existing builtin. This can take you quite far since many builtins are very similar, but it also risks forgetting to change one small part of what you copy and pasted and losing a lot of time later on when you cant figure out why things dont work. So, speaking from experience, even if you are copying an existing builtin, try and implement it manually without copying and pasting. Two recent instances of this (as of September 7th, 2020):
- `List.keepIf` did not work for a long time because in builtins its `LowLevel` was `ListMap`. This was because I copy and pasted the `List.map` implementation in `builtins.rs
- `List.walkRight` had mysterious memory bugs for a little while because in `unique.rs` its return type was `list_type(flex(b))` instead of `flex(b)` since it was copy and pasted from `List.keepIf`.
- `List.walkBackwards` had mysterious memory bugs for a little while because in `unique.rs` its return type was `list_type(flex(b))` instead of `flex(b)` since it was copy and pasted from `List.keepIf`.

View File

@ -20,74 +20,92 @@ pub fn build(b: *Builder) void {
const test_step = b.step("test", "Run tests");
test_step.dependOn(&main_tests.step);
// Targets
const host_target = b.standardTargetOptions(.{});
const i386_target = makeI386Target();
const wasm32_target = makeWasm32Target();
// LLVM IR
const obj_name = "builtins-host";
const llvm_obj = b.addObject(obj_name, main_path);
llvm_obj.setBuildMode(mode);
llvm_obj.linkSystemLibrary("c");
llvm_obj.strip = true;
llvm_obj.emit_llvm_ir = true;
llvm_obj.emit_bin = false;
const ir = b.step("ir", "Build LLVM ir");
ir.dependOn(&llvm_obj.step);
generateLlvmIrFile(b, mode, host_target, main_path, "ir", "builtins-host");
generateLlvmIrFile(b, mode, i386_target, main_path, "ir-i386", "builtins-i386");
generateLlvmIrFile(b, mode, wasm32_target, main_path, "ir-wasm32", "builtins-wasm32");
// 32-bit x86, useful for debugging
var i386_target = CrossTarget.parse(.{}) catch unreachable;
// Generate Object Files
generateObjectFile(b, mode, host_target, main_path, "object", "builtins-host");
generateObjectFile(b, mode, wasm32_target, main_path, "wasm32-object", "builtins-wasm32");
i386_target.cpu_arch = std.Target.Cpu.Arch.i386;
i386_target.os_tag = std.Target.Os.Tag.linux;
i386_target.abi = std.Target.Abi.musl;
removeInstallSteps(b);
}
const obj_name_i386 = "builtins-i386";
const llvm_obj_i386 = b.addObject(obj_name_i386, main_path);
llvm_obj_i386.setBuildMode(mode);
llvm_obj_i386.strip = true;
llvm_obj_i386.emit_llvm_ir = true;
llvm_obj_i386.emit_bin = false;
llvm_obj_i386.target = i386_target;
// TODO zig 0.9 can generate .bc directly, switch to that when it is released!
fn generateLlvmIrFile(
b: *Builder,
mode: std.builtin.Mode,
target: CrossTarget,
main_path: []const u8,
step_name: []const u8,
object_name: []const u8,
) void {
const obj = b.addObject(object_name, main_path);
obj.setBuildMode(mode);
obj.strip = true;
obj.emit_llvm_ir = true;
obj.emit_bin = false;
obj.target = target;
const ir_i386 = b.step("ir-i386", "Build LLVM ir for 32-bit targets (x86)");
ir_i386.dependOn(&llvm_obj_i386.step);
const ir = b.step(step_name, "Build LLVM ir");
ir.dependOn(&obj.step);
}
// LLVM IR 32-bit (wasm)
var wasm32_target = CrossTarget.parse(.{}) catch unreachable;
// 32-bit wasm
wasm32_target.cpu_arch = std.Target.Cpu.Arch.wasm32;
wasm32_target.os_tag = std.Target.Os.Tag.freestanding;
wasm32_target.abi = std.Target.Abi.none;
const obj_name_wasm32 = "builtins-wasm32";
const llvm_obj_wasm32 = b.addObject(obj_name_wasm32, main_path);
llvm_obj_wasm32.setBuildMode(mode);
llvm_obj_wasm32.strip = true;
llvm_obj_wasm32.emit_llvm_ir = true;
llvm_obj_wasm32.emit_bin = false;
llvm_obj_wasm32.target = wasm32_target;
const ir_wasm32 = b.step("ir-wasm32", "Build LLVM ir for 32-bit targets (wasm)");
ir_wasm32.dependOn(&llvm_obj_wasm32.step);
// Object File
// TODO: figure out how to get this to emit symbols that are only scoped to linkage (global but hidden).
// Also, zig has -ffunction-sections, but I am not sure how to add it here.
// With both of those changes, unused zig functions will be cleaned up by the linker saving around 100k.
const obj = b.addObject("builtins-host", main_path);
// Generate Object File
// TODO: figure out how to get this to emit symbols that are only scoped to linkage (global but hidden).
// @bhansconnect: I believe anything with global scope will still be preserved by the linker even if it
// is never called. I think it could theoretically be called by a dynamic lib that links to the executable
// or something similar.
fn generateObjectFile(
b: *Builder,
mode: std.builtin.Mode,
target: CrossTarget,
main_path: []const u8,
step_name: []const u8,
object_name: []const u8,
) void {
const obj = b.addObject(object_name, main_path);
obj.setBuildMode(mode);
obj.linkSystemLibrary("c");
obj.setOutputDir(".");
obj.strip = true;
const obj_step = b.step("object", "Build object file for linking");
obj.target = target;
obj.link_function_sections = true;
const obj_step = b.step(step_name, "Build object file for linking");
obj_step.dependOn(&obj.step);
}
b.default_step = ir;
removeInstallSteps(b);
fn makeI386Target() CrossTarget {
var target = CrossTarget.parse(.{}) catch unreachable;
target.cpu_arch = std.Target.Cpu.Arch.i386;
target.os_tag = std.Target.Os.Tag.linux;
target.abi = std.Target.Abi.musl;
return target;
}
fn makeWasm32Target() CrossTarget {
var target = CrossTarget.parse(.{}) catch unreachable;
// 32-bit wasm
target.cpu_arch = std.Target.Cpu.Arch.wasm32;
target.os_tag = std.Target.Os.Tag.freestanding;
target.abi = std.Target.Abi.none;
return target;
}
fn removeInstallSteps(b: *Builder) void {
for (b.top_level_steps.items) |top_level_step, i| {
if (mem.eql(u8, top_level_step.step.name, "install") or mem.eql(u8, top_level_step.step.name, "uninstall")) {
const name = top_level_step.step.name;
const name = top_level_step.step.name;
if (mem.eql(u8, name, "install") or mem.eql(u8, name, "uninstall")) {
_ = b.top_level_steps.swapRemove(i);
}
}

View File

@ -6,4 +6,4 @@ set -euxo pipefail
zig build test
# fmt every zig
find src/*.zig -type f -print0 | xargs -n 1 -0 zig fmt --check || (echo "zig fmt --check FAILED! Check the previuous lines to see which files were improperly formatted." && exit 1)
find src/*.zig -type f -print0 | xargs -n 1 -0 zig fmt --check || (echo "zig fmt --check FAILED! Check the previous lines to see which files were improperly formatted." && exit 1)

View File

@ -768,7 +768,7 @@ pub fn dictWalk(
const key = dict.getKey(i, alignment, key_width, value_width);
const value = dict.getValue(i, alignment, key_width, value_width);
caller(data, key, value, b2, b1);
caller(data, b2, key, value, b1);
std.mem.swap([*]u8, &b1, &b2);
},

Some files were not shown because too many files have changed in this diff Show More