Merge branch 'main' into jaredly-patch-1

Signed-off-by: Ayaz <20735482+ayazhafiz@users.noreply.github.com>
This commit is contained in:
Ayaz 2023-12-02 20:16:49 -06:00 committed by GitHub
commit 9e6933a16f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3003 changed files with 411682 additions and 263136 deletions

View File

@ -1,29 +0,0 @@
[alias]
test-gen-llvm = "test -p test_gen"
test-gen-dev = "test -p roc_gen_dev -p test_gen --no-default-features --features gen-dev"
test-gen-wasm = "test -p roc_gen_wasm -p test_gen --no-default-features --features gen-wasm"
[target.wasm32-unknown-unknown]
# Rust compiler flags for minimum-sized .wasm binary in the web REPL
# opt-level=s Optimizations should focus more on size than speed
# lto=fat Spend extra effort on link-time optimization across crates
rustflags = ["-Copt-level=s", "-Clto=fat"]
[env]
# Gives us the path of the workspace root for use in cargo tests without having
# to compute it per-package.
# https://github.com/rust-lang/cargo/issues/3946#issuecomment-973132993
ROC_WORKSPACE_DIR = { value = "", relative = true }
# Debug flags. Keep this up-to-date with compiler/debug_flags/src/lib.rs.
# Set = "1" to turn a debug flag on.
ROC_PRETTY_PRINT_ALIAS_CONTENTS = "0"
ROC_PRINT_UNIFICATIONS = "0"
ROC_PRINT_MISMATCHES = "0"
ROC_VERIFY_RIGID_LET_GENERALIZED = "0"
ROC_PRINT_IR_AFTER_SPECIALIZATION = "0"
ROC_PRINT_IR_AFTER_RESET_REUSE = "0"
ROC_PRINT_IR_AFTER_REFCOUNT = "0"
ROC_DEBUG_ALIAS_ANALYSIS = "0"
ROC_PRINT_LLVM_FN_VERIFICATION = "0"
ROC_PRINT_LOAD_LOG = "0"

50
.cargo/config.toml Normal file
View File

@ -0,0 +1,50 @@
[alias]
test-gen-llvm = "test -p test_gen"
test-gen-dev = "test -p roc_gen_dev -p test_gen --no-default-features --features gen-dev"
test-gen-wasm = "test -p roc_gen_wasm -p test_gen --no-default-features --features gen-wasm"
test-gen-llvm-wasm = "test -p roc_gen_wasm -p test_gen --no-default-features --features gen-llvm-wasm"
nextest-gen-llvm = "nextest run -p test_gen"
nextest-gen-dev = "nextest run -p roc_gen_dev -p test_gen --no-default-features --features gen-dev"
nextest-gen-wasm = "nextest run -p roc_gen_wasm -p test_gen --no-default-features --features gen-wasm"
nextest-gen-llvm-wasm = "nextest run -p roc_gen_wasm -p test_gen --no-default-features --features gen-llvm-wasm"
uitest = "test -p uitest"
[target.wasm32-unknown-unknown]
# Rust compiler flags for minimum-sized .wasm binary in the web REPL
# opt-level=s Optimizations should focus more on size than speed
# lto=fat Spend extra effort on link-time optimization across crates
# embed-bitcode=yes Turn back on lto since it is no longer default
rustflags = ["-Copt-level=s", "-Clto=fat", "-Cembed-bitcode=yes"]
# TODO: there is probably a more proper solution to this.
# We are pulling in roc_alloc and friends due to using roc_std.
# They ared defined in roc_glue, but windows linking breaks before we get there.
[target.'cfg(target_os = "windows")']
rustflags = ["-Clink-args=/FORCE:UNRESOLVED"]
[env]
# Gives us the path of the workspace root for use in cargo tests without having
# to compute it per-package.
# https://github.com/rust-lang/cargo/issues/3946#issuecomment-973132993
ROC_WORKSPACE_DIR = { value = "", relative = true }
# Debug flags. Keep this up-to-date with compiler/debug_flags/src/lib.rs.
# Set = "1" to turn a debug flag on.
ROC_PRETTY_PRINT_ALIAS_CONTENTS = "0"
ROC_PRINT_UNIFICATIONS = "0"
ROC_PRINT_UNDERIVABLE = "0"
ROC_TRACE_COMPACTION = "0"
ROC_PRINT_UNIFICATIONS_DERIVED = "0"
ROC_PRINT_MISMATCHES = "0"
ROC_VERIFY_RIGID_LET_GENERALIZED = "0"
ROC_CHECK_MONO_IR = "0"
ROC_PRINT_IR_AFTER_SPECIALIZATION = "0"
ROC_PRINT_IR_AFTER_RESET_REUSE = "0"
ROC_PRINT_IR_AFTER_DROP_SPECIALIZATION = "0"
ROC_PRINT_IR_AFTER_REFCOUNT = "0"
ROC_PRINT_RUNTIME_ERROR_GEN = "0"
ROC_DEBUG_ALIAS_ANALYSIS = "0"
ROC_PRINT_LLVM_FN_VERIFICATION = "0"
ROC_PRINT_LOAD_LOG = "0"

View File

@ -1,4 +0,0 @@
AUTHORS
nix
.envrc
.gitignore

2
.gitattributes vendored Normal file
View File

@ -0,0 +1,2 @@
# Require roc files to be checlked out with Unix line endings, even on windows
*.roc text eol=lf

3
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1,3 @@
# These are supported funding model platforms
github: roc-lang

11
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,11 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
time: "07:00"
timezone: "Europe/Brussels"
# Disable all version updates, only critical security updates will be submitted
open-pull-requests-limit: 0

View File

@ -0,0 +1,218 @@
on:
# pull_request:
workflow_dispatch:
# this cancels workflows currently in progress if you start a new one
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
# use .tar.gz for quick testing
ARCHIVE_FORMAT: .tar.gz
BASIC_CLI_BRANCH: main
jobs:
fetch-releases:
runs-on: [ubuntu-20.04]
steps:
- uses: actions/checkout@v3
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-latest.tar.gz
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-latest.tar.gz
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-latest.tar.gz
- name: Save roc_nightly archives
uses: actions/upload-artifact@v3
with:
path: roc_nightly-*
build-linux-x86_64-files:
runs-on: [ubuntu-20.04]
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
- name: build basic-cli with surgical linker and also with legacy linker
env:
CARGO_BUILD_TARGET: x86_64-unknown-linux-musl
run: ./ci/build_basic_cli.sh linux_x86_64 "--linker legacy"
- name: Save .rh, .rm and .o file
uses: actions/upload-artifact@v3
with:
name: linux-x86_64-files
path: |
basic-cli/src/metadata_linux-x64.rm
basic-cli/src/linux-x64.rh
basic-cli/src/linux-x64.o
build-linux-arm64-files:
runs-on: [self-hosted, Linux, ARM64]
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
- name: build basic-cli
env:
CARGO_BUILD_TARGET: aarch64-unknown-linux-musl
CC_aarch64_unknown_linux_musl: clang-16
AR_aarch64_unknown_linux_musl: llvm-ar-16
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS: "-Clink-self-contained=yes -Clinker=rust-lld"
run: ./ci/build_basic_cli.sh linux_arm64
- name: Save .o file
uses: actions/upload-artifact@v3
with:
name: linux-arm64-files
path: |
basic-cli/src/linux-arm64.o
build-macos-x86_64-files:
runs-on: [macos-11] # I expect the generated files to work on macOS 12 and up
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
- run: ./ci/build_basic_cli.sh macos_x86_64
- name: Save .o files
uses: actions/upload-artifact@v3
with:
name: macos-x86_64-files
path: |
basic-cli/src/macos-x64.o
build-macos-apple-silicon-files:
name: build apple silicon .o file
runs-on: [self-hosted, macOS, ARM64]
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
- run: ./ci/build_basic_cli.sh macos_apple_silicon
- name: Save macos-arm64.o file
uses: actions/upload-artifact@v3
with:
name: macos-apple-silicon-files
path: |
basic-cli/src/macos-arm64.o
create-release-archive:
needs: [build-linux-x86_64-files, build-linux-arm64-files, build-macos-x86_64-files, build-macos-apple-silicon-files]
name: create release archive
runs-on: [ubuntu-20.04]
steps:
- uses: actions/checkout@v3
- name: remove all folders except the ci folder
run: ls | grep -v ci | xargs rm -rf
- name: Download the previously uploaded files
uses: actions/download-artifact@v3
- name: mv roc nightly and simplify name
run: mv $(ls -d artifact/* | grep "roc_nightly.*tar\.gz" | grep "linux_x86_64") ./roc_nightly.tar.gz
- name: decompress the tar
run: tar -xzvf roc_nightly.tar.gz
- name: delete tar
run: rm roc_nightly.tar.gz
- name: rename nightly folder
run: mv roc_nightly* roc_nightly
- run: git clone https://github.com/roc-lang/basic-cli.git
- run: cp macos-apple-silicon-files/* ./basic-cli/src
- run: cp linux-x86_64-files/* ./basic-cli/src
- run: cp linux-arm64-files/* ./basic-cli/src
- run: cp macos-x86_64-files/* ./basic-cli/src
- run: ./roc_nightly/roc build --bundle=${{ env.ARCHIVE_FORMAT }} ./basic-cli/src/main.roc
- run: echo "TAR_FILENAME=$(ls -d basic-cli/src/* | grep ${{ env.ARCHIVE_FORMAT }})" >> $GITHUB_ENV
- name: Upload platform archive
uses: actions/upload-artifact@v3
with:
name: basic-cli-platform
path: |
${{ env.TAR_FILENAME }}
test-release-ubuntu:
needs: [create-release-archive]
runs-on: [ubuntu-20.04]
steps:
- name: Download the previously uploaded files
uses: actions/download-artifact@v3
- name: mv roc nightly and simplify name
run: mv $(ls -d artifact/* | grep "roc_nightly.*tar\.gz" | grep "linux_x86_64") ./roc_nightly.tar.gz
- name: decompress the tar
run: tar -xzvf roc_nightly.tar.gz
- name: delete tar
run: rm roc_nightly.tar.gz
- name: rename nightly folder
run: mv roc_nightly* roc_nightly
- if: contains(env.ARCHIVE_FORMAT, 'gz')
run: |
cd basic-cli-platform && ls | grep "tar" | xargs tar -xzf
- if: contains(env.ARCHIVE_FORMAT, 'br')
run: |
cd basic-cli-platform && ls | grep "tar" | xargs brotli -d
ls | grep "tar$" | xargs tar -xf
- name: Install expect for tests if we dont have it yet
run: if ! dpkg -l | grep -qw expect; then sudo apt install -y expect; fi
- name: Install ncat for tests if we dont have it yet
run: if ! dpkg -l | grep -qw ncat; then sudo apt install -y ncat; fi
- name: prep testing
run: |
mv roc_nightly basic-cli-platform/.
cd basic-cli-platform
mkdir src
find . -maxdepth 1 -type f -exec mv {} src/ \;
mkdir temp-basic-cli
cd temp-basic-cli
git clone https://github.com/roc-lang/basic-cli.git
cd basic-cli
git checkout ${{ env.BASIC_CLI_BRANCH }}
cp -r examples ../..
cp -r ci ../..
cp -r LICENSE ../..
# LICENSE is necessary for command test
- name: run tests
run: |
cd basic-cli-platform
ROC=./roc_nightly/roc EXAMPLES_DIR=./examples/ ROC_BUILD_FLAGS=--prebuilt-platform ./ci/all_tests.sh

View File

@ -0,0 +1,59 @@
on:
workflow_dispatch:
# this cancels workflows currently in progress if you start a new one
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test-basic-cli-release-arm64:
runs-on: [self-hosted, Linux, ARM64]
steps:
- name: clone basic-cli repo
uses: actions/checkout@v3
with:
repository: roc-lang/basic-cli
ref: main
- name: get latest roc nightly
run: |
curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-latest.tar.gz
- name: rename nightly tar
run: mv $(ls | grep "roc_nightly.*tar\.gz") roc_nightly.tar.gz
- name: decompress the tar
run: tar -xzf roc_nightly.tar.gz
- run: rm roc_nightly.tar.gz
- name: simplify nightly folder name
run: mv roc_nightly* roc_nightly
- run: ./roc_nightly/roc version
- run: expect -v
# Run all tests
- run: ROC=./roc_nightly/roc EXAMPLES_DIR=./examples/ ./ci/all_tests.sh
######
# Now test the latest basic-cli release, not the main branch
######
- name: Remove roc_nightly folder to keep things simple (we'll download it again later)
run: rm -rf roc_nightly
- name: Get the repo of the latest basic-cli release
run: |
git clone --depth 1 https://github.com/roc-lang/basic-cli
cd basic-cli
git fetch --tags
latestTag=$(git describe --tags $(git rev-list --tags --max-count=1))
git checkout $latestTag
- name: Run all tests with latest roc nightly and latest basic-cli release
run: |
sed -i 's/x86_64/arm64/g' ./ci/test_latest_release.sh
ROC=./roc_nightly/roc EXAMPLES_DIR=./basic-cli/examples/ ./ci/test_latest_release.sh

View File

@ -0,0 +1,164 @@
on:
# pull_request:
workflow_dispatch:
# this cancels workflows currently in progress if you start a new one
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
# use .tar.gz for quick testing
ARCHIVE_FORMAT: .tar.br
BASIC_WEBSERVER_BRANCH: main
jobs:
fetch-releases:
runs-on: [ubuntu-20.04]
steps:
- uses: actions/checkout@v3
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-latest.tar.gz
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_arm64-latest.tar.gz
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
- run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-latest.tar.gz
- name: Save roc_nightly archives
uses: actions/upload-artifact@v3
with:
path: roc_nightly-*
build-linux-x86_64-files:
runs-on: [ubuntu-20.04]
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
- name: build basic-webserver with legacy linker
env:
CARGO_BUILD_TARGET: x86_64-unknown-linux-musl
run: ./ci/build_basic_webserver.sh linux_x86_64 "--linker legacy"
- name: Save .rh, .rm and .o file
uses: actions/upload-artifact@v3
with:
name: linux-x86_64-files
path: |
basic-webserver/platform/metadata_linux-x64.rm
basic-webserver/platform/linux-x64.rh
basic-webserver/platform/linux-x64.o
build-linux-arm64-files:
runs-on: [self-hosted, Linux, ARM64]
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
- name: build basic-webserver
env:
CARGO_BUILD_TARGET: aarch64-unknown-linux-musl
CC_aarch64_unknown_linux_musl: clang-16
AR_aarch64_unknown_linux_musl: llvm-ar-16
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS: "-Clink-self-contained=yes -Clinker=rust-lld"
run: ./ci/build_basic_webserver.sh linux_arm64
- name: Save .o file
uses: actions/upload-artifact@v3
with:
name: linux-arm64-files
path: |
basic-webserver/platform/linux-arm64.o
build-macos-x86_64-files:
runs-on: [macos-11] # I expect the generated files to work on macOS 12 and 13
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
- run: ./ci/build_basic_webserver.sh macos_x86_64
- name: Save .o files
uses: actions/upload-artifact@v3
with:
name: macos-x86_64-files
path: |
basic-webserver/platform/macos-x64.o
build-macos-apple-silicon-files:
name: build apple silicon .o file
runs-on: [self-hosted, macOS, ARM64]
needs: [fetch-releases]
steps:
- uses: actions/checkout@v3
- name: Download the previously uploaded roc_nightly archives
uses: actions/download-artifact@v3
- run: ./ci/build_basic_webserver.sh macos_apple_silicon
- name: Save macos-arm64.o file
uses: actions/upload-artifact@v3
with:
name: macos-apple-silicon-files
path: |
basic-webserver/platform/macos-arm64.o
create-release-archive:
needs: [build-linux-x86_64-files, build-linux-arm64-files, build-macos-x86_64-files, build-macos-apple-silicon-files]
name: create release archive
runs-on: [ubuntu-20.04]
steps:
- uses: actions/checkout@v3
- name: remove all folders except the ci folder
run: ls | grep -v ci | xargs rm -rf
- name: Download the previously uploaded files
uses: actions/download-artifact@v3
- name: mv roc nightly and simplify name
run: mv $(ls -d artifact/* | grep "roc_nightly.*tar\.gz" | grep "linux_x86_64") ./roc_nightly.tar.gz
- name: decompress the tar
run: tar -xzvf roc_nightly.tar.gz
- name: delete tar
run: rm roc_nightly.tar.gz
- name: rename nightly folder
run: mv roc_nightly* roc_nightly
- run: |
git clone https://github.com/roc-lang/basic-webserver.git
cd basic-webserver
git checkout ${{ env.BASIC_WEBSERVER_BRANCH }}
cd ..
- run: cp macos-apple-silicon-files/* ./basic-webserver/platform
- run: cp linux-x86_64-files/* ./basic-webserver/platform
- run: cp linux-arm64-files/* ./basic-webserver/platform
- run: cp macos-x86_64-files/* ./basic-webserver/platform
- run: ./roc_nightly/roc build --bundle=${{ env.ARCHIVE_FORMAT }} ./basic-webserver/platform/main.roc
- run: echo "TAR_FILENAME=$(ls -d basic-webserver/platform/* | grep ${{ env.ARCHIVE_FORMAT }})" >> $GITHUB_ENV
- name: Upload platform archive
uses: actions/upload-artifact@v3
with:
name: basic-webserver-platform
path: |
${{ env.TAR_FILENAME }}

View File

@ -1,11 +1,8 @@
on: [pull_request]
on:
workflow_call:
name: Benchmarks
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
RUST_BACKTRACE: 1
ROC_NUM_WORKERS: 1
@ -18,30 +15,23 @@ jobs:
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
with:
ref: "trunk"
ref: "main"
clean: "true"
- name: Earthly version
run: earthly --version
- name: on main; prepare a self-contained benchmark folder
run: nix develop -c ./ci/benchmarks/prep_folder.sh main
- name: on trunk; prepare a self-contained benchmark folder
run: ./ci/safe-earthly.sh --build-arg BENCH_SUFFIX=trunk +prep-bench-folder
- uses: actions/checkout@v2
- uses: actions/checkout@v3
with:
clean: "false" # we want to keep the benchmark folder
- name: on current branch; prepare a self-contained benchmark folder
run: ./ci/safe-earthly.sh +prep-bench-folder
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
run: nix develop -c ./ci/benchmarks/prep_folder.sh branch
- name: build benchmark runner
run: cd ci/bench-runner && cargo build --release && cd ../..
run: nix develop -c bash -c "cd ci/benchmarks/bench-runner && cargo build --release && cd ../../.."
- name: run benchmarks with regression check
run: ./ci/bench-runner/target/release/bench-runner --check-executables-changed
run: nix develop -c ./ci/benchmarks/bench-runner/target/release/bench-runner --check-executables-changed

View File

@ -1,28 +0,0 @@
on: [pull_request]
name: CI
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
RUST_BACKTRACE: 1
jobs:
build-fmt-clippy-test:
name: fmt, clippy, test --release
runs-on: [self-hosted, i5-4690K]
timeout-minutes: 90
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v2
with:
clean: "true"
- name: Earthly version
run: earthly --version
- name: install dependencies, build, run zig tests, rustfmt, clippy, cargo test --release
run: ./ci/safe-earthly.sh +test-all

56
.github/workflows/ci_cleanup.yml vendored Normal file
View File

@ -0,0 +1,56 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * 1'
name: Garbage collect nix store
jobs:
clean-big-ci:
runs-on: [self-hosted, i7-6700K]
timeout-minutes: 120
steps:
- name: Clean up nix store
run: nix-store --gc
clean-small-ci:
runs-on: [self-hosted, i5-4690K]
timeout-minutes: 120
steps:
- name: Clean up nix store
run: nix-store --gc
clean-mac-mini-arm64:
runs-on: [self-hosted, macOS, ARM64]
timeout-minutes: 120
steps:
- name: Clean up nix store
run: nix-store --gc
- name: Clean up nix shells
run: rm -rf /private/tmp/nix-shell.*
clean-rpi-1:
runs-on: [self-hosted, Linux, ARM64]
timeout-minutes: 120
steps:
- name: Clean up nix store
run: nix-store --gc
clean-rpi-2:
runs-on: [self-hosted, Linux, ARM64]
timeout-minutes: 120
steps:
- name: Clean up nix store
run: nix-store --gc
clean-mac-mini-x86-64:
runs-on: [self-hosted, macOS, X64]
timeout-minutes: 120
steps:
- name: Clean up nix store
run: nix-store --gc
- name: Clean up temp roc binaries
run: find /private/var/folders/hq -type f -name "roc_app_binary" -exec rm {} \; || true

View File

@ -0,0 +1,17 @@
on:
workflow_dispatch:
schedule:
- cron: '0 5 * * *'
name: Clean up nix on mac mini m1
jobs:
clean-mac-mini-arm64:
runs-on: [self-hosted, macOS, ARM64]
timeout-minutes: 120
steps:
- name: Clean up nix store
run: nix-store --gc
- name: Clean up old nix shells
run: rm -rf /private/tmp/nix-shell.*

114
.github/workflows/ci_manager.yml vendored Normal file
View File

@ -0,0 +1,114 @@
on:
pull_request:
name: CI manager
# cancel current runs when a new commit is pushed
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
check-changes:
runs-on: ubuntu-22.04
outputs:
run_tests: ${{ steps.filecheck.outputs.run_tests }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Check if only css, html or md files changed
id: filecheck
run: |
git fetch origin ${{ github.base_ref }}
if git diff --name-only origin/${{ github.base_ref }} HEAD | grep -qvE '(\.md$|\.css$|\.html$)'; then
echo "run_tests=full" >> $GITHUB_OUTPUT
else
echo "run_tests=none" >> $GITHUB_OUTPUT
fi
- run: echo "debug output ${{ steps.filecheck.outputs.run_tests }}"
start-nix-linux-x86-64-tests:
needs: check-changes
if: needs.check-changes.outputs.run_tests == 'full'
uses: ./.github/workflows/nix_linux_x86_64.yml
start-nix-linux-aarch64-build-default-test:
needs: check-changes
if: needs.check-changes.outputs.run_tests == 'full'
uses: ./.github/workflows/nix_linux_arm64_default.yml
start-nix-linux-aarch64-cargo-build-test:
needs: check-changes
if: needs.check-changes.outputs.run_tests == 'full'
uses: ./.github/workflows/nix_linux_arm64_cargo.yml
start-nix-macos-apple-silicon-tests:
needs: check-changes
if: needs.check-changes.outputs.run_tests == 'full'
uses: ./.github/workflows/nix_macos_apple_silicon.yml
start-macos-x86-64-tests:
needs: check-changes
if: needs.check-changes.outputs.run_tests == 'full'
uses: ./.github/workflows/ubuntu_x86_64.yml
start-ubuntu-x86-64-tests:
needs: check-changes
if: needs.check-changes.outputs.run_tests == 'full'
uses: ./.github/workflows/ubuntu_x86_64.yml
start-windows-release-build-test:
needs: check-changes
if: needs.check-changes.outputs.run_tests == 'full'
uses: ./.github/workflows/windows_release_build.yml
start-windows-tests:
needs: check-changes
if: needs.check-changes.outputs.run_tests == 'full'
uses: ./.github/workflows/windows_tests.yml
start-roc-benchmarks:
needs: check-changes
if: needs.check-changes.outputs.run_tests == 'full'
uses: ./.github/workflows/benchmarks.yml
ran-full:
runs-on: ubuntu-22.04
needs: [
start-nix-linux-x86-64-tests,
start-nix-linux-aarch64-build-default-test,
start-nix-linux-aarch64-cargo-build-test,
start-nix-macos-apple-silicon-tests,
start-macos-x86-64-tests,
start-ubuntu-x86-64-tests,
start-windows-release-build-test,
start-windows-tests,
start-roc-benchmarks
]
steps:
- run: echo "all workflows succeeded!"
ran-none:
runs-on: ubuntu-22.04
needs: [check-changes]
if: needs.check-changes.outputs.run_tests == 'none'
steps:
- run: echo "Only non-code files changed. CI manager did not run any workflows."
# we need a single end job for the required checks under branch protection rules
finish:
runs-on: ubuntu-22.04
needs: [ran-full, ran-none]
if: |
always()
&& contains(needs.*.result, 'success')
&& !contains(needs.*.result, 'failure')
&& !contains(needs.*.result, 'cancelled')
&& !(needs.ran-full.result == 'skipped' && needs.ran-none.result == 'skipped')
steps:
- run: echo "Workflow succeeded :)"

View File

@ -0,0 +1,58 @@
on:
pull_request:
name: devtools nix files test - linux
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
devtools-test-linux:
name: devtools-test-linux
runs-on: [ubuntu-20.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Only run all steps if a nix file changed
run: |
git fetch origin ${{ github.base_ref }}
if git diff --name-only origin/${{ github.base_ref }} HEAD | grep 'nix'; then
echo "A nix file was changed. Testing devtools nix files..."
echo "nix_changed=true" >> $GITHUB_ENV
else
echo "A nix file was changed. No need to run tests."
echo "nix_changed=false" >> $GITHUB_ENV
fi
- uses: cachix/install-nix-action@v23
if: env.nix_changed == 'true'
with:
nix_path: nixpkgs=channel:nixos-unstable
- name: test devtools/flake.nix
if: env.nix_changed == 'true'
id: devtools_test_step
run: |
sed -i "s|/home/username/gitrepos/roc|$(realpath .)|g" devtools/flake.nix
cat devtools/flake.nix
mkdir -p ../temp
cp devtools/flake.nix ../temp
cp devtools/flake.lock ../temp
cd ../temp
git init
git add flake.nix flake.lock
nix develop
- name: Print tip on fail
if: steps.devtools_test_step.outcome == 'failure'
run: |
echo "The devtools test failed, this can likely be fixed by"
echo "locally deleting devtools/flake.lock and following the"
echo "instructions in devtools/README.md. This will create a"
echo "new flake.lock you should use to replace the old devtools/flake.lock"

View File

@ -0,0 +1,52 @@
on:
pull_request:
name: devtools nix files test - macos
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
devtools-test-macos:
name: devtools-test-mac
runs-on: [self-hosted, macOS, ARM64]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Only run all steps if a nix file changed
run: |
git fetch origin ${{ github.base_ref }}
if git diff --name-only origin/${{ github.base_ref }} HEAD | grep 'nix'; then
echo "A nix file was changed. Testing devtools nix files..."
echo "nix_changed=true" >> $GITHUB_ENV
else
echo "A nix file was changed. No need to run tests."
echo "nix_changed=false" >> $GITHUB_ENV
fi
- name: test devtools/flake.nix
if: env.nix_changed == 'true'
id: devtools_test_step
run: |
sed -i '' "s|/home/username/gitrepos/roc|$(realpath .)|g" devtools/flake.nix
cat devtools/flake.nix
mkdir -p ../temp
cp devtools/flake.nix ../temp
cp devtools/flake.lock ../temp
cd ../temp
git init
git add flake.nix flake.lock
nix develop --show-trace
- name: Print tip on fail
if: steps.devtools_test_step.outcome == 'failure'
run: |
echo "The devtools test failed, this can likely be fixed by"
echo "locally deleting devtools/flake.lock and following the"
echo "instructions in devtools/README.md. This will create a"
echo "new flake.lock you should use to replace the old devtools/flake.lock"

103
.github/workflows/docker.yml vendored Normal file
View File

@ -0,0 +1,103 @@
on:
workflow_dispatch:
name: Docker images tests
jobs:
nightly-ubuntu-latest:
name: nightly-ubuntu-latest
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Copy example docker file
run: cp docker/nightly-ubuntu-latest/docker-compose.example.yml docker/nightly-ubuntu-latest/docker-compose.yml
- name: Build image
run: docker-compose -f docker/nightly-ubuntu-latest/docker-compose.yml build
- name: Run hello world test
run: docker-compose -f docker/nightly-ubuntu-latest/docker-compose.yml run roc examples/helloWorld.roc
nightly-ubuntu-2204:
name: nightly-ubuntu-2204
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Copy example docker file
run: cp docker/nightly-ubuntu-2204/docker-compose.example.yml docker/nightly-ubuntu-2204/docker-compose.yml
- name: Build image
run: docker-compose -f docker/nightly-ubuntu-2204/docker-compose.yml build
- name: Run hello world test
run: docker-compose -f docker/nightly-ubuntu-2204/docker-compose.yml run roc examples/helloWorld.roc
nightly-ubuntu-2004:
name: nightly-ubuntu-2004
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Copy example docker file
run: cp docker/nightly-ubuntu-2004/docker-compose.example.yml docker/nightly-ubuntu-2004/docker-compose.yml
- name: Build image
run: docker-compose -f docker/nightly-ubuntu-2004/docker-compose.yml build
- name: Run hello world test
run: docker-compose -f docker/nightly-ubuntu-2004/docker-compose.yml run roc examples/helloWorld.roc
nightly-debian-latest:
name: nightly-debian-latest
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Copy example docker file
run: cp docker/nightly-debian-latest/docker-compose.example.yml docker/nightly-debian-latest/docker-compose.yml
- name: Build image
run: docker-compose -f docker/nightly-debian-latest/docker-compose.yml build
- name: Run hello world test
run: docker-compose -f docker/nightly-debian-latest/docker-compose.yml run roc examples/helloWorld.roc
nightly-debian-bookworm:
name: nightly-debian-bookworm
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Copy example docker file
run: cp docker/nightly-debian-bookworm/docker-compose.example.yml docker/nightly-debian-bookworm/docker-compose.yml
- name: Build image
run: docker-compose -f docker/nightly-debian-bookworm/docker-compose.yml build
- name: Run hello world test
run: docker-compose -f docker/nightly-debian-bookworm/docker-compose.yml run roc examples/helloWorld.roc
nightly-debian-buster:
name: nightly-debian-buster
runs-on: [ubuntu-22.04]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Copy example docker file
run: cp docker/nightly-debian-buster/docker-compose.example.yml docker/nightly-debian-buster/docker-compose.yml
- name: Build image
run: docker-compose -f docker/nightly-debian-buster/docker-compose.yml build
- name: Run hello world test
run: docker-compose -f docker/nightly-debian-buster/docker-compose.yml run roc examples/helloWorld.roc

33
.github/workflows/macos_x86_64.yml vendored Normal file
View File

@ -0,0 +1,33 @@
on:
workflow_call:
name: Macos x86-64 rust tests
env:
RUST_BACKTRACE: 1
jobs:
test-rust-macos-x86-64:
runs-on: [self-hosted, macOS, X64]
timeout-minutes: 90
env:
RUSTC_WRAPPER: /Users/username1/.cargo/bin/sccache
steps:
- uses: actions/checkout@v3
- name: set LLVM_SYS_160_PREFIX
run: echo "LLVM_SYS_160_PREFIX=$(brew --prefix llvm@16)" >> $GITHUB_ENV
- name: Update PATH to use zig 11
run: |
echo "PATH=/Users/username1/Downloads/zig-macos-x86_64-0.11.0:$PATH" >> $GITHUB_ENV
- run: zig version
- name: test_gen llvm tests
run: cargo nextest-gen-llvm --release --no-fail-fast --locked -E "package(test_gen) - test(gen_str::str_append_scalar)"
- name: regular rust tests
run: cargo test --locked --release -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_str::str_append_scalar --skip gen_tags::phantom_polymorphic_record && sccache --show-stats
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
# this issue may be caused by using older versions of XCode

View File

@ -0,0 +1,30 @@
on:
pull_request:
schedule:
- cron: '0 9 * * *' # 9=9am utc+0
name: Check Markdown links
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
markdown-link-check:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
- uses: gaurav-nelson/github-action-markdown-link-check@v1
with:
use-quiet-mode: 'yes'
use-verbose-mode: 'yes'
base-branch: 'main'
check-modified-files-only: 'yes'
if: ${{ github.event_name == 'pull_request' }}
- uses: gaurav-nelson/github-action-markdown-link-check@v1
with:
use-quiet-mode: 'yes'
use-verbose-mode: 'yes'
base-branch: 'main'
check-modified-files-only: 'no'
if: ${{ github.event_name == 'schedule' }}

View File

@ -1,31 +0,0 @@
on:
schedule:
- cron: '0 9 * * *'
name: Nightly Release Build
jobs:
build:
name: Test and Build
runs-on: [self-hosted, i5-4690K]
timeout-minutes: 90
env:
FORCE_COLOR: 1 # for earthly logging
steps:
- uses: actions/checkout@v2
- name: Earthly print version
run: earthly --version
- name: install dependencies, build, run tests, build release
run: ./ci/safe-earthly.sh +build-nightly-release
- name: Create pre-release with test_archive.tar.gz
uses: WebFreak001/deploy-nightly@v1.1.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # automatically provided by github actions
with:
upload_url: https://uploads.github.com/repos/rtfeldman/roc/releases/51880579/assets{?name,label}
release_id: 51880579
asset_path: ./roc_linux_x86_64.tar.gz
asset_name: roc_nightly-linux_x86_64-$$.tar.gz # $$ to inserts date (YYYYMMDD) and 6 letter commit hash
asset_content_type: application/gzip
max_releases: 3

View File

@ -0,0 +1,54 @@
on:
# pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
name: Nightly Release Linux arm64/aarch64
jobs:
build:
name: build and package nightly release
runs-on: [self-hosted, Linux, ARM64]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- name: Update PATH to use zig 11
run: |
echo "PATH=/home/username/Downloads/zig-linux-aarch64-0.11.0:$PATH" >> $GITHUB_ENV
- run: zig version
- name: create version.txt
run: ./ci/write_version.sh
- name: build release with lto
run: cargo build --profile=release-with-lto --locked --bin roc
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
SHA: ${{ env.SHA }}
run: echo "RELEASE_FOLDER_NAME=roc_nightly-linux_arm64-$DATE-$SHA" >> $GITHUB_ENV
# this makes the roc binary a lot smaller
- name: strip debug info
run: strip ./target/release-with-lto/roc
- name: Make nightly release tar archive
run: ./ci/package_release.sh ${{ env.RELEASE_FOLDER_NAME }}
- name: Upload roc nightly tar. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
retention-days: 4

View File

@ -0,0 +1,65 @@
on:
# pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
name: Nightly Release Linux x86_64
jobs:
build:
name: build and package nightly release
runs-on: [self-hosted, i7-6700K]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- name: Update PATH to use zig 11
run: |
echo "PATH=/home/big-ci-user/Downloads/zig-linux-x86_64-0.11.0:$PATH" >> $GITHUB_ENV
- run: zig version
- name: create version.txt
run: ./ci/write_version.sh
- name: build release with lto
run: RUSTFLAGS="-C target-cpu=x86-64" cargo build --profile=release-with-lto --locked --bin roc
# target-cpu=x86-64 -> For maximal compatibility for all CPU's. This was also faster in our tests: https://roc.zulipchat.com/#narrow/stream/231635-compiler-development/topic/.2Ecargo.2Fconfig.2Etoml/near/325726299
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build wasm repl
run: ./ci/www-repl.sh
- name: Upload wasm repl tar. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
with:
name: roc_repl_wasm.tar.gz
path: roc_repl_wasm.tar.gz
retention-days: 4
- name: build file name
env:
DATE: ${{ env.DATE }}
SHA: ${{ env.SHA }}
run: echo "RELEASE_FOLDER_NAME=roc_nightly-linux_x86_64-$DATE-$SHA" >> $GITHUB_ENV
# this makes the roc binary a lot smaller
- name: strip debug info
run: strip ./target/release-with-lto/roc
- name: Make nightly release tar archive
run: ./ci/package_release.sh ${{ env.RELEASE_FOLDER_NAME }}
- name: Upload roc nightly tar. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
retention-days: 4

View File

@ -0,0 +1,72 @@
on:
# pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
name: Nightly Release macOS Apple Silicon
env:
RUST_BACKTRACE: 1
LLVM_SYS_160_PREFIX: /opt/homebrew/opt/llvm@16
jobs:
test-and-build:
name: Rust tests, build and package nightly release
runs-on: [self-hosted, macOS, ARM64]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- run: zig version
- name: llvm version
run: llc --version | grep LLVM
- name: run tests
run: cargo test --locked --release
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
SHA: ${{ env.SHA }}
run: echo "RELEASE_FOLDER_NAME=roc_nightly-macos_apple_silicon-$DATE-$SHA" >> $GITHUB_ENV
- name: write version to file
run: ./ci/write_version.sh
- name: build nightly release
run: cargo build --locked --profile=release-with-lto --bin roc
# this makes the roc binary a lot smaller
- name: strip debug info
run: strip ./target/release-with-lto/roc
- name: package release
run: ./ci/package_release.sh ${{ env.RELEASE_FOLDER_NAME }}
- name: delete everything except the tar
run: ls | grep -v "roc_nightly.*tar\.gz" | xargs rm -rf
- name: extract tar for a quick test
run: ls | grep tar | xargs tar -xf
- name: test with rust platform
run: cd ${{ env.RELEASE_FOLDER_NAME }} && ./roc examples/platform-switching/rocLovesRust.roc
- name: print short commit SHA
run: git rev-parse --short "$GITHUB_SHA"
- name: print date
run: date
- name: Upload artifact Actually uploading to github releases has to be done manually
uses: actions/upload-artifact@v3
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
retention-days: 4

View File

@ -0,0 +1,63 @@
on:
# pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *' # 9=9am utc+0
name: Nightly Release macOS x86_64
env:
LLVM_SYS_160_PREFIX: /usr/local/opt/llvm@16
jobs:
test-build-upload:
name: build, test, package and upload nightly release
runs-on: [self-hosted, macOS, X64]
timeout-minutes: 120
steps:
- uses: actions/checkout@v3
- name: Update PATH to use zig 11
run: |
echo "PATH=/Users/username1/Downloads/zig-macos-x86_64-0.11.0:$PATH" >> $GITHUB_ENV
- run: zig version
- name: write version to file
run: ./ci/write_version.sh
- name: execute rust tests
run: cargo test --release --locked -- --skip opaque_wrap_function --skip gen_list::bool_list_literal --skip platform_switching_swift --skip swift_ui --skip gen_str::str_append_scalar --skip gen_tags::phantom_polymorphic_record
# swift tests are skipped because of "Could not find or use auto-linked library 'swiftCompatibilityConcurrency'" on macos-11 x86_64 CI machine
# this issue may be caused by using older versions of XCode
- name: build release
run: RUSTFLAGS="-C target-cpu=x86-64" cargo build --profile=release-with-lto --locked --bin roc
# target-cpu=x86-64 -> For maximal compatibility for all CPU's.
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
SHA: ${{ env.SHA }}
run: echo "RELEASE_FOLDER_NAME=roc_nightly-macos_x86_64-$DATE-$SHA" >> $GITHUB_ENV
# this makes the roc binary a lot smaller
- name: strip debug info
run: strip ./target/release-with-lto/roc
- name: package release
run: ./ci/package_release.sh ${{ env.RELEASE_FOLDER_NAME }}
- name: Upload artifact. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
retention-days: 4

View File

@ -0,0 +1,16 @@
on:
schedule:
- cron: '0 9 * * *'
name: Nightly netlify build and deploy
jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: trigger netlify build and deploy
env:
HOOK: ${{ secrets.NETLIFY_BUILD_HOOK }}
run: |
curl -X POST -d {} https://api.netlify.com/build_hooks/${HOOK}

View File

@ -0,0 +1,39 @@
on:
# pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
name: Nightly Release Old Linux arm64 using Earthly
jobs:
build:
name: build and package nightly release
runs-on: [self-hosted, Linux, ARM64]
timeout-minutes: 180
steps:
- uses: actions/checkout@v3
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
SHA: ${{ env.SHA }}
run: echo "RELEASE_FOLDER_NAME=roc_nightly-old_linux_arm64-$DATE-$SHA" >> $GITHUB_ENV
- run: earthly --version
- name: build release with earthly
run: earthly +build-nightly-release --RELEASE_FOLDER_NAME=${{ env.RELEASE_FOLDER_NAME }} --ZIG_ARCH=aarch64
- name: Upload roc nightly tar. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
retention-days: 4

View File

@ -0,0 +1,42 @@
on:
# pull_request:
workflow_dispatch:
schedule:
- cron: '0 9 * * *'
name: Nightly Release Old Linux x86_64 using Earthly
jobs:
build:
name: build and package nightly release
runs-on: [ubuntu-20.04]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- name: get commit SHA
run: echo "SHA=$(git rev-parse --short "$GITHUB_SHA")" >> $GITHUB_ENV
- name: get date
run: echo "DATE=$(date "+%Y-%m-%d")" >> $GITHUB_ENV
- name: build file name
env:
DATE: ${{ env.DATE }}
SHA: ${{ env.SHA }}
run: echo "RELEASE_FOLDER_NAME=roc_nightly-old_linux_x86_64-$DATE-$SHA" >> $GITHUB_ENV
- name: install earthly
run: sudo /bin/sh -c 'wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-amd64 -O /usr/local/bin/earthly && chmod +x /usr/local/bin/earthly && /usr/local/bin/earthly bootstrap --with-autocomplete'
- run: earthly --version
- name: build release with earthly
run: earthly +build-nightly-release --RELEASE_FOLDER_NAME=${{ env.RELEASE_FOLDER_NAME }} --RUSTFLAGS="-C target-cpu=x86-64"
- name: Upload roc nightly tar. Actually uploading to github releases has to be done manually.
uses: actions/upload-artifact@v3
with:
name: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
path: ${{ env.RELEASE_FOLDER_NAME }}.tar.gz
retention-days: 4

View File

@ -1,28 +0,0 @@
on: [pull_request]
name: Nix M1 cargo test
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
RUST_BACKTRACE: 1
jobs:
nix-m1:
name: nix-m1
runs-on: [self-hosted, macOS, ARM64]
timeout-minutes: 90
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v2
with:
clean: "true"
- name: setup dependencies with nix and build the tests
run: nix develop -c cargo test --locked --release --no-run
- name: execute tests with guaranteed success
run: nix develop -c cargo test --locked --release --no-fail-fast || true # || true to return a successful exit code so that test failures can be observed

View File

@ -0,0 +1,22 @@
on:
workflow_call:
name: test cargo build on linux arm64 inside nix
env:
RUST_BACKTRACE: 1
jobs:
nix-linux-arm64-cargo:
name: nix-linux-arm64
runs-on: [self-hosted, Linux, ARM64]
timeout-minutes: 150
steps:
- uses: actions/checkout@v3
- name: test release build
run: nix develop -c cargo build --release --locked
# TODO
#- name: build tests without running
# run: cargo test --no-run --release

View File

@ -0,0 +1,18 @@
on:
workflow_call:
name: test default.nix on linux arm64
env:
RUST_BACKTRACE: 1
jobs:
nix-linux-arm64-default:
name: nix-linux-arm64
runs-on: [self-hosted, Linux, ARM64]
timeout-minutes: 150
steps:
- uses: actions/checkout@v3
- name: test building default.nix
run: nix-build

33
.github/workflows/nix_linux_x86_64.yml vendored Normal file
View File

@ -0,0 +1,33 @@
on:
workflow_call:
name: Nix linux x86_64 cargo test
env:
RUST_BACKTRACE: 1
jobs:
nix-linux-x86:
name: nix-linux-x86
runs-on: [self-hosted, i5-4690K]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- name: test building default.nix
run: nix-build
- name: execute tests with --release
run: nix develop -c cargo test --locked --release
- name: test wasm32 cli_run
run: nix develop -c cargo test --locked --release --features="wasm32-cli-run"
- name: test the dev backend # these tests require an explicit feature flag
run: nix develop -c cargo nextest run --locked --release --package test_gen --no-default-features --features gen-dev --no-fail-fast
- name: wasm repl tests
run: nix develop -c crates/repl_test/test_wasm.sh
- name: test building wasm repl
run: nix develop -c ./ci/www-repl.sh

View File

@ -0,0 +1,59 @@
on:
workflow_call:
name: Nix apple silicon cargo test
env:
RUST_BACKTRACE: 1
jobs:
nix-apple-silicon:
name: nix-apple-silicon
runs-on: [self-hosted, macOS, ARM64]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
# These started to accumulate quickly since #5990, not sure why
- name: Clean up old nix shells
run: rm -rf /private/tmp/nix-shell.*
- run: zig version
- name: check formatting with rustfmt
run: nix develop -c cargo fmt --all -- --check
- name: check code style with clippy
run: nix develop -c cargo clippy --workspace --tests -- --deny warnings
- name: check code style with clippy --release
run: nix develop -c cargo clippy --workspace --tests --release -- --deny warnings
- name: test building default.nix
run: nix-build
- name: execute tests with --release
run: nix develop -c cargo test --locked --release
- name: make a libapp.so for the next step
run: nix develop -c cargo run -- gen-stub-lib examples/platform-switching/rocLovesRust.roc
- name: check that the platform`s produced dylib is loadable
run: cd examples/platform-switching/rust-platform && nix develop -c cargo test --release --locked
- name: test aarch64 dev backend
run: nix develop -c cargo nextest-gen-dev --locked --release --no-fail-fast
# we run the llvm wasm tests only on this machine because it is fast and wasm should be cross-target
- name: execute llvm wasm tests with --release
run: nix develop -c cargo test-gen-llvm-wasm --locked --release
- name: set env var and test website build script
run: |
nix develop -c bash www/build.sh
- name: wasm repl tests
run: nix develop -c crates/repl_test/test_wasm.sh
- name: test building wasm repl
run: nix develop -c ./ci/www-repl.sh

27
.github/workflows/nix_macos_x86_64.yml vendored Normal file
View File

@ -0,0 +1,27 @@
on:
workflow_call:
name: Nix macOS x86_64 cargo test
env:
RUST_BACKTRACE: 1
jobs:
nix-macos-x86-64:
name: nix-macos-x86-64
runs-on: [macos-12]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- uses: cachix/install-nix-action@v22
- name: execute cli_run tests only, the full tests take too long but are run nightly
run: nix develop -c cargo test --locked --release -p roc_cli -- --skip hello_gui
# see 5932 for hello_gui
- name: make a libapp.so for the next step
run: nix develop -c cargo run -- gen-stub-lib examples/platform-switching/rocLovesRust.roc
- name: check that the platform`s produced dylib is loadable
run: cd examples/platform-switching/rust-platform && nix develop -c cargo test --release --locked

View File

@ -1,4 +1,5 @@
on: [pull_request]
on:
pull_request:
name: SpellCheck
@ -12,17 +13,14 @@ env:
jobs:
spell-check:
name: spell check
runs-on: [self-hosted, linux]
runs-on: [ubuntu-20.04]
timeout-minutes: 10
env:
FORCE_COLOR: 1
steps:
- uses: actions/checkout@v2
with:
clean: "true"
- uses: actions/checkout@v3
- name: Earthly version
run: earthly --version
- run: cargo install typos-cli --version 1.0.11
- name: install spell checker, do spell check
run: ./ci/safe-earthly.sh +check-typos
- name: do spell check with typos-cli 1.0.11
run: typos

21
.github/workflows/stale.yml vendored Normal file
View File

@ -0,0 +1,21 @@
name: 'Close stale PRs'
on:
schedule:
- cron: '30 1 * * *'
jobs:
stale:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- uses: actions/stale@v5
with:
delete-branch: true
exempt-pr-labels: 'blocked'
days-before-issue-close: -1
days-before-pr-stale: 30
days-before-pr-close: 30
stale-pr-message: 'Thank you for your contribution! Sometimes PRs end up staying open for a long time without activity, which can make the list of open PRs get long and time-consuming to review. To keep things manageable for reviewers, this bot automatically closes PRs that havent had activity in 60 days. This PR hasnt had activity in 30 days, so it will be automatically closed if there is no more activity in the next 30 days. Keep in mind that PRs marked `Closed` are not deleted, so no matter what, the PR will still be right here in the repo. You can always access it and reopen it anytime you like!'
stale-pr-label: 'inactive for 30 days'
close-pr-label: 'auto-closed'

View File

@ -0,0 +1,50 @@
on:
workflow_dispatch:
name: Test latest nightly release for macOS Apple Silicon
jobs:
test-nightly:
name: test nightly macos aarch64
runs-on: [self-hosted, macOS, ARM64]
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- name: get the latest release archive
run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_apple_silicon-latest.tar.gz
- name: remove everything in this dir except the tar and ci folder # we want to test like a user who would have downloaded the release, so we clean up all files from the repo checkout
run: ls | grep -v "roc_nightly.*tar\.gz" | grep -v "^ci$" | xargs rm -rf
- name: decompress the tar
run: ls | grep "roc_nightly.*tar\.gz" | xargs tar -xzvf
- name: delete tar
run: ls | grep "roc_nightly.*tar\.gz" | xargs rm -rf
- name: rename nightly folder
run: mv roc_nightly* roc_nightly
- name: test roc hello world
run: cd roc_nightly && ./roc examples/helloWorld.roc
- name: test platform switching rust
run: cd roc_nightly && ./roc examples/platform-switching/rocLovesRust.roc
- name: test platform switching zig
run: cd roc_nightly && ./roc examples/platform-switching/rocLovesZig.roc
- name: test platform switching c
run: cd roc_nightly && ./roc examples/platform-switching/rocLovesC.roc
- name: test repl
run: |
cd ci/repl_basic_test
cargo build --release
cp target/release/repl_basic_test ../../roc_nightly
cd ../../roc_nightly
./repl_basic_test

View File

@ -0,0 +1,46 @@
on:
workflow_dispatch:
name: Test latest nightly releases for macOS and Linux x86_64
jobs:
test-nightly:
name: test nightly macos 11/12/13, ubuntu 20.04/22.04
strategy:
fail-fast: false
matrix:
os: [ macos-11, macos-12, macos-13, ubuntu-20.04, ubuntu-22.04 ]
runs-on: ${{ matrix.os }}
timeout-minutes: 90
steps:
- uses: actions/checkout@v3
- uses: goto-bus-stop/setup-zig@v2
with:
version: 0.11.0
- name: get the latest release archive for linux (x86_64)
if: startsWith(matrix.os, 'ubuntu')
run: |
curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-linux_x86_64-latest.tar.gz
- name: get the latest release archive for macos (x86_64)
if: startsWith(matrix.os, 'macos')
run: curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-macos_x86_64-latest.tar.gz
- run: zig version
- name: prep and run basic tests
run: |
./ci/basic_nightly_test.sh
- name: clean up, get old linux release (x86_64), run tests
if: startsWith(matrix.os, 'ubuntu')
run: |
rm -rf roc_nightly
curl -fOL https://github.com/roc-lang/roc/releases/download/nightly/roc_nightly-old_linux_x86_64-latest.tar.gz
./ci/basic_nightly_test.sh

66
.github/workflows/ubuntu_x86_64.yml vendored Normal file
View File

@ -0,0 +1,66 @@
on:
workflow_call:
name: CI
env:
RUST_BACKTRACE: 1
jobs:
test-zig-rust-wasm:
name: test zig, rust, wasm...
runs-on: [self-hosted, i7-6700K]
timeout-minutes: 90
env:
RUSTC_WRAPPER: /home/big-ci-user/.cargo/bin/sccache
steps:
- uses: actions/checkout@v3
- name: Check for duplicate AUTHORS
run: diff <(sort AUTHORS) <(sort AUTHORS | uniq) # The < operator treats a string as a file. diff 'succeeds' if no difference.
- name: Update PATH to use zig 11
run: |
echo "PATH=/home/big-ci-user/Downloads/zig-linux-x86_64-0.11.0:$PATH" >> $GITHUB_ENV
- run: zig version
- name: zig fmt check, zig tests
run: cd crates/compiler/builtins/bitcode && ./run-tests.sh
- name: roc format check on builtins
run: cargo run --locked --release format --check crates/compiler/builtins/roc
- name: zig wasm tests
run: cd crates/compiler/builtins/bitcode && ./run-wasm-tests.sh
- name: regular rust tests
# see #5904 for skipped test
run: cargo test --locked --release -- --skip cli_run::expects_dev_and_test && sccache --show-stats
- name: check that the platform`s produced dylib is loadable
run: cd examples/platform-switching/rust-platform && LD_LIBRARY_PATH=. cargo test --release --locked
- name: test the dev backend # these tests require an explicit feature flag
run: cargo test --locked --release --package test_gen --no-default-features --features gen-dev && sccache --show-stats
- name: test gen-wasm single threaded # gen-wasm has some multithreading problems to do with the wasmer runtime
run: cargo test --locked --release --package test_gen --no-default-features --features gen-wasm -- --test-threads=1 && sccache --show-stats
- name: run `roc test` on Str builtins
run: cargo run --locked --release -- test crates/compiler/builtins/roc/Str.roc && sccache --show-stats
- name: run `roc test` on Dict builtins
run: cargo run --locked --release -- test crates/compiler/builtins/roc/Dict.roc && sccache --show-stats
- name: wasm repl test
run: crates/repl_test/test_wasm.sh && sccache --show-stats
- name: test building wasm repl
run: ./ci/www-repl.sh && sccache --show-stats
#TODO i386 (32-bit linux) cli tests
#TODO verify-no-git-changes
- name: test website build script
run: bash www/build.sh

View File

@ -0,0 +1,41 @@
on:
workflow_call:
name: windows - release build
env:
RUST_BACKTRACE: 1
jobs:
windows-release-build:
name: windows-release-build
runs-on: windows-2022
env:
LLVM_SYS_160_PREFIX: C:\LLVM-16.0.6-win64
timeout-minutes: 150
steps:
- uses: actions/checkout@v3
- run: Add-Content -Path "$env:GITHUB_ENV" -Value "GITHUB_RUNNER_CPU=$((Get-CimInstance Win32_Processor).Name)"
- name: download and install zig
run: |
curl.exe -f --output "C:\zig-windows-x86_64-0.11.0.zip" --url https://ziglang.org/download/0.11.0/zig-windows-x86_64-0.11.0.zip
cd C:\
7z x zig-windows-x86_64-0.11.0.zip
Add-Content $env:GITHUB_PATH "C:\zig-windows-x86_64-0.11.0\"
- name: zig version
run: zig version
- name: install rust nightly 1.71.0
run: rustup install nightly-2023-05-28
- name: set up llvm 16
run: |
curl.exe -f -L -O -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" https://github.com/roc-lang/llvm-package-windows/releases/download/v16.0.6/LLVM-16.0.6-win64.7z
7z x LLVM-16.0.6-win64.7z -oC:\LLVM-16.0.6-win64
- name: cargo build release.
run: cargo build --locked --release

65
.github/workflows/windows_tests.yml vendored Normal file
View File

@ -0,0 +1,65 @@
on:
workflow_call:
name: windows - subset of tests
env:
RUST_BACKTRACE: 1
jobs:
windows-test-subset:
name: windows-test-subset
runs-on: windows-2022
env:
LLVM_SYS_160_PREFIX: C:\LLVM-16.0.6-win64
timeout-minutes: 150
steps:
- uses: actions/checkout@v3
- run: Add-Content -Path "$env:GITHUB_ENV" -Value "GITHUB_RUNNER_CPU=$((Get-CimInstance Win32_Processor).Name)"
- uses: Swatinem/rust-cache@v2
with:
shared-key: "rust-cache-windows-${{env.GITHUB_RUNNER_CPU}}"
- name: download and install zig
run: |
curl.exe -f --output "C:\zig-windows-x86_64-0.11.0.zip" --url https://ziglang.org/download/0.11.0/zig-windows-x86_64-0.11.0.zip
cd C:\
7z x zig-windows-x86_64-0.11.0.zip
Add-Content $env:GITHUB_PATH "C:\zig-windows-x86_64-0.11.0\"
- run: zig version
- name: zig tests
run: |
cd crates\compiler\builtins\bitcode\
zig build test
- name: install rust nightly 1.71.0
run: rustup install nightly-2023-05-28
- name: set up llvm 16
run: |
curl.exe -f -L -O -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" https://github.com/roc-lang/llvm-package-windows/releases/download/v16.0.6/LLVM-16.0.6-win64.7z
7z x LLVM-16.0.6-win64.7z -oC:\LLVM-16.0.6-win64
- name: Build tests --release without running.
run: cargo test --locked --release --no-run
# Why are these tests not build with previous command? => fingerprint error. Use `CARGO_LOG=cargo::core::compiler::fingerprint=info` to investigate
- name: Build specific tests without running.
run: cargo test --locked --release --no-run -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_linker -p roc_cli -p test_gen
- name: Test setjmp/longjmp logic
run: cargo test-gen-dev --locked --release nat_alias && cargo test-gen-dev --locked --release a_crash
- name: Run gen tests
run: cargo test-gen-llvm --locked --release gen_str
- name: Actually run the tests.
run: cargo test --locked --release -p roc_ident -p roc_region -p roc_collections -p roc_can -p roc_types -p roc_solve -p roc_mono -p roc_gen_dev -p roc_gen_wasm -p roc_serialize -p roc_linker -p roc_cli

View File

@ -1,10 +1,10 @@
name: deploy www.roc-lang.org
# Whenever a commit lands on trunk, deploy the site
# Whenever a commit lands on `main`, deploy the site
on:
push:
branches:
- deploy-www # TODO change to trunk
- deploy-www
jobs:
deploy:

63
.gitignore vendored
View File

@ -5,31 +5,38 @@ zig-cache
.envrc
*.rs.bk
*.o
*.so
*.so.*
*.obj
*.dll
*.dylib
*.lib
*.def
*.tmp
*.wasm
*.exe
*.pdb
# llvm human-readable output
*.ll
*.bc
#valgrind
# valgrind
vgcore.*
# roc cache files
*.rh*
*.rm*
preprocessedhost
metadata
#editors
.idea/
.vscode/
.ignore
#files too big to track in git
editor/benches/resources/100000_lines.roc
editor/benches/resources/10000_lines.roc
editor/benches/resources/1000_lines.roc
editor/benches/resources/100_lines.roc
editor/benches/resources/25000000_lines.roc
editor/benches/resources/50000_lines.roc
editor/benches/resources/500_lines.roc
# file editor creates when no arg is passed
new-roc-project
.exrc
.vimrc
.nvimrc
# rust cache (sccache folder)
sccache_dir
@ -45,3 +52,33 @@ roc_linux_x86_64.tar.gz
# macOS .DS_Store files
.DS_Store
# files geneated when formatting fails
*.roc-format-failed
*.roc-format-failed-ast-after
*.roc-format-failed-ast-before
# nix
result
# Only keep Cargo.lock dependencies for the main compiler.
# Examples and test only crates should be fine to be unlocked.
# This remove unneccessary lock file versioning.
# It also ensures the compiler can always pull in 1 version of things and doesn't get restricted by sub lockfiles.
/**/Cargo.lock
!/Cargo.lock
# snapshot tests temp file
*.pending-snap
# checkmate
checkmate_*.json
www/build/
www/main
www/dist
# ignore the examples folder in the WIP website, this is copied from roc-lang/examples in when building the site
www/content/examples
www/examples-main.zip
www/examples-main

View File

@ -1 +1 @@
13.0.0
16.0.0

View File

@ -1,7 +1,7 @@
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: Roc
Upstream-Contact: Richard Feldman <oss@rtfeldman.com>
Source: https://github.com/rtfeldman/roc
Source: https://github.com/roc-lang/roc
Files: *
Copyright: © The Roc Contributors

128
AUTHORS
View File

@ -3,28 +3,27 @@ Folkert de Vries <folkert@folkertdev.nl>
Chad Stearns <chadtech0@gmail.com>
Christoph Hermann <schtoeffel@gmail.com>
Dan Bruder <danb@structionsite.com>
Ian Mackenzie <ian.e.mackenzie@gmail.com>
Mario Rogic <hello@mario.net.au>
Anton-4 <17049058+Anton-4@users.noreply.github.com>
Ian Mackenzie <ian.e.mackenzie@gmail.com>
Anton <17049058+Anton-4@users.noreply.github.com>
Sébastien Besnier <sebastien.fabrice.besnier@gmail.com>
Wolfgang Schuster <wolfadex@gmail.com>
Harrison Bachrach <harrison.bachrach@gmail.com>
Wolfgag Schuster <wolfadex@gmail.com>
Nathan Bleigh <nathan.bleigh@gmail.com>
Jared Ramirez <jaredramirez@me.com>
Aksel Wester <aksel@wester.co>
Jared Ramirez <jaredramirez@hey.com>
Dimitar Apostolov <d.apostolov@gmail.com>
Brendan Hansknecht <brendan.hansknecht@gmail.com>
Nathan Bleigh <nathan.bleigh@gmail.com>
Pablo Hirafuji <pablohirafuji@gmail.com>
Brendan Hansknecht <brendan.hansknecht@gmail.com>
Bob Shelline <bobshelline@gmail.com>
Guilherme Belmonte <contact@belmonte.dev>
Stefan Ladwig <koenig.s@gmail.com>
Wojciech Piekutowski <wojciech@piekutowski.net>
Zeljko Nesic <popara@gmail.com>
Lucas Rosa <x@rvcas.dev>
Pit Capitain <pit@capitain.de>
Lucas Rosa <x@rvcas.dev>
Celso Bonutti Filho <celso.bonuttif@gmail.com>
Eric Henry <correia.eh@gmail.com>
Ju Liu <ju@noredink.com>
Peter Fields <pcfields@gmail.com>
Brian J. Cardiff <bcardiff@gmail.com>
Basile Henry <bjm.henry@gmail.com>
@ -34,7 +33,7 @@ Dan Gieschen Knutson <dan.knutson@gmail.com>
Joshua Hoeflich <joshuaharry411@icloud.com>
Brian Carroll <brian.carroll.ireland@gmail.com>
Kofi Gumbs <h.kofigumbs@gmail.com>
Luiz de Oliveira <luizcarlos1405@gmail.com>
Luiz Carlos L. G. de Oliveira <luizcarlos1405@gmail.com>
Chelsea Troy <chelsea.dommert@gmail.com>
Shritesh Bhattarai <shr@ite.sh>
Kevin Sjöberg <mail@kevinsjoberg.com>
@ -44,27 +43,34 @@ Matthias Beyer <mail@beyermatthias.de>
Tim Whiting <tim@whitings.org>
Logan Lowder <logan.lowder@logikcull.com>
Joshua Warner <joshuawarner32@gmail.com>
Luiz Carlos L. G. de Oliveira <luizcarlos1405@gmail.com>
Oleksii Skidan <al.skidan@gmail.com>
Martin Janiczek <martin@janiczek.cz>
Eric Newbury <enewbury@users.noreply.github.com>
Ayaz Hafiz <ayaz.hafiz.1@gmail.com>
Johannes Maas <github@j-maas.de>
Takeshi Sato <doublequotation@gmail.com>
Oleksii Skidan <al.skidan@gmail.com>
Eric Newbury <eric@newbury.cloud>
Martin Janiczek <martin@janiczek.cz>
Johannes Maas <github@j-maas.de>
Ayaz Hafiz <ayaz.hafiz.1@gmail.com>
Michael Downey <mdow814@gmail.com>
Theo Felippe <public@theocodes.com>
Joost Baas <joost@joostbaas.eu>
Cristiano Piemontese <cristiano.piemontese@vidiemme.it>
Callum Dunster <cdunster@users.noreply.github.com>
Martin Stewart <MartinSStewart@gmail.com>
James Hegedus <jthegedus@hey.com>
Cristiano Piemontese <cristiano.piemontese@vidiemme.it>
Yann Simon <yann.simon.fr@gmail.com>
Shahn Hogan <shahnhogan@hotmail.com>
Tankor Smash <tankorsmash+github@gmail.com>
Ivo-Balbaert <ivo.balbaert@telenet.be>
Matthias Devlamynck <matthias.devlamynck@mailoo.org>
Jan Van Bruggen <JanCVanB@users.noreply.github.com>
Mats Sigge <<mats.sigge@gmail.com>>
Jan Van Bruggen <JanCVanB@pm.me>
Nick Mazuk <Nick-Mazuk@users.noreply.github.com>
Mats Sigge <mats.sigge@gmail.com>
Drew Lazzeri <dlazzeri1@gmail.com>
Tom Dohrmann <erbse.13@gmx.de>
Ryan Olson <ryanolsonx@gmail.com>
Elijah Schow <elijah.schow@gmail.com>
Emi Simpson <emi@alchemi.dev>
Celso Bonutti <i.am@cel.so>
Jose Quesada <jquesada2016@fau.edu>
Derek Gustafson <degustaf@gmail.com>
Philippe Vinchon <p.vinchon@gmail.com>
Pierre-Henri Trivier <phtrivier@yahoo.fr>
@ -72,13 +78,95 @@ Elliot Waite <1767836+elliotwaite@users.noreply.github.com>
zimt28 <1764689+zimt28@users.noreply.github.com>
Ananda Umamil <zweimach@zweimach.org>
SylvanSign <jake.d.bray@gmail.com>
Nikita Mounier <36044205+nikitamounier@users.noreply.github.com>
Cai Bingjun <62678643+C-BJ@users.noreply.github.com>
Nikita Mounier <nikita.mounier@gmail.com>
Kevin Gillette <kgillette628@gmail.com>
Jared Cone <jared.cone@gmail.com>
Sean Hagstrom <sean@seanhagstrom.com>
Kas Buunk <kasbuunk@icloud.com>
Kas Buunk <kasbuunk@icloud.com>
Tommy Graves <tommy@rwx.com>
Oskar Hahn <mail@oshahn.de>
Nuno Ferreira <nunogcferreira@gmail.com>
Jonas Schell <jonasschell@ocupe.org>
Mfon Eti-mfon <mfonetimfon@gmail.com>
Drake Bennion <drake.bennion@gmail.com>
Hashi364 <49736221+Kiyoshi364@users.noreply.github.com>
Jared Forsyth <jared@jaredforsyth.com>
Patrick Kilgore <git@pck.email>
Marten/Qqwy <w-m@wmcode.nl>
Tobias Steckenborn <tobias.steckenborn@consolvis.de>
Christoph Rüßler <christoph.ruessler@mailbox.org>
Ralf Engbers <raleng@users.noreply.github.com>
Mostly Void <7rat13@gmail.com>
Luis F. Gutierrez <luis@gutierrezhiller.com>
Ross Smyth <crs2017@gmail.com>
David A. Kunz <david.kunz@sap.com>
Paul Young <84700+paulyoung@users.noreply.github.com>
Rod <randomer@users.noreply.github.com>
Marko Vujanic <crashxx@gmail.com>
Jelle Besseling <jelle@pingiun.com>
isaacthefallenapple <isaacthefallenapple@gmail.com>
Bryce Miller <sandprickle@users.noreply.github.com>
Bjørn Madsen <bm@aeons.dk>
David Dunn <26876072+doubledup@users.noreply.github.com>
Vilem <17603372+buggymcbugfix@users.noreply.github.com>
KilianVounckx <kilianvounckx@hotmail.be>
J Teeuwissen <jelleteeuwissen@hotmail.nl>
Matthieu Pizenberg <matthieu.pizenberg@gmail.com>
rezzaghi <lbrezzaghi@gmail.com>
João Mota <jackthemotorcycle@gmail.com>
Marcos Prieto <marcospri@gmail.com>
Prajwal S N <prajwalnadig21@gmail.com>
Christopher Duncan <chris.duncan.arauz+git@protonmail.com>
Luke Boswell <lukewilliamboswell@gmail.com>
Luca Cervello <luca.cervello@gmail.com>
Josh Mak <joshmak@berkeley.edu>
Jakub Kozłowski <kubukoz@gmail.com>
Travis Staloch <twostepted@gmail.com>
Nick Gravgaard <nick@nickgravgaard.com>
Keerthana Kasthuril <76804118+keerthanak-tw@users.noreply.github.com>
Salman Shaik <salmansiddiq.shaik@gmail.com>
Austin Clements <austinclementsbass@gmail.com>
Georges Boris <georgesboris@gmail.com>
Marc Walter <walter.marc@outlook.com>
Nathan Freestone <17188138+nfreesto@users.noreply.github.com>
Lunarmagpie <Bambolambo0@gmail.com>
Ahmad Sattar <thehabbos007@gmail.com>
Jack Kellenberger <107156696+jmkellenberger@users.noreply.github.com>
Christopher Bertels <bakkdoor@flasht.de>
Henrikh Kantuni <henrikh.kantuni@gmail.com>
dankeyy <dankeyy@protonmail.com>
human154 <46430360+human154@users.noreply.github.com>
Ju Liu <liuju86@gmail.com>
Giacomo Cavalieri <giacomo.cavalieri@icloud.com>
Ajai Nelson <22969541+Aurelius333@users.noreply.github.com>
Agus Zubiaga <hi@aguz.me>
itmuckel <itmuckel@gmail.com>
Seth Workman <Saworkman1@gmail.com>
Jacob Zimmerman <sad2project@users.noreply.github.com>
Yuki Omoto <yukeomoto@gmail.com>
Leonardo Taglialegne <cmt.miniBill@gmail.com>
Jonas Schell <jonas@livekit.io>
Kiryl Dziamura <kiryl.dziamura@gmail.com>
Isaac Van Doren <69181572+isaacvando@users.noreply.github.com>
Jarl André Hübenthal <jarlah@protonmail.com>
Gabriel Dertoni <gab.dertoni@gmail.com>
Mattia Maldini <mattia512maldini@gmail.com>
David Smith <david.lawrence.smith@gmail.com>
Abdullah Umer <abdullahumer575@gmail.com>
Fábio Beirão <fdbeirao@gmail.com>
Tero Laxström <tlax@neonpeons.com>
HajagosNorbert <hajagosnorbi@gmail.com>
Hannes <h@nnes.dev>
K. Eisuke <ekawano114@gmail.com>
KekmaTime <136650032+KekmaTime@users.noreply.github.com>
NoaVidovic <noavidovic0@gmail.com>
Artsiom Shamsutdzinau <shamsartem@gmail.com>
Kadin Sayani <kadin.sayani@proton.me>
Nachiket Kanore <nachiket.kanore@gmail.com>
pinage404 <pinage404@gmail.com>
Fabian Schmalzried <fabhof@posteo.de>
Isak Jones <isak.jones.980@gmail.com>
Ch1n3du <danielonyesoh@gmail.com>
Elias Mulhall <eli.mulhall@gmail.com>
ABuffSeagull <reecevanatta@hey.com>

View File

@ -1,81 +1,62 @@
# Building the Roc compiler from source
If you run into any problems getting Roc built from source, please ask for help in the `#beginners` channel on [Roc Zulip](https://roc.zulipchat.com) (the fastest way), or create an issue in this repo!
## Using Nix
### On Linux/MacOS/NixOS x86_64/aarch64/arm64
On MacOS and Linux, we highly recommend Using [nix](https://nixos.org/download.html) to quickly install all dependencies necessary to build roc.
:warning: If you tried to run `cargo` in the repo folder before installing nix, make sure to execute `cargo clean` first. To prevent you from executing `cargo` outside of nix, tools like [direnv](https://github.com/nix-community/nix-direnv) and [lorri](https://github.com/nix-community/lorri) can put you in a nix shell automatically when you `cd` into the directory.
### On Linux x86_64 or MacOS aarch64/arm64/x86_64
#### Install
We highly recommend Using [nix](https://nixos.org/download.html) to automatically install all dependencies necessary to build roc.
If you are running ArchLinux or a derivative like Manjaro, you'll need to run `sudo sysctl -w kernel.unprivileged_userns_clone=1` before installing nix.
Install nix (not necessary on NixOS):
```
curl -L https://nixos.org/nix/install | sh
- If you are using WSL (Windows subsystem for Linux):
```sh
sh <(curl -L https://nixos.org/nix/install) --no-daemon
```
Start a fresh terminal session (= open a new terminal).
- For everything else:
install nixFlakes in your environment:
```
nix-env -iA nixpkgs.nixFlakes
```sh
sh <(curl -L https://nixos.org/nix/install) --daemon
```
Edit either `~/.config/nix/nix.conf` or `/etc/nix/nix.conf` and add:
```
Open a new terminal and edit either `~/.config/nix/nix.conf` or `/etc/nix/nix.conf` and add:
```text
experimental-features = nix-command flakes
```
If Nix was installed in multi-user mode, make sure to restart the nix-daemon.
If you don't know how to do this, restarting your computer will also do the job.
If Nix was installed in multi-user mode, make sure to restart the nix-daemon.
If you don't know how to do this, restarting your computer will also do the job.
#### Usage
Now with nix set up, you just need to run one command from the roc project root directory:
```
```sh
nix develop
```
You should be in a shell with everything needed to build already installed.
Use `cargo run help` to see all subcommands.
To use the `repl` subcommand, execute `cargo run repl`.
Use `cargo build` to build the whole project.
Read the instructions [here](devtools/README.md) to make nix work well with your development tools (vscode, vim, rust-analyzer...)
#### Extra tips
If you want to load all dependencies automatically whenever you `cd` into `roc`, check out [direnv](https://direnv.net/) and [lorri](https://github.com/nix-community/lorri).
If you want to load all dependencies automatically whenever you `cd` into `roc`, check out [direnv](https://direnv.net/).
Then you will no longer need to execute `nix develop` first.
### Editor
The editor is a :construction:WIP:construction: and not ready yet to replace your favorite editor, although if you want to try it out on nix, read on.
`cargo run edit` should work from NixOS, if you use another OS, follow the instructions below.
#### Nvidia GPU
```
nix run --override-input nixpkgs nixpkgs/nixos-21.11 --impure github:guibou/nixGL#nixVulkanNvidia -- cargo run edit
```
If you get an error like:
```
error: unable to execute '/nix/store/qk6...wjla-nixVulkanNvidia-470.103.01/bin/nixVulkanNvidia': No such file or directory
```
The intel command should work:
```
nix run --override-input nixpkgs nixpkgs/nixos-21.11 --impure github:guibou/nixGL#nixVulkanIntel -- cargo run edit
```
##### Integrated Intel Graphics
```
nix run --override-input nixpkgs nixpkgs/nixos-21.11 --impure github:guibou/nixGL#nixVulkanIntel -- cargo run edit
```
##### Other configs
Check the [nixGL repo](https://github.com/guibou/nixGL) for other graphics configurations. Feel free to ask us for help if you get stuck.
## Troubleshooting
Create an issue if you run into problems not listed here.
@ -85,14 +66,14 @@ That will help us improve this document for everyone who reads it in the future!
To build the compiler, you need these installed:
* [Zig](https://ziglang.org/), see below for version
* `libxkbcommon` - macOS seems to have it already; on Ubuntu or Debian you can get it with `apt-get install libxkbcommon-dev`
* On Debian/Ubuntu `sudo apt-get install pkg-config`
* LLVM, see below for version
- [Zig](https://ziglang.org/), see below for version
- On Debian/Ubuntu `sudo apt-get install pkg-config`
- LLVM, see below for version
- [rust](https://rustup.rs/)
To run the test suite (via `cargo test`), you additionally need to install:
* [`valgrind`](https://www.valgrind.org/) (needs special treatment to [install on macOS](https://stackoverflow.com/a/61359781)
- [`valgrind`](https://www.valgrind.org/) (needs special treatment to [install on macOS](https://stackoverflow.com/a/61359781)
Alternatively, you can use `cargo test --no-fail-fast` or `cargo test -p specific_tests` to skip over the valgrind failures & tests.
For debugging LLVM IR, we use [DebugIR](https://github.com/vaivaswatha/debugir). This dependency is only required to build with the `--debug` flag, and for normal development you should be fine without it.
@ -101,7 +82,7 @@ For debugging LLVM IR, we use [DebugIR](https://github.com/vaivaswatha/debugir).
You may see an error like this during builds:
```
```text
/usr/bin/ld: cannot find -lxcb-render
/usr/bin/ld: cannot find -lxcb-shape
/usr/bin/ld: cannot find -lxcb-xfixes
@ -109,51 +90,32 @@ You may see an error like this during builds:
If so, you can fix it like so:
```
```sh
sudo apt-get install libxcb-render0-dev libxcb-shape0-dev libxcb-xfixes0-dev
```
### Zig
**version: 0.9.1**
**version: 0.11.0**
For any OS, you can use [`zigup`](https://github.com/marler8997/zigup) to manage zig installations.
If you prefer a package manager, you can try the following:
- For MacOS, you can install with `brew install zig`
- For, Ubuntu, you can use Snap, you can install with `snap install zig --classic --beta`
- For other systems, checkout this [page](https://github.com/ziglang/zig/wiki/Install-Zig-from-a-Package-Manager)
If you want to install it manually, you can also download Zig directly [here](https://ziglang.org/download/). Just make sure you download the right version, the bleeding edge master build is the first download link on this page.
- MacOS: `brew install zig@0.11.0`
- Systems with snap (such as Ubuntu): `snap install zig --classic --beta`
- Other systems: refer to the [zig documentation](https://github.com/ziglang/zig/wiki/Install-Zig-from-a-Package-Manager)
If you want to install it manually, you can [download the binary](https://ziglang.org/download/#release-0.11.0) and place it on your PATH.
Apart from the binary, the archive contains a `lib` folder, which needs to be copied next to the binary.
> WINDOWS NOTE: when you unpack the Zig archive on windows, the result is nested in an extra directory. The instructions on the zig website will seem to not work. So, double-check that the path to zig executable does not include the same directory name twice.
### LLVM
**version: 13.0.x**
For macOS, you can install LLVM 13 using `brew install llvm@13` and then adding
`$(brew --prefix llvm@13)/bin` to your `PATH`. You can confirm this worked by
running `llc --version` - it should mention "LLVM version 13.0.0" at the top.
You may also need to manually specify a prefix env var like so:
```
export LLVM_SYS_130_PREFIX=/usr/local/opt/llvm@13
```
**version: 16.0.x**
For Ubuntu and Debian:
```
sudo apt -y install lsb-release software-properties-common gnupg
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
./llvm.sh 13
```
If you use this script, you'll need to add `clang` to your `PATH`.
By default, the script installs it as `clang-13`. You can address this with symlinks like so:
```
sudo ln -s /usr/bin/clang-13 /usr/bin/clang
```
There are also alternative installation options at http://releases.llvm.org/download.html
[Troubleshooting](#troubleshooting)
See below for operating system specific installation instructions.
### Building
@ -161,9 +123,41 @@ Use `cargo build` to build the whole project.
Use `cargo run help` to see all subcommands.
To use the `repl` subcommand, execute `cargo run repl`.
The default is a developer build. For an optimized build, use:
```
cargo build --release --bin roc
```
### LLVM installation on Linux
For a current list of all dependency versions and their names in apt, see the Earthfile.
For Ubuntu and Debian:
```sh
sudo apt -y install lsb-release software-properties-common gnupg
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
./llvm.sh 16
```
If you use this script, you'll need to add `clang` to your `PATH`.
By default, the script installs it as `clang-16`. You can address this with symlinks like so:
```sh
sudo ln -s /usr/bin/clang-16 /usr/bin/clang
```
There are also alternative installation options at <http://releases.llvm.org/download.html>
[Troubleshooting](#troubleshooting)
For Fedora:
```sh
sudo dnf install llvm16 llvm16-devel
```
#### LLVM Linux troubleshooting
On some Linux systems we've seen the error "failed to run custom build command for x11".
On Ubuntu, running `sudo apt install pkg-config cmake libx11-dev` fixed this.
@ -171,44 +165,57 @@ On Ubuntu, running `sudo apt install pkg-config cmake libx11-dev` fixed this.
If you encounter `cannot find -lz` run `sudo apt install zlib1g-dev`.
If you encounter:
```
error: No suitable version of LLVM was found system-wide or pointed
to by LLVM_SYS_130_PREFIX.
```
Add `export LLVM_SYS_130_PREFIX=/usr/lib/llvm-13` to your `~/.bashrc` or equivalent file for your shell.
### LLVM installation on macOS
```text
error: No suitable version of LLVM was found system-wide or pointed
to by LLVM_SYS_160_PREFIX.
```
Add `export LLVM_SYS_160_PREFIX=/usr/lib/llvm-16` to your `~/.bashrc` or equivalent file for your shell.
### LLVM installation on MacOS
For macOS, you can install LLVM 16 using `brew install llvm@16` and then adding
`$(brew --prefix llvm@16)/bin` to your `PATH`. You can confirm this worked by
running `llc --version` - it should mention "LLVM version 16.0.x" at the top.
You may also need to manually specify a prefix env var like so:
```sh
export LLVM_SYS_160_PREFIX=$(brew --prefix llvm@16)
```
#### LLVM MacOS troubleshooting
If installing LLVM fails, it might help to run `sudo xcode-select -r` before installing again.
It might also be useful to add these exports to your shell:
```
```sh
export LDFLAGS="-L/usr/local/opt/llvm/lib -Wl,-rpath,/usr/local/opt/llvm/lib"
export CPPFLAGS="-I/usr/local/opt/llvm/include"
```
### LLVM installation on Windows
**Warning** While `cargo build` works on windows, linking roc programs does not yet, see issue #2608. This also means the repl, the editor and many tests will not work on windows.
Installing LLVM's prebuilt binaries doesn't seem to be enough for the `llvm-sys` crate that Roc depends on, so I had to follow the steps below:
**Warning** While `cargo build` works on windows, linking roc programs does not yet, see issue #2608. This also means the repl, and many tests will not work on windows.
The official LLVM pre-built binaries for Windows lack features that roc needs. Instead:
1. I downloaded and installed [Build Tools for Visual Studio 2019](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools&rel=16) (a full Visual Studio install should work too; the Build Tools are just the CLI tools, which is all I wanted)
1. Download the custom LLVM 7z archive [here](https://github.com/PLC-lang/llvm-package-windows/releases/tag/v13.0.1).
1. Download the custom LLVM 7z archive [here](https://github.com/roc-lang/llvm-package-windows/releases/download/v16.0.6/LLVM-16.0.6-win64.7z).
1. [Download 7-zip](https://www.7-zip.org/) to be able to extract this archive.
1. Extract the 7z file to where you want to permanently keep the folder.
1. In powershell, set the `LLVM_SYS_130_PREFIX` environment variable (check [here](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_environment_variables?view=powershell-7.2#saving-changes-to-environment-variables) to make this a permanent environment variable):
```
[Environment]::SetEnvironmentVariable(
"Path",
[Environment]::GetEnvironmentVariable("Path", "User") + ";C:\Users\anton\Downloads\LLVM-13.0.1-win64\bin",
"User"
)
```
1. Extract the 7z file to where you want to permanently keep the folder. We recommend you pick a path without any spaces in it.
1. In powershell, set the `LLVM_SYS_160_PREFIX` environment variable (check [here](https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_environment_variables?view=powershell-7.2#saving-environment-variables-with-the-system-control-panel) to make this a permanent environment variable):
```text
<# ! Replace YOUR_USERNAME ! #>
$env:LLVM_SYS_160_PREFIX = 'C:\Users\YOUR_USERNAME\Downloads\LLVM-16.0.6-win64'
```
Once all that was done, `cargo build` ran successfully for Roc!
#### LLVM Windows troubleshooting
If you see the build failing because some internal file is not available, it might be your anti-virus program. Cargo's behavior is kind of similar to a virus (downloading files from the internet, creating many files), and this has been known to cause problems.
### Build speed on WSL/WSL2
If your Roc project folder is in the Windows filesystem but you're compiling from Linux, rebuilds may be as much as 20x slower than they should be!
@ -221,15 +228,15 @@ makes build times a lot faster, and I highly recommend it.
Create `~/.cargo/config.toml` if it does not exist and add this to it:
```
```toml
[build]
# Link with lld, per https://github.com/rust-lang/rust/issues/39915#issuecomment-538049306
# Use target-cpu=native, per https://deterministic.space/high-performance-rust.html
rustflags = ["-C", "link-arg=-fuse-ld=lld", "-C", "target-cpu=native"]
```
Then install `lld` version 13 (e.g. with `$ sudo apt-get install lld-13`)
Then install `lld` version 16 (e.g. with `$ sudo apt-get install lld-16`)
and add make sure there's a `ld.lld` executable on your `PATH` which
is symlinked to `lld-13`.
is symlinked to `lld-16`.
That's it! Enjoy the faster builds.

View File

@ -8,20 +8,20 @@ In the interest of fostering an open and welcoming environment, we as participan
Examples of behavior that contributes to creating a positive environment include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Kindly giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience
* Focusing on what is best not just for us as individuals, but for the overall
- Demonstrating empathy and kindness toward other people
- Being respectful of differing opinions, viewpoints, and experiences
- Kindly giving and gracefully accepting constructive feedback
- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience
- Focusing on what is best not just for us as individuals, but for the overall
community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email address, without their explicit permission
* Telling others to be less sensitive, or that they should not feel hurt or offended by something
- The use of sexualized language or imagery, and sexual attention or advances of any kind
- Trolling, insulting or derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or email address, without their explicit permission
- Telling others to be less sensitive, or that they should not feel hurt or offended by something
## Enforcement Responsibilities
@ -41,4 +41,4 @@ Moderators who do not follow or enforce the Code of Conduct in good faith may fa
## Attribution
This Code of Conduct is adapted from the Contributor Covenant, version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
This Code of Conduct is adapted from the Contributor Covenant, version 1.4, available at <https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>

View File

@ -2,45 +2,96 @@
## Code of Conduct
We are committed to providing a friendly, safe and welcoming environment for all. Make sure to take a look at the [Code of Conduct](CodeOfConduct.md)!
We are committed to providing a friendly, safe and welcoming environment for all. Make sure to take a look at the [Code of Conduct](CODE_OF_CONDUCT.md)!
## How to contribute
All contributions are appreciated! Typo fixes, bug fixes, feature requests,
bug reports are all helpful for the project.
If you are looking for a good place to start, consider reaching out on the `#contributing` channel on [Roc Zulip][roc-zulip].
Before making your first pull request, definitely talk to an existing contributor on [Roc Zulip][roc-zulip] first about what you plan to do! This can not only avoid duplicated effort, it can also avoid making a whole PR only to discover it won't be accepted because the change doesn't fit with the goals of the language's design or implementation.
If you are interested in larger, implementation- or research-heavy projects
related to Roc, check out [Roc Project Ideas][project-ideas] and reach out to us
on Zulip! These projects may be suitable for academic theses, independent
research, or even just valuable projects to learn from and improve Roc with.
## Building from Source
Check [Build from source](BUILDING_FROM_SOURCE.md) for instructions.
Check [Building from source](BUILDING_FROM_SOURCE.md) for instructions.
## Running Tests
Most contributors execute the following commands befor pushing their code:
```
Most contributors execute the following commands before pushing their code:
```sh
cargo test
cargo fmt --all -- --check
cargo clippy --workspace --tests -- --deny warnings
```
Execute `cargo fmt --all` to fix the formatting.
If you want to run all tests and checks as they are run on CI, [install earthly](https://earthly.dev/get-earthly) and run:
```
earthly +test-all
## Generating Docs
If you make changes to [Roc's Standard Library](https://www.roc-lang.org/builtins/Str), you can add comments to the code following [the CommonMark Spec](https://spec.commonmark.org/current/) to further explain your intentions. You can view these changes locally with:
```sh
cargo run docs crates/compiler/builtins/roc/main.roc
```
Earthly may temporarily use a lot of disk space, up to 90 GB. This disk space is available again after rebooting.
This command will generate the documentation in the [`generated-docs`](generated-docs) directory.
## Contribution Tips
- If you've never made a pull request on github before, [this](https://www.freecodecamp.org/news/how-to-make-your-first-pull-request-on-github-3/) will be a good place to start.
- Create an issue if the purpose of a struct/field/type/function/... is not immediately clear from its name or nearby comments.
- You find good first issues [here](https://github.com/rtfeldman/roc/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
- Before making your first pull request, definitely talk to an existing contributor on [Roc Zulip](https://roc.zulipchat.com) first about what you plan to do! This can not only avoid duplicated effort, it can also avoid making a whole PR only to discover it won't be accepted because the change doesn't fit with the goals of the language's design or implementation.
- It's a good idea to open a work-in-progress pull request as you begin working on something. This way, others can see that you're working on it, which avoids duplicate effort, and others can give feedback sooner rather than later if they notice a problem in the direction things are going. Be sure to include "WIP" in the title of the PR as long as it's not ready for review!
- Make sure to create a branch on the roc repository for your changes. We do not allow CI to be run on forks for security.
- All your commits need to be signed to prevent impersonation:
1. If you have a Yubikey, follow [guide 1](https://dev.to/paulmicheli/using-your-yubikey-to-get-started-with-gpg-3h4k), [guide 2](https://dev.to/paulmicheli/using-your-yubikey-for-signed-git-commits-4l73) and skip the steps below.
2. [Make a key to sign your commits.](https://docs.github.com/en/authentication/managing-commit-signature-verification/generating-a-new-gpg-key).
3. [Configure git to use your key.](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key)
4. Make git sign your commits automatically:
```
git config --global commit.gpgsign true
```
- You can find good first issues [here][good-first-issues]. Once you have gained some experience you can take a look at the [intermediate issues](https://github.com/roc-lang/roc/issues?q=is%3Aopen+is%3Aissue+label%3A%22intermediate+issue%22).
- [Fork](https://github.com/roc-lang/roc/fork) the repo so that you can apply your changes first on your own copy of the roc repo.
- It's a good idea to open a draft pull request as you begin working on something. This way, others can see that you're working on it, which avoids duplicate effort, and others can give feedback sooner rather than later if they notice a problem in the direction things are going. Click the button "ready for review" when it's ready.
### Commit signing
All your commits need to be signed [to prevent impersonation](https://dev.to/martiliones/how-i-got-linus-torvalds-in-my-contributors-on-github-3k4g). Check out [our guide for commit signing](devtools/signing.md).
#### Commit signing on NixOS
On NixOS pinentry can cause problems, the following setup works well for those with a KDE desktop. From `/etc/nixos/configuration.nix`:
```
programs.gnupg.agent = {
enable = true;
pinentryFlavor = "qt";
enableSSHSupport = true;
};
```
### Forgot to sign commits?
You can view your commits on github, those without the "Verified" badge still need to be signed.
If any of those is a merge commit, follow [these steps](https://stackoverflow.com/a/9958215/4200103) instead of the ones below.
If you have only one commit, running `git commit --amend --no-edit -S` would sign the latest commit 🚀.
In case you have multiple commits, you can sign them in two ways:
1. Switching to interactive rebase mode and editing the file:
- Enter into interactive mode, by running `git rebase -i HEAD~n` where `n` is the number of commits up to the most current commit you would like to see.
- This would display a set of commits in a text file like below:
```
pick hash2 commit message 2
pick hash1 commit message 1
```
- On a new line below a commit you want to sign, add `exec git commit --amend --no-edit -S`. Do this for all your unsigned commits.
2. Or run git rebase recursively:
- Find the oldest commit you want to sign, using the `git log --show-signature` command.
- Run the command `git rebase --exec 'git commit --amend --no-edit -n -S' -i HASH` which would sign all commits up to commit `HASH`.
If you already pushed unsigned commits, you may have to do a force push with `git push origin -f <branch_name>`.
## Can we do better?
Feel free to open an issue if you think this document can be improved or is unclear in any way.
[roc-zulip]: https://roc.zulipchat.com
[good-first-issues]: https://github.com/roc-lang/roc/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22
[project-ideas]: https://docs.google.com/document/d/1mMaxIi7vxyUyNAUCs98d68jYj6C9Fpq4JIZRU735Kwg/edit?usp=sharing

5672
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,65 +1,40 @@
[workspace]
members = [
"compiler/ident",
"compiler/region",
"compiler/collections",
"compiler/exhaustive",
"compiler/module",
"compiler/parse",
"compiler/can",
"compiler/problem",
"compiler/types",
"compiler/builtins",
"compiler/constrain",
"compiler/unify",
"compiler/solve",
"compiler/late_solve",
"compiler/fmt",
"compiler/mono",
"compiler/alias_analysis",
"compiler/test_mono",
"compiler/load",
"compiler/load_internal",
"compiler/gen_llvm",
"compiler/gen_dev",
"compiler/gen_wasm",
"compiler/build",
"compiler/arena_pool",
"compiler/test_gen",
"compiler/roc_target",
"compiler/debug_flags",
"vendor/inkwell",
"vendor/pathfinding",
"vendor/pretty",
"bindgen",
"editor",
"ast",
"cli",
"code_markup",
"highlight",
"error_macros",
"reporting",
"repl_cli",
"repl_eval",
"repl_test",
"repl_wasm",
"test_utils",
"utils",
"docs",
"docs_cli",
"linker",
"wasi-libc-sys",
"crates/compiler/*",
"crates/vendor/*",
"crates/glue",
"crates/cli",
"crates/cli_utils",
"crates/highlight",
"crates/error_macros",
"crates/reporting",
"crates/packaging",
"crates/repl_cli",
"crates/repl_eval",
"crates/repl_test",
"crates/repl_ui",
"crates/repl_wasm",
"crates/repl_expect",
"crates/roc_std",
"crates/test_utils",
"crates/valgrind",
"crates/tracing",
"crates/utils/*",
"crates/docs",
"crates/docs_cli",
"crates/linker",
"crates/wasi-libc-sys",
"crates/wasm_module",
"crates/wasm_interp",
"crates/lang_srv",
]
exclude = [
# Examples sometimes have Rust hosts in their platforms. The compiler should ignore those.
"examples",
"ci/bench-runner",
# Ignore building these normally. They are only imported by tests.
# The tests will still correctly build them.
"cli_utils",
"compiler/test_mono_macros",
# `cargo build` would cause roc_std to be built with default features which errors on windows
"roc_std",
"ci/benchmarks/bench-runner",
"ci/repl_basic_test",
# Examples sometimes have Rust hosts in their platforms. The compiler should ignore those.
"crates/cli_testing_examples",
"examples",
]
# Needed to be able to run `cargo run -p roc_cli --no-default-features` -
# see www/build.sh for more.
@ -68,12 +43,157 @@ exclude = [
# workspace, and without `resolver = "2"` here, you can't use `-p` like this.
resolver = "2"
[workspace.package]
authors = ["The Roc Contributors"]
edition = "2021"
license = "UPL-1.0"
repository = "https://github.com/roc-lang/roc"
version = "0.0.1"
[workspace.dependencies]
# NOTE: roc-lang/inkwell is a fork of TheDan64/inkwell which does not change anything.
#
# The reason for this fork is that the way Inkwell is designed, you have to use
# a particular branch (e.g. "llvm8-0") in Cargo.toml. That would be fine, except that
# breaking changes get pushed directly to that branch, which breaks our build
# without warning.
#
# We tried referencing a specific rev on TheDan64/inkwell directly (instead of branch),
# but although that worked locally, it did not work on GitHub Actions. (After a few
# hours of investigation, gave up trying to figure out why.) So this is the workaround:
# having an immutable tag on the roc-lang/inkwell fork which points to
# a particular "release" of Inkwell.
#
# When we want to update Inkwell, we can sync up roc-lang/inkwell to the latest
# commit of TheDan64/inkwell, push a new tag which points to the latest commit,
# change the tag value in this Cargo.toml to point to that tag, and `cargo update`.
# This way, GitHub Actions works and nobody's builds get broken.
# TODO: Switch this back to roc-lang/inkwell once it is updated
inkwell = { git = "https://github.com/roc-lang/inkwell", branch = "inkwell-llvm-16", features = ["llvm16-0"] }
arrayvec = "0.7.2" # update roc_std/Cargo.toml on change
backtrace = "0.3.67"
base64-url = "1.4.13"
bincode = "1.3.3"
bitflags = "1.3.2"
bitvec = "1.0.1"
blake3 = "1.3.3"
brotli = "3.3.4" # used for decompressing tarballs over HTTPS, if the server supports brotli
bumpalo = { version = "3.12.0", features = ["collections"] }
bytemuck = { version = "1.13.1", features = ["derive"] }
capstone = { version = "0.11.0", default-features = false }
cgmath = "0.18.0"
chrono = "0.4.26"
clap = { version = "4.2.7", default-features = false, features = ["std", "color", "suggestions", "help", "usage", "error-context"] }
colored = "2.0.0"
console_error_panic_hook = "0.1.7"
const_format = { version = "0.2.30", features = ["const_generics"] }
copypasta = "0.8.2"
criterion = { git = "https://github.com/Anton-4/criterion.rs", features = ["html_reports"], rev = "30ea0c5" }
criterion-perf-events = { git = "https://github.com/Anton-4/criterion-perf-events", rev = "0f38c3e" }
crossbeam = "0.8.2"
dircpy = "0.3.14"
distance = "0.4.0"
encode_unicode = "1.0.0"
errno = "0.3.0"
flate2 = "1.0.25"
fnv = "1.0.7"
fs_extra = "1.3.0"
futures = "0.3.26"
glyph_brush = "0.7.7"
hashbrown = { version = "0.13.2", features = ["bumpalo"] }
iced-x86 = { version = "1.18.0", default-features = false, features = ["std", "decoder", "op_code_info", "instr_info"] }
im = "15.1.0"
im-rc = "15.1.0"
indexmap = "1.9.2"
indoc = "1.0.9"
insta = "1.28.0"
js-sys = "0.3.61"
lazy_static = "1.4.0"
libc = "0.2.139" # update roc_std/Cargo.toml on change
libfuzzer-sys = "0.4"
libloading = "0.7.4"
libtest-mimic = "0.6.0"
log = "0.4.17"
mach_object = "0.1"
maplit = "1.0.2"
memmap2 = "0.5.10"
mimalloc = { version = "0.1.34", default-features = false }
nonempty = "0.8.1"
object = { version = "0.30.3", features = ["read", "write"] }
packed_struct = "0.10.1"
page_size = "0.5.0"
palette = "0.6.1"
parking_lot = "0.12"
peg = "0.8.1"
perfcnt = "0.8.0"
pest = "2.5.6"
pest_derive = "2.5.6"
pretty_assertions = "1.3.0" # update roc_std/Cargo.toml on change
proc-macro2 = "1.0.63"
proptest = "1.1.0"
pulldown-cmark = { version = "0.9.2", default-features = false }
quickcheck = "1.0.3" # update roc_std/Cargo.toml on change
quickcheck_macros = "1.0.0" # update roc_std/Cargo.toml on change
quote = "1.0.23"
rand = "0.8.5"
regex = "1.7.1"
remove_dir_all = "0.8.1"
reqwest = { version = "0.11.20", default-features = false, features = ["blocking", "rustls-tls"] } # default-features=false removes libopenssl as a dependency on Linux, which might not be available!
rlimit = "0.9.1"
rustyline = { git = "https://github.com/roc-lang/rustyline", rev = "e74333c" }
rustyline-derive = { git = "https://github.com/roc-lang/rustyline", rev = "e74333c" }
schemars = "0.8.12"
serde = { version = "1.0.153", features = ["derive"] } # update roc_std/Cargo.toml on change
serde-xml-rs = "0.6.0"
serde_json = "1.0.94" # update roc_std/Cargo.toml on change
serial_test = "1.0.0"
signal-hook = "0.3.15"
smallvec = { version = "1.10.0", features = ["const_generics", "const_new"] }
snafu = { version = "0.7.4", features = ["backtraces"] }
static_assertions = "1.1.0" # update roc_std/Cargo.toml on change
strip-ansi-escapes = "0.1.1"
strum = { version = "0.24.1", features = ["derive"] }
strum_macros = "0.24.3"
syn = { version = "1.0.109", features = ["full", "extra-traits"] }
tar = "0.4.38"
target-lexicon = "0.12.6"
tempfile = "=3.2.0"
threadpool = "1.8.1"
tracing = { version = "0.1.37", features = ["release_max_level_off"] }
tracing-appender = "0.2.2"
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
unicode-segmentation = "1.10.1"
uuid = { version = "1.3.0", features = ["v4"] }
walkdir = "2.3.2"
wasm-bindgen = "0.2.84"
wasm-bindgen-futures = "0.4.34"
wgpu = "0.12.0"
wgpu_glyph = "0.16.0"
winapi = { version = "0.3.9", features = ["memoryapi"] }
winit = "0.26.1"
wyhash = "0.5.0"
# Optimizations based on https://deterministic.space/high-performance-rust.html
[profile.release]
lto = "thin"
codegen-units = 1
# debug = true # enable when profiling
[profile.dev]
debug = "line-tables-only"
[profile.bench]
lto = "thin"
codegen-units = 1
lto = "thin"
[profile.release-with-debug]
inherits = "release"
debug = true
[profile.release-with-lto]
inherits = "release"
lto = "thin" # TODO: We could consider full here since this is only used for packaged release on github.
[profile.debug-full]
inherits = "dev"
debug = true

158
Earthfile
View File

@ -1,4 +1,6 @@
FROM rust:1.60.0-slim-bullseye # make sure to update rust-toolchain.toml too so that everything uses the same rust version
VERSION 0.6
FROM rust:1.71.1-slim-buster # make sure to update rust-toolchain.toml too so that everything uses the same rust version
WORKDIR /earthbuild
prep-debian:
@ -7,155 +9,53 @@ prep-debian:
install-other-libs:
FROM +prep-debian
RUN apt -y install wget git
RUN apt -y install libxcb-shape0-dev libxcb-xfixes0-dev # for editor clipboard
RUN apt -y install libasound2-dev # for editor sounds
RUN apt -y install libunwind-dev pkg-config libx11-dev zlib1g-dev
RUN apt -y install libunwind-dev pkg-config zlib1g-dev
RUN apt -y install unzip # for www/build.sh
install-zig-llvm-valgrind-clippy-rustfmt:
install-zig-llvm:
ARG ZIG_ARCH
FROM +install-other-libs
# editor
RUN apt -y install libxkbcommon-dev
# zig
RUN wget -c https://ziglang.org/download/0.9.1/zig-linux-x86_64-0.9.1.tar.xz --no-check-certificate
RUN tar -xf zig-linux-x86_64-0.9.1.tar.xz
RUN ln -s /earthbuild/zig-linux-x86_64-0.9.1/zig /bin/zig
RUN wget -c https://ziglang.org/download/0.11.0/zig-linux-$ZIG_ARCH-0.11.0.tar.xz --no-check-certificate
RUN tar -xf zig-linux-$ZIG_ARCH-0.11.0.tar.xz
RUN ln -s /earthbuild/zig-linux-$ZIG_ARCH-0.11.0/zig /bin/zig
# zig builtins wasm tests
RUN apt -y install build-essential
RUN cargo install wasmer-cli --features "singlepass"
# llvm
RUN apt -y install lsb-release software-properties-common gnupg
RUN wget https://apt.llvm.org/llvm.sh
RUN chmod +x llvm.sh
RUN ./llvm.sh 13
RUN ln -s /usr/bin/clang-13 /usr/bin/clang
RUN ./llvm.sh 16
RUN ln -s /usr/bin/clang-16 /usr/bin/clang
# use lld as linker
RUN ln -s /usr/bin/lld-13 /usr/bin/ld.lld
RUN ln -s /usr/bin/lld-16 /usr/bin/ld.lld
RUN apt -y install libpolly-16-dev # required by llvm-sys crate
ENV RUSTFLAGS="-C link-arg=-fuse-ld=lld -C target-cpu=native"
# valgrind
RUN apt -y install valgrind
# clippy
RUN rustup component add clippy
# rustfmt
RUN rustup component add rustfmt
# wasm repl & tests
RUN rustup target add wasm32-unknown-unknown wasm32-wasi
RUN apt -y install libssl-dev
RUN OPENSSL_NO_VENDOR=1 cargo install wasm-pack
# criterion
RUN cargo install cargo-criterion
# sccache
RUN cargo install sccache
RUN cargo install sccache --locked
RUN sccache -V
ENV RUSTC_WRAPPER=/usr/local/cargo/bin/sccache
ENV SCCACHE_DIR=/earthbuild/sccache_dir
ENV CARGO_INCREMENTAL=0 # no need to recompile package when using new function
copy-dirs:
FROM +install-zig-llvm-valgrind-clippy-rustfmt
COPY --dir bindgen cli cli_utils compiler docs docs_cli editor ast code_markup error_macros highlight utils test_utils reporting repl_cli repl_eval repl_test repl_wasm repl_www roc_std vendor examples linker Cargo.toml Cargo.lock version.txt www wasi-libc-sys ./
test-zig:
FROM +install-zig-llvm-valgrind-clippy-rustfmt
COPY --dir compiler/builtins/bitcode ./
RUN cd bitcode && ./run-tests.sh && ./run-wasm-tests.sh
build-rust-test:
FROM +copy-dirs
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo test --locked --release --features with_sound --workspace --no-run && sccache --show-stats
check-clippy:
FROM +build-rust-test
RUN cargo clippy -V
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo clippy --workspace --tests -- --deny warnings
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo clippy --workspace --tests --release -- --deny warnings
check-rustfmt:
FROM +build-rust-test
RUN cargo fmt --version
RUN cargo fmt --all -- --check
check-typos:
RUN cargo install typos-cli --version 1.0.11 # version set to prevent confusion if the version is updated automatically
COPY --dir .github ci cli cli_utils compiler docs editor examples ast code_markup highlight utils linker nightly_benches packages roc_std www *.md LEGAL_DETAILS shell.nix version.txt ./
RUN typos
test-rust:
FROM +build-rust-test
ENV ROC_WORKSPACE_DIR=/earthbuild
ENV RUST_BACKTRACE=1
# for race condition problem with cli test
ENV ROC_NUM_WORKERS=1
# run one of the benchmarks to make sure the host is compiled
# not pre-compiling the host can cause race conditions
RUN echo "4" | cargo run --release examples/benchmarks/NQueens.roc
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo test --locked --release --features with_sound --workspace && sccache --show-stats
# test the dev and wasm backend: they require an explicit feature flag.
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo test --locked --release --package test_gen --no-default-features --features gen-dev && sccache --show-stats
# gen-wasm has some multithreading problems to do with the wasmer runtime. Run it single-threaded as a separate job
RUN --mount=type=cache,target=$SCCACHE_DIR \
cargo test --locked --release --package test_gen --no-default-features --features gen-wasm -- --test-threads=1 && sccache --show-stats
# repl_test: build the compiler for wasm target, then run the tests on native target
RUN --mount=type=cache,target=$SCCACHE_DIR \
repl_test/test_wasm.sh && sccache --show-stats
# run i386 (32-bit linux) cli tests
# NOTE: disabled until zig 0.9
# RUN echo "4" | cargo run --locked --release --features="target-x86" -- --target=x86_32 examples/benchmarks/NQueens.roc
# RUN --mount=type=cache,target=$SCCACHE_DIR \
# cargo test --locked --release --features with_sound --test cli_run i386 --features="i386-cli-run" && sccache --show-stats
# make sure website deployment works (that is, make sure build.sh returns status code 0)
ENV REPL_DEBUG=1
RUN bash www/build.sh
verify-no-git-changes:
FROM +test-rust
# If running tests caused anything to be changed or added (without being
# included in a .gitignore somewhere), fail the build!
#
# How it works: the `git ls-files` command lists all the modified or
# uncommitted files in the working tree, the `| grep -E .` command returns a
# zero exit code if it listed any files and nonzero otherwise (which is the
# opposite of what we want), and the `!` at the start inverts the exit code.
RUN ! git ls-files --deleted --modified --others --exclude-standard | grep -E .
test-all:
BUILD +test-zig
BUILD +check-rustfmt
BUILD +check-clippy
BUILD +test-rust
BUILD +verify-no-git-changes
ARG ZIG_ARCH
FROM +install-zig-llvm --ZIG_ARCH=$ZIG_ARCH
COPY --dir crates examples Cargo.toml Cargo.lock version.txt .cargo www rust-toolchain.toml ./
build-nightly-release:
FROM +test-rust
COPY --dir .git LICENSE LEGAL_DETAILS ./
ARG RELEASE_FOLDER_NAME
ARG RUSTFLAGS
ARG ZIG_ARCH=x86_64
FROM +copy-dirs --ZIG_ARCH=$ZIG_ARCH
COPY --dir .git LICENSE LEGAL_DETAILS ci ./
# version.txt is used by the CLI: roc --version
RUN printf "nightly pre-release, built from commit " > version.txt
RUN git log --pretty=format:'%h' -n 1 >> version.txt
RUN printf " on: " >> version.txt
RUN date >> version.txt
RUN RUSTFLAGS="-C target-cpu=x86-64" cargo build --features with_sound --release
RUN cd ./target/release && tar -czvf roc_linux_x86_64.tar.gz ./roc ../../LICENSE ../../LEGAL_DETAILS ../../examples/hello-world ../../compiler/builtins/bitcode/src/ ../../roc_std
SAVE ARTIFACT ./target/release/roc_linux_x86_64.tar.gz AS LOCAL roc_linux_x86_64.tar.gz
# compile everything needed for benchmarks and output a self-contained dir from which benchmarks can be run.
prep-bench-folder:
FROM +copy-dirs
ARG BENCH_SUFFIX=branch
RUN cargo criterion -V
RUN --mount=type=cache,target=$SCCACHE_DIR cd cli && cargo criterion --no-run
RUN mkdir -p bench-folder/compiler/builtins/bitcode/src
RUN mkdir -p bench-folder/target/release/deps
RUN mkdir -p bench-folder/examples/benchmarks
RUN cp examples/benchmarks/*.roc bench-folder/examples/benchmarks/
RUN cp -r examples/benchmarks/platform bench-folder/examples/benchmarks/
RUN cp compiler/builtins/bitcode/src/str.zig bench-folder/compiler/builtins/bitcode/src
RUN cp target/release/roc bench-folder/target/release
# copy the most recent time bench to bench-folder
RUN cp target/release/deps/`ls -t target/release/deps/ | grep time_bench | head -n 1` bench-folder/target/release/deps/time_bench
SAVE ARTIFACT bench-folder AS LOCAL bench-folder-$BENCH_SUFFIX
RUN ./ci/write_version.sh
RUN RUSTFLAGS=$RUSTFLAGS cargo build --profile=release-with-lto --locked --bin roc
# strip debug info
RUN strip ./target/release-with-lto/roc
RUN ./ci/package_release.sh $RELEASE_FOLDER_NAME
RUN ls
SAVE ARTIFACT ./$RELEASE_FOLDER_NAME.tar.gz AS LOCAL $RELEASE_FOLDER_NAME.tar.gz

492
FAQ.md
View File

@ -1,19 +1,78 @@
Click the ☰ button in the top left to see and search the table of contents.
# Frequently Asked Questions
# Why make a new editor instead of making an LSP plugin for VSCode, Vim or Emacs?
The Roc editor is one of the key areas where we want to innovate. Constraining ourselves to a plugin for existing editors would severely limit our possibilities for innovation.
## Where did the name Roc come from?
A key part of our editor will be the use of plugins that are shipped with libraries. Think of a regex visualizer, parser debugger, or color picker. For library authors, it would be most convenient to write these plugins in Roc. Trying to dynamically load library plugins (written in Roc) in for example VSCode seems very difficult.
<img width="128" alt="The Roc logo, an origami bird" src="https://user-images.githubusercontent.com/1094080/92188927-e61ebd00-ee2b-11ea-97ef-2fc88e0094b0.png">
## Is there syntax highlighting for Vim/Emacs/VS Code or a LSP?
The Roc programming language is named after [a mythical bird](<https://en.wikipedia.org/wiki/Roc_(mythology)>).
Not currently. Although they will presumably exist someday, while Roc is in the early days there's actually a conscious
effort to focus on the Roc Editor *instead of* adding Roc support to other editors - specifically in order to give the Roc
Editor the best possible chance at kickstarting a virtuous cycle of plugin authorship.
Thats why the logo is a bird. Its specifically an [_origami_ bird](https://youtu.be/9gni1t1k1uY) as an homage
to [Elm](https://elm-lang.org/)s tangram logo.
This is an unusual approach, but there are more details in [this 2021 interview](https://youtu.be/ITrDd6-PbvY?t=212).
Roc is a direct descendant of Elm. The languages are similar, but not the same.
[Origami](https://en.wikipedia.org/wiki/Origami) likewise has similarities to [tangrams](https://en.wikipedia.org/wiki/Tangram), although they are not the same.
Both involve making a surprising variety of things
from simple primitives. [_Folds_](<https://en.wikipedia.org/wiki/Fold_(higher-order_function)>)
are also common in functional programming.
In the meantime, using CoffeeScript syntax highlighting for .roc files turns out to work surprisingly well!
The logo was made by tracing triangles onto a photo of a physical origami bird.
Its made of triangles because triangles are a foundational primitive in
computer graphics.
The name was chosen because it makes for a three-letter file extension, it means something
fantastical, and it has incredible potential for puns. Here are some different ways to spell it:
- **Roc** - traditional
- **roc** - low-key
- **ROC** - [YELLING](https://package.elm-lang.org/packages/elm/core/latest/String#toUpper)
- **Röc** - [metal 🤘](https://en.wikipedia.org/wiki/Metal_umlaut)
Fun fact: "roc" translates to 鹏 in Chinese, [which means](https://www.mdbg.net/chinese/dictionary?page=worddict&wdrst=0&wdqb=%E9%B9%8F) "a large fabulous bird."
## Why can't functions be compared for equality using the `==` operator?
Function equality has been proven to be undecidable in the general case because of the [halting problem](https://en.wikipedia.org/wiki/Halting_problem).
So while we as humans might be able to look at `\x -> x + 1` and `\x -> 1 + x` and know that they're equivalent,
in the general case it's not possible for a computer to do this reliably.
There are some other potential ways to define function equality, but they all have problems.
One way would be to have two functions be considered equal if their source code is equivalent. (Perhaps disregarding
comments and spaces.) This sounds reasonable, but it means that now revising a function to do
exactly the same thing as before (say, changing `\x -> x + 1` to `\x -> 1 + x`) can cause a bug in a
distant part of the code base. Defining function equality this way means that revising a function's internals
is no longer a safe, local operation - even if it gives all the same outputs for all the same inputs.
Another option would be to define it using "reference equality." This is what JavaScript does, for example.
However, Roc does not use reference equality anywhere else in the language, and it would mean that (for example)
passing `\x -> x + 1` to a function compared to defining `fn = \x -> x + 1` elsewhere and then passing `fn` into
the function might give different answers.
Both of these would make revising code riskier across the entire language, which is very undesirable.
Another option would be to define that function equality always returns `false`. So both of these would evaluate
to `false`:
- `(\x -> x + 1) == (\x -> 1 + x)`
- `(\x -> x + 1) == (\x -> x + 1)`
This makes function equality effectively useless, while still technically allowing it. It has some other downsides:
- Now if you put a function inside a record, using `==` on that record will still type-check, but it will then return `false`. This could lead to bugs if you didn't realize you had accidentally put a function in there - for example, because you were actually storing a different type (e.g. an opaque type) and didn't realize it had a function inside it.
- If you put a function (or a value containing a function) into a `Dict` or `Set`, you'll never be able to get it out again. This is a common problem with [NaN](https://en.wikipedia.org/wiki/NaN), which is also defined not to be equal to itself.
The first of these problems could be addressed by having function equality always return true instead of false (since that way it would not affect other fields' equality checks in a record), but that design has its own problems:
- Although function equality is still useless, `(\x -> x + 1) == (\x -> x)` returns `Bool.true`. Even if it didn't lead to bugs in practice, this would certainly be surprising and confusing to beginners.
- Now if you put several different functions into a `Dict` or `Set`, only one of them will be kept; the others will be discarded or overwritten. This could cause bugs if a value stored a function internally, and then other functions relied on that internal function for correctness.
Each of these designs makes Roc a language that's some combination of more error-prone, more confusing, and more
brittle to change. Disallowing function equality at compile time eliminates all of these drawbacks.
Note that you can provide a custom implementation of the `Eq` ability for an opaque type that contains a function,
in any way you like (including ignoring the function for equality).
## Why is there no way to specify "import everything this module exposes" in `imports`?
@ -40,50 +99,10 @@ circulate about how to unlock those speed boosts. If Roc had this feature, it's
piece of advice would eventually circulate: "don't use this feature because it slows down your builds."
If a feature exists in a language, but the common recommendation is never to use it, that's cause for reconsidering
whether the feature should be in the language at all. In the case of this feature, I think it's simpler if the
whether the feature should be in the language at all. In the case of this feature, it's simpler if the
language doesn't have it; that way nobody has to learn (or spend time spreading the word) about the
performance-boosting advice not to use it.
## Why can't functions be compared for equality using the `==` operator?
Function equality has been proven to be undecidable in the general case because of the [halting problem](https://en.wikipedia.org/wiki/Halting_problem).
So while we as humans might be able to look at `\x -> x + 1` and `\x -> 1 + x` and know that they're equivalent,
in the general case it's not possible for a computer to do this reliably.
There are some other potential ways to define function equality, but they all have problems.
One way would be to have two functions be considered equal if their source code is equivalent. (Perhaps disregarding
comments and spaces.) This sounds reasonable, but it means that now revising a function to do
exactly the same thing as before (say, changing `\x -> x + 1` to `\x -> 1 + x`) can cause a bug in a
distant part of the code base. Defining function equality this way means that revising a function's internals
is no longer a safe, local operation - even if it gives all the same outputs for all the same inputs.
Another option would be to define it using "reference equality." This is what JavaScript does, for example.
However, Roc does not use reference equality anywhere else in the language, and it would mean that (for example)
passing `\x -> x + 1` to a function compared to defining `fn = \x -> x + 1` elsewhere and then passing `fn` into
the function might give different answers.
Both of these would make revising code riskier across the entire language, which is very undesirable.
Another option would be to define that function equality always returns `False`. So both of these would evaluate
to `False`:
* `(\x -> x + 1) == (\x -> 1 + x)`
* `(\x -> x + 1) == (\x -> x + 1)`
This makes function equality effectively useless, while still technically allowing it. It has some other downsides:
* Now if you put a function inside a record, using `==` on that record will still type-check, but it will then return `False`. This could lead to bugs if you didn't realize you had accidentally put a function in there - for example, because you were actually storing a different type (e.g. an opaque type) and didn't realize it had a function inside it.
* If you put a function (or a value containing a function) into a `Dict` or `Set`, you'll never be able to get it out again. This is a common problem with [NaN](https://en.wikipedia.org/wiki/NaN), which is also defined not to be equal to itself.
The first of these problems could be addressed by having function equality always return `True` instead of `False` (since that way it would not affect other fields' equality checks in a record), but that design has its own problems:
* Although function equality is still useless, `(\x -> x + 1) == (\x -> x)` returns `True`. Even if it didn't lead to bugs in practice, this would certainly be surprising and confusing to beginners.
* Now if you put several different functions into a `Dict` or `Set`, only one of them will be kept; the others will be discarded or overwritten. This could cause bugs if a value stored a function internally, and then other functions relied on that internal function for correctness.
Each of these designs makes Roc a language that's some combination of more error-prone, more confusing, and more
brittle to change. Disallowing function equality at compile time eliminates all of these drawbacks.
Note that you can implement the `Eq` ability for a record that contains a function any way you want (for example, ignoring the function).
## Why doesn't Roc have a `Maybe` or `Option` or `Optional` type, or `null` or `nil` or `undefined`?
It's common for programming languages to have a [null reference](https://en.wikipedia.org/wiki/Null_pointer)
@ -98,7 +117,7 @@ and `Optional` (like in Java).
By design, Roc does not have one of these. There are several reasons for this.
First, if a function returns a potential error, Roc has the convention to use `Result` with an error type that
has a single tag describing what went wrong. (For example, `List.first : List a -> Result a [ListWasEmpty]*`
has a single tag describing what went wrong. (For example, `List.first : List a -> Result a [ListWasEmpty]`
instead of `List.first : List a -> Maybe a`.) This is not only more self-descriptive, it also composes better with
other operations that can fail; there's no need to have functions like `Result.toMaybe` or `Maybe.toResult`,
because in Roc, the convention is that operations that can fail always use `Result`.
@ -109,12 +128,12 @@ To describe something that's neither an optional field nor an operation that can
more descriptive than something like `Maybe`. For example, if a record type has an `artist` field, but the artist
information may not be available, compare these three alternative ways to represent that:
* `artist : Maybe Artist`
* `artist : [Loading, Loaded Artist]`
* `artist : [Unspecified, Specified Artist]`
- `artist : Maybe Artist`
- `artist : [Loading, Loaded Artist]`
- `artist : [Unspecified, Specified Artist]`
All three versions tell us that we might not have access to an `Artist`. However, the `Maybe` version doesn't
tell us why that might be. The `Loading`/`Loaded` version tells us we don't have one *yet*, because we're
tell us why that might be. The `Loading`/`Loaded` version tells us we don't have one _yet_, because we're
still loading it, whereas the `Unspecified`/`Specified` version tells us we don't have one and shouldn't expect
to have one later if we wait, because it wasn't specified.
@ -128,17 +147,17 @@ for using `Maybe` even when it's less self-descriptive), we'd have to rewrite al
helper functions. As such, a subtle downside of these helper functions is that they discourage any change to
the data model that would break their call sites, even if that change would improve the data model overall.
On a historical note, `Maybe` may have been thought of as a substitute for null references—as opposed to something that emerged organically based on specific motivating use cases after `Result` already existed. That said, in languages that do not have an equivalent of Roc's tag unions, it's much less ergonomic to write something like `Result a [ListWasEmpty]*`, so that design would not fit those languages as well as it fits Roc.
On a historical note, `Maybe` may have been thought of as a substitute for null references—as opposed to something that emerged organically based on specific motivating use cases after `Result` already existed. That said, in languages that do not have an equivalent of Roc's tag unions, it's much less ergonomic to write something like `Result a [ListWasEmpty]`, so that design would not fit those languages as well as it fits Roc.
## Why doesn't Roc have higher-kinded polymorphism or arbitrary-rank types?
_Since this is a FAQ answer, I'm going to assume familiarity with higher-kinded types and higher-rank types instead of including a primer on them._
_Since this is a FAQ answer, it assumes familiarity with higher-kinded types and higher-rank types instead of including a primer on them._
A valuable aspect of Roc's type system is that it has decidable [principal](https://en.wikipedia.org/wiki/Principal_type)
type inference. This means that:
* At compile time, Roc can correctly infer the types for every expression in a program, even if you don't annotate any of the types.
* This inference always infers the most general type possible; you couldn't possibly add a valid type annotation that would make the type more flexible than the one that Roc would infer if you deleted the annotation.
- At compile time, Roc can correctly infer the types for every expression in a program, even if you don't annotate any of the types.
- This inference always infers the most general type possible; you couldn't possibly add a valid type annotation that would make the type more flexible than the one that Roc would infer if you deleted the annotation.
It's been proven that any type system which supports either [higher-kinded polymorphism](https://www.cl.cam.ac.uk/~jdy22/papers/lightweight-higher-kinded-polymorphism.pdf) or [arbitrary-rank types](https://www.microsoft.com/en-us/research/wp-content/uploads/2016/02/putting.pdf) cannot have decidable
principal type inference. With either of those features in the language, there will be situations where the compiler
@ -146,40 +165,33 @@ would be unable to infer a type—and you'd have to write a type annotation. Thi
situations where the editor would not be able to reliably tell you the type of part of your program, unlike today
where it can accurately tell you the type of anything, even if you have no type annotations in your entire code base.
### Arbitrary-rank types
This is one factor that higher-rank and higher-kinded types have in common. There are other factors which are specific
to each.
Unlike arbitrary-rank (aka "Rank-N") types, both Rank-1 and Rank-2 type systems are compatible with principal
type inference. Roc currently uses Rank-1 types, and the benefits of Rank-N over Rank-2 don't seem worth
sacrificing principal type inference to attain, so let's focus on the trade-offs between Rank-1 and Rank-2.
### Higher-rank types
Supporting Rank-2 types in Roc has been discussed before, but it has several important downsides:
Supporting higher-rank types in Roc has been discussed before, but it has several important downsides:
* It would increase the complexity of the language.
* It would make some compiler error messages more confusing (e.g. they might mention `forall` because that was the most general type that could be inferred, even if that wasn't helpful or related to the actual problem).
* It would substantially increase the complexity of the type checker, which would necessarily slow it down.
- It would increase the complexity of the language.
- It would make some compiler error messages more confusing (e.g. they might mention `forall` because that was the most general type that could be inferred, even if that wasn't helpful or related to the actual problem).
- It would substantially increase the complexity of the type checker, which would necessarily slow it down.
- It would make some Roc programs run significantly more slowly. Roc compiles programs by [monomorphizing](https://en.wikipedia.org/wiki/Monomorphization), and it's unclear how we could fully monomorphize programs containing Rank-2 types. This means compiling programs which include Rank-2 types (or higher) would require sacrificing monomorphization, which would substantially degrade runtime performance.
No implementation of Rank-2 types can remove any of these downsides. Thus far, we've been able to come up
with sufficiently nice APIs that only require Rank-1 types, and we haven't seen a really compelling use case
where the gap between the Rank-2 and Rank-1 designs was big enough to justify switching to Rank-2.
Since I prefer Roc being simpler and having a faster compiler with nicer error messages, my hope is that Roc
will never get Rank-2 types. However, it may turn out that in the future we learn about currently-unknown
upsides that somehow outweigh these downsides, so I'm open to considering the possibility - while rooting against it.
As such, the plan is for Roc to stick with Rank-1 types indefinitely.
### Higher-kinded polymorphism
I want to be really clear about this one: the explicit plan is that Roc will never support higher-kinded polymorphism.
The explicit plan is that Roc will never support higher-kinded polymorphism.
On the technical side, the reasons for this are ordinary: I understand the practical benefits and
drawbacks of HKP, and I think the drawbacks outweigh the benefits when it comes to Roc. (Those who come to a
different conclusion may think HKP's drawbacks would be less of a big a deal in Roc than I do. That's reasonable;
we programmers often weigh the same trade-offs differently.) To be clear, I think this in the specific context of
Roc; there are plenty of other languages where HKP seems like a great fit. For example, it's hard to imagine Haskell
without it. Similarly, I think lifetime annotations are a great fit for Rust, but don't think they'd be right
for Roc either.
On the technical side, the reasons for this are ordinary: like any language feature, HKP has both benefits and drawbacks,
and in the context of Roc, the drawbacks seem to outweigh the benefits. (Those who come to a different conclusion may
think HKP's drawbacks would be less of a big a deal in Roc. That's reasonable; we programmers often weigh the same
trade-offs differently.) To be clear, this analysis of HKP is in the specific context of Roc; there are plenty of
other languages where HKP seems like a great fit. For example, it's hard to imagine Haskell without it. Similarly,
lifetime annotations might be a natural fit for Rust, but they wouldn't be a good fit for Roc either.
I also think it's important to consider the cultural implications of deciding whether or not to support HKP.
To illustrate what I mean, imagine this conversation:
It's also important to consider the cultural implications of deciding whether or not to support HKP.
To illustrate these implications, imagine this conversation:
**Programmer 1:** "How do you feel about higher-kinded polymorphism?"
@ -189,9 +201,9 @@ To illustrate what I mean, imagine this conversation:
**Programmer 2:** "OH NO."
I've had several variations of this conversation: I'm talking about higher-kinded types,
another programmer asks what that means, I give monads as an example, and their reaction is strongly negative.
I've also had plenty of conversations with programmers who love HKP and vigorously advocate for its addition
For some, this conversation does not require imagining, because it's so familiar: higher-kinded types come up in
conversation, another programmer asks what that means, monads are given as an example, and their reaction is
strongly negative. On the flip side, plenty of programmers love HKP and vigorously advocate for its addition
to languages they use which don't have it. Feelings about HKP seem strongly divided, maybe more so
than any other type system feature besides static and dynamic types.
@ -201,154 +213,153 @@ language will inevitably follow. If the language does support HKP, one or more a
around monads will inevitably follow, along with corresponding cultural changes. (See Scala for example.)
Culturally, to support HKP is to take a side, and to decline to support it is also to take a side.
Given this, language designers have three options:
Given this, languages have three options:
* Have HKP and have Monad in the standard library. Embrace them and build a culture and ecosystem around them.
* Have HKP and don't have Monad in the standard library. An alternate standard lbirary built around monads will inevitably emerge, and both the community and ecosystem will divide themselves along pro-monad and anti-monad lines.
* Don't have HKP; build a culture and ecosystem around other things.
- Have HKP and have Monad in the standard library. Embrace them and build a culture and ecosystem around them.
- Have HKP and don't have Monad in the standard library. An alternate standard library built around monads will inevitably emerge, and both the community and ecosystem will divide themselves along pro-monad and anti-monad lines.
- Don't have HKP; build a culture and ecosystem around other things.
Considering that these are the only three options, I think the best choice for Roc—not only on a technical
level, but on a cultural level as well—is to make it clear that the plan is for Roc never to support HKP.
I hope this clarity can save a lot of community members' time that would otherwise be spent on advocacy or
arguing between the two sides of the divide. Again, I think it's completely reasonable for anyone to have a
different preference, but given that language designers can only choose one of these options, I'm confident
I've made the right choice for Roc by designing it never to have higher-kinded polymorphism.
## Why do Roc's syntax and standard library differ from Elm's?
Roc is a direct descendant of [Elm](https://elm-lang.org/). However, there are some differences between the two languages.
Syntactic differences are among these. This is a feature, not a bug; if Roc had identical syntax to Elm, then it's
predictable that people would write code that was designed to work in both languages - and would then rely on
that being true, for example by making a package which advertised "Works in both Elm and Roc!" This in turn
would mean that later if either language were to change its syntax in a way that didn't make sense for the other,
the result would be broken code and sadness.
So why does Roc have the specific syntax changes it does? Here are some brief explanations:
* `#` instead of `--` for comments - this allows [hashbang](https://senthilnayagan.medium.com/shebang-hashbang-10966b8f28a8)s to work without needing special syntax. That isn't a use case Elm supports, but it is one Roc is designed to support.
* `{}` instead of `()` for the unit type - Elm has both, and they can both be used as a unit type. Since `{}` has other uses in the type system, but `()` doesn't, I consider it redundant and took it out.
* `when`...`is` instead of `case`...`of` - I predict it will be easier for beginners to pick up, because usually the way I explain `case`...`of` to beginners is by saying the words "when" and "is" out loud - e.g. "when `color` is `Red`, it runs this first branch; when `color` is `Blue`, it runs this other branch..."
* `:` instead of `=` for record field definitions (e.g. `{ foo: bar }` where Elm syntax would be `{ foo = bar }`): I like `=` being reserved for definitions, and `:` is the most popular alternative.
* Backpassing syntax - since Roc is designed to be used for use cases like command-line apps, shell scripts, and servers, I expect chained effects to come up a lot more often than they do in Elm. I think backpassing is nice for those use cases, similarly to how `do` notation is nice for them in Haskell.
* Tag unions instead of Elm's custom types (aka algebraic data types). This isn't just a syntactic change; tag unions are mainly in Roc because they can facilitate errors being accumulated across chained effects, which (as noted a moment ago) I expect to be a lot more common in Roc than in Elm. If you have tag unions, you don't really need a separate language feature for algebraic data types, since closed tag unions essentially work the same way - aside from not giving you a way to selectively expose variants or define phantom types. Roc's opaque types language feature covers those use cases instead.
* No `::` operator, or `::` pattern matching for lists. Both of these are for the same reason: an Elm `List` is a linked list, so both prepending to it and removing an element from the front are very cheap operations. In contrast, a Roc `List` is a flat array, so both prepending to it and removing an element from the front are among the most expensive operations you can possibly do with it! To get good performance, this usage pattern should be encouraged in Elm and discouraged in Roc. Since having special syntax would encourage it, it would not be good for Roc to have that syntax!
* No `<|` operator. In Elm, I almost exclusively found myself wanting to use this in conjunction with anonymous functions (e.g. `foo <| \bar -> ...`) or conditionals (e.g. `foo <| if bar then ...`). In Roc you can do both of these without the `<|`. That means the main remaining use for `<|` is to reduce parentheses, but I tend to think `|>` is better at that (or else the parens are fine), so after the other syntactic changes, I considered `<|` an unnecessary stylistic alternative to `|>` or parens.
* The `|>` operator passes the expression before the `|>` as the *first* argument to the function after the `|>` instead of as the last argument. See the section on currying for details on why this works this way.
* `:` instead of `type alias` - I like to avoid reserved keywords for terms that are desirable in userspace, so that people don't have to name things `typ` because `type` is a reserved keyword, or `clazz` because `class` is reserved. (I couldn't think of satisfactory alternatives for `as`, `when`, `is`, or `if` other than different reserved keywords. I could see an argument for `then`—and maybe even `is`—being replaced with a `->` or `=>` or something, but I don't anticipate missing either of those words much in userspace. `then` is used in JavaScript promises, but I think there are several better names for that function.)
* No underscores in variable names - I've seen Elm beginners reflexively use `snake_case` over `camelCase` and then need to un-learn the habit after the compiler accepted it. I'd rather have the compiler give feedback that this isn't the way to do it in Roc, and suggest a camelCase alternative. I've also seen underscores used for lazy naming, e.g. `foo` and then `foo_`. If lazy naming is the goal, `foo2` is just as concise as `foo_`, but `foo3` is more concise than `foo__`. So in a way, removing `_` is a forcing function for improved laziness. (Of course, more descriptive naming would be even better.)
* Trailing commas - I've seen people walk away (in some cases physically!) from Elm as soon as they saw the leading commas in collection literals. While I think they've made a mistake by not pushing past this aesthetic preference to give the language a chance, I also would prefer not put them in a position to make such a mistake in the first place. Secondarily, while I'm personally fine with either style, between the two I prefer the look of trailing commas.
* The `!` unary prefix operator. I didn't want to have a `Basics` module (more on that in a moment), and without `Basics`, this would either need to be called fully-qualified (`Bool.not`) or else a module import of `Bool.{ not }` would be necessary. Both seemed less nice than supporting the `!` prefix that's common to so many widely-used languages, especially when we already have a unary prefix operator of `-` for negation (e.g. `-x`).
* `!=` for the inequality operator (instead of Elm's `/=`) - this one pairs more naturally with the `!` prefix operator and is also very common in other languages.
Roc also has a different standard library from Elm. Some of the differences come down to platforms and applications (e.g. having `Task` in Roc's standard library wouldn't make sense), but others do not. Here are some brief explanations:
* No `Basics` module. I wanted to have a simple rule of "all modules in the standard library are imported by default, and so are their exposed types," and that's it. Given that I wanted the comparison operators (e.g. `<`) to work only on numbers, it ended up that having `Num` and `Bool` modules meant that almost nothing would be left for a `Basics` equivalent in Roc except `identity` and `Never`. The Roc type `[]` (empty tag union) is equivalent to `Never`, so that wasn't necessary, and I generally think that `identity` is a good concept but a sign of an incomplete API whenever its use comes up in practice. For example, instead of calling `|> List.filterMap identity` I'd rather have access to a more self-descriptive function like `|> List.dropNothings`. With `Num` and `Bool`, and without `identity` and `Never`, there was nothing left in `Basics`.
* `Str` instead of `String` - after using the `str` type in Rust, I realized I had no issue whatsoever with the more concise name, especially since it was used in so many places (similar to `Msg` and `Cmd` in Elm) - so I decided to save a couple of letters.
* No function composition operators - I stopped using these in Elm so long ago, at one point I forgot they were in the language! See the FAQ entry on currying for details about why.
* No `Char`. What most people think of as a "character" is a rendered glyph. However, rendered glyphs are comprised of [grapheme clusters](https://stackoverflow.com/a/27331885), which are a variable number of Unicode code points - and there's no upper bound on how many code points there can be in a single cluster. In a world of emoji, I think this makes `Char` error-prone and it's better to have `Str` be the only first-class unit. For convenience when working with unicode code points (e.g. for performance-critical tasks like parsing), the single-quote syntax is sugar for the corresponding `U32` code point - for example, writing `'鹏'` is exactly the same as writing `40527`. Like Rust, you get a compiler error if you put something in single quotes that's not a valid [Unicode scalar value](http://www.unicode.org/glossary/#unicode_scalar_value).
* No `Debug.log` - the editor can do a better job at this, or you can write `expect x != x` to see what `x` is when the expectation fails. Using the editor means your code doesn't change, and using `expect` gives a natural reminder to remove the debugging code before shipping: the build will fail.
* No `Debug.todo` - instead you can write a type annotation with no implementation below it; the type checker will treat it normally, but attempting to use the value will cause a runtime exception. This is a feature I've often wanted in Elm, because I like prototyping APIs by writing out the types only, but then when I want the compiler to type-check them for me, I end up having to add `Debug.todo` in various places.
* No `Maybe`. See the "Why doesn't Roc have a `Maybe`/`Option`/`Optional` type" FAQ question
Considering that these are the only three options, an early decision in Roc's design—not only on a technical
level, but on a cultural level as well—was to make it clear that the plan is for Roc never to support HKP.
The hope is that this clarity can save a lot of community members' time that would otherwise be spent on advocacy or
arguing between the two sides of the divide. Again, it's completely reasonable for anyone to have a different preference,
but given that languages can only choose one of these options, it seems clear that the right choice for Roc
is for it to never have higher-kinded polymorphism.
## Why aren't Roc functions curried by default?
Although technically any language with first-class functions makes it possible to curry
any function (e.g. I can manually curry a Roc function `\x, y, z ->` by writing `\x -> \y -> \z ->` instead),
any function (e.g. anyone can manually curry a Roc function `\x, y, z ->` by writing `\x -> \y -> \z ->` instead),
typically what people mean when they say Roc isn't a curried language is that Roc functions aren't curried
by default. For the rest of this section, I'll use "currying" as a shorthand for "functions that are curried
by default. The rest of this section will use "currying" as a shorthand for "functions that are curried
by default" for the sake of brevity.
As I see it, currying has one major upside and several major downsides. The upside:
Currying makes function calls more concise in some cases, but it has several significant downsides:
* It makes function calls more concise in some cases.
The downsides:
* It lowers error message quality, because there can no longer be an error for "function called with too few arguments." (Calling a function with fewer arguments is always valid in curried functions; the error you get instead will unavoidably be some other sort of type mismatch, and it will be up to you to figure out that the real problem was that you forgot an argument.)
* It makes the `|>` operator more error-prone in some cases.
* It makes higher-order function calls need more parentheses in some cases.
* It significantly increases the language's learning curve. (More on this later.)
* It facilitates pointfree function composition. (More on why this is listed as a downside later.)
- It lowers error message quality, because there can no longer be an error for "function called with too few arguments." (Calling a function with fewer arguments is always valid in curried functions; the error you get instead will unavoidably be some other sort of type mismatch, and it will be up to you to figure out that the real problem was that you forgot an argument.)
- It makes the `|>` operator more error-prone in some cases.
- It makes higher-order function calls need more parentheses in some cases.
- It significantly increases the language's learning curve. (More on this later.)
- It facilitates pointfree function composition. (More on why this is listed as a downside later.)
There's also a downside that it would make runtime performance of compiled programs worse by default,
but I assume it would be possible to optimize that away at the cost of slightly longer compile times.
but it would most likely be possible to optimize that away at the cost of slightly longer compile times.
I consider the one upside (conciseness in some places) extremely minor, and have almost never missed it in Roc.
Here are some more details about the downsides as I see them.
These downsides seem to outweigh the one upside (conciseness in some places). Here are some more details about each of
the downsides.
### Currying and the `|>` operator
In Roc, this code produces `"Hello, World!"`
In Roc, both of these expressions evaluate to `"Hello, World!"`
```elm
"Hello, World"
|> Str.concat "!"
```elixir
Str.concat "Hello, " "World!"
```
This is because Roc's `|>` operator uses the expression before the `|>` as the *first* argument to the function
after it. For functions where both arguments have the same type, but it's obvious which argument goes where (e.g.
`Str.concat "Hello, " "World!"`, `List.concat [1, 2] [3, 4]`), this works out well. Another example would
be `|> Num.sub 1`, which subtracts 1 from whatever came before the `|>`.
```elixir
"Hello, "
|> Str.concat "World!"
```
For this reason, "pipeline-friendliness" in Roc means that the first argument to each function is typically
the one that's most likely to be built up using a pipeline. For example, `List.map`:
It's unsurprising to most beginners that these work the same way; it's common for a beginner who has recently learned
how `|>` works to assume that `|> Str.concat "!"` would concatenate `!` onto the end of a string.
```elm
This is not how it works in curried languages, however. In curried languages with a `|>` operator, the first expression
still returns `"Hello, World!"` but the second one returns `"World!Hello, "` instead. This can be an unpleasant surprise
for beginners, but even experienced users commonly find that this behavior is less useful than having both of
these expressions evaluate to the same thing.
In Roc, both expressions evaluate to the same thing because Roc's `|>` operator uses the expression before the `|>` as the _first_ argument, whereas in curried languages, `|>` uses it as the _last_ argument. For example, this is how `|>` works in both [F#](https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/symbol-and-operator-reference/#function-symbols-and-operators) and in [Elm](https://package.elm-lang.org/packages/elm/core/1.0.5/Basics#|%3E), both of which are curried languages. In contrast, Roc's `|>` design uses the same argument ordering as [Elixir](https://hexdocs.pm/elixir/1.14.0/Kernel.html#%7C%3E/2) and [Gleam](https://gleam.run/book/tour/functions.html#pipe-operator), none of which are curried languages.
This comes up in other situations besides string concatenation. For example, consider subtraction and division:
```elixir
someNumber
|> Num.div 2
```
```elixir
someNumber
|> Num.sub 1
```
Again, it's reasonable to expect that `|> Num.div 2` will divide a number by 2, and that
`|> Num.sub 1` will subtract 1 from a number. In Roc, this is how they work, but in
curried languages they work the opposite way: `|> Num.div 2` takes the number 2 and
divides it by a number, and `|> Num.sub 1` takes the number 1 and subtracts a number
from it. This is once again both more surprising to beginners and less useful to
experienced users.
The way `|>` works in Roc has a second benefit when it comes to higher-order functions. Consider these two examples:
```elixir
answer = List.map numbers \num ->
someFunction
"some argument"
num
anotherArg
```
```elixir
numbers
|> List.map Num.abs
|> List.map Num.abs
```
This argument ordering convention also often makes it possible to pass anonymous functions to higher-order
functions without needing parentheses, like so:
In Roc, `List.map` takes a list and then a function. Because of the way `|>` works in Roc, `numbers |> List.map Num.abs` passes `numbers` as the first argument to `List.map`, and `Num.abs` as the second argument. So both of these examples work fine.
```elm
List.map numbers \num -> Num.abs (num - 1)
In a curried language, these two examples couldn't both be valid. In order for `|> List.map Num.abs` to work in a curried language (where `|>` works the other way), `List.map` would have to take its arguments in the opposite order: the function first and the list second.
This means the first example would have to change from this...
```elixir
answer = List.map numbers \num ->
someFunction
"some argument"
num
anotherArg
```
(If the arguments were reversed, this would be `List.map (\num -> Num.abs (num - 1)) numbers` and the
extra parentheses would be required.)
...to this:
Neither of these benefits is compatible with the argument ordering currying encourages. Currying encourages
`List.map` to take the `List` as its second argument instead of the first, so that you can partially apply it
like `(List.map Num.abs)`; if Roc introduced currying but kept the order of `List.map` the same way it is today,
then partially applying `List.map` (e.g. `(List.map numbers)`) would be much less useful than if the arguments
were swapped - but that in turn would make it less useful with `|>` and would require parentheses when passing
it an anonymous function.
```elixir
answer =
List.map
(\num ->
someFunction
"some argument"
num
anotherArg
)
numbers
```
This is a fundamental design tension. One argument order works well with `|>` (at least the way it works in Roc
today) and with passing anonymous functions to higher-order functions, and the other works well with currying.
It's impossible to have both.
The Roc version of this is nicer in that it doesn't require parentheses around the function argument. A curried language
could theoretically adopt Roc's style of `|>` (where it pipes in the first argument instead of the last argument), but
to get this second benefit, the language would also need to have `List.map` take the function as its second argument
instead of the first. However, this would work against currying's one upside; it would no longer work to write
`(List.map negate)` if the `List.map` arguments were flipped, the way they are in Roc. So currying and `|>` are unavoidably
in tension.
Of note, one possible design is to have currying while also having `|>` pass the *last* argument instead of the first.
This is what Elm does, and it makes pipeline-friendliness and curry-friendliness the same thing. However, it also
means that either `|> Str.concat "!"` would add the `"!"` to the front of the string, or else `Str.concat`'s
arguments would have to be flipped - meaning that `Str.concat "Hello, World" "!"` would evaluate to `"!Hello, World"`.
The only way to have `Str.concat` work the way it does in Roc today (where both pipelines and non-pipeline calling
do what you'd want them to) is to order function arguments in a way that is not conducive to currying. This design
tension only exists if there's currying in the language; without it, you can order arguments for pipeline-friendliness
without concern.
As a historical note, these stylistic benefits (of `|> Num.sub 1` working as expected, and being able to write `List.map numbers \num ->`) were not among the original reasons Roc did not have currying. These benefits were discovered after the decision had already been made that Roc would not be a curried language, and they served to reinforce after the fact that the decision was the right one for Roc given the language's goals.
### Currying and learning curve
Prior to designing Roc, I taught a lot of beginner [Elm](https://elm-lang.org/) workshops. Sometimes at
conferences, sometimes for [Frontend Masters](https://frontendmasters.com/courses/intro-elm/),
sometimes for free at local coding bootcamps or meetup groups.
In total I've spent well over 100 hours standing in front of a class, introducing the students to their
first pure functional programming language.
Currying leads to function signatures that look surprising to beginners. For example, in Roc, the
[`Bool.and`](https://www.roc-lang.org/builtins/Bool#and) function has the type `Bool, Bool -> Bool`. If Roc were a
curried language, this function would instead have the type `Bool -> Bool -> Bool`. Since no mainstream programming
languages today are curried, anyone who knows a mainstream language and is learning their first curried language will
require additional explanation about why function types look this way.
Here was my experience teaching currying:
This explanation is nontrivial. It requires explaining partial application, how curried functions facilitate partial
application, how function signatures accurately reflect that they're curried, and going through examples for all of these.
All of it builds up to the punchline that "technically, all functions in this language have a single argument," which
some percentage of learners find interesting, and some percentage still find confusing even after all that explanation.
* The only way to avoid teaching it is to refuse to explain why multi-argument functions have multiple `->`s in them. (If you don't explain it, at least one student will ask about it - and many if not all of the others will wonder.)
* Teaching currying properly takes a solid chunk of time, because it requires explaining partial application, explaining how curried functions facilitate partial application, how function signatures accurately reflect that they're curried, and going through examples for all of these.
* Even after doing all this, and iterating on my approach each time to try to explain it more effectively than I had the time before, I'd estimate that under 50% of the class ended up actually understanding currying. I consistently heard that in practice it only "clicked" for most people after spending significantly more time writing code with it.
This is not the end of the world, especially because it's easy enough to think "okay, I still don't totally get this
even after that explanation, but I can remember that function arguments are separated by `->` in this language
and maybe I'll understand the rest later." (Which they almost always do, if they stick with the language.)
Clearly currying doesn't preclude a language from being easy to learn, because Elm has currying, and Elm's learning
curve is famously gentle.
It's common for beginners to report that currying only "clicked" for them after spending significant time writing code
in a curried language. This is not the end of the world, especially because it's easy enough to think "I still don't
totally get this even after that explanation, but I can remember that function arguments are separated by `->` in this
language and maybe I'll understand the rest later." Clearly currying doesn't preclude a language from being easy to learn,
because Elm has currying, and Elm's learning curve is famously gentle.
That said, beginners who feel confused while learning the language are less likely to continue with it.
And however easy Roc would be to learn if it had currying, the language is certainly easier to learn without it.
@ -367,41 +378,72 @@ compose : (a -> b), (c -> a) -> (c -> b)
compose = \f, g, x -> f (g x)
```
Here's how I would instead write this:
Here's a way to write it without pointfree function composition:
```elm
reverseSort : List elem -> List elem
reverseSort = \list -> List.reverse (List.sort list)
```
I've consistently found that I can more quickly and accurately understand function definitions that use
named arguments, even though the code is longer. I suspect this is because I'm faster at reading than I am at
desugaring, and whenever I read the top version I end up needing to mentally desugar it into the bottom version.
In more complex examples (this is among the tamest pointfree function composition examples I've seen), I make
a mistake in my mental desugaring, and misunderstand what the function is doing - which can cause bugs.
It's common for programmers to build a mental model of what `compose List.reverse List.sort` does by mentally
translating it into `\list -> List.reverse (List.sort list)`. This extra mental translation step makes it take
longer to read and to understand despite being technically more concise. In more complex examples (this
is among the tamest of pointfree function composition examples), the chances increase of making a mistake in
the mental translation step, leading to a misundesrtanding of what the function is doing—which can cause bugs.
I assumed I would get faster and more accurate at this over time. However, by now it's been about a decade
since I first learned about the technique, and I'm still slower and less accurate at reading code that uses
pointfree function composition (including if I wrote it - but even moreso if I didn't) than code written with
with named arguments. I've asked a lot of other programmers about their experiences with pointfree function
composition over the years, and the overwhelming majority of responses have been consistent with my experience.
As such, my opinion about pointfree function composition has gotten less and less nuanced over time. I've now moved
past "it's the right tool for the job, sometimes" to concluding it's best thought of as an antipattern. This is
because I realized how much time I was spending evaluating on a case-by-case basis whether it might be the
right fit for a given situation. The time spent on this analysis alone vastly outweighed the sum of all the
benefits I got in the rare cases where I concluded it was a fit. So I've found the way to get the most out of
pointfree function composition is to never even think about using it; every other strategy leads to a worse outcome.
Currying facilitates the antipattern of pointfree function composition, which I view as a downside of currying.
Some languages place such a high value on conciseness that they would consider the conciceness upside to outweigh
these downsides, but Roc is not one of those languages. It's considered stylistically better in Roc to write the
second version above. Given this, since currying facilitates pointfree function composition, making Roc a curried
language would have the downside of facilitating an antipattern in the language.
Stacking up all these downsides of currying against the one upside of making certain function calls more concise,
I concluded that it would be a mistake to have it in Roc.
it seems clear that Roc should not be a curried language.
## Why are both rust and zig used?
## Will Roc ever have linear types, dependent types, refinement types, or uniqueness types?
At the start of the project, we did not know zig well and it was not production ready. The reason zig entered the project because it has many different backends (wasm, various assembly formats, llvm IR) and can create code with minimal dependencies
The plan is for Roc to never have linear types, dependent types, refinement types, or uniqueness types.
Rust has much more overhead in terms of code size. It's objectively not a lot, but it's less with zig.
Fast compile times are a primary goal for Roc, and a major downside of refinement types is an exponential increase in compile times. This rules out refinement types for Roc.
We think rust is a nicer language to work in for a project of this size. It has a type system that we're more familiar with, it has a package ecosystem and excellent tooling.
If Roc were to have linear types or uniqueness types, they would move things that are currently behind-the-scenes performance optimizations into the type system. For them to be effective across the ecosystem, they couldn't really be opt-in; everyone would have to use them, even those for whom the current system of behind-the-scenes optimizations already met their performance needs without any added type system complexity. Since the overwhelming majority of use cases are expected to fall into that latter group, adding linear types or uniqueness types to Roc would be a net negative for the ecosystem.
Dependent types are too risky of a bet for Roc to take. They have been implemented in programming languages for three decades, and for at least half that time period, it has been easy to find predictions that dependent types will be the future of type systems. Much harder to find are success stories of complex applications built with dependent types, which realized benefits that significantly outweighed the substantial complexity of introducing value semantics to a type system.
Perhaps more success stories will emerge over time, but in the meantime it remains an open question whether dependent types are net beneficial in practice to application development. Further experimentation would be required to answer this question, and Roc is not the right language to do those experiments.
## Will Roc's compiler ever be self-hosted? (That is, will it ever be written in Roc?)
The plan is to never implement Roc's compiler in Roc.
The goal is for Roc's compiler to deliver the best user experience possible. Compiler performance is strongly influenced by how memory is used, and there are many performance benefits to be gained from using a systems language like Rust which offers more direct control over memory than Roc ever should.
Roc isn't trying to be the best possible language for high-performance compiler development, but it is trying to have a high-performance compiler. The best tool for that job is a language other than Roc, so that's what we're using!
## Why does Roc use the license it does?
The short explanation for why Roc is released under the [Universal Permissive License](https://opensource.org/licenses/UPL):
- Like [MIT](https://opensource.org/licenses/MIT), it's permissive and concise
- Like [Apache2](https://opensource.org/licenses/Apache-2.0), it protects against contributors claiming software patents over contributed code after the fact (MIT and BSD do not include protections against this)
- It's compatible with [GPLv2](https://opensource.org/licenses/GPL-2.0) (which [Apache2 is not](https://www.apache.org/licenses/GPL-compatibility.html))
- It's one license, unlike "MIT or Apache2, at your choice" (which is how [Rust addressed the problem](https://internals.rust-lang.org/t/rationale-of-apache-dual-licensing/8952/4) of MIT not having patent protections but Apache2 not being GPLv2 compatible)
- It's been approved by OSI, FSF, and Oracle's lawyers, so it has been not only vetted by three giants in the world of software licensing, but also three giants with competing interests - and they all approved it.
There's also [a longer explanation](https://github.com/roc-lang/roc/issues/1199) with more detail about the motivation and thought process, if you're interested.
## Why does Roc use both Rust and Zig?
Roc's compiler has always been written in [Rust](https://www.rust-lang.org/). Roc's standard library was briefly written in Rust, but was soon rewritten in [Zig](https://ziglang.org/).
There were a few reasons for this rewrite.
1. We struggled to get Rust to emit LLVM bitcode in the format we needed, which is important so that LLVM can do whole-program optimizations across the standard library and compiled application.
2. Since the standard library has to interact with raw generated machine code (or LLVM bitcode), the Rust code unavoidably needed `unsafe` annotations all over the place. This made one of Rust's biggest selling points inapplicable in this particular use case.
3. Given that Rust's main selling points are inapplicable (its package ecosystem being another), Zig's much faster compile times are a welcome benefit.
4. Zig has more tools for working in a memory-unsafe environment, such as reporting memory leaks in tests. These have been helpful in finding bugs that are out of scope for safe Rust.
The split of Rust for the compiler and Zig for the standard library has worked well so far, and there are no plans to change it.
## Why is the website so basic?
We have a very basic website on purpose, it helps set expectations that roc is a work in progress and not ready yet for a first release.

View File

@ -52,7 +52,7 @@ limitations under the License.
* Elm - https://github.com/elm/compiler
This source code can be found in editor/src/lang/solve.rs and compiler/src/solve.rs, and is licensed under the following terms:
This source code can be found in compiler/src/solve.rs, and is licensed under the following terms:
Copyright 2012-present Evan Czaplicki
@ -119,23 +119,6 @@ limitations under the License.
===========================================================
* learn-wgpu - https://github.com/sotrh/learn-wgpu
This source code can be found in editor/src/graphics/lowlevel/buffer.rs, editor/src/graphics/primitives/text.rs, and editor/src/graphics/primitives/lowlevel/vertex.rs, and is licensed under the following terms:
MIT License
Copyright (c) 2020 Benjamin Hansen
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
===========================================================
* pretty - https://github.com/Marwes/pretty.rs
This source code can be found in vendor/pretty/ and is licensed under the following terms:
@ -186,24 +169,9 @@ limitations under the License.
===========================================================
* Ropey - https://github.com/cessen/ropey
This source code can be found in editor/src/ui/text/lines.rs and is licensed under the following terms:
Copyright (c) 2017 Nathan Vegdahl
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
===========================================================
* Zig - https://ziglang.org
This source code can be found in compiler/builtins/bitcode/src/hash.zig, highlight/tests/peg_grammar.rs and highlight/src/highlight_parser.rs and is licensed under the following terms:
This source code can be found in highlight/tests/peg_grammar.rs and highlight/src/highlight_parser.rs and is licensed under the following terms:
The MIT License (Expat)

140
README.md
View File

@ -1,116 +1,48 @@
# The Roc Programming Language
# Work in progress!
Roc is a language for making delightful software.
[Roc](https://www.roc-lang.org) is not ready for a 0.1 release yet, but we do have:
The [tutorial](TUTORIAL.md) is the best place to learn about how to use the language - it assumes no prior knowledge of Roc or similar languages. (If you already know [Elm](https://elm-lang.org/), then [Roc for Elm Programmers](https://github.com/rtfeldman/roc/blob/trunk/roc-for-elm-programmers.md) may be of interest.)
- [**installation** guide](https://github.com/roc-lang/roc/tree/main/getting_started)
- [**tutorial**](https://roc-lang.org/tutorial)
- [**docs** for the standard library](https://www.roc-lang.org/builtins)
- [**examples**](https://github.com/roc-lang/examples/tree/main/examples)
- [**faq**: frequently asked questions](https://github.com/roc-lang/roc/blob/main/FAQ.md)
- [**group chat**](https://roc.zulipchat.com) for help, questions and discussions
There's also a folder of [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) - the [CLI example](https://github.com/rtfeldman/roc/tree/trunk/examples/cli) in particular is a reasonable starting point to build on.
If you have a specific question, the [FAQ](FAQ.md) might have an answer, although [Roc Zulip chat](https://roc.zulipchat.com) is overall the best place to ask questions and get help! It's also where we discuss [ideas](https://roc.zulipchat.com/#narrow/stream/304641-ideas) for the language. If you want to get involved in contributing to the language, Zulip is also a great place to ask about good first projects.
## State of Roc
Roc is not ready for production yet. You are likely to encounter bugs. Publishing packages or documentation is not yet supported.
Many programs can however be compiled correctly. Check out [examples](examples) and [examples/benchmarks](examples/benchmarks). There are minimal platforms for Rust, Zig, C, Swift and an HTTP server. We are hard at work to make programming in Roc a delightful experience!
## Getting started
- [Linux x86](getting_started/linux_x86.md)
- [Windows](getting_started/windows.md)
- [Other](getting_started/other.md)
### Examples
Run examples as follows:
```
cargo run examples/hello-world/helloWorld.roc
```
Some examples like `examples/benchmarks/NQueens.roc` require input after running.
For NQueens, input 10 in the terminal and press enter.
[examples/benchmarks](examples/benchmarks) contains larger examples.
**Tip:** when programming in roc, we recommend to execute `./roc check myproject/Foo.roc` before `./roc myproject/Foo.roc` or `./roc build myproject/Foo.roc`. `./roc check` can produce clear error messages in cases where building/running may panic.
If you'd like to contribute, check out [good first issues](https://github.com/roc-lang/roc/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22). Don't hesitate to ask for help on our [group chat](https://roc.zulipchat.com), we're friendly!
## Sponsors
We are very grateful for our sponsors [NoRedInk](https://www.noredink.com/) and [rwx](https://www.rwx.com).
You can 💜 **sponsor** 💜 Roc on:
- [GitHub](https://github.com/sponsors/roc-lang)
- [Liberapay](https://liberapay.com/roc_lang)
[<img src="https://www.noredink.com/assets/logo-red-black-f6989d7567cf90b349409137595e99c52d036d755b4403d25528e0fd83a3b084.svg" height="60" alt="NoRedInk logo"/>](https://www.noredink.com/)
We are very grateful for our corporate sponsors [Vendr](https://www.vendr.com/), [RWX](https://www.rwx.com), [Tweede golf](https://tweedegolf.nl/en), and [ohne-makler](https://www.ohne-makler.net):
[<img src="https://user-images.githubusercontent.com/1094080/223597445-81755626-a080-4299-a38c-3c92e7548489.png" height="60" alt="Vendr logo"/>](https://www.vendr.com)
&nbsp;&nbsp;&nbsp;&nbsp;
[<img src="https://www.rwx.com/build/_assets/rwx_banner_transparent_cropped-RYV7W2KL.svg" height="60" alt="rwx logo"/>](https://www.rwx.com)
[<img src="https://github.com/roc-lang/roc/assets/1094080/82c0868e-d23f-42a0-ac2d-c6e6b2e16575" height="60" alt="RWX logo"/>](https://www.rwx.com)
&nbsp;&nbsp;&nbsp;&nbsp;
[<img src="https://user-images.githubusercontent.com/1094080/183123052-856815b1-8cc9-410a-83b0-589f03613188.svg" height="60" alt="tweede golf logo"/>](https://tweedegolf.nl/en)
[<img src="https://www.ohne-makler.net/static/img/brand/logo.svg" height="60" alt="ohne-makler logo"/>](https://www.ohne-makler.net)
## Applications and Platforms
If you would like your company to become a corporate sponsor of Roc's development, please [DM Richard Feldman on Zulip](https://roc.zulipchat.com/#narrow/pm-with/281383-user281383)!
Applications are often built on a *framework.* Typically, both application and framework are written in the same language.
* [Rails](https://rubyonrails.org/) applications are written in Ruby, and so is Rails.
* [Angular](https://angularjs.org/) applications are written in TypeScript, and so is Angular.
* [Phoenix](https://phoenixframework.org/) applications are written in Elixir, and so is Phoenix.
We'd also like to express our gratitude to our generous [individual sponsors](https://github.com/sponsors/roc-lang/)! A special thanks to those sponsoring $25/month or more:
Some programs support plugins. Often the plugins are written in the same language as the underlying program.
* [Webpack](https://webpack.js.org/) plugins are written in JavaScript, and so is Webpack.
* [Eclipse](https://www.eclipse.org/ide/) plugins are written in Java, and so is Eclipse.
* [Leiningen](https://leiningen.org/) plugins are written in Clojure, and so is Leiningen.
* [Drew Lazzeri](https://github.com/asteroidb612)
* [Alex Binaei](https://github.com/mrmizz)
* [Jono Mallanyk](https://github.com/jonomallanyk)
* [Chris Packett](https://github.com/chris-packett)
* [James Birtles](https://github.com/jamesbirtles)
* [Ivo Balbaert](https://github.com/Ivo-Balbaert)
* [Lucas Rosa](https://github.com/rvcas)
* [Jonas Schell](https://github.com/Ocupe)
* [Christopher Dolan](https://github.com/cdolan)
* [Nick Gravgaard](https://github.com/nickgravgaard)
* [Zeljko Nesic](https://github.com/popara)
* [Shritesh Bhattarai](https://github.com/shritesh)
* [Richard Feldman](https://github.com/rtfeldman)
* [Ayaz Hafiz](https://github.com/ayazhafiz)
All of these can be considered examples of a platform/application relationship. There is an underlying platform, and many applications are built on top of it. (Plugins are a type of application in this sense.)
Sometimes, platforms and their applications are written in different languages.
* [Neovim](https://neovim.io/) is written in C for performance, and its plugins can be written in languages such as Python, JS, and Ruby.
* [NGINX](https://www.nginx.com/) is written in C for performance, and its plugins can be written in a [subset of JavaScript](https://www.nginx.com/blog/introduction-nginscript/).
* [Unity](https://unity.com/) is written in C++ for performance, and Unity applications (such as games) can be written in C#, Boo, or a JavaScript dialect called UnityScript.
Like in the previous examples, application authors building on these platforms get to use high-level languages with automatic memory management. They make no ergonomics sacrifices, and may not even be aware that the underlying platform is written in a lower-level language.
By using systems-level programming languages like C and C++, platform authors sacrifice development speed, but unlock the highest possible performance characteristics. This is a tradeoff many platform authors are happy to accept, for the sake of having applications built on their platforms run very fast.
## Roc's Design
Roc is designed to make the "systems-level platform, higher-level application" experience as nice as possible.
* **Application** authors code exclusively in Roc. It's a language designed for nice ergonomics. The syntax resembles Ruby or CoffeeScript, and it has a fast compiler with full type inference.
* **Platform** authors code almost exclusively in a systems-level language like C, C++, Rust, Swift or [Zig](https://ziglang.org/), except for the thin Roc API they expose to application authors. Roc application code compiles to machine code, and production builds of Roc apps benefit from the same [LLVM](https://llvm.org/) optimizations that C++, Rust, Swift and Zig do. Roc application authors do not need to know this lower-level code exists; all they have to interact with is the platform's API, which is exposed as an ordinary Roc API.
Every Roc application is built on top of exactly one Roc platform. There is no such thing as a Roc application that runs without a platform, and there is no default platform. You must choose one!
The core Roc language and standard library include no I/O operations, which gives platform authors complete control over which effects they want to support. Some of the implications of this include:
* A high-performance build tool (or text editor) written in Rust can be a Roc platform with a strong plugin security model. For example, it could expose only operations allowing plugin authors to modify the contents of certain files, rather than allowing plugins arbitrary read/write access to the entire filesystem.
* A VR or [Arduino](https://www.arduino.cc/) platform can expose uncommon I/O operations supported by that hardware, while omitting common I/O operations that are unsupported (such as reading keyboard input from a terminal that doesn't exist).
* A high-performance Web server written in Rust can be a Roc platform where all I/O operations are implemented in terms of Streams or Observables rather than a more traditional asynchronous abstraction like Futures or Promises. This would mean all code in that platform's ecosystem would be necessarily built on a common streaming abstraction.
## Project Goals
Roc is in relatively early stages of development. It's currently possible to build both platforms and applications (see the [examples](https://github.com/rtfeldman/roc/tree/trunk/examples) folder for some examples that aren't particularly organized at the moment), although [documentation](https://github.com/rtfeldman/roc/tree/trunk/compiler/builtins/roc) is in even earlier stages than the compiler itself.
Besides the above language design, a separate goal is for Roc to ship with an ambitiously boundary-pushing graphical editor. Not like "an IDE," but rather something that makes people say "I have never seen anything remotely like this outside of Bret Victor demos."
One of the reasons this editor is coupled with the language itself is to allow package authors to include custom editor tooling inside packages.
A trivial example: suppose I'm writing a Roc app for an Arduino platform. I install a platform-specific package for displaying text on a grid of LEDs. Because I've installed this package, at the call site where I call the function to specify the color of the text on the LEDs, my Roc editor displays an inline color picker. As I move a slider around to try out different colors, not only does my code change to reflect that value in realtime, but the physical LEDs in my room change color in realtime as well. As the application author, all I did to get that experience was to install the "text on an LED grid" package, nothing else.
The goal is for this to be one of the most trivial, bare minimum examples of what the editor experience would be like. Hopefully, people in the future will look back on this example and say "that's so embarrassingly basic; why didn't you talk about one of the *actually great* things in the seamless editor plugin ecosystem?"
Finally, some implementation goals:
* The web server for the package manager is written in Roc (with an underlying Rust platform for the web server, for example [warp](https://github.com/seanmonstar/warp)).
* The editor plugins are written in Roc (with an underlying Rust platform for the editor itself, for example using [gfx-hal](https://github.com/gfx-rs/gfx)).
* The CLI (for building Roc projects on CI platforms) has its user interface written in Roc (with an underlying Rust platform for fast compilation and basic CLI interactions).
It's an ambitious project! It'll take a long time to get where it's going, but hopefully it'll be worth the wait.
## Getting Involved
The number of people involved in Roc's development has been steadily increasing
over time - which has been great, because it's meant we've been able to onboard
people at a nice pace. (Most people who have contributed to Roc had previously
never done anything with Rust and also never worked on a compiler, but we've
been able to find beginner-friendly projects to get people up to speed gradually.)
If you're interested in getting involved, check out
[CONTRIBUTING.md](https://github.com/rtfeldman/roc/blob/trunk/CONTRIBUTING.md)!
## Name and Logo
If you're curious about where the language's name and logo came from,
[here's an explanation](https://github.com/rtfeldman/roc/blob/trunk/name-and-logo.md).
Thank you all so much for helping Roc progress!

File diff suppressed because it is too large Load Diff

View File

@ -1,35 +0,0 @@
[package]
name = "roc_ast"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
edition = "2021"
description = "AST as used by the editor and (soon) docs. In contrast to the compiler, these types do not keep track of a location in a file."
[dependencies]
roc_builtins = { path = "../compiler/builtins"}
roc_can = { path = "../compiler/can" }
roc_collections = { path = "../compiler/collections" }
roc_region = { path = "../compiler/region" }
roc_module = { path = "../compiler/module" }
roc_parse = { path = "../compiler/parse" }
roc_problem = { path = "../compiler/problem" }
roc_types = { path = "../compiler/types" }
roc_unify = { path = "../compiler/unify"}
roc_load = { path = "../compiler/load" }
roc_target = { path = "../compiler/roc_target" }
roc_error_macros = { path = "../error_macros" }
roc_reporting = { path = "../reporting" }
arrayvec = "0.7.2"
bumpalo = { version = "3.8.0", features = ["collections"] }
page_size = "0.4.2"
snafu = { version = "0.6.10", features = ["backtraces"] }
ven_graph = { path = "../vendor/pathfinding" }
libc = "0.2.106"
[dev-dependencies]
indoc = "1.0.3"
[target.'cfg(windows)'.dependencies]
winapi = { version = "0.3.9", features = ["memoryapi"]}

View File

@ -1,73 +0,0 @@
use roc_module::{ident::Ident, module_err::ModuleError};
use roc_parse::parser::SyntaxError;
use roc_region::all::{Loc, Region};
use snafu::{Backtrace, Snafu};
use crate::lang::core::ast::ASTNodeId;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub))]
pub enum ASTError {
#[snafu(display(
"ASTNodeIdWithoutExprId: The expr_id_opt in ASTNode({:?}) was `None` but I was expecting `Some(ExprId)` .",
ast_node_id
))]
ASTNodeIdWithoutExprId {
ast_node_id: ASTNodeId,
backtrace: Backtrace,
},
#[snafu(display(
"UnexpectedASTNode: required a {} at this position, node was a {}.",
required_node_type,
encountered_node_type
))]
UnexpectedASTNode {
required_node_type: String,
encountered_node_type: String,
backtrace: Backtrace,
},
#[snafu(display(
"UnexpectedPattern2Variant: required a {} at this position, Pattern2 was a {}.",
required_pattern2,
encountered_pattern2,
))]
UnexpectedPattern2Variant {
required_pattern2: String,
encountered_pattern2: String,
backtrace: Backtrace,
},
#[snafu(display("IdentExistsError: {}", msg))]
IdentExistsError { msg: String },
WrapModuleError {
#[snafu(backtrace)]
source: ModuleError,
},
#[snafu(display("SyntaxError: {}", msg))]
SyntaxErrorNoBacktrace { msg: String },
}
pub type ASTResult<T, E = ASTError> = std::result::Result<T, E>;
impl From<ModuleError> for ASTError {
fn from(module_err: ModuleError) -> Self {
Self::WrapModuleError { source: module_err }
}
}
impl From<(Region, Loc<Ident>)> for ASTError {
fn from(ident_exists_err: (Region, Loc<Ident>)) -> Self {
Self::IdentExistsError {
msg: format!("{:?}", ident_exists_err),
}
}
}
impl<'a> From<SyntaxError<'a>> for ASTError {
fn from(syntax_err: SyntaxError) -> Self {
Self::SyntaxErrorNoBacktrace {
msg: format!("{:?}", syntax_err),
}
}
}

View File

@ -1,316 +0,0 @@
use roc_collections::all::MutMap;
use roc_problem::can::Problem;
use roc_region::all::{Loc, Region};
use roc_types::subs::Variable;
use crate::{
lang::{
core::{
def::def::References,
expr::{
expr2::{Expr2, ExprId, WhenBranch},
expr_to_expr2::expr_to_expr2,
output::Output,
record_field::RecordField,
},
pattern::to_pattern2,
},
env::Env,
scope::Scope,
},
mem_pool::{pool_str::PoolStr, pool_vec::PoolVec, shallow_clone::ShallowClone},
};
pub(crate) enum CanonicalizeRecordProblem {
#[allow(dead_code)]
InvalidOptionalValue {
field_name: PoolStr,
field_region: Region,
record_region: Region,
},
}
enum FieldVar {
VarAndExprId(Variable, ExprId),
OnlyVar(Variable),
}
pub(crate) fn canonicalize_fields<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
fields: &'a [Loc<roc_parse::ast::AssignedField<'a, roc_parse::ast::Expr<'a>>>],
) -> Result<(PoolVec<RecordField>, Output), CanonicalizeRecordProblem> {
let mut can_fields: MutMap<&'a str, FieldVar> = MutMap::default();
let mut output = Output::default();
for loc_field in fields.iter() {
match canonicalize_field(env, scope, &loc_field.value) {
Ok(can_field) => {
match can_field {
CanonicalField::LabelAndValue {
label,
value_expr,
value_output,
var,
} => {
let expr_id = env.pool.add(value_expr);
let replaced =
can_fields.insert(label, FieldVar::VarAndExprId(var, expr_id));
if let Some(_old) = replaced {
// env.problems.push(Problem::DuplicateRecordFieldValue {
// field_name: label,
// field_region: loc_field.region,
// record_region: region,
// replaced_region: old.region,
// });
todo!()
}
output.references.union_mut(value_output.references);
}
CanonicalField::InvalidLabelOnly { label, var } => {
let replaced = can_fields.insert(label, FieldVar::OnlyVar(var));
if let Some(_old) = replaced {
todo!()
}
}
}
}
Err(CanonicalizeFieldProblem::InvalidOptionalValue {
field_name: _,
field_region: _,
}) => {
// env.problem(Problem::InvalidOptionalValue {
// field_name: field_name.clone(),
// field_region,
// record_region: region,
// });
// return Err(CanonicalizeRecordProblem::InvalidOptionalValue {
// field_name,
// field_region,
// record_region: region,
// });
todo!()
}
}
}
let pool_vec = PoolVec::with_capacity(can_fields.len() as u32, env.pool);
for (node_id, (string, field_var)) in pool_vec.iter_node_ids().zip(can_fields.into_iter()) {
let name = PoolStr::new(string, env.pool);
match field_var {
FieldVar::VarAndExprId(var, expr_id) => {
env.pool[node_id] = RecordField::LabeledValue(name, var, expr_id);
}
FieldVar::OnlyVar(var) => {
env.pool[node_id] = RecordField::InvalidLabelOnly(name, var);
} // TODO RecordField::LabelOnly
}
}
Ok((pool_vec, output))
}
#[allow(dead_code)]
enum CanonicalizeFieldProblem {
InvalidOptionalValue {
field_name: PoolStr,
field_region: Region,
},
}
// TODO: the `value_output: Output` field takes _a lot_ of space!
#[allow(clippy::large_enum_variant)]
enum CanonicalField<'a> {
LabelAndValue {
label: &'a str,
value_expr: Expr2,
value_output: Output,
var: Variable,
},
InvalidLabelOnly {
label: &'a str,
var: Variable,
}, // TODO make ValidLabelOnly
}
fn canonicalize_field<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
field: &'a roc_parse::ast::AssignedField<'a, roc_parse::ast::Expr<'a>>,
) -> Result<CanonicalField<'a>, CanonicalizeFieldProblem> {
use roc_parse::ast::AssignedField::*;
match field {
// Both a label and a value, e.g. `{ name: "blah" }`
RequiredValue(label, _, loc_expr) => {
let field_var = env.var_store.fresh();
let (loc_can_expr, output) =
expr_to_expr2(env, scope, &loc_expr.value, loc_expr.region);
match loc_can_expr {
Expr2::RuntimeError() => Ok(CanonicalField::InvalidLabelOnly {
label: label.value,
var: field_var,
}),
_ => Ok(CanonicalField::LabelAndValue {
label: label.value,
value_expr: loc_can_expr,
value_output: output,
var: field_var,
}),
}
}
OptionalValue(label, _, loc_expr) => Err(CanonicalizeFieldProblem::InvalidOptionalValue {
field_name: PoolStr::new(label.value, env.pool),
field_region: Region::span_across(&label.region, &loc_expr.region),
}),
// A label with no value, e.g. `{ name }` (this is sugar for { name: name })
LabelOnly(label) => {
let field_var = env.var_store.fresh();
// TODO return ValidLabel if label points to in scope variable
Ok(CanonicalField::InvalidLabelOnly {
label: label.value,
var: field_var,
})
}
SpaceBefore(sub_field, _) | SpaceAfter(sub_field, _) => {
canonicalize_field(env, scope, sub_field)
}
Malformed(_string) => {
panic!("TODO canonicalize malformed record field");
}
}
}
#[inline(always)]
pub(crate) fn canonicalize_when_branch<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
branch: &'a roc_parse::ast::WhenBranch<'a>,
output: &mut Output,
) -> (WhenBranch, References) {
let patterns = PoolVec::with_capacity(branch.patterns.len() as u32, env.pool);
let original_scope = scope;
let mut scope = original_scope.shallow_clone();
// TODO report symbols not bound in all patterns
for (node_id, loc_pattern) in patterns.iter_node_ids().zip(branch.patterns.iter()) {
let (new_output, can_pattern) = to_pattern2(
env,
&mut scope,
roc_parse::pattern::PatternType::WhenBranch,
&loc_pattern.value,
loc_pattern.region,
);
output.union(new_output);
env.set_region(node_id, loc_pattern.region);
env.pool[node_id] = can_pattern;
}
let (value, mut branch_output) =
expr_to_expr2(env, &mut scope, &branch.value.value, branch.value.region);
let value_id = env.pool.add(value);
env.set_region(value_id, branch.value.region);
let guard = match &branch.guard {
None => None,
Some(loc_expr) => {
let (can_guard, guard_branch_output) =
expr_to_expr2(env, &mut scope, &loc_expr.value, loc_expr.region);
let expr_id = env.pool.add(can_guard);
env.set_region(expr_id, loc_expr.region);
branch_output.union(guard_branch_output);
Some(expr_id)
}
};
// Now that we've collected all the references for this branch, check to see if
// any of the new idents it defined were unused. If any were, report it.
for (symbol, region) in scope.symbols() {
let symbol = symbol;
if !output.references.has_lookup(symbol)
&& !branch_output.references.has_lookup(symbol)
&& !original_scope.contains_symbol(symbol)
{
env.problem(Problem::UnusedDef(symbol, region));
}
}
let references = branch_output.references.clone();
output.union(branch_output);
(
WhenBranch {
patterns,
body: value_id,
guard,
},
references,
)
}
pub(crate) fn canonicalize_lookup(
env: &mut Env<'_>,
scope: &mut Scope,
module_name: &str,
ident: &str,
region: Region,
) -> (Expr2, Output) {
use Expr2::*;
let mut output = Output::default();
let can_expr = if module_name.is_empty() {
// Since module_name was empty, this is an unqualified var.
// Look it up in scope!
match scope.lookup(&(*ident).into(), region) {
Ok(symbol) => {
output.references.lookups.insert(symbol);
Var(symbol)
}
Err(problem) => {
env.problem(Problem::RuntimeError(problem));
RuntimeError()
}
}
} else {
// Since module_name was nonempty, this is a qualified var.
// Look it up in the env!
match env.qualified_lookup(module_name, ident, region) {
Ok(symbol) => {
output.references.lookups.insert(symbol);
Var(symbol)
}
Err(problem) => {
// Either the module wasn't imported, or
// it was imported but it doesn't expose this ident.
env.problem(Problem::RuntimeError(problem));
RuntimeError()
}
}
};
// If it's valid, this ident should be in scope already.
(can_expr, output)
}

View File

@ -1,2 +0,0 @@
pub mod canonicalize;
pub mod module;

View File

@ -1,326 +0,0 @@
#![allow(clippy::all)]
#![allow(dead_code)]
#![allow(unused_imports)]
#![allow(unused_variables)]
use bumpalo::Bump;
use roc_can::operator::desugar_def;
use roc_collections::all::{default_hasher, ImMap, ImSet, MutMap, MutSet, SendMap};
use roc_module::ident::Ident;
use roc_module::ident::Lowercase;
use roc_module::symbol::IdentIdsByModule;
use roc_module::symbol::{IdentIds, ModuleId, ModuleIds, Symbol};
use roc_parse::ast;
use roc_parse::pattern::PatternType;
use roc_problem::can::{Problem, RuntimeError};
use roc_region::all::{Loc, Region};
use roc_types::subs::{VarStore, Variable};
use crate::lang::core::def::def::canonicalize_defs;
use crate::lang::core::def::def::Def;
use crate::lang::core::def::def::{sort_can_defs, Declaration};
use crate::lang::core::expr::expr2::Expr2;
use crate::lang::core::expr::output::Output;
use crate::lang::core::pattern::Pattern2;
use crate::lang::core::types::Alias;
use crate::lang::core::val_def::ValueDef;
use crate::lang::env::Env;
use crate::lang::scope::Scope;
use crate::mem_pool::pool::NodeId;
use crate::mem_pool::pool::Pool;
use crate::mem_pool::pool_vec::PoolVec;
use crate::mem_pool::shallow_clone::ShallowClone;
pub struct ModuleOutput {
pub aliases: MutMap<Symbol, NodeId<Alias>>,
pub rigid_variables: MutMap<Variable, Lowercase>,
pub declarations: Vec<Declaration>,
pub exposed_imports: MutMap<Symbol, Variable>,
pub lookups: Vec<(Symbol, Variable, Region)>,
pub problems: Vec<Problem>,
pub ident_ids: IdentIds,
pub references: MutSet<Symbol>,
}
// TODO trim these down
#[allow(clippy::too_many_arguments)]
pub fn canonicalize_module_defs<'a>(
arena: &Bump,
loc_defs: &'a [Loc<ast::Def<'a>>],
home: ModuleId,
module_ids: &ModuleIds,
exposed_ident_ids: IdentIds,
dep_idents: IdentIdsByModule,
aliases: MutMap<Symbol, Alias>,
exposed_imports: MutMap<Ident, (Symbol, Region)>,
mut exposed_symbols: MutSet<Symbol>,
var_store: &mut VarStore,
) -> Result<ModuleOutput, RuntimeError> {
let mut pool = Pool::with_capacity(1 << 10);
let mut can_exposed_imports = MutMap::default();
let mut scope = Scope::new(home, &mut pool, var_store);
let num_deps = dep_idents.len();
for (name, alias) in aliases.into_iter() {
let vars = PoolVec::with_capacity(alias.targs.len() as u32, &mut pool);
for (node_id, targ_id) in vars.iter_node_ids().zip(alias.targs.iter_node_ids()) {
let (poolstr, var) = &pool[targ_id];
pool[node_id] = (poolstr.shallow_clone(), *var);
}
scope.add_alias(&mut pool, name, vars, alias.actual);
}
// Desugar operators (convert them to Apply calls, taking into account
// operator precedence and associativity rules), before doing other canonicalization.
//
// If we did this *during* canonicalization, then each time we
// visited a BinOp node we'd recursively try to apply this to each of its nested
// operators, and then again on *their* nested operators, ultimately applying the
// rules multiple times unnecessarily.
let mut desugared =
bumpalo::collections::Vec::with_capacity_in(loc_defs.len() + num_deps, arena);
for loc_def in loc_defs.iter() {
desugared.push(&*arena.alloc(Loc {
value: desugar_def(arena, &loc_def.value),
region: loc_def.region,
}));
}
let mut env = Env::new(
home,
arena,
&mut pool,
var_store,
dep_idents,
module_ids,
exposed_ident_ids,
);
let mut lookups = Vec::with_capacity(num_deps);
let rigid_variables = MutMap::default();
// Exposed values are treated like defs that appear before any others, e.g.
//
// imports [Foo.{ bar, baz }]
//
// ...is basically the same as if we'd added these extra defs at the start of the module:
//
// bar = Foo.bar
// baz = Foo.baz
//
// Here we essentially add those "defs" to "the beginning of the module"
// by canonicalizing them right before we canonicalize the actual ast::Def nodes.
for (ident, (symbol, region)) in exposed_imports {
let first_char = ident.as_inline_str().chars().next().unwrap();
if first_char.is_lowercase() {
// this is a value definition
let expr_var = env.var_store.fresh();
match scope.import(ident, symbol, region) {
Ok(()) => {
// Add an entry to exposed_imports using the current module's name
// as the key; e.g. if this is the Foo module and we have
// exposes [Bar.{ baz }] then insert Foo.baz as the key, so when
// anything references `baz` in this Foo module, it will resolve to Bar.baz.
can_exposed_imports.insert(symbol, expr_var);
// This will be used during constraint generation,
// to add the usual Lookup constraint as if this were a normal def.
lookups.push((symbol, expr_var, region));
}
Err((_shadowed_symbol, _region)) => {
panic!("TODO gracefully handle shadowing in imports.")
}
}
} else {
// This is a type alias
// the should already be added to the scope when this module is canonicalized
debug_assert!(scope.contains_alias(symbol));
}
}
let (defs, _scope, output, symbols_introduced) = canonicalize_defs(
&mut env,
Output::default(),
&scope,
&desugared,
PatternType::TopLevelDef,
);
// See if any of the new idents we defined went unused.
// If any were unused and also not exposed, report it.
for (symbol, region) in symbols_introduced {
if !output.references.has_lookup(symbol) && !exposed_symbols.contains(&symbol) {
env.problem(Problem::UnusedDef(symbol, region));
}
}
// TODO register rigids
// for (var, lowercase) in output.introduced_variables.name_by_var.clone() {
// rigid_variables.insert(var, lowercase);
// }
let mut references = MutSet::default();
// Gather up all the symbols that were referenced across all the defs' lookups.
for symbol in output.references.lookups.iter() {
references.insert(*symbol);
}
// Gather up all the symbols that were referenced across all the defs' calls.
for symbol in output.references.calls.iter() {
references.insert(*symbol);
}
// Gather up all the symbols that were referenced from other modules.
for symbol in env.qualified_lookups.iter() {
references.insert(*symbol);
}
// NOTE previously we inserted builtin defs into the list of defs here
// this is now done later, in file.rs.
match sort_can_defs(&mut env, defs, Output::default()) {
(Ok(mut declarations), output) => {
use Declaration::*;
for decl in declarations.iter() {
match decl {
Declare(def) => {
for symbol in def.symbols(env.pool) {
if exposed_symbols.contains(&symbol) {
// Remove this from exposed_symbols,
// so that at the end of the process,
// we can see if there were any
// exposed symbols which did not have
// corresponding defs.
exposed_symbols.remove(&symbol);
}
}
}
DeclareRec(defs) => {
for def in defs {
for symbol in def.symbols(env.pool) {
if exposed_symbols.contains(&symbol) {
// Remove this from exposed_symbols,
// so that at the end of the process,
// we can see if there were any
// exposed symbols which did not have
// corresponding defs.
exposed_symbols.remove(&symbol);
}
}
}
}
InvalidCycle(identifiers, _) => {
panic!("TODO gracefully handle potentially attempting to expose invalid cyclic defs {:?}" , identifiers);
}
Builtin(def) => {
// Builtins cannot be exposed in module declarations.
// This should never happen!
debug_assert!(def
.symbols(env.pool)
.iter()
.all(|symbol| !exposed_symbols.contains(symbol)));
}
}
}
let mut aliases = MutMap::default();
for (symbol, alias) in output.aliases {
// Remove this from exposed_symbols,
// so that at the end of the process,
// we can see if there were any
// exposed symbols which did not have
// corresponding defs.
exposed_symbols.remove(&symbol);
aliases.insert(symbol, alias);
}
// By this point, all exposed symbols should have been removed from
// exposed_symbols and added to exposed_vars_by_symbol. If any were
// not, that means they were declared as exposed but there was
// no actual declaration with that name!
for symbol in exposed_symbols {
env.problem(Problem::ExposedButNotDefined(symbol));
// In case this exposed value is referenced by other modules,
// create a decl for it whose implementation is a runtime error.
let mut pattern_vars = SendMap::default();
pattern_vars.insert(symbol, env.var_store.fresh());
let runtime_error = RuntimeError::ExposedButNotDefined(symbol);
let value_def = {
let pattern_id = env.pool.add(Pattern2::Identifier(symbol));
let expr_id = env.pool.add(Expr2::RuntimeError());
ValueDef::NoAnnotation {
pattern_id,
expr_id,
expr_var: env.var_store.fresh(),
}
};
let def = Def::Value(value_def);
declarations.push(Declaration::Declare(def));
}
// Incorporate any remaining output.lookups entries into references.
for symbol in output.references.lookups {
references.insert(symbol);
}
// Incorporate any remaining output.calls entries into references.
for symbol in output.references.calls {
references.insert(symbol);
}
// Gather up all the symbols that were referenced from other modules.
for symbol in env.qualified_lookups.iter() {
references.insert(*symbol);
}
// TODO find captured variables
// for declaration in declarations.iter_mut() {
// match declaration {
// Declare(def) => fix_values_captured_in_closure_def(def, &mut MutSet::default()),
// DeclareRec(defs) => {
// fix_values_captured_in_closure_defs(defs, &mut MutSet::default())
// }
// InvalidCycle(_, _) | Builtin(_) => {}
// }
// }
// TODO this loops over all symbols in the module, we can speed it up by having an
// iterator over all builtin symbols
// TODO move over the builtins
// for symbol in references.iter() {
// if symbol.is_builtin() {
// // this can fail when the symbol is for builtin types, or has no implementation yet
// if let Some(def) = builtins::builtin_defs_map(*symbol, var_store) {
// declarations.push(Declaration::Builtin(def));
// }
// }
// }
Ok(ModuleOutput {
aliases,
rigid_variables,
declarations,
references,
exposed_imports: can_exposed_imports,
problems: vec![], // TODO env.problems,
lookups,
ident_ids: env.ident_ids,
})
}
(Err(runtime_error), _) => Err(runtime_error),
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,63 +0,0 @@
use crate::{
ast_error::{ASTNodeIdWithoutExprId, ASTResult},
mem_pool::pool::Pool,
};
use super::{
def::def2::{def2_to_string, DefId},
expr::{expr2::ExprId, expr2_to_string::expr2_to_string},
header::AppHeader,
};
#[derive(Debug)]
pub struct AST {
pub header: AppHeader,
pub def_ids: Vec<DefId>,
}
impl AST {
pub fn insert_def_at_index(&mut self, new_def_id: DefId, index: usize) {
self.def_ids.insert(index, new_def_id);
}
// TODO print in tree shape, similar to linux tree command
pub fn ast_to_string(&self, pool: &Pool) -> String {
let mut full_ast_string = String::new();
for def_id in self.def_ids.iter() {
full_ast_string.push_str(&def2_to_string(*def_id, pool));
full_ast_string.push_str("\n\n");
}
full_ast_string
}
}
#[derive(Debug, PartialEq, Copy, Clone)]
pub enum ASTNodeId {
ADefId(DefId),
AExprId(ExprId),
}
impl ASTNodeId {
pub fn to_expr_id(&self) -> ASTResult<ExprId> {
match self {
ASTNodeId::AExprId(expr_id) => Ok(*expr_id),
_ => ASTNodeIdWithoutExprId { ast_node_id: *self }.fail()?,
}
}
pub fn to_def_id(&self) -> ASTResult<DefId> {
match self {
ASTNodeId::ADefId(def_id) => Ok(*def_id),
_ => ASTNodeIdWithoutExprId { ast_node_id: *self }.fail()?,
}
}
}
pub fn ast_node_to_string(node_id: ASTNodeId, pool: &Pool) -> String {
match node_id {
ASTNodeId::ADefId(def_id) => def2_to_string(def_id, pool),
ASTNodeId::AExprId(expr_id) => expr2_to_string(expr_id, pool),
}
}

View File

@ -1,70 +0,0 @@
use roc_types::subs::VarStore;
use crate::{
lang::core::{def::def::Def, expr::expr2::Expr2},
mem_pool::{pool::Pool, pool_vec::PoolVec},
};
use super::def::def::Declaration;
pub(crate) fn decl_to_let(
pool: &mut Pool,
var_store: &mut VarStore,
decl: Declaration,
ret: Expr2,
) -> Expr2 {
match decl {
Declaration::Declare(def) => match def {
Def::AnnotationOnly { .. } => todo!(),
Def::Value(value_def) => {
let def_id = pool.add(value_def);
let body_id = pool.add(ret);
Expr2::LetValue {
def_id,
body_id,
body_var: var_store.fresh(),
}
}
Def::Function(function_def) => {
let def_id = pool.add(function_def);
let body_id = pool.add(ret);
Expr2::LetFunction {
def_id,
body_id,
body_var: var_store.fresh(),
}
}
},
Declaration::DeclareRec(defs) => {
let mut function_defs = vec![];
for def in defs {
match def {
Def::AnnotationOnly { .. } => todo!(),
Def::Function(function_def) => function_defs.push(function_def),
Def::Value(_) => unreachable!(),
}
}
let body_id = pool.add(ret);
Expr2::LetRec {
defs: PoolVec::new(function_defs.into_iter(), pool),
body_var: var_store.fresh(),
body_id,
}
}
Declaration::InvalidCycle(_entries, _) => {
// TODO: replace with something from Expr2
// Expr::RuntimeError(RuntimeError::CircularDef(entries))
todo!()
}
Declaration::Builtin(_) => {
// Builtins should only be added to top-level decls, not to let-exprs!
unreachable!()
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,58 +0,0 @@
use roc_module::symbol::IdentId;
use crate::{
lang::core::expr::{expr2::Expr2, expr2_to_string::expr2_to_string},
mem_pool::pool::{NodeId, Pool},
};
// A top level definition, not inside a function. For example: `main = "Hello, world!"`
#[derive(Debug)]
pub enum Def2 {
// ValueDef example: `main = "Hello, world!"`. identifier -> `main`, expr -> "Hello, world!"
ValueDef {
identifier_id: IdentId,
expr_id: NodeId<Expr2>,
},
Blank,
CommentsBefore {
comments: String,
def_id: DefId,
},
CommentsAfter {
comments: String,
def_id: DefId,
},
}
pub type DefId = NodeId<Def2>;
pub fn def2_to_string(node_id: DefId, pool: &Pool) -> String {
let mut full_string = String::new();
let def2 = pool.get(node_id);
match def2 {
Def2::ValueDef {
identifier_id,
expr_id,
} => {
full_string.push_str(&format!(
"Def2::ValueDef(identifier_id: >>{:?}), expr_id: >>{:?})",
identifier_id,
expr2_to_string(*expr_id, pool)
));
}
Def2::Blank => {
full_string.push_str("Def2::Blank");
}
Def2::CommentsBefore {
comments,
def_id: _,
} => full_string.push_str(comments),
Def2::CommentsAfter {
comments,
def_id: _,
} => full_string.push_str(comments),
}
full_string
}

View File

@ -1,215 +0,0 @@
use bumpalo::collections::Vec as BumpVec;
use bumpalo::Bump;
use roc_module::ident::{Ident, IdentStr};
use roc_parse::{ast::CommentOrNewline, parser::SyntaxError};
use roc_region::all::Region;
use crate::lang::{core::expr::expr_to_expr2::loc_expr_to_expr2, env::Env, scope::Scope};
use super::def2::Def2;
fn spaces_to_comments(spaces: &[CommentOrNewline]) -> Option<String> {
if !spaces.is_empty() && !all_newlines(spaces) {
let mut all_comments_str = String::new();
for comment in spaces.iter().filter(|c_or_nl| !c_or_nl.is_newline()) {
all_comments_str.push_str(&comment.to_string_repr());
}
Some(all_comments_str)
} else {
None
}
}
pub fn toplevel_defs_to_defs2<'a>(
arena: &'a Bump,
env: &mut Env<'a>,
scope: &mut Scope,
parsed_defs: roc_parse::ast::Defs<'a>,
region: Region,
) -> Vec<Def2> {
let mut result = Vec::with_capacity(parsed_defs.tags.len());
for (index, def) in parsed_defs.defs().enumerate() {
let mut def = match def {
Err(roc_parse::ast::ValueDef::Body(&loc_pattern, &loc_expr)) => {
let expr2 = loc_expr_to_expr2(arena, loc_expr, env, scope, region).0;
let expr_id = env.pool.add(expr2);
use roc_parse::ast::Pattern::*;
match loc_pattern.value {
Identifier(id_str) => {
let identifier_id =
env.ident_ids.get_or_insert(&Ident(IdentStr::from(id_str)));
// TODO support with annotation
Def2::ValueDef {
identifier_id,
expr_id,
}
}
other => {
unimplemented!(
"I don't yet know how to convert the pattern {:?} into an expr2",
other
)
}
}
}
other => {
unimplemented!(
"I don't know how to make an expr2 from this def yet: {:?}",
other
)
}
};
let spaces_before = &parsed_defs.spaces[parsed_defs.space_before[index].indices()];
let spaces_after = &parsed_defs.spaces[parsed_defs.space_after[index].indices()];
if let Some(comments) = spaces_to_comments(spaces_before) {
let inner_def_id = env.pool.add(def);
def = Def2::CommentsBefore {
comments,
def_id: inner_def_id,
};
}
if let Some(comments) = spaces_to_comments(spaces_after) {
let inner_def_id = env.pool.add(def);
def = Def2::CommentsAfter {
comments,
def_id: inner_def_id,
};
}
result.push(def)
}
result
}
pub fn defs_to_defs2<'a>(
arena: &'a Bump,
env: &mut Env<'a>,
scope: &mut Scope,
parsed_defs: &'a BumpVec<roc_region::all::Loc<roc_parse::ast::Def<'a>>>,
region: Region,
) -> Vec<Def2> {
parsed_defs
.iter()
.map(|loc| def_to_def2(arena, env, scope, &loc.value, region))
.collect()
}
pub fn def_to_def2<'a>(
arena: &'a Bump,
env: &mut Env<'a>,
scope: &mut Scope,
parsed_def: &'a roc_parse::ast::Def<'a>,
region: Region,
) -> Def2 {
use roc_parse::ast::Def::*;
//dbg!(parsed_def);
match parsed_def {
SpaceBefore(inner_def, comments) => {
// filter comments
if !comments.is_empty() && !all_newlines(comments) {
let inner_def = def_to_def2(arena, env, scope, inner_def, region);
let inner_def_id = env.pool.add(inner_def);
let mut all_comments_str = String::new();
for comment in comments.iter().filter(|c_or_nl| !c_or_nl.is_newline()) {
all_comments_str.push_str(&comment.to_string_repr());
}
Def2::CommentsBefore {
comments: all_comments_str,
def_id: inner_def_id,
}
} else {
def_to_def2(arena, env, scope, inner_def, region)
}
}
SpaceAfter(inner_def, comments) => {
// filter comments
if !comments.is_empty() && !all_newlines(comments) {
let inner_def = def_to_def2(arena, env, scope, inner_def, region);
let inner_def_id = env.pool.add(inner_def);
let mut all_comments_str = String::new();
for comment in comments.iter().filter(|c_or_nl| !c_or_nl.is_newline()) {
all_comments_str.push_str(&comment.to_string_repr());
}
Def2::CommentsAfter {
def_id: inner_def_id,
comments: all_comments_str,
}
} else {
def_to_def2(arena, env, scope, inner_def, region)
}
}
Value(roc_parse::ast::ValueDef::Body(&loc_pattern, &loc_expr)) => {
let expr2 = loc_expr_to_expr2(arena, loc_expr, env, scope, region).0;
let expr_id = env.pool.add(expr2);
use roc_parse::ast::Pattern::*;
match loc_pattern.value {
Identifier(id_str) => {
let identifier_id = env.ident_ids.get_or_insert(&Ident(IdentStr::from(id_str)));
// TODO support with annotation
Def2::ValueDef {
identifier_id,
expr_id,
}
}
other => {
unimplemented!(
"I don't yet know how to convert the pattern {:?} into an expr2",
other
)
}
}
}
other => {
unimplemented!(
"I don't know how to make an expr2 from this def yet: {:?}",
other
)
}
}
}
fn all_newlines(comments: &[CommentOrNewline]) -> bool {
comments
.iter()
.all(|com_or_newline| com_or_newline.is_newline())
}
pub fn str_to_def2<'a>(
arena: &'a Bump,
input: &'a str,
env: &mut Env<'a>,
scope: &mut Scope,
region: Region,
) -> Result<Vec<Def2>, SyntaxError<'a>> {
match roc_parse::test_helpers::parse_defs_with(arena, input.trim()) {
Ok(vec_loc_def) => Ok(defs_to_defs2(
arena,
env,
scope,
arena.alloc(vec_loc_def),
region,
)),
Err(fail) => Err(fail),
}
}

View File

@ -1,3 +0,0 @@
pub mod def;
pub mod def2;
pub mod def_to_def2;

View File

@ -1,228 +0,0 @@
use arrayvec::ArrayString;
use roc_types::subs::Variable;
use crate::{
lang::core::{fun_def::FunctionDef, pattern::Pattern2, val_def::ValueDef},
mem_pool::{pool::NodeId, pool_str::PoolStr, pool_vec::PoolVec},
};
use roc_can::expr::Recursive;
use roc_module::called_via::CalledVia;
use roc_module::low_level::LowLevel;
use roc_module::symbol::Symbol;
use super::record_field::RecordField;
pub const ARR_STRING_CAPACITY: usize = 24;
pub type ArrString = ArrayString<ARR_STRING_CAPACITY>;
// TODO make the inner types private?
pub type ExprId = NodeId<Expr2>;
/// An Expr that fits in 32B.
/// It has a 1B discriminant and variants which hold payloads of at most 31B.
#[derive(Debug)]
pub enum Expr2 {
/// A negative number literal without a dot
SmallInt {
number: IntVal, // 16B
var: Variable, // 4B
style: IntStyle, // 1B
text: PoolStr, // 8B
},
// TODO(rvcas): rename this eventually
/// A large (over 64-bit) negative number literal without a dot.
/// This variant can't use IntVal because if IntVal stored 128-bit
/// integers, it would be 32B on its own because of alignment.
I128 {
number: i128, // 16B
var: Variable, // 4B
style: IntStyle, // 1B
text: PoolStr, // 8B
},
// TODO(rvcas): rename this eventually
/// A large (over 64-bit) nonnegative number literal without a dot
/// This variant can't use IntVal because if IntVal stored 128-bit
/// integers, it would be 32B on its own because of alignment.
U128 {
number: u128, // 16B
var: Variable, // 4B
style: IntStyle, // 1B
text: PoolStr, // 8B
},
/// A floating-point literal (with a dot)
Float {
number: FloatVal, // 16B
var: Variable, // 4B
text: PoolStr, // 8B
},
/// string literals of length up to 30B
SmallStr(ArrString), // 31B
/// string literals of length 31B or more
Str(PoolStr), // 8B
// Lookups
Var(Symbol), // 8B
InvalidLookup(PoolStr), // 8B
List {
elem_var: Variable, // 4B
elems: PoolVec<ExprId>, // 8B
},
If {
cond_var: Variable, // 4B
expr_var: Variable, // 4B
branches: PoolVec<(ExprId, ExprId)>, // 8B
final_else: ExprId, // 4B
},
When {
cond_var: Variable, // 4B
expr_var: Variable, // 4B
branches: PoolVec<WhenBranch>, // 8B
cond: ExprId, // 4B
},
LetRec {
defs: PoolVec<FunctionDef>, // 8B
body_var: Variable, // 8B
body_id: ExprId, // 4B
},
LetFunction {
def_id: NodeId<FunctionDef>, // 4B
body_var: Variable, // 8B
body_id: ExprId, // 4B
},
LetValue {
def_id: NodeId<ValueDef>, // 4B
body_id: ExprId, // 4B
body_var: Variable, // 4B
},
Call {
args: PoolVec<(Variable, ExprId)>, // 8B
expr_id: ExprId, // 4B
expr_var: Variable, // 4B
fn_var: Variable, // 4B
closure_var: Variable, // 4B
called_via: CalledVia, // 2B
},
RunLowLevel {
op: LowLevel, // 1B
args: PoolVec<(Variable, ExprId)>, // 8B
ret_var: Variable, // 4B
},
Closure {
args: PoolVec<(Variable, NodeId<Pattern2>)>, // 8B
uniq_symbol: Symbol, // 8B This is a globally unique symbol for the closure
body_id: ExprId, // 4B
function_type: Variable, // 4B
recursive: Recursive, // 1B
extra: NodeId<ClosureExtra>, // 4B
},
// Product Types
Record {
record_var: Variable, // 4B
fields: PoolVec<RecordField>, // 8B
},
/// Empty record constant
EmptyRecord,
/// Look up exactly one field on a record, e.g. (expr).foo.
Access {
field: PoolStr, // 4B
expr: ExprId, // 4B
record_var: Variable, // 4B
ext_var: Variable, // 4B
field_var: Variable, // 4B
},
/// field accessor as a function, e.g. (.foo) expr
Accessor {
function_var: Variable, // 4B
closure_var: Variable, // 4B
field: PoolStr, // 4B
record_var: Variable, // 4B
ext_var: Variable, // 4B
field_var: Variable, // 4B
},
Update {
symbol: Symbol, // 8B
updates: PoolVec<RecordField>, // 8B
record_var: Variable, // 4B
ext_var: Variable, // 4B
},
// Sum Types
Tag {
name: PoolStr, // 4B
variant_var: Variable, // 4B
ext_var: Variable, // 4B
arguments: PoolVec<(Variable, ExprId)>, // 8B
},
Blank, // Rendered as empty box in editor
// Compiles, but will crash if reached
RuntimeError(/* TODO make a version of RuntimeError that fits in 15B */),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Problem {
RanOutOfNodeIds,
}
pub type Res<T> = Result<T, Problem>;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum IntStyle {
Decimal,
Octal,
Hex,
Binary,
}
impl IntStyle {
pub fn from_base(base: roc_parse::ast::Base) -> Self {
use roc_parse::ast::Base;
match base {
Base::Decimal => Self::Decimal,
Base::Octal => Self::Octal,
Base::Hex => Self::Hex,
Base::Binary => Self::Binary,
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum IntVal {
I64(i64),
U64(u64),
I32(i32),
U32(u32),
I16(i16),
U16(u16),
I8(i8),
U8(u8),
}
#[test]
fn size_of_intval() {
assert_eq!(std::mem::size_of::<IntVal>(), 16);
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum FloatVal {
F64(f64),
F32(f32),
}
#[derive(Debug)]
pub struct WhenBranch {
pub patterns: PoolVec<Pattern2>, // 4B
pub body: ExprId, // 3B
pub guard: Option<ExprId>, // 4B
}
/// This is overflow data from a Closure variant, which needs to store
/// more than 32B of total data
#[derive(Debug)]
pub struct ClosureExtra {
pub return_type: Variable, // 4B
pub captured_symbols: PoolVec<(Symbol, Variable)>, // 8B
pub closure_type: Variable, // 4B
pub closure_ext_var: Variable, // 4B
}

View File

@ -1,162 +0,0 @@
use crate::{
lang::core::{expr::record_field::RecordField, val_def::value_def_to_string},
mem_pool::pool::Pool,
};
use super::expr2::{Expr2, ExprId};
use roc_types::subs::Variable;
pub fn expr2_to_string(node_id: ExprId, pool: &Pool) -> String {
let mut full_string = String::new();
let expr2 = pool.get(node_id);
expr2_to_string_helper(expr2, 0, pool, &mut full_string);
full_string
}
fn get_spacing(indent_level: usize) -> String {
std::iter::repeat(" ")
.take(indent_level)
.collect::<Vec<&str>>()
.join("")
}
fn expr2_to_string_helper(
expr2: &Expr2,
indent_level: usize,
pool: &Pool,
out_string: &mut String,
) {
out_string.push_str(&get_spacing(indent_level));
match expr2 {
Expr2::SmallStr(arr_string) => out_string.push_str(&format!(
"{}{}{}",
"SmallStr(\"",
arr_string.as_str(),
"\")",
)),
Expr2::Str(pool_str) => {
out_string.push_str(&format!("{}{}{}", "Str(\"", pool_str.as_str(pool), "\")",))
}
Expr2::Blank => out_string.push_str("Blank"),
Expr2::EmptyRecord => out_string.push_str("EmptyRecord"),
Expr2::Record { record_var, fields } => {
out_string.push_str("Record:\n");
out_string.push_str(&var_to_string(record_var, indent_level + 1));
out_string.push_str(&format!("{}fields: [\n", get_spacing(indent_level + 1)));
let mut first_child = true;
for field in fields.iter(pool) {
if !first_child {
out_string.push_str(", ")
} else {
first_child = false;
}
match field {
RecordField::InvalidLabelOnly(pool_str, var) => {
out_string.push_str(&format!(
"{}({}, Var({:?})",
get_spacing(indent_level + 2),
pool_str.as_str(pool),
var,
));
}
RecordField::LabelOnly(pool_str, var, symbol) => {
out_string.push_str(&format!(
"{}({}, Var({:?}), Symbol({:?})",
get_spacing(indent_level + 2),
pool_str.as_str(pool),
var,
symbol
));
}
RecordField::LabeledValue(pool_str, var, val_node_id) => {
out_string.push_str(&format!(
"{}({}, Var({:?}), Expr2(\n",
get_spacing(indent_level + 2),
pool_str.as_str(pool),
var,
));
let val_expr2 = pool.get(*val_node_id);
expr2_to_string_helper(val_expr2, indent_level + 3, pool, out_string);
out_string.push_str(&format!("{})\n", get_spacing(indent_level + 2)));
}
}
}
out_string.push_str(&format!("{}]\n", get_spacing(indent_level + 1)));
}
Expr2::List { elem_var, elems } => {
out_string.push_str("List:\n");
out_string.push_str(&var_to_string(elem_var, indent_level + 1));
out_string.push_str(&format!("{}elems: [\n", get_spacing(indent_level + 1)));
let mut first_elt = true;
for elem_expr2_id in elems.iter(pool) {
if !first_elt {
out_string.push_str(", ")
} else {
first_elt = false;
}
let elem_expr2 = pool.get(*elem_expr2_id);
expr2_to_string_helper(elem_expr2, indent_level + 2, pool, out_string)
}
out_string.push_str(&format!("{}]\n", get_spacing(indent_level + 1)));
}
Expr2::InvalidLookup(pool_str) => {
out_string.push_str(&format!("InvalidLookup({})", pool_str.as_str(pool)));
}
Expr2::SmallInt { text, .. } => {
out_string.push_str(&format!("SmallInt({})", text.as_str(pool)));
}
Expr2::LetValue {
def_id, body_id, ..
} => {
out_string.push_str(&format!(
"LetValue(def_id: >>{:?}), body_id: >>{:?})",
value_def_to_string(pool.get(*def_id), pool),
pool.get(*body_id)
));
}
Expr2::Call { .. } => {
out_string.push_str(&format!("Call({:?})", expr2,));
}
Expr2::Closure { args, .. } => {
out_string.push_str("Closure:\n");
out_string.push_str(&format!("{}args: [\n", get_spacing(indent_level + 1)));
for (_, pattern_id) in args.iter(pool) {
let arg_pattern2 = pool.get(*pattern_id);
out_string.push_str(&format!(
"{}{:?}\n",
get_spacing(indent_level + 2),
arg_pattern2
));
}
}
&Expr2::Var { .. } => {
out_string.push_str(&format!("{:?}", expr2,));
}
Expr2::RuntimeError { .. } => {
out_string.push_str("RuntimeError\n");
}
other => todo!("Implement for {:?}", other),
}
out_string.push('\n');
}
fn var_to_string(some_var: &Variable, indent_level: usize) -> String {
format!("{}Var({:?})\n", get_spacing(indent_level + 1), some_var)
}

View File

@ -1,707 +0,0 @@
use bumpalo::Bump;
use roc_can::expr::{IntValue, Recursive};
use roc_can::num::{
finish_parsing_base, finish_parsing_float, finish_parsing_num, ParsedNumResult,
};
use roc_can::operator::desugar_expr;
use roc_collections::all::MutSet;
use roc_module::symbol::Symbol;
use roc_parse::{ast::Expr, pattern::PatternType};
use roc_problem::can::{Problem, RuntimeError};
use roc_region::all::{Loc, Region};
use super::{expr2::Expr2, output::Output};
use crate::canonicalization::canonicalize::{
canonicalize_fields, canonicalize_lookup, canonicalize_when_branch, CanonicalizeRecordProblem,
};
use crate::lang::core::declaration::decl_to_let;
use crate::lang::core::def::def::{canonicalize_defs, sort_can_defs};
use crate::lang::core::expr::expr2::ClosureExtra;
use crate::lang::core::pattern::to_pattern2;
use crate::lang::core::str::flatten_str_literal;
use crate::mem_pool::shallow_clone::ShallowClone;
use crate::{
lang::{
core::expr::expr2::{ExprId, FloatVal, IntStyle, IntVal},
env::Env,
scope::Scope,
},
mem_pool::{pool_str::PoolStr, pool_vec::PoolVec},
};
pub fn loc_expr_to_expr2<'a>(
arena: &'a Bump,
loc_expr: Loc<Expr<'a>>,
env: &mut Env<'a>,
scope: &mut Scope,
region: Region,
) -> (Expr2, Output) {
let desugared_loc_expr = desugar_expr(arena, arena.alloc(loc_expr));
expr_to_expr2(env, scope, arena.alloc(desugared_loc_expr.value), region)
}
const ZERO: Region = Region::zero();
pub fn expr_to_expr2<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
parse_expr: &'a roc_parse::ast::Expr<'a>,
region: Region,
) -> (Expr2, self::Output) {
use roc_parse::ast::Expr::*;
//dbg!("{:?}", parse_expr);
match parse_expr {
Float(string) => {
match finish_parsing_float(string) {
Ok((string_without_suffix, float, _bound)) => {
let expr = Expr2::Float {
number: FloatVal::F64(float),
var: env.var_store.fresh(),
text: PoolStr::new(string_without_suffix, env.pool),
};
(expr, Output::default())
}
Err((raw, error)) => {
// emit runtime error
let runtime_error = RuntimeError::InvalidFloat(error, ZERO, raw.into());
env.problem(Problem::RuntimeError(runtime_error));
//
// Expr::RuntimeError(runtime_error)
todo!()
}
}
}
Num(string) => {
match finish_parsing_num(string) {
Ok((
parsed,
ParsedNumResult::UnknownNum(int, _) | ParsedNumResult::Int(int, _),
)) => {
let expr = Expr2::SmallInt {
number: IntVal::I64(match int {
IntValue::U128(_) => todo!(),
IntValue::I128(n) => i128::from_ne_bytes(n) as i64, // FIXME
}),
var: env.var_store.fresh(),
// TODO non-hardcode
style: IntStyle::Decimal,
text: PoolStr::new(parsed, env.pool),
};
(expr, Output::default())
}
Ok((parsed, ParsedNumResult::Float(float, _))) => {
let expr = Expr2::Float {
number: FloatVal::F64(float),
var: env.var_store.fresh(),
text: PoolStr::new(parsed, env.pool),
};
(expr, Output::default())
}
Err((raw, error)) => {
// emit runtime error
let runtime_error = RuntimeError::InvalidInt(
error,
roc_parse::ast::Base::Decimal,
ZERO,
raw.into(),
);
env.problem(Problem::RuntimeError(runtime_error));
//
// Expr::RuntimeError(runtime_error)
todo!()
}
}
}
NonBase10Int {
string,
base,
is_negative,
} => {
match finish_parsing_base(string, *base, *is_negative) {
Ok((int, _bound)) => {
let expr = Expr2::SmallInt {
number: IntVal::I64(match int {
IntValue::U128(_) => todo!(),
IntValue::I128(n) => i128::from_ne_bytes(n) as i64, // FIXME
}),
var: env.var_store.fresh(),
// TODO non-hardcode
style: IntStyle::from_base(*base),
text: PoolStr::new(string, env.pool),
};
(expr, Output::default())
}
Err((raw, error)) => {
// emit runtime error
let runtime_error = RuntimeError::InvalidInt(error, *base, ZERO, raw.into());
env.problem(Problem::RuntimeError(runtime_error));
//
// Expr::RuntimeError(runtime_error)
todo!()
}
}
}
Str(literal) => flatten_str_literal(env, scope, literal),
List(items) => {
let mut output = Output::default();
let output_ref = &mut output;
let elems: PoolVec<ExprId> = PoolVec::with_capacity(items.len() as u32, env.pool);
for (node_id, item) in elems.iter_node_ids().zip(items.iter()) {
let (expr, sub_output) = expr_to_expr2(env, scope, &item.value, item.region);
output_ref.union(sub_output);
let expr_id = env.pool.add(expr);
env.pool[node_id] = expr_id;
}
let expr = Expr2::List {
elem_var: env.var_store.fresh(),
elems,
};
(expr, output)
}
Tag(tag) => {
// a tag without any arguments
(
Expr2::Tag {
name: PoolStr::new(tag, env.pool),
variant_var: env.var_store.fresh(),
ext_var: env.var_store.fresh(),
arguments: PoolVec::empty(env.pool),
},
Output::default(),
)
}
RecordUpdate {
fields,
update: loc_update,
} => {
let (can_update, update_out) =
expr_to_expr2(env, scope, &loc_update.value, loc_update.region);
if let Expr2::Var(symbol) = &can_update {
match canonicalize_fields(env, scope, fields.items) {
Ok((can_fields, mut output)) => {
output.references.union_mut(update_out.references);
let answer = Expr2::Update {
record_var: env.var_store.fresh(),
ext_var: env.var_store.fresh(),
symbol: *symbol,
updates: can_fields,
};
(answer, output)
}
Err(CanonicalizeRecordProblem::InvalidOptionalValue {
field_name: _,
field_region: _,
record_region: _,
}) => {
// let runtime_error = roc_problem::can::RuntimeError::InvalidOptionalValue {
// field_name,
// field_region,
// record_region,
// };
//
// env.problem(Problem::RuntimeError(runtime_error));
todo!()
}
}
} else {
// only (optionally qualified) variables can be updated, not arbitrary expressions
// let error = roc_problem::can::RuntimeError::InvalidRecordUpdate {
// region: can_update.region,
// };
//
// let answer = Expr::RuntimeError(error.clone());
//
// env.problems.push(Problem::RuntimeError(error));
//
// (answer, Output::default())
todo!("{:?}", &can_update)
}
}
Record(fields) => {
if fields.is_empty() {
(Expr2::EmptyRecord, Output::default())
} else {
match canonicalize_fields(env, scope, fields.items) {
Ok((can_fields, output)) => (
Expr2::Record {
record_var: env.var_store.fresh(),
fields: can_fields,
},
output,
),
Err(CanonicalizeRecordProblem::InvalidOptionalValue {
field_name: _,
field_region: _,
record_region: _,
}) => {
// let runtime_error = RuntimeError::InvalidOptionalValue {
// field_name,
// field_region,
// record_region,
// };
//
// env.problem(runtime_error);
// (
// Expr::RuntimeError(
// ),
// Output::default(),
//
// )
todo!()
}
}
}
}
Access(record_expr, field) => {
// TODO
let region = ZERO;
let (record_expr_id, output) = to_expr_id(env, scope, record_expr, region);
(
Expr2::Access {
record_var: env.var_store.fresh(),
field_var: env.var_store.fresh(),
ext_var: env.var_store.fresh(),
expr: record_expr_id,
field: PoolStr::new(field, env.pool),
},
output,
)
}
AccessorFunction(field) => (
Expr2::Accessor {
function_var: env.var_store.fresh(),
record_var: env.var_store.fresh(),
ext_var: env.var_store.fresh(),
closure_var: env.var_store.fresh(),
field_var: env.var_store.fresh(),
field: PoolStr::new(field, env.pool),
},
Output::default(),
),
If(branches, final_else) => {
let mut new_branches = Vec::with_capacity(branches.len());
let mut output = Output::default();
for (condition, then_branch) in branches.iter() {
let (cond, cond_output) =
expr_to_expr2(env, scope, &condition.value, condition.region);
let (then_expr, then_output) =
expr_to_expr2(env, scope, &then_branch.value, then_branch.region);
output.references.union_mut(cond_output.references);
output.references.union_mut(then_output.references);
new_branches.push((env.pool.add(cond), env.pool.add(then_expr)));
}
let (else_expr, else_output) =
expr_to_expr2(env, scope, &final_else.value, final_else.region);
output.references.union_mut(else_output.references);
let expr = Expr2::If {
cond_var: env.var_store.fresh(),
expr_var: env.var_store.fresh(),
branches: PoolVec::new(new_branches.into_iter(), env.pool),
final_else: env.pool.add(else_expr),
};
(expr, output)
}
When(loc_cond, branches) => {
// Infer the condition expression's type.
let cond_var = env.var_store.fresh();
let (can_cond, mut output) =
expr_to_expr2(env, scope, &loc_cond.value, loc_cond.region);
// the condition can never be a tail-call
output.tail_call = None;
let can_branches = PoolVec::with_capacity(branches.len() as u32, env.pool);
for (node_id, branch) in can_branches.iter_node_ids().zip(branches.iter()) {
let (can_when_branch, branch_references) =
canonicalize_when_branch(env, scope, *branch, &mut output);
output.references.union_mut(branch_references);
env.pool[node_id] = can_when_branch;
}
// A "when" with no branches is a runtime error, but it will mess things up
// if code gen mistakenly thinks this is a tail call just because its condition
// happened to be one. (The condition gave us our initial output value.)
if branches.is_empty() {
output.tail_call = None;
}
// Incorporate all three expressions into a combined Output value.
let expr = Expr2::When {
expr_var: env.var_store.fresh(),
cond_var,
cond: env.pool.add(can_cond),
branches: can_branches,
};
(expr, output)
}
Closure(loc_arg_patterns, loc_body_expr) => {
// The globally unique symbol that will refer to this closure once it gets converted
// into a top-level procedure for code gen.
//
// In the Foo module, this will look something like Foo.$1 or Foo.$2.
let symbol = env
.closure_name_symbol
.unwrap_or_else(|| env.gen_unique_symbol());
env.closure_name_symbol = None;
// The body expression gets a new scope for canonicalization.
// Shadow `scope` to make sure we don't accidentally use the original one for the
// rest of this block, but keep the original around for later diffing.
let original_scope = scope;
let mut scope = original_scope.shallow_clone();
let can_args = PoolVec::with_capacity(loc_arg_patterns.len() as u32, env.pool);
let mut output = Output::default();
let mut bound_by_argument_patterns = MutSet::default();
for (node_id, loc_pattern) in can_args.iter_node_ids().zip(loc_arg_patterns.iter()) {
let (new_output, can_arg) = to_pattern2(
env,
&mut scope,
roc_parse::pattern::PatternType::FunctionArg,
&loc_pattern.value,
loc_pattern.region,
);
bound_by_argument_patterns
.extend(new_output.references.bound_symbols.iter().copied());
output.union(new_output);
let pattern_id = env.add(can_arg, loc_pattern.region);
env.pool[node_id] = (env.var_store.fresh(), pattern_id);
}
let (body_expr, new_output) =
expr_to_expr2(env, &mut scope, &loc_body_expr.value, loc_body_expr.region);
let mut captured_symbols: MutSet<Symbol> =
new_output.references.lookups.iter().copied().collect();
// filter out the closure's name itself
captured_symbols.remove(&symbol);
// symbols bound either in this pattern or deeper down are not captured!
captured_symbols.retain(|s| !new_output.references.bound_symbols.contains(s));
captured_symbols.retain(|s| !bound_by_argument_patterns.contains(s));
// filter out top-level symbols
// those will be globally available, and don't need to be captured
captured_symbols.retain(|s| !env.top_level_symbols.contains(s));
// filter out imported symbols
// those will be globally available, and don't need to be captured
captured_symbols.retain(|s| s.module_id() == env.home);
// TODO any Closure that has an empty `captured_symbols` list could be excluded!
output.union(new_output);
// filter out aliases
captured_symbols.retain(|s| !output.references.referenced_aliases.contains(s));
// filter out functions that don't close over anything
captured_symbols.retain(|s| !output.non_closures.contains(s));
// Now that we've collected all the references, check to see if any of the args we defined
// went unreferenced. If any did, report them as unused arguments.
for (sub_symbol, region) in scope.symbols() {
if !original_scope.contains_symbol(sub_symbol) {
if !output.references.has_lookup(sub_symbol) {
// The body never referenced this argument we declared. It's an unused argument!
env.problem(Problem::UnusedArgument(symbol, sub_symbol, region));
}
// We shouldn't ultimately count arguments as referenced locals. Otherwise,
// we end up with weird conclusions like the expression (\x -> x + 1)
// references the (nonexistent) local variable x!
output.references.lookups.remove(&sub_symbol);
}
}
env.register_closure(symbol, output.references.clone());
let mut captured_symbols: Vec<_> = captured_symbols
.into_iter()
.map(|s| (s, env.var_store.fresh()))
.collect();
// sort symbols, so we know the order in which they're stored in the closure record
captured_symbols.sort();
// store that this function doesn't capture anything. It will be promoted to a
// top-level function, and does not need to be captured by other surrounding functions.
if captured_symbols.is_empty() {
output.non_closures.insert(symbol);
}
let captured_symbols = PoolVec::new(captured_symbols.into_iter(), env.pool);
let extra = ClosureExtra {
return_type: env.var_store.fresh(), // 4B
captured_symbols, // 8B
closure_type: env.var_store.fresh(), // 4B
closure_ext_var: env.var_store.fresh(), // 4B
};
(
Expr2::Closure {
function_type: env.var_store.fresh(),
uniq_symbol: symbol,
recursive: Recursive::NotRecursive,
args: can_args,
body_id: env.add(body_expr, loc_body_expr.region),
extra: env.pool.add(extra),
},
output,
)
}
Apply(loc_fn, loc_args, application_style) => {
// The expression that evaluates to the function being called, e.g. `foo` in
// (foo) bar baz
let fn_region = loc_fn.region;
// Canonicalize the function expression and its arguments
let (fn_expr, mut output) = expr_to_expr2(env, scope, &loc_fn.value, fn_region);
// The function's return type
let args = PoolVec::with_capacity(loc_args.len() as u32, env.pool);
for (node_id, loc_arg) in args.iter_node_ids().zip(loc_args.iter()) {
let (arg_expr_id, arg_out) = to_expr_id(env, scope, &loc_arg.value, loc_arg.region);
env.pool[node_id] = (env.var_store.fresh(), arg_expr_id);
output.references.union_mut(arg_out.references);
}
// Default: We're not tail-calling a symbol (by name), we're tail-calling a function value.
output.tail_call = None;
let expr = match fn_expr {
Expr2::Var(ref symbol) => {
output.references.calls.insert(*symbol);
// we're tail-calling a symbol by name, check if it's the tail-callable symbol
output.tail_call = match &env.tailcallable_symbol {
Some(tc_sym) if *tc_sym == *symbol => Some(*symbol),
Some(_) | None => None,
};
// IDEA: Expr2::CallByName?
let fn_expr_id = env.add(fn_expr, fn_region);
Expr2::Call {
args,
expr_id: fn_expr_id,
expr_var: env.var_store.fresh(),
fn_var: env.var_store.fresh(),
closure_var: env.var_store.fresh(),
called_via: *application_style,
}
}
Expr2::RuntimeError() => {
// We can't call a runtime error; bail out by propagating it!
return (fn_expr, output);
}
Expr2::Tag {
variant_var,
ext_var,
name,
..
} => Expr2::Tag {
variant_var,
ext_var,
name,
arguments: args,
},
_ => {
// This could be something like ((if True then fn1 else fn2) arg1 arg2).
let fn_expr_id = env.add(fn_expr, fn_region);
Expr2::Call {
args,
expr_id: fn_expr_id,
expr_var: env.var_store.fresh(),
fn_var: env.var_store.fresh(),
closure_var: env.var_store.fresh(),
called_via: *application_style,
}
}
};
(expr, output)
}
Defs(loc_defs, loc_ret) => {
let (unsorted, mut scope, defs_output, symbols_introduced) = canonicalize_defs(
env,
Output::default(),
scope,
loc_defs,
PatternType::DefExpr,
);
// The def as a whole is a tail call iff its return expression is a tail call.
// Use its output as a starting point because its tail_call already has the right answer!
let (ret_expr, mut output) =
expr_to_expr2(env, &mut scope, &loc_ret.value, loc_ret.region);
output
.introduced_variables
.union(&defs_output.introduced_variables);
output.references.union_mut(defs_output.references);
// Now that we've collected all the references, check to see if any of the new idents
// we defined went unused by the return expression. If any were unused, report it.
for (symbol, region) in symbols_introduced {
if !output.references.has_lookup(symbol) {
env.problem(Problem::UnusedDef(symbol, region));
}
}
let (can_defs, output) = sort_can_defs(env, unsorted, output);
match can_defs {
Ok(decls) => {
let mut expr = ret_expr;
for declaration in decls.into_iter().rev() {
expr = decl_to_let(env.pool, env.var_store, declaration, expr);
}
(expr, output)
}
Err(_err) => {
// TODO: fix this to be something from Expr2
// (RuntimeError(err), output)
todo!()
}
}
}
PrecedenceConflict { .. } => {
// use roc_problem::can::RuntimeError::*;
//
// let problem = PrecedenceProblem::BothNonAssociative(
// *whole_region,
// binop1.clone(),
// binop2.clone(),
// );
//
// env.problem(Problem::PrecedenceProblem(problem.clone()));
//
// (
// RuntimeError(InvalidPrecedence(problem, region)),
// Output::default(),
// )
todo!()
}
MalformedClosure => {
// use roc_problem::can::RuntimeError::*;
// (RuntimeError(MalformedClosure(region)), Output::default())
todo!()
}
MalformedIdent(_name, _problem) => {
// use roc_problem::can::RuntimeError::*;
//
// let problem = MalformedIdentifier((*name).into(), region);
// env.problem(Problem::RuntimeError(problem.clone()));
//
// (RuntimeError(problem), Output::default())
todo!()
}
Var {
module_name, // module_name will only be filled if the original Roc code stated something like `5 + SomeModule.myVar`, module_name will be blank if it was `5 + myVar`
ident,
} => canonicalize_lookup(env, scope, module_name, ident, region),
// Below this point, we shouln't see any of these nodes anymore because
// operator desugaring should have removed them!
bad_expr @ ParensAround(_) => {
panic!(
"A ParensAround did not get removed during operator desugaring somehow: {:#?}",
bad_expr
);
}
bad_expr @ SpaceBefore(_, _) => {
panic!(
"A SpaceBefore did not get removed during operator desugaring somehow: {:#?}",
bad_expr
);
}
bad_expr @ SpaceAfter(_, _) => {
panic!(
"A SpaceAfter did not get removed during operator desugaring somehow: {:#?}",
bad_expr
);
}
bad_expr @ BinOps { .. } => {
panic!(
"A binary operator chain did not get desugared somehow: {:#?}",
bad_expr
);
}
bad_expr @ UnaryOp(_, _) => {
panic!(
"A unary operator did not get desugared somehow: {:#?}",
bad_expr
);
}
rest => todo!("not yet implemented {:?}", rest),
}
}
pub fn to_expr_id<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
parse_expr: &'a roc_parse::ast::Expr<'a>,
region: Region,
) -> (ExprId, Output) {
let (expr, output) = expr_to_expr2(env, scope, parse_expr, region);
(env.add(expr, region), output)
}

View File

@ -1,51 +0,0 @@
use roc_collections::all::MutMap;
use roc_module::ident::Lowercase;
use roc_module::symbol::Symbol;
use roc_types::subs::Variable;
#[derive(Clone, Debug, PartialEq, Default)]
pub struct IntroducedVariables {
// Rigids must be unique within a type annotation.
// E.g. in `identity : a -> a`, there should only be one
// variable (a rigid one, with name "a").
// Hence `rigids : Map<Lowercase, Variable>`
//
// But then between annotations, the same name can occur multiple times,
// but a variable can only have one name. Therefore
// `ftv : Map<Variable, Lowercase>`.
pub wildcards: Vec<Variable>,
pub var_by_name: MutMap<Lowercase, Variable>,
pub name_by_var: MutMap<Variable, Lowercase>,
pub host_exposed_aliases: MutMap<Symbol, Variable>,
}
impl IntroducedVariables {
pub fn insert_named(&mut self, name: Lowercase, var: Variable) {
self.var_by_name.insert(name.clone(), var);
self.name_by_var.insert(var, name);
}
pub fn insert_wildcard(&mut self, var: Variable) {
self.wildcards.push(var);
}
pub fn insert_host_exposed_alias(&mut self, symbol: Symbol, var: Variable) {
self.host_exposed_aliases.insert(symbol, var);
}
pub fn union(&mut self, other: &Self) {
self.wildcards.extend(other.wildcards.iter().cloned());
self.var_by_name.extend(other.var_by_name.clone());
self.name_by_var.extend(other.name_by_var.clone());
self.host_exposed_aliases
.extend(other.host_exposed_aliases.clone());
}
pub fn var_by_name(&self, name: &Lowercase) -> Option<&Variable> {
self.var_by_name.get(name)
}
pub fn name_by_var(&self, var: Variable) -> Option<&Lowercase> {
self.name_by_var.get(&var)
}
}

View File

@ -1,6 +0,0 @@
pub mod expr2;
pub mod expr2_to_string;
pub mod expr_to_expr2;
mod introduced_vars;
pub(crate) mod output;
pub mod record_field;

View File

@ -1,30 +0,0 @@
use crate::{
lang::core::{def::def::References, types::Alias},
mem_pool::pool::NodeId,
};
use roc_collections::all::{MutMap, MutSet};
use roc_module::symbol::Symbol;
use super::introduced_vars::IntroducedVariables;
#[derive(Clone, Default, Debug, PartialEq)]
pub struct Output {
pub references: References,
pub tail_call: Option<Symbol>,
pub introduced_variables: IntroducedVariables,
pub aliases: MutMap<Symbol, NodeId<Alias>>,
pub non_closures: MutSet<Symbol>,
}
impl Output {
pub fn union(&mut self, other: Self) {
self.references.union_mut(other.references);
if let (None, Some(later)) = (self.tail_call, other.tail_call) {
self.tail_call = Some(later);
}
self.aliases.extend(other.aliases);
self.non_closures.extend(other.non_closures);
}
}

View File

@ -1,49 +0,0 @@
use roc_types::subs::Variable;
use crate::mem_pool::pool_str::PoolStr;
use roc_module::symbol::Symbol;
use super::expr2::ExprId;
#[derive(Debug)]
pub enum RecordField {
InvalidLabelOnly(PoolStr, Variable),
LabelOnly(PoolStr, Variable, Symbol),
LabeledValue(PoolStr, Variable, ExprId),
}
use RecordField::*;
impl RecordField {
pub fn get_record_field_var(&self) -> &Variable {
match self {
InvalidLabelOnly(_, var) => var,
LabelOnly(_, var, _) => var,
LabeledValue(_, var, _) => var,
}
}
pub fn get_record_field_pool_str(&self) -> &PoolStr {
match self {
InvalidLabelOnly(pool_str, _) => pool_str,
LabelOnly(pool_str, _, _) => pool_str,
LabeledValue(pool_str, _, _) => pool_str,
}
}
pub fn get_record_field_pool_str_mut(&mut self) -> &mut PoolStr {
match self {
InvalidLabelOnly(pool_str, _) => pool_str,
LabelOnly(pool_str, _, _) => pool_str,
LabeledValue(pool_str, _, _) => pool_str,
}
}
pub fn get_record_field_val_node_id(&self) -> Option<ExprId> {
match self {
InvalidLabelOnly(_, _) => None,
LabelOnly(_, _, _) => None,
LabeledValue(_, _, field_val_id) => Some(*field_val_id),
}
}
}

View File

@ -1,61 +0,0 @@
use crate::{
lang::rigids::Rigids,
mem_pool::{pool::NodeId, pool_vec::PoolVec, shallow_clone::ShallowClone},
};
use roc_module::symbol::Symbol;
use roc_types::subs::Variable;
use super::{
expr::expr2::ExprId,
pattern::PatternId,
types::{Type2, TypeId},
};
#[derive(Debug)]
pub enum FunctionDef {
WithAnnotation {
name: Symbol, // 8B
arguments: PoolVec<(NodeId<Type2>, PatternId)>, // 8B
rigids: NodeId<Rigids>, // 4B
return_type: TypeId, // 4B
body_id: ExprId, // 4B
},
NoAnnotation {
name: Symbol, // 8B
arguments: PoolVec<(Variable, PatternId)>, // 8B
return_var: Variable, // 4B
body_id: ExprId, // 4B
},
}
impl ShallowClone for FunctionDef {
fn shallow_clone(&self) -> Self {
match self {
Self::WithAnnotation {
name,
arguments,
rigids,
return_type,
body_id,
} => Self::WithAnnotation {
name: *name,
arguments: arguments.shallow_clone(),
rigids: *rigids,
return_type: *return_type,
body_id: *body_id,
},
Self::NoAnnotation {
name,
arguments,
return_var,
body_id,
} => Self::NoAnnotation {
name: *name,
arguments: arguments.shallow_clone(),
return_var: *return_var,
body_id: *body_id,
},
}
}
}

View File

@ -1,10 +0,0 @@
use super::expr::expr2::ExprId;
#[derive(Debug)]
pub struct AppHeader {
pub app_name: String,
pub packages_base: String,
pub imports: Vec<String>,
pub provides: Vec<String>,
pub ast_node_id: ExprId, // TODO probably want to create and use HeaderId
}

View File

@ -1,10 +0,0 @@
pub mod ast;
mod declaration;
pub mod def;
pub mod expr;
pub mod fun_def;
pub mod header;
pub mod pattern;
pub mod str;
pub mod types;
pub mod val_def;

View File

@ -1,651 +0,0 @@
#![allow(clippy::all)]
#![allow(dead_code)]
#![allow(unused_imports)]
use bumpalo::collections::Vec as BumpVec;
use roc_can::expr::{unescape_char, IntValue};
use roc_can::num::{
finish_parsing_base, finish_parsing_float, finish_parsing_num, ParsedNumResult,
};
use roc_collections::all::BumpMap;
use roc_error_macros::todo_opaques;
use roc_module::symbol::{Interns, Symbol};
use roc_parse::ast::{StrLiteral, StrSegment};
use roc_parse::pattern::PatternType;
use roc_problem::can::{MalformedPatternProblem, Problem, RuntimeError, ShadowKind};
use roc_region::all::Region;
use roc_types::subs::Variable;
use crate::ast_error::{ASTResult, UnexpectedPattern2Variant};
use crate::constrain::Constraint;
use crate::lang::core::expr::expr_to_expr2::to_expr_id;
use crate::lang::env::Env;
use crate::lang::scope::Scope;
use crate::mem_pool::pool::{NodeId, Pool};
use crate::mem_pool::pool_str::PoolStr;
use crate::mem_pool::pool_vec::PoolVec;
use crate::mem_pool::shallow_clone::ShallowClone;
use super::expr::expr2::{ExprId, FloatVal, IntVal};
use super::expr::output::Output;
use super::types::Type2;
pub type PatternId = NodeId<Pattern2>;
#[derive(Debug)]
pub enum Pattern2 {
Identifier(Symbol), // 8B
NumLiteral(Variable, i64), // 4B + 8B
IntLiteral(IntVal), // 16B
FloatLiteral(FloatVal), // 16B
StrLiteral(PoolStr), // 8B
CharacterLiteral(char), // 4B
Underscore, // 0B
Tag {
whole_var: Variable, // 4B
ext_var: Variable, // 4B
tag_name: PoolStr, // 8B
arguments: PoolVec<(Variable, PatternId)>, // 8B
},
RecordDestructure {
whole_var: Variable, // 4B
ext_var: Variable, // 4B
destructs: PoolVec<RecordDestruct>, // 8B
},
// Runtime Exceptions
// TODO: figure out how to better handle regions
// to keep this member under 32. With 2 Regions
// it ends up at size 40
Shadowed {
shadowed_ident: PoolStr,
// definition: Region,
// shadowed_at: Region,
},
/// Example: (5 = 1 + 2) is an unsupported pattern in an assignment; Int patterns aren't allowed in assignments!
UnsupportedPattern(Region),
// parse error patterns
MalformedPattern(MalformedPatternProblem, Region),
}
impl ShallowClone for Pattern2 {
fn shallow_clone(&self) -> Self {
todo!()
}
}
#[derive(Debug)]
pub struct PatternState2<'a> {
pub headers: BumpMap<Symbol, Type2>,
pub vars: BumpVec<'a, Variable>,
pub constraints: BumpVec<'a, Constraint<'a>>,
}
#[derive(Debug)]
pub struct RecordDestruct {
pub var: Variable, // 4B
pub label: PoolStr, // 8B
pub symbol: Symbol, // 8B
pub typ: NodeId<DestructType>, // 4B
}
#[derive(Clone, Debug)]
pub enum DestructType {
Required,
Optional(Variable, ExprId), // 4B + 4B
Guard(Variable, PatternId), // 4B + 4B
}
pub fn as_pattern_id<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
pattern_id: PatternId,
pattern_type: PatternType,
pattern: &roc_parse::ast::Pattern<'a>,
region: Region,
) -> Output {
let (output, can_pattern) = to_pattern2(env, scope, pattern_type, pattern, region);
env.pool[pattern_id] = can_pattern;
env.set_region(pattern_id, region);
output
}
pub fn to_pattern_id<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
pattern_type: PatternType,
pattern: &roc_parse::ast::Pattern<'a>,
region: Region,
) -> (Output, PatternId) {
let (output, can_pattern) = to_pattern2(env, scope, pattern_type, pattern, region);
let pattern_id = env.pool.add(can_pattern);
env.set_region(pattern_id, region);
(output, pattern_id)
}
pub fn to_pattern2<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
pattern_type: PatternType,
pattern: &roc_parse::ast::Pattern<'a>,
region: Region,
) -> (Output, Pattern2) {
use roc_parse::ast::Pattern::*;
use PatternType::*;
let mut output = Output::default();
let can_pattern = match pattern {
Identifier(name) => match scope.introduce(
(*name).into(),
&env.exposed_ident_ids,
&mut env.ident_ids,
region,
) {
Ok(symbol) => {
output.references.bound_symbols.insert(symbol);
Pattern2::Identifier(symbol)
}
Err((original_region, shadow)) => {
env.problem(Problem::RuntimeError(RuntimeError::Shadowing {
original_region,
shadow: shadow.clone(),
kind: ShadowKind::Variable,
}));
let name: &str = shadow.value.as_ref();
Pattern2::Shadowed {
shadowed_ident: PoolStr::new(name, env.pool),
}
}
},
QualifiedIdentifier { .. } => {
let problem = MalformedPatternProblem::QualifiedIdentifier;
malformed_pattern(env, problem, region)
}
Underscore(_) => match pattern_type {
WhenBranch | FunctionArg => Pattern2::Underscore,
TopLevelDef | DefExpr => underscore_in_def(env, region),
},
FloatLiteral(ref string) => match pattern_type {
WhenBranch => match finish_parsing_float(string) {
Err(_error) => {
let problem = MalformedPatternProblem::MalformedFloat;
malformed_pattern(env, problem, region)
}
Ok((_, float, _bound)) => Pattern2::FloatLiteral(FloatVal::F64(float)),
},
ptype => unsupported_pattern(env, ptype, region),
},
NumLiteral(string) => match pattern_type {
WhenBranch => match finish_parsing_num(string) {
Err(_error) => {
let problem = MalformedPatternProblem::MalformedInt;
malformed_pattern(env, problem, region)
}
Ok((_, ParsedNumResult::UnknownNum(int, _bound))) => {
Pattern2::NumLiteral(
env.var_store.fresh(),
match int {
IntValue::U128(_) => todo!(),
IntValue::I128(n) => i128::from_ne_bytes(n) as i64, // FIXME
},
)
}
Ok((_, ParsedNumResult::Int(int, _bound))) => {
Pattern2::IntLiteral(IntVal::I64(match int {
IntValue::U128(_) => todo!(),
IntValue::I128(n) => i128::from_ne_bytes(n) as i64, // FIXME
}))
}
Ok((_, ParsedNumResult::Float(int, _bound))) => {
Pattern2::FloatLiteral(FloatVal::F64(int))
}
},
ptype => unsupported_pattern(env, ptype, region),
},
NonBase10Literal {
string,
base,
is_negative,
} => match pattern_type {
WhenBranch => match finish_parsing_base(string, *base, *is_negative) {
Err(_error) => {
let problem = MalformedPatternProblem::MalformedBase(*base);
malformed_pattern(env, problem, region)
}
Ok((int, _bound)) => {
let int = match int {
IntValue::U128(_) => todo!(),
IntValue::I128(n) => i128::from_ne_bytes(n) as i64, // FIXME
};
if *is_negative {
Pattern2::IntLiteral(IntVal::I64(-int))
} else {
Pattern2::IntLiteral(IntVal::I64(int))
}
}
},
ptype => unsupported_pattern(env, ptype, region),
},
StrLiteral(literal) => match pattern_type {
WhenBranch => flatten_str_literal(env.pool, literal),
ptype => unsupported_pattern(env, ptype, region),
},
SingleQuote(string) => match pattern_type {
WhenBranch => {
let mut it = string.chars().peekable();
if let Some(char) = it.next() {
if it.peek().is_none() {
Pattern2::CharacterLiteral(char)
} else {
// multiple chars is found
let problem = MalformedPatternProblem::MultipleCharsInSingleQuote;
malformed_pattern(env, problem, region)
}
} else {
// no characters found
let problem = MalformedPatternProblem::EmptySingleQuote;
malformed_pattern(env, problem, region)
}
}
ptype => unsupported_pattern(env, ptype, region),
},
Tag(name) => {
// Canonicalize the tag's name.
Pattern2::Tag {
whole_var: env.var_store.fresh(),
ext_var: env.var_store.fresh(),
tag_name: PoolStr::new(name, env.pool),
arguments: PoolVec::empty(env.pool),
}
}
OpaqueRef(..) => todo_opaques!(),
Apply(tag, patterns) => {
let can_patterns = PoolVec::with_capacity(patterns.len() as u32, env.pool);
for (loc_pattern, node_id) in (*patterns).iter().zip(can_patterns.iter_node_ids()) {
let (new_output, can_pattern) = to_pattern2(
env,
scope,
pattern_type,
&loc_pattern.value,
loc_pattern.region,
);
output.union(new_output);
let can_pattern_id = env.pool.add(can_pattern);
env.pool[node_id] = (env.var_store.fresh(), can_pattern_id);
}
match tag.value {
Tag(name) => Pattern2::Tag {
whole_var: env.var_store.fresh(),
ext_var: env.var_store.fresh(),
tag_name: PoolStr::new(name, env.pool),
arguments: can_patterns,
},
_ => unreachable!("Other patterns cannot be applied"),
}
}
RecordDestructure(patterns) => {
let ext_var = env.var_store.fresh();
let whole_var = env.var_store.fresh();
let destructs = PoolVec::with_capacity(patterns.len() as u32, env.pool);
let opt_erroneous = None;
for (node_id, loc_pattern) in destructs.iter_node_ids().zip((*patterns).iter()) {
match loc_pattern.value {
Identifier(label) => {
match scope.introduce(
label.into(),
&env.exposed_ident_ids,
&mut env.ident_ids,
region,
) {
Ok(symbol) => {
output.references.bound_symbols.insert(symbol);
let destruct = RecordDestruct {
var: env.var_store.fresh(),
label: PoolStr::new(label, env.pool),
symbol,
typ: env.pool.add(DestructType::Required),
};
env.pool[node_id] = destruct;
env.set_region(node_id, loc_pattern.region);
}
Err((original_region, shadow)) => {
env.problem(Problem::RuntimeError(RuntimeError::Shadowing {
original_region,
shadow: shadow.clone(),
kind: ShadowKind::Variable,
}));
// let shadowed = Pattern2::Shadowed {
// definition: original_region,
// shadowed_at: loc_pattern.region,
// shadowed_ident: shadow.value,
// };
// No matter what the other patterns
// are, we're definitely shadowed and will
// get a runtime exception as soon as we
// encounter the first bad pattern.
// opt_erroneous = Some();
// env.pool[node_id] = sha;
// env.set_region(node_id, loc_pattern.region);
todo!("we must both report/store the problem, but also not lose any information")
}
};
}
RequiredField(label, loc_guard) => {
// a guard does not introduce the label into scope!
let symbol = scope.ignore(label.into(), &mut env.ident_ids);
let (new_output, can_guard) = to_pattern_id(
env,
scope,
pattern_type,
&loc_guard.value,
loc_guard.region,
);
let destruct = RecordDestruct {
var: env.var_store.fresh(),
label: PoolStr::new(label, env.pool),
symbol,
typ: env
.pool
.add(DestructType::Guard(env.var_store.fresh(), can_guard)),
};
output.union(new_output);
env.pool[node_id] = destruct;
env.set_region(node_id, loc_pattern.region);
}
OptionalField(label, loc_default) => {
// an optional DOES introduce the label into scope!
match scope.introduce(
label.into(),
&env.exposed_ident_ids,
&mut env.ident_ids,
region,
) {
Ok(symbol) => {
let (can_default, expr_output) =
to_expr_id(env, scope, &loc_default.value, loc_default.region);
// an optional field binds the symbol!
output.references.bound_symbols.insert(symbol);
output.union(expr_output);
let destruct = RecordDestruct {
var: env.var_store.fresh(),
label: PoolStr::new(label, env.pool),
symbol,
typ: env.pool.add(DestructType::Optional(
env.var_store.fresh(),
can_default,
)),
};
env.pool[node_id] = destruct;
env.set_region(node_id, loc_pattern.region);
}
Err((original_region, shadow)) => {
env.problem(Problem::RuntimeError(RuntimeError::Shadowing {
original_region,
shadow: shadow.clone(),
kind: ShadowKind::Variable,
}));
// No matter what the other patterns
// are, we're definitely shadowed and will
// get a runtime exception as soon as we
// encounter the first bad pattern.
// opt_erroneous = Some(Pattern::Shadowed(original_region, shadow));
todo!("must report problem but also not loose any information")
}
};
}
_ => unreachable!("Any other pattern should have given a parse error"),
}
}
// If we encountered an erroneous pattern (e.g. one with shadowing),
// use the resulting RuntimeError. Otherwise, return a successful record destructure.
opt_erroneous.unwrap_or(Pattern2::RecordDestructure {
whole_var,
ext_var,
destructs,
})
}
RequiredField(_name, _loc_pattern) => {
unreachable!("should have been handled in RecordDestructure");
}
OptionalField(_name, _loc_pattern) => {
unreachable!("should have been handled in RecordDestructure");
}
Malformed(_str) => {
let problem = MalformedPatternProblem::Unknown;
malformed_pattern(env, problem, region)
}
MalformedIdent(_str, bad_ident) => {
let problem = MalformedPatternProblem::BadIdent(*bad_ident);
malformed_pattern(env, problem, region)
}
SpaceBefore(sub_pattern, _) | SpaceAfter(sub_pattern, _) => {
return to_pattern2(env, scope, pattern_type, sub_pattern, region)
}
};
(output, can_pattern)
}
pub fn symbols_from_pattern(pool: &Pool, initial: &Pattern2) -> Vec<Symbol> {
use Pattern2::*;
let mut symbols = Vec::new();
let mut stack = vec![initial];
while let Some(pattern) = stack.pop() {
match pattern {
Identifier(symbol) => {
symbols.push(*symbol);
}
Tag { arguments, .. } => {
for (_, pat_id) in arguments.iter(pool) {
let pat = pool.get(*pat_id);
stack.push(pat);
}
}
RecordDestructure { destructs, .. } => {
for destruct in destructs.iter(pool) {
let destruct_type = pool.get(destruct.typ);
if let DestructType::Guard(_, subpattern_id) = &destruct_type {
let subpattern = pool.get(*subpattern_id);
stack.push(subpattern);
} else {
symbols.push(destruct.symbol);
}
}
}
NumLiteral(_, _)
| IntLiteral(_)
| FloatLiteral(_)
| StrLiteral(_)
| CharacterLiteral(_)
| Underscore
| MalformedPattern(_, _)
| Shadowed { .. }
| UnsupportedPattern(_) => {}
}
}
symbols
}
pub fn get_identifier_string(pattern: &Pattern2, interns: &Interns) -> ASTResult<String> {
match pattern {
Pattern2::Identifier(symbol) => Ok(symbol.as_str(interns).to_string()),
other => UnexpectedPattern2Variant {
required_pattern2: "Identifier".to_string(),
encountered_pattern2: format!("{:?}", other),
}
.fail()?,
}
}
pub fn symbols_and_variables_from_pattern(
pool: &Pool,
initial: &Pattern2,
initial_var: Variable,
) -> Vec<(Symbol, Variable)> {
use Pattern2::*;
let mut symbols = Vec::new();
let mut stack = vec![(initial_var, initial)];
while let Some((variable, pattern)) = stack.pop() {
match pattern {
Identifier(symbol) => {
symbols.push((*symbol, variable));
}
Tag { arguments, .. } => {
for (var, pat_id) in arguments.iter(pool) {
let pat = pool.get(*pat_id);
stack.push((*var, pat));
}
}
RecordDestructure { destructs, .. } => {
for destruct in destructs.iter(pool) {
let destruct_type = pool.get(destruct.typ);
if let DestructType::Guard(_, subpattern_id) = &destruct_type {
let subpattern = pool.get(*subpattern_id);
stack.push((destruct.var, subpattern));
} else {
symbols.push((destruct.symbol, destruct.var));
}
}
}
NumLiteral(_, _)
| IntLiteral(_)
| FloatLiteral(_)
| StrLiteral(_)
| CharacterLiteral(_)
| Underscore
| MalformedPattern(_, _)
| Shadowed { .. }
| UnsupportedPattern(_) => {}
}
}
symbols
}
/// When we detect an unsupported pattern type (e.g. 5 = 1 + 2 is unsupported because you can't
/// assign to Int patterns), report it to Env and return an UnsupportedPattern runtime error pattern.
fn unsupported_pattern<'a>(
env: &mut Env<'a>,
pattern_type: PatternType,
region: Region,
) -> Pattern2 {
use roc_problem::can::BadPattern;
env.problem(Problem::UnsupportedPattern(
BadPattern::Unsupported(pattern_type),
region,
));
Pattern2::UnsupportedPattern(region)
}
fn underscore_in_def<'a>(env: &mut Env<'a>, region: Region) -> Pattern2 {
use roc_problem::can::BadPattern;
env.problem(Problem::UnsupportedPattern(
BadPattern::UnderscoreInDef,
region,
));
Pattern2::UnsupportedPattern(region)
}
pub(crate) fn flatten_str_literal(pool: &mut Pool, literal: &StrLiteral<'_>) -> Pattern2 {
use roc_parse::ast::StrLiteral::*;
match literal {
PlainLine(str_slice) => Pattern2::StrLiteral(PoolStr::new(str_slice, pool)),
Line(segments) => flatten_str_lines(pool, &[segments]),
Block(lines) => flatten_str_lines(pool, lines),
}
}
pub(crate) fn flatten_str_lines(pool: &mut Pool, lines: &[&[StrSegment<'_>]]) -> Pattern2 {
use StrSegment::*;
let mut buf = String::new();
for line in lines {
for segment in line.iter() {
match segment {
Plaintext(string) => {
buf.push_str(string);
}
Unicode(loc_digits) => {
todo!("parse unicode digits {:?}", loc_digits);
}
Interpolated(loc_expr) => {
return Pattern2::UnsupportedPattern(loc_expr.region);
}
EscapedChar(escaped) => buf.push(unescape_char(escaped)),
}
}
}
Pattern2::StrLiteral(PoolStr::new(&buf, pool))
}
/// When we detect a malformed pattern like `3.X` or `0b5`,
/// report it to Env and return an UnsupportedPattern runtime error pattern.
fn malformed_pattern<'a>(
env: &mut Env<'a>,
problem: MalformedPatternProblem,
region: Region,
) -> Pattern2 {
env.problem(Problem::RuntimeError(RuntimeError::MalformedPattern(
problem, region,
)));
Pattern2::MalformedPattern(problem, region)
}

View File

@ -1,252 +0,0 @@
use roc_error_macros::internal_error;
use roc_module::{called_via::CalledVia, symbol::Symbol};
use roc_parse::ast::StrLiteral;
use crate::{
ast_error::{ASTResult, UnexpectedASTNode},
lang::{
core::expr::{
expr2::{ArrString, ARR_STRING_CAPACITY},
expr_to_expr2::expr_to_expr2,
},
env::Env,
scope::Scope,
},
mem_pool::{pool::Pool, pool_str::PoolStr, pool_vec::PoolVec},
};
use super::expr::{
expr2::{Expr2, ExprId},
output::Output,
};
pub(crate) fn flatten_str_literal<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
literal: &StrLiteral<'a>,
) -> (Expr2, Output) {
use roc_parse::ast::StrLiteral::*;
match literal {
PlainLine(str_slice) => {
// TODO use smallstr
let expr = Expr2::Str(PoolStr::new(str_slice, env.pool));
(expr, Output::default())
}
Line(segments) => flatten_str_lines(env, scope, &[segments]),
Block(lines) => flatten_str_lines(env, scope, lines),
}
}
enum StrSegment {
Interpolation(Expr2),
Plaintext(PoolStr),
}
fn flatten_str_lines<'a>(
env: &mut Env<'a>,
scope: &mut Scope,
lines: &[&[roc_parse::ast::StrSegment<'a>]],
) -> (Expr2, Output) {
use roc_parse::ast::StrSegment::*;
let mut buf = String::new();
let mut segments = Vec::new();
let mut output = Output::default();
for line in lines {
for segment in line.iter() {
match segment {
Plaintext(string) => {
buf.push_str(string);
}
Unicode(loc_hex_digits) => match u32::from_str_radix(loc_hex_digits.value, 16) {
Ok(code_pt) => match std::char::from_u32(code_pt) {
Some(ch) => {
buf.push(ch);
}
None => {
// env.problem(Problem::InvalidUnicodeCodePt(loc_hex_digits.region));
//
// return (
// Expr::RuntimeError(RuntimeError::InvalidUnicodeCodePt(
// loc_hex_digits.region,
// )),
// output,
// );
todo!()
}
},
Err(_) => {
// env.problem(Problem::InvalidHexadecimal(loc_hex_digits.region));
//
// return (
// Expr::RuntimeError(RuntimeError::InvalidHexadecimal(
// loc_hex_digits.region,
// )),
// output,
// );
todo!()
}
},
Interpolated(loc_expr) => {
if roc_can::expr::is_valid_interpolation(loc_expr.value) {
// Interpolations desugar to Str.concat calls
output.references.calls.insert(Symbol::STR_CONCAT);
if !buf.is_empty() {
segments.push(StrSegment::Plaintext(PoolStr::new(&buf, env.pool)));
buf = String::new();
}
let (loc_expr, new_output) =
expr_to_expr2(env, scope, loc_expr.value, loc_expr.region);
output.union(new_output);
segments.push(StrSegment::Interpolation(loc_expr));
} else {
// env.problem(Problem::InvalidInterpolation(loc_expr.region));
//
// return (
// Expr::RuntimeError(RuntimeError::InvalidInterpolation(loc_expr.region)),
// output,
// );
todo!()
}
}
EscapedChar(escaped) => buf.push(roc_can::expr::unescape_char(escaped)),
}
}
}
if !buf.is_empty() {
segments.push(StrSegment::Plaintext(PoolStr::new(&buf, env.pool)));
}
(desugar_str_segments(env, segments), output)
}
/// Resolve string interpolations by desugaring a sequence of StrSegments
/// into nested calls to Str.concat
fn desugar_str_segments(env: &mut Env, segments: Vec<StrSegment>) -> Expr2 {
use StrSegment::*;
let pool = &mut env.pool;
let var_store = &mut env.var_store;
let mut iter = segments.into_iter().rev();
let mut expr = match iter.next() {
Some(Plaintext(pool_str)) => Expr2::Str(pool_str),
Some(Interpolation(expr_id)) => expr_id,
None => {
// No segments? Empty string!
let pool_str = PoolStr::new("", pool);
Expr2::Str(pool_str)
}
};
for seg in iter {
let new_expr = match seg {
Plaintext(string) => Expr2::Str(string),
Interpolation(expr_id) => expr_id,
};
let concat_expr_id = pool.add(Expr2::Var(Symbol::STR_CONCAT));
let args = vec![
(var_store.fresh(), pool.add(new_expr)),
(var_store.fresh(), pool.add(expr)),
];
let args = PoolVec::new(args.into_iter(), pool);
let new_call = Expr2::Call {
args,
expr_id: concat_expr_id,
expr_var: var_store.fresh(),
fn_var: var_store.fresh(),
closure_var: var_store.fresh(),
called_via: CalledVia::Space,
};
expr = new_call
}
expr
}
pub fn update_str_expr(
node_id: ExprId,
new_char: char,
insert_index: usize,
pool: &mut Pool,
) -> ASTResult<()> {
let str_expr = pool.get_mut(node_id);
enum Either {
MyArrString(ArrString),
OldPoolStr(PoolStr),
NewPoolStr(PoolStr),
}
let insert_either = match str_expr {
Expr2::SmallStr(arr_string) => {
if arr_string.len() < arr_string.capacity() {
let mut new_bytes: [u8; ARR_STRING_CAPACITY] = Default::default();
let arr_bytes = arr_string.as_str().as_bytes();
new_bytes[..insert_index].copy_from_slice(&arr_bytes[..insert_index]);
new_bytes[insert_index] = new_char as u8;
new_bytes[insert_index + 1..arr_bytes.len() + 1]
.copy_from_slice(&arr_bytes[insert_index..]);
let new_str = unsafe {
// all old characters have been checked on file load, new_char has been checked inside editor/src/editor/mvc/ed_update.rs
std::str::from_utf8_unchecked(&new_bytes[..arr_bytes.len() + 1])
};
let new_arr_string = match ArrString::from(new_str) {
Ok(arr_string) => arr_string,
Err(e) => {
internal_error!("Failed to build valid ArrayString from str: {:?}", e)
}
};
Either::MyArrString(new_arr_string)
} else {
let mut new_string = arr_string.as_str().to_owned();
new_string.insert(insert_index, new_char);
let new_pool_str = PoolStr::new(&new_string, pool);
Either::NewPoolStr(new_pool_str)
}
}
Expr2::Str(old_pool_str) => Either::OldPoolStr(*old_pool_str),
other => UnexpectedASTNode {
required_node_type: "SmallStr or Str",
encountered_node_type: format!("{:?}", other),
}
.fail()?,
};
match insert_either {
Either::MyArrString(arr_string) => {
pool.set(node_id, Expr2::SmallStr(arr_string));
}
Either::OldPoolStr(old_pool_str) => {
let mut new_string = old_pool_str.as_str(pool).to_owned();
new_string.insert(insert_index, new_char);
let new_pool_str = PoolStr::new(&new_string, pool);
pool.set(node_id, Expr2::Str(new_pool_str))
}
Either::NewPoolStr(new_pool_str) => pool.set(node_id, Expr2::Str(new_pool_str)),
}
Ok(())
}

View File

@ -1,871 +0,0 @@
#![allow(clippy::all)]
#![allow(dead_code)]
#![allow(unused_imports)]
// use roc_can::expr::Output;
use roc_collections::all::{MutMap, MutSet};
use roc_error_macros::todo_abilities;
use roc_module::ident::{Ident, Lowercase, TagName, Uppercase};
use roc_module::symbol::Symbol;
use roc_region::all::{Loc, Region};
use roc_types::types::{Problem, RecordField};
use roc_types::{subs::Variable, types::ErrorType};
use crate::lang::env::Env;
use crate::lang::scope::Scope;
use crate::mem_pool::pool::{NodeId, Pool};
use crate::mem_pool::pool_str::PoolStr;
use crate::mem_pool::pool_vec::PoolVec;
use crate::mem_pool::shallow_clone::ShallowClone;
pub type TypeId = NodeId<Type2>;
const TYPE2_SIZE: () = assert!(std::mem::size_of::<Type2>() == 3 * 8 + 4);
#[derive(Debug)]
pub enum Type2 {
Variable(Variable), // 4B
Alias(Symbol, PoolVec<TypeId>, TypeId), // 24B = 8B + 8B + 4B + pad
Opaque(Symbol, PoolVec<TypeId>, TypeId), // 24B = 8B + 8B + 4B + pad
AsAlias(Symbol, PoolVec<(PoolStr, TypeId)>, TypeId), // 24B = 8B + 8B + 4B + pad
// 24B
HostExposedAlias {
name: Symbol, // 8B
arguments: PoolVec<(PoolStr, TypeId)>, // 8B
actual_var: Variable, // 4B
actual: TypeId, // 4B
},
EmptyTagUnion,
TagUnion(PoolVec<(TagName, PoolVec<Type2>)>, TypeId), // 12B = 8B + 4B
RecursiveTagUnion(Variable, PoolVec<(TagName, PoolVec<Type2>)>, TypeId), // 16B = 4B + 8B + 4B
EmptyRec,
Record(PoolVec<(PoolStr, RecordField<TypeId>)>, TypeId), // 12B = 8B + 4B
Function(PoolVec<Type2>, TypeId, TypeId), // 16B = 8B + 4B + 4B
Apply(Symbol, PoolVec<Type2>), // 16B = 8B + 8B
Erroneous(Problem2), // 24B
}
#[derive(Debug)]
pub enum Problem2 {
CanonicalizationProblem,
CircularType(Symbol, NodeId<ErrorType>), // 12B = 8B + 4B
CyclicAlias(Symbol, PoolVec<Symbol>), // 20B = 8B + 12B
UnrecognizedIdent(PoolStr), // 8B
Shadowed(Loc<PoolStr>),
BadTypeArguments {
symbol: Symbol, // 8B
type_got: u8, // 1B
alias_needs: u8, // 1B
},
InvalidModule,
SolvedTypeError,
}
impl ShallowClone for Type2 {
fn shallow_clone(&self) -> Self {
match self {
Self::Variable(var) => Self::Variable(*var),
Self::Alias(symbol, args, alias_type_id) => {
Self::Alias(*symbol, args.shallow_clone(), alias_type_id.clone())
}
Self::Opaque(symbol, args, alias_type_id) => {
Self::Opaque(*symbol, args.shallow_clone(), alias_type_id.clone())
}
Self::Record(fields, ext_id) => Self::Record(fields.shallow_clone(), ext_id.clone()),
Self::Function(args, closure_type_id, ret_type_id) => Self::Function(
args.shallow_clone(),
closure_type_id.clone(),
ret_type_id.clone(),
),
rest => todo!("{:?}", rest),
}
}
}
impl Type2 {
fn substitute(_pool: &mut Pool, _subs: &MutMap<Variable, TypeId>, _type_id: TypeId) {
todo!()
}
pub fn variables(&self, pool: &mut Pool) -> MutSet<Variable> {
use Type2::*;
let mut stack = vec![self];
let mut result = MutSet::default();
while let Some(this) = stack.pop() {
match this {
Variable(v) => {
result.insert(*v);
}
Alias(_, _, actual) | AsAlias(_, _, actual) | Opaque(_, _, actual) => {
stack.push(pool.get(*actual));
}
HostExposedAlias {
actual_var, actual, ..
} => {
result.insert(*actual_var);
stack.push(pool.get(*actual));
}
EmptyTagUnion | EmptyRec | Erroneous(_) => {}
TagUnion(tags, ext) => {
for (_, args) in tags.iter(pool) {
stack.extend(args.iter(pool));
}
stack.push(pool.get(*ext));
}
RecursiveTagUnion(rec, tags, ext) => {
for (_, args) in tags.iter(pool) {
stack.extend(args.iter(pool));
}
stack.push(pool.get(*ext));
result.insert(*rec);
}
Record(fields, ext) => {
for (_, field) in fields.iter(pool) {
stack.push(pool.get(*field.as_inner()));
}
stack.push(pool.get(*ext));
}
Function(args, closure, result) => {
stack.extend(args.iter(pool));
stack.push(pool.get(*closure));
stack.push(pool.get(*result));
}
Apply(_, args) => {
stack.extend(args.iter(pool));
}
}
}
result
}
pub fn contains_symbol(&self, _pool: &mut Pool, _needle: Symbol) -> bool {
todo!()
}
pub fn substitute_alias(&self, _pool: &mut Pool, _needle: Symbol, _actual: Self) {
todo!()
}
}
impl NodeId<Type2> {
pub fn variables(&self, _pool: &mut Pool) -> MutSet<Variable> {
todo!()
}
}
/// A temporary data structure to return a bunch of values to Def construction
pub enum Signature {
FunctionWithAliases {
annotation: Type2,
arguments: PoolVec<Type2>,
closure_type_id: TypeId,
return_type_id: TypeId,
},
Function {
arguments: PoolVec<Type2>,
closure_type_id: TypeId,
return_type_id: TypeId,
},
Value {
annotation: Type2,
},
}
pub enum Annotation2 {
Annotation {
named_rigids: MutMap<Lowercase, Variable>,
unnamed_rigids: MutSet<Variable>,
symbols: MutSet<Symbol>,
signature: Signature,
},
Erroneous(roc_types::types::Problem),
}
pub fn to_annotation2<'a>(
env: &mut Env,
scope: &mut Scope,
annotation: &'a roc_parse::ast::TypeAnnotation<'a>,
region: Region,
) -> Annotation2 {
let mut references = References::default();
let annotation = to_type2(env, scope, &mut references, annotation, region);
// we dealias until we hit a non-alias, then we either hit a function type (and produce a
// function annotation) or anything else (and produce a value annotation)
match annotation {
Type2::Function(arguments, closure_type_id, return_type_id) => {
let References {
named,
unnamed,
symbols,
..
} = references;
let signature = Signature::Function {
arguments,
closure_type_id,
return_type_id,
};
Annotation2::Annotation {
named_rigids: named,
unnamed_rigids: unnamed,
symbols,
signature,
}
}
Type2::Alias(_, _, _) => {
// most of the time, the annotation is not an alias, so this case is comparatively
// less efficient
shallow_dealias(env, references, annotation)
}
_ => {
let References {
named,
unnamed,
symbols,
..
} = references;
let signature = Signature::Value { annotation };
Annotation2::Annotation {
named_rigids: named,
unnamed_rigids: unnamed,
symbols,
signature,
}
}
}
}
fn shallow_dealias<'a>(env: &mut Env, references: References, annotation: Type2) -> Annotation2 {
let References {
named,
unnamed,
symbols,
..
} = references;
let mut inner = &annotation;
loop {
match inner {
Type2::Alias(_, _, actual) => {
inner = env.pool.get(*actual);
}
Type2::Function(arguments, closure_type_id, return_type_id) => {
let signature = Signature::FunctionWithAliases {
arguments: arguments.shallow_clone(),
closure_type_id: *closure_type_id,
return_type_id: *return_type_id,
annotation,
};
return Annotation2::Annotation {
named_rigids: named,
unnamed_rigids: unnamed,
symbols,
signature,
};
}
_ => {
let signature = Signature::Value { annotation };
return Annotation2::Annotation {
named_rigids: named,
unnamed_rigids: unnamed,
symbols,
signature,
};
}
}
}
}
#[derive(Default)]
pub struct References {
named: MutMap<Lowercase, Variable>,
unnamed: MutSet<Variable>,
hidden: MutSet<Variable>,
symbols: MutSet<Symbol>,
}
pub fn to_type_id<'a>(
env: &mut Env,
scope: &mut Scope,
rigids: &mut References,
annotation: &roc_parse::ast::TypeAnnotation<'a>,
region: Region,
) -> TypeId {
let type2 = to_type2(env, scope, rigids, annotation, region);
env.add(type2, region)
}
pub fn as_type_id<'a>(
env: &mut Env,
scope: &mut Scope,
rigids: &mut References,
type_id: TypeId,
annotation: &roc_parse::ast::TypeAnnotation<'a>,
region: Region,
) {
let type2 = to_type2(env, scope, rigids, annotation, region);
env.pool[type_id] = type2;
env.set_region(type_id, region);
}
pub fn to_type2<'a>(
env: &mut Env,
scope: &mut Scope,
references: &mut References,
annotation: &roc_parse::ast::TypeAnnotation<'a>,
region: Region,
) -> Type2 {
use roc_parse::ast::Pattern;
use roc_parse::ast::TypeAnnotation::*;
use roc_parse::ast::TypeHeader;
match annotation {
Apply(module_name, ident, targs) => {
match to_type_apply(env, scope, references, module_name, ident, targs, region) {
TypeApply::Apply(symbol, args) => {
references.symbols.insert(symbol);
Type2::Apply(symbol, args)
}
TypeApply::Alias(symbol, args, actual) => {
references.symbols.insert(symbol);
Type2::Alias(symbol, args, actual)
}
TypeApply::Erroneous(_problem) => {
// Type2::Erroneous(problem)
todo!()
}
}
}
Function(argument_types, return_type) => {
let arguments = PoolVec::with_capacity(argument_types.len() as u32, env.pool);
for (type_id, loc_arg) in arguments.iter_node_ids().zip(argument_types.iter()) {
as_type_id(
env,
scope,
references,
type_id,
&loc_arg.value,
loc_arg.region,
);
}
let return_type_id = to_type_id(
env,
scope,
references,
&return_type.value,
return_type.region,
);
let closure_type = Type2::Variable(env.var_store.fresh());
let closure_type_id = env.pool.add(closure_type);
Type2::Function(arguments, closure_type_id, return_type_id)
}
BoundVariable(v) => {
// A rigid type variable. The parser should have already ensured that the name is indeed a lowercase.
let v = Lowercase::from(*v);
match references.named.get(&v) {
Some(var) => Type2::Variable(*var),
None => {
let var = env.var_store.fresh();
references.named.insert(v, var);
Type2::Variable(var)
}
}
}
Inferred => {
let var = env.var_store.fresh();
Type2::Variable(var)
}
Wildcard | Malformed(_) => {
let var = env.var_store.fresh();
references.unnamed.insert(var);
Type2::Variable(var)
}
Record { fields, ext, .. } => {
let field_types_map =
can_assigned_fields(env, scope, references, &fields.items, region);
let field_types = PoolVec::with_capacity(field_types_map.len() as u32, env.pool);
for (node_id, (label, field)) in field_types.iter_node_ids().zip(field_types_map) {
let poolstr = PoolStr::new(label.as_str(), env.pool);
let rec_field = match field {
RecordField::Optional(_) => {
let field_id = env.pool.add(field.into_inner());
RecordField::Optional(field_id)
}
RecordField::Demanded(_) => {
let field_id = env.pool.add(field.into_inner());
RecordField::Demanded(field_id)
}
RecordField::Required(_) => {
let field_id = env.pool.add(field.into_inner());
RecordField::Required(field_id)
}
};
env.pool[node_id] = (poolstr, rec_field);
}
let ext_type = match ext {
Some(loc_ann) => to_type_id(env, scope, references, &loc_ann.value, region),
None => env.add(Type2::EmptyRec, region),
};
Type2::Record(field_types, ext_type)
}
TagUnion { tags, ext, .. } => {
let tag_types_vec = can_tags(env, scope, references, tags.items, region);
let tag_types = PoolVec::with_capacity(tag_types_vec.len() as u32, env.pool);
for (node_id, (tag_name, field)) in tag_types.iter_node_ids().zip(tag_types_vec) {
env.pool[node_id] = (tag_name, field);
}
let ext_type = match ext {
Some(loc_ann) => to_type_id(env, scope, references, &loc_ann.value, region),
None => env.add(Type2::EmptyTagUnion, region),
};
Type2::TagUnion(tag_types, ext_type)
}
As(
loc_inner,
_spaces,
TypeHeader {
name,
vars: loc_vars,
},
) => {
// e.g. `{ x : Int, y : Int } as Point`
let symbol = match scope.introduce(
name.value.into(),
&env.exposed_ident_ids,
&mut env.ident_ids,
region,
) {
Ok(symbol) => symbol,
Err((_original_region, _shadow)) => {
// let problem = Problem2::Shadowed(original_region, shadow.clone());
// env.problem(roc_problem::can::Problem::ShadowingInAnnotation {
// original_region,
// shadow,
// });
// return Type2::Erroneous(problem);
todo!();
}
};
let inner_type = to_type2(env, scope, references, &loc_inner.value, region);
let vars = PoolVec::with_capacity(loc_vars.len() as u32, env.pool);
let lowercase_vars = PoolVec::with_capacity(loc_vars.len() as u32, env.pool);
for ((loc_var, named_id), var_id) in loc_vars
.iter()
.zip(lowercase_vars.iter_node_ids())
.zip(vars.iter_node_ids())
{
let var = match loc_var.value {
Pattern::Identifier(name) if name.chars().next().unwrap().is_lowercase() => {
name
}
_ => unreachable!("I thought this was validated during parsing"),
};
let var_name = Lowercase::from(var);
if let Some(var) = references.named.get(&var_name) {
let poolstr = PoolStr::new(var_name.as_str(), env.pool);
let type_id = env.pool.add(Type2::Variable(*var));
env.pool[var_id] = (poolstr.shallow_clone(), type_id);
env.pool[named_id] = (poolstr, *var);
env.set_region(named_id, loc_var.region);
} else {
let var = env.var_store.fresh();
references.named.insert(var_name.clone(), var);
let poolstr = PoolStr::new(var_name.as_str(), env.pool);
let type_id = env.pool.add(Type2::Variable(var));
env.pool[var_id] = (poolstr.shallow_clone(), type_id);
env.pool[named_id] = (poolstr, var);
env.set_region(named_id, loc_var.region);
}
}
let alias_actual = inner_type;
// TODO instantiate recursive tag union
// let alias_actual = if let Type2::TagUnion(tags, ext) = inner_type {
// let rec_var = env.var_store.fresh();
//
// let mut new_tags = Vec::with_capacity(tags.len());
// for (tag_name, args) in tags {
// let mut new_args = Vec::with_capacity(args.len());
// for arg in args {
// let mut new_arg = arg.clone();
// new_arg.substitute_alias(symbol, &Type2::Variable(rec_var));
// new_args.push(new_arg);
// }
// new_tags.push((tag_name.clone(), new_args));
// }
// Type2::RecursiveTagUnion(rec_var, new_tags, ext)
// } else {
// inner_type
// };
let mut hidden_variables = MutSet::default();
hidden_variables.extend(alias_actual.variables(env.pool));
for (_, var) in lowercase_vars.iter(env.pool) {
hidden_variables.remove(var);
}
let alias_actual_id = env.pool.add(alias_actual);
scope.add_alias(env.pool, symbol, lowercase_vars, alias_actual_id);
let alias = scope.lookup_alias(symbol).unwrap();
// local_aliases.insert(symbol, alias.clone());
// TODO host-exposed
// if vars.is_empty() && env.home == symbol.module_id() {
// let actual_var = env.var_store.fresh();
// rigids.host_exposed.insert(symbol, actual_var);
// Type::HostExposedAlias {
// name: symbol,
// arguments: vars,
// actual: Box::new(alias.typ.clone()),
// actual_var,
// }
// } else {
// Type::Alias(symbol, vars, Box::new(alias.typ.clone()))
// }
Type2::AsAlias(symbol, vars, alias.actual)
}
Where { .. } => todo_abilities!(),
SpaceBefore(nested, _) | SpaceAfter(nested, _) => {
to_type2(env, scope, references, nested, region)
}
}
}
// TODO trim down these arguments!
#[allow(clippy::too_many_arguments)]
fn can_assigned_fields<'a>(
env: &mut Env,
scope: &mut Scope,
rigids: &mut References,
fields: &&[Loc<roc_parse::ast::AssignedField<'a, roc_parse::ast::TypeAnnotation<'a>>>],
region: Region,
) -> MutMap<Lowercase, RecordField<Type2>> {
use roc_parse::ast::AssignedField::*;
use roc_types::types::RecordField::*;
// SendMap doesn't have a `with_capacity`
let mut field_types = MutMap::default();
// field names we've seen so far in this record
let mut seen = std::collections::HashMap::with_capacity(fields.len());
'outer: for loc_field in fields.iter() {
let mut field = &loc_field.value;
// use this inner loop to unwrap the SpaceAfter/SpaceBefore
// when we find the name of this field, break out of the loop
// with that value, so we can check whether the field name is
// a duplicate
let new_name = 'inner: loop {
match field {
RequiredValue(field_name, _, annotation) => {
let field_type =
to_type2(env, scope, rigids, &annotation.value, annotation.region);
let label = Lowercase::from(field_name.value);
field_types.insert(label.clone(), Required(field_type));
break 'inner label;
}
OptionalValue(field_name, _, annotation) => {
let field_type =
to_type2(env, scope, rigids, &annotation.value, annotation.region);
let label = Lowercase::from(field_name.value);
field_types.insert(label.clone(), Optional(field_type));
break 'inner label;
}
LabelOnly(loc_field_name) => {
// Interpret { a, b } as { a : a, b : b }
let field_name = Lowercase::from(loc_field_name.value);
let field_type = {
if let Some(var) = rigids.named.get(&field_name) {
Type2::Variable(*var)
} else {
let field_var = env.var_store.fresh();
rigids.named.insert(field_name.clone(), field_var);
Type2::Variable(field_var)
}
};
field_types.insert(field_name.clone(), Required(field_type));
break 'inner field_name;
}
SpaceBefore(nested, _) | SpaceAfter(nested, _) => {
// check the nested field instead
field = nested;
continue 'inner;
}
Malformed(_) => {
// TODO report this?
// completely skip this element, advance to the next tag
continue 'outer;
}
}
};
// ensure that the new name is not already in this record:
// note that the right-most tag wins when there are two with the same name
if let Some(replaced_region) = seen.insert(new_name.clone(), loc_field.region) {
env.problem(roc_problem::can::Problem::DuplicateRecordFieldType {
field_name: new_name.into(),
record_region: region,
field_region: loc_field.region,
replaced_region,
});
}
}
field_types
}
fn can_tags<'a>(
env: &mut Env,
scope: &mut Scope,
rigids: &mut References,
tags: &'a [Loc<roc_parse::ast::Tag<'a>>],
region: Region,
) -> Vec<(TagName, PoolVec<Type2>)> {
use roc_parse::ast::Tag;
let mut tag_types = Vec::with_capacity(tags.len());
// tag names we've seen so far in this tag union
let mut seen = std::collections::HashMap::with_capacity(tags.len());
'outer: for loc_tag in tags.iter() {
let mut tag = &loc_tag.value;
// use this inner loop to unwrap the SpaceAfter/SpaceBefore
// when we find the name of this tag, break out of the loop
// with that value, so we can check whether the tag name is
// a duplicate
let new_name = 'inner: loop {
match tag {
Tag::Apply { name, args } => {
let arg_types = PoolVec::with_capacity(args.len() as u32, env.pool);
for (type_id, loc_arg) in arg_types.iter_node_ids().zip(args.iter()) {
as_type_id(env, scope, rigids, type_id, &loc_arg.value, loc_arg.region);
}
let tag_name = TagName(name.value.into());
tag_types.push((tag_name.clone(), arg_types));
break 'inner tag_name;
}
Tag::SpaceBefore(nested, _) | Tag::SpaceAfter(nested, _) => {
// check the nested tag instead
tag = nested;
continue 'inner;
}
Tag::Malformed(_) => {
// TODO report this?
// completely skip this element, advance to the next tag
continue 'outer;
}
}
};
// ensure that the new name is not already in this tag union:
// note that the right-most tag wins when there are two with the same name
if let Some(replaced_region) = seen.insert(new_name.clone(), loc_tag.region) {
env.problem(roc_problem::can::Problem::DuplicateTag {
tag_region: loc_tag.region,
tag_union_region: region,
replaced_region,
tag_name: new_name,
});
}
}
tag_types
}
enum TypeApply {
Apply(Symbol, PoolVec<Type2>),
Alias(Symbol, PoolVec<TypeId>, TypeId),
Erroneous(roc_types::types::Problem),
}
#[inline(always)]
fn to_type_apply<'a>(
env: &mut Env,
scope: &mut Scope,
rigids: &mut References,
module_name: &str,
ident: &str,
type_arguments: &[Loc<roc_parse::ast::TypeAnnotation<'a>>],
region: Region,
) -> TypeApply {
let symbol = if module_name.is_empty() {
// Since module_name was empty, this is an unqualified type.
// Look it up in scope!
let ident: Ident = (*ident).into();
match scope.lookup(&ident, region) {
Ok(symbol) => symbol,
Err(problem) => {
env.problem(roc_problem::can::Problem::RuntimeError(problem));
return TypeApply::Erroneous(Problem::UnrecognizedIdent(ident.into()));
}
}
} else {
match env.qualified_lookup(module_name, ident, region) {
Ok(symbol) => symbol,
Err(problem) => {
// Either the module wasn't imported, or
// it was imported but it doesn't expose this ident.
env.problem(roc_problem::can::Problem::RuntimeError(problem));
return TypeApply::Erroneous(Problem::UnrecognizedIdent((*ident).into()));
}
}
};
let argument_type_ids = PoolVec::with_capacity(type_arguments.len() as u32, env.pool);
for (type_id, loc_arg) in argument_type_ids.iter_node_ids().zip(type_arguments.iter()) {
as_type_id(env, scope, rigids, type_id, &loc_arg.value, loc_arg.region);
}
let args = type_arguments;
let opt_alias = scope.lookup_alias(symbol);
match opt_alias {
Some(ref alias) => {
// use a known alias
let actual = alias.actual;
let mut substitutions: MutMap<Variable, TypeId> = MutMap::default();
if alias.targs.len() != args.len() {
let error = TypeApply::Erroneous(Problem::BadTypeArguments {
symbol,
region,
alias_needs: alias.targs.len() as u8,
type_got: args.len() as u8,
});
return error;
}
let arguments = PoolVec::with_capacity(type_arguments.len() as u32, env.pool);
let it = arguments.iter_node_ids().zip(
argument_type_ids
.iter_node_ids()
.zip(alias.targs.iter_node_ids()),
);
for (node_id, (type_id, loc_var_id)) in it {
let loc_var = &env.pool[loc_var_id];
let name = loc_var.0.shallow_clone();
let var = loc_var.1;
env.pool[node_id] = (name, type_id);
substitutions.insert(var, type_id);
}
// make sure the recursion variable is freshly instantiated
// have to allocate these outside of the if for lifetime reasons...
let new = env.var_store.fresh();
let fresh = env.pool.add(Type2::Variable(new));
if let Type2::RecursiveTagUnion(rvar, ref tags, ext) = &mut env.pool[actual] {
substitutions.insert(*rvar, fresh);
env.pool[actual] = Type2::RecursiveTagUnion(new, tags.shallow_clone(), *ext);
}
// make sure hidden variables are freshly instantiated
for var_id in alias.hidden_variables.iter_node_ids() {
let var = env.pool[var_id];
let fresh = env.pool.add(Type2::Variable(env.var_store.fresh()));
substitutions.insert(var, fresh);
}
// instantiate variables
Type2::substitute(env.pool, &substitutions, actual);
let type_arguments = PoolVec::with_capacity(arguments.len() as u32, env.pool);
for (node_id, type_id) in arguments
.iter_node_ids()
.zip(type_arguments.iter_node_ids())
{
let typ = env.pool[node_id].1;
env.pool[type_id] = typ;
}
TypeApply::Alias(symbol, type_arguments, actual)
}
None => TypeApply::Apply(symbol, argument_type_ids),
}
}
#[derive(Debug)]
pub struct Alias {
pub targs: PoolVec<(PoolStr, Variable)>,
pub actual: TypeId,
/// hidden type variables, like the closure variable in `a -> b`
pub hidden_variables: PoolVec<Variable>,
}
impl ShallowClone for Alias {
fn shallow_clone(&self) -> Self {
Self {
targs: self.targs.shallow_clone(),
hidden_variables: self.hidden_variables.shallow_clone(),
actual: self.actual,
}
}
}

View File

@ -1,101 +0,0 @@
use crate::{
lang::{core::expr::expr2_to_string::expr2_to_string, rigids::Rigids},
mem_pool::{
pool::{NodeId, Pool},
shallow_clone::ShallowClone,
},
};
use roc_types::subs::Variable;
use super::{
expr::expr2::ExprId,
pattern::{Pattern2, PatternId},
types::TypeId,
};
#[derive(Debug)]
pub enum ValueDef {
WithAnnotation {
pattern_id: PatternId, // 4B
expr_id: ExprId, // 4B
type_id: TypeId,
rigids: Rigids,
expr_var: Variable, // 4B
},
NoAnnotation {
pattern_id: PatternId, // 4B
expr_id: ExprId, // 4B
expr_var: Variable, // 4B
},
}
impl ShallowClone for ValueDef {
fn shallow_clone(&self) -> Self {
match self {
Self::WithAnnotation {
pattern_id,
expr_id,
type_id,
rigids,
expr_var,
} => Self::WithAnnotation {
pattern_id: *pattern_id,
expr_id: *expr_id,
type_id: *type_id,
rigids: rigids.shallow_clone(),
expr_var: *expr_var,
},
Self::NoAnnotation {
pattern_id,
expr_id,
expr_var,
} => Self::NoAnnotation {
pattern_id: *pattern_id,
expr_id: *expr_id,
expr_var: *expr_var,
},
}
}
}
impl ValueDef {
pub fn get_expr_id(&self) -> ExprId {
match self {
ValueDef::WithAnnotation { expr_id, .. } => *expr_id,
ValueDef::NoAnnotation { expr_id, .. } => *expr_id,
}
}
pub fn get_pattern_id(&self) -> NodeId<Pattern2> {
match self {
ValueDef::WithAnnotation { pattern_id, .. } => *pattern_id,
ValueDef::NoAnnotation { pattern_id, .. } => *pattern_id,
}
}
}
pub fn value_def_to_string(val_def: &ValueDef, pool: &Pool) -> String {
match val_def {
ValueDef::WithAnnotation {
pattern_id,
expr_id,
type_id,
rigids,
expr_var,
} => {
format!("WithAnnotation {{ pattern_id: {:?}, expr_id: {:?}, type_id: {:?}, rigids: {:?}, expr_var: {:?}}}", pool.get(*pattern_id), expr2_to_string(*expr_id, pool), pool.get(*type_id), rigids, expr_var)
}
ValueDef::NoAnnotation {
pattern_id,
expr_id,
expr_var,
} => {
format!(
"NoAnnotation {{ pattern_id: {:?}, expr_id: {:?}, expr_var: {:?}}}",
pool.get(*pattern_id),
expr2_to_string(*expr_id, pool),
expr_var
)
}
}
}

View File

@ -1,189 +0,0 @@
use crate::mem_pool::pool::{NodeId, Pool};
use bumpalo::{collections::Vec as BumpVec, Bump};
use roc_collections::all::{MutMap, MutSet};
use roc_module::ident::{Ident, Lowercase, ModuleName};
use roc_module::symbol::{IdentIds, IdentIdsByModule, ModuleId, ModuleIds, Symbol};
use roc_problem::can::{Problem, RuntimeError};
use roc_region::all::{Loc, Region};
use roc_types::subs::VarStore;
use super::core::def::def::References;
/// TODO document
#[derive(Debug)]
pub struct Env<'a> {
pub home: ModuleId,
pub var_store: &'a mut VarStore,
pub pool: &'a mut Pool,
pub arena: &'a Bump,
pub problems: BumpVec<'a, Problem>,
pub dep_idents: IdentIdsByModule,
pub module_ids: &'a ModuleIds,
pub ident_ids: IdentIds,
pub exposed_ident_ids: IdentIds,
pub closures: MutMap<Symbol, References>,
/// Symbols which were referenced by qualified lookups.
pub qualified_lookups: MutSet<Symbol>,
pub top_level_symbols: MutSet<Symbol>,
pub closure_name_symbol: Option<Symbol>,
pub tailcallable_symbol: Option<Symbol>,
}
impl<'a> Env<'a> {
#[allow(clippy::too_many_arguments)]
pub fn new(
home: ModuleId,
arena: &'a Bump,
pool: &'a mut Pool,
var_store: &'a mut VarStore,
dep_idents: IdentIdsByModule,
module_ids: &'a ModuleIds,
exposed_ident_ids: IdentIds,
) -> Env<'a> {
Env {
home,
arena,
pool,
problems: BumpVec::new_in(arena),
var_store,
dep_idents,
module_ids,
ident_ids: exposed_ident_ids.clone(), // we start with these, but will add more later using Scope.introduce
exposed_ident_ids,
closures: MutMap::default(),
qualified_lookups: MutSet::default(),
tailcallable_symbol: None,
closure_name_symbol: None,
top_level_symbols: MutSet::default(),
}
}
pub fn add<T>(&mut self, item: T, region: Region) -> NodeId<T> {
let id = self.pool.add(item);
self.set_region(id, region);
id
}
pub fn problem(&mut self, problem: Problem) {
self.problems.push(problem);
}
pub fn set_region<T>(&mut self, _node_id: NodeId<T>, _region: Region) {
dbg!("Don't Forget to set the region eventually");
}
pub fn register_closure(&mut self, symbol: Symbol, references: References) {
self.closures.insert(symbol, references);
}
/// Generates a unique, new symbol like "$1" or "$5",
/// using the home module as the module_id.
///
/// This is used, for example, during canonicalization of an Expr::Closure
/// to generate a unique symbol to refer to that closure.
pub fn gen_unique_symbol(&mut self) -> Symbol {
let ident_id = self.ident_ids.gen_unique();
Symbol::new(self.home, ident_id)
}
/// Returns Err if the symbol resolved, but it was not exposed by the given module
pub fn qualified_lookup(
&mut self,
module_name: &str,
ident: &str,
region: Region,
) -> Result<Symbol, RuntimeError> {
debug_assert!(
!module_name.is_empty(),
"Called env.qualified_lookup with an unqualified ident: {:?}",
ident
);
let module_name: ModuleName = module_name.into();
match self.module_ids.get_id(&module_name) {
Some(&module_id) => {
let ident: Ident = ident.into();
// You can do qualified lookups on your own module, e.g.
// if I'm in the Foo module, I can do a `Foo.bar` lookup.
if module_id == self.home {
match self.ident_ids.get_id(&ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, ident_id);
self.qualified_lookups.insert(symbol);
Ok(symbol)
}
None => Err(RuntimeError::LookupNotInScope(
Loc {
value: ident,
region,
},
self.ident_ids
.ident_strs()
.map(|(_, string)| string.into())
.collect(),
)),
}
} else {
match self.dep_idents.get(&module_id) {
Some(exposed_ids) => match exposed_ids.get_id(&ident) {
Some(ident_id) => {
let symbol = Symbol::new(module_id, ident_id);
self.qualified_lookups.insert(symbol);
Ok(symbol)
}
None => {
let exposed_values = exposed_ids
.ident_strs()
.filter(|(_, ident)| {
ident.starts_with(|c: char| c.is_lowercase())
})
.map(|(_, ident)| Lowercase::from(ident))
.collect();
Err(RuntimeError::ValueNotExposed {
module_name,
ident,
region,
exposed_values,
})
}
},
None => Err(RuntimeError::ModuleNotImported {
module_name,
imported_modules: self
.dep_idents
.keys()
.filter_map(|module_id| self.module_ids.get_name(*module_id))
.map(|module_name| module_name.as_ref().into())
.collect(),
region,
module_exists: true,
}),
}
}
}
None => Err(RuntimeError::ModuleNotImported {
module_name,
imported_modules: self
.module_ids
.available_modules()
.map(|string| string.as_ref().into())
.collect(),
region,
module_exists: false,
}),
}
}
}

View File

@ -1,4 +0,0 @@
pub mod core;
pub mod env;
mod rigids;
pub mod scope;

View File

@ -1,83 +0,0 @@
use std::{
collections::{HashMap, HashSet},
hash::BuildHasherDefault,
};
use crate::mem_pool::{
pool::Pool, pool_str::PoolStr, pool_vec::PoolVec, shallow_clone::ShallowClone,
};
use roc_collections::all::WyHash;
use roc_module::ident::Lowercase;
use roc_types::subs::Variable;
#[derive(Debug)]
pub struct Rigids {
// Rigid type variable = type variable where type is specified by the programmer
pub names: PoolVec<(Option<PoolStr>, Variable)>, // 8B
padding: [u8; 1],
}
#[allow(clippy::needless_collect)]
impl Rigids {
pub fn new(
named: HashMap<Lowercase, Variable, BuildHasherDefault<WyHash>>,
unnamed: HashSet<Variable, BuildHasherDefault<WyHash>>,
pool: &mut Pool,
) -> Self {
let names = PoolVec::with_capacity((named.len() + unnamed.len()) as u32, pool);
let mut temp_names = Vec::new();
temp_names.extend(named.iter().map(|(name, var)| (Some(name.as_str()), *var)));
temp_names.extend(unnamed.iter().map(|var| (None, *var)));
for (node_id, (opt_name, variable)) in names.iter_node_ids().zip(temp_names) {
let poolstr = opt_name.map(|name| PoolStr::new(name, pool));
pool[node_id] = (poolstr, variable);
}
Self {
names,
padding: Default::default(),
}
}
pub fn named(&self, pool: &mut Pool) -> PoolVec<(PoolStr, Variable)> {
let named = self
.names
.iter(pool)
.filter_map(|(opt_pool_str, var)| {
opt_pool_str.as_ref().map(|pool_str| (*pool_str, *var))
})
.collect::<Vec<(PoolStr, Variable)>>();
PoolVec::new(named.into_iter(), pool)
}
pub fn unnamed(&self, pool: &mut Pool) -> PoolVec<Variable> {
let unnamed = self
.names
.iter(pool)
.filter_map(|(opt_pool_str, var)| {
if opt_pool_str.is_none() {
Some(*var)
} else {
None
}
})
.collect::<Vec<Variable>>();
PoolVec::new(unnamed.into_iter(), pool)
}
}
impl ShallowClone for Rigids {
fn shallow_clone(&self) -> Self {
Self {
names: self.names.shallow_clone(),
padding: self.padding,
}
}
}

View File

@ -1,353 +0,0 @@
#![allow(clippy::all)]
#![allow(dead_code)]
#![allow(unused_imports)]
use std::fmt;
use crate::ast_error::ASTResult;
use crate::mem_pool::pool::Pool;
use crate::mem_pool::pool_str::PoolStr;
use crate::mem_pool::pool_vec::PoolVec;
use crate::mem_pool::shallow_clone::ShallowClone;
use roc_collections::all::{MutMap, MutSet};
use roc_module::ident::{Ident, Lowercase};
use roc_module::symbol::{
get_module_ident_ids, get_module_ident_ids_mut, IdentIds, IdentIdsByModule, Interns, ModuleId,
Symbol,
};
use roc_problem::can::RuntimeError;
use roc_region::all::{Loc, Region};
use roc_types::{
builtin_aliases,
solved_types::{BuiltinAlias, FreeVars, SolvedType},
subs::{VarId, VarStore, Variable},
};
use super::core::types::{Alias, Type2, TypeId};
use super::env::Env;
fn solved_type_to_type_id(
pool: &mut Pool,
solved_type: &SolvedType,
free_vars: &mut FreeVars,
var_store: &mut VarStore,
) -> TypeId {
let typ2 = to_type2(pool, solved_type, free_vars, var_store);
pool.add(typ2)
}
fn to_type2(
pool: &mut Pool,
solved_type: &SolvedType,
free_vars: &mut FreeVars,
var_store: &mut VarStore,
) -> Type2 {
match solved_type {
// TODO(opaques): take opaques into account
SolvedType::Alias(symbol, solved_type_variables, _todo, solved_actual, _kind) => {
let type_variables = PoolVec::with_capacity(solved_type_variables.len() as u32, pool);
for (type_variable_node_id, solved_arg) in type_variables
.iter_node_ids()
.zip(solved_type_variables.iter())
{
let typ2 = to_type2(pool, solved_arg, free_vars, var_store);
let node = pool.add(typ2);
pool[type_variable_node_id] = node;
}
let actual_typ2 = to_type2(pool, solved_actual, free_vars, var_store);
let actual = pool.add(actual_typ2);
let typ2 = Type2::Alias(*symbol, type_variables, actual);
typ2
}
SolvedType::TagUnion(tags, ext) => {
let new_tags = PoolVec::with_capacity(tags.len() as u32, pool);
for (tag_node_id, (tag_name, args)) in new_tags.iter_node_ids().zip(tags.iter()) {
let new_args: PoolVec<Type2> = PoolVec::with_capacity(args.len() as u32, pool);
for (arg_node_id, arg) in new_args.iter_node_ids().zip(args.iter()) {
let node = to_type2(pool, arg, free_vars, var_store);
pool[arg_node_id] = node;
}
pool[tag_node_id] = (tag_name.clone(), new_args);
}
let actual_typ2 = to_type2(pool, ext, free_vars, var_store);
let actual = pool.add(actual_typ2);
let typ2 = Type2::TagUnion(new_tags, actual);
typ2
}
SolvedType::Flex(var_id) => {
Type2::Variable(var_id_to_flex_var(*var_id, free_vars, var_store))
}
SolvedType::EmptyTagUnion => Type2::EmptyTagUnion,
rest => todo!("{:?}", rest),
}
}
fn var_id_to_flex_var(
var_id: VarId,
free_vars: &mut FreeVars,
var_store: &mut VarStore,
) -> Variable {
if let Some(var) = free_vars.unnamed_vars.get(&var_id) {
*var
} else {
let var = var_store.fresh();
free_vars.unnamed_vars.insert(var_id, var);
var
}
}
#[derive(Debug)]
pub struct Scope {
/// All the identifiers in scope, mapped to were they were defined and
/// the Symbol they resolve to.
idents: MutMap<Ident, (Symbol, Region)>,
/// A cache of all the symbols in scope. This makes lookups much
/// faster when checking for unused defs and unused arguments.
symbols: MutMap<Symbol, Region>,
/// The type aliases currently in scope
aliases: MutMap<Symbol, Alias>,
/// The current module being processed. This will be used to turn
/// unqualified idents into Symbols.
home: ModuleId,
}
impl Scope {
pub fn new(home: ModuleId, pool: &mut Pool, var_store: &mut VarStore) -> Scope {
let solved_aliases = builtin_aliases::aliases();
let mut aliases = MutMap::default();
for (symbol, builtin_alias) in solved_aliases {
// let BuiltinAlias { region, vars, typ } = builtin_alias;
let BuiltinAlias { vars, typ, .. } = builtin_alias;
let mut free_vars = FreeVars::default();
// roc_types::solved_types::to_type(&typ, &mut free_vars, var_store);
let actual = solved_type_to_type_id(pool, &typ, &mut free_vars, var_store);
// make sure to sort these variables to make them line up with the type arguments
let mut type_variables: Vec<_> = free_vars.unnamed_vars.into_iter().collect();
type_variables.sort();
debug_assert_eq!(vars.len(), type_variables.len());
let variables = PoolVec::with_capacity(vars.len() as u32, pool);
let it = variables
.iter_node_ids()
.zip(vars.iter())
.zip(type_variables);
for ((node_id, loc_name), (_, var)) in it {
// TODO region is ignored, but "fake" anyway. How to resolve?
let name = PoolStr::new(loc_name.value.as_str(), pool);
pool[node_id] = (name, var);
}
let alias = Alias {
actual,
/// We know that builtin aliases have no hidden variables (e.g. in closures)
hidden_variables: PoolVec::empty(pool),
targs: variables,
};
aliases.insert(symbol, alias);
}
let idents = Symbol::default_in_scope();
let idents: MutMap<_, _> = idents.into_iter().collect();
Scope {
home,
idents,
symbols: MutMap::default(),
aliases,
}
}
pub fn idents(&self) -> impl Iterator<Item = (&Ident, &(Symbol, Region))> {
self.idents.iter()
}
pub fn symbols(&self) -> impl Iterator<Item = (Symbol, Region)> + '_ {
self.symbols.iter().map(|(x, y)| (*x, *y))
}
pub fn contains_ident(&self, ident: &Ident) -> bool {
self.idents.contains_key(ident)
}
pub fn contains_symbol(&self, symbol: Symbol) -> bool {
self.symbols.contains_key(&symbol)
}
pub fn num_idents(&self) -> usize {
self.idents.len()
}
pub fn lookup(&mut self, ident: &Ident, region: Region) -> Result<Symbol, RuntimeError> {
match self.idents.get(ident) {
Some((symbol, _)) => Ok(*symbol),
None => Err(RuntimeError::LookupNotInScope(
Loc {
region,
value: ident.clone().into(),
},
self.idents.keys().map(|v| v.as_ref().into()).collect(),
)),
}
}
pub fn lookup_alias(&self, symbol: Symbol) -> Option<&Alias> {
self.aliases.get(&symbol)
}
/// Introduce a new ident to scope.
///
/// Returns Err if this would shadow an existing ident, including the
/// Symbol and Region of the ident we already had in scope under that name.
pub fn introduce(
&mut self,
ident: Ident,
exposed_ident_ids: &IdentIds,
all_ident_ids: &mut IdentIds,
region: Region,
) -> Result<Symbol, (Region, Loc<Ident>)> {
match self.idents.get(&ident) {
Some((_, original_region)) => {
let shadow = Loc {
value: ident,
region,
};
Err((*original_region, shadow))
}
None => {
// If this IdentId was already added previously
// when the value was exposed in the module header,
// use that existing IdentId. Otherwise, create a fresh one.
let ident_id = match exposed_ident_ids.get_id(&ident) {
Some(ident_id) => ident_id,
None => all_ident_ids.add_str(ident.as_str()),
};
let symbol = Symbol::new(self.home, ident_id);
self.symbols.insert(symbol, region);
self.idents.insert(ident, (symbol, region));
Ok(symbol)
}
}
}
/// Ignore an identifier.
///
/// Used for record guards like { x: Just _ }
pub fn ignore(&mut self, ident: Ident, all_ident_ids: &mut IdentIds) -> Symbol {
let ident_id = all_ident_ids.add_str(ident.as_str());
Symbol::new(self.home, ident_id)
}
/// Import a Symbol from another module into this module's top-level scope.
///
/// Returns Err if this would shadow an existing ident, including the
/// Symbol and Region of the ident we already had in scope under that name.
pub fn import(
&mut self,
ident: Ident,
symbol: Symbol,
region: Region,
) -> Result<(), (Symbol, Region)> {
match self.idents.get(&ident) {
Some(shadowed) => Err(*shadowed),
None => {
self.symbols.insert(symbol, region);
self.idents.insert(ident, (symbol, region));
Ok(())
}
}
}
pub fn add_alias(
&mut self,
pool: &mut Pool,
name: Symbol,
vars: PoolVec<(PoolStr, Variable)>,
typ: TypeId,
) {
let mut hidden_variables = MutSet::default();
hidden_variables.extend(typ.variables(pool));
for loc_var in vars.iter(pool) {
hidden_variables.remove(&loc_var.1);
}
let hidden_variables_vec = PoolVec::with_capacity(hidden_variables.len() as u32, pool);
for (node_id, var) in hidden_variables_vec.iter_node_ids().zip(hidden_variables) {
pool[node_id] = var;
}
let alias = Alias {
targs: vars,
hidden_variables: hidden_variables_vec,
actual: typ,
};
self.aliases.insert(name, alias);
}
pub fn contains_alias(&mut self, name: Symbol) -> bool {
self.aliases.contains_key(&name)
}
pub fn fill_scope(&mut self, env: &Env, all_ident_ids: &mut IdentIdsByModule) -> ASTResult<()> {
let ident_ids = get_module_ident_ids(all_ident_ids, &env.home)?.clone();
for (_, ident_ref) in ident_ids.ident_strs() {
self.introduce(
ident_ref.into(),
&env.exposed_ident_ids,
get_module_ident_ids_mut(all_ident_ids, &env.home)?,
Region::zero(),
)?;
}
Ok(())
}
}
impl ShallowClone for Scope {
fn shallow_clone(&self) -> Self {
Self {
idents: self.idents.clone(),
symbols: self.symbols.clone(),
aliases: self
.aliases
.iter()
.map(|(s, a)| (*s, a.shallow_clone()))
.collect(),
home: self.home,
}
}
}

View File

@ -1,8 +0,0 @@
pub mod ast_error;
mod canonicalization;
pub mod constrain;
pub mod lang;
pub mod mem_pool;
pub mod module;
pub mod parse;
pub mod solve_type;

View File

@ -1,4 +0,0 @@
pub mod pool;
pub mod pool_str;
pub mod pool_vec;
pub mod shallow_clone;

View File

@ -1,269 +0,0 @@
/// A memory pool of 32-byte nodes. The node value 0 is reserved for the pool's
/// use, and valid nodes may never have that value.
///
/// Internally, the pool is divided into pages of 4096 bytes. It stores nodes
/// into one page at a time, and when it runs out, it uses mmap to reserve an
/// anonymous memory page in which to store nodes.
///
/// Since nodes are 32 bytes, one page can store 128 nodes; you can access a
/// particular node by its NodeId, which is an opaque wrapper around a pointer.
///
/// Pages also use the node value 0 (all 0 bits) to mark nodes as unoccupied.
/// This is important for performance.
use std::any::type_name;
use std::ffi::c_void;
use std::marker::PhantomData;
use std::mem::{align_of, size_of, MaybeUninit};
pub const NODE_BYTES: usize = 32;
// Each page has 128 slots. Each slot holds one 32B node
// This means each page is 4096B, which is the size of a memory page
// on typical systems where the compiler will be run.
//
// Nice things about this system include:
// * Allocating a new page is as simple as asking the OS for a memory page.
// * Since each node is 32B, each node's memory address will be a multiple of 16.
// * Thanks to the free lists and our consistent chunk sizes, we should
// end up with very little fragmentation.
// * Finding a slot for a given node should be very fast: see if the relevant
// free list has any openings; if not, try the next size up.
//
// Less nice things include:
// * This system makes it very hard to ever give a page back to the OS.
// We could try doing the Mesh Allocator strategy: whenever we allocate
// something, assign it to a random slot in the page, and then periodically
// try to merge two pages into one (by locking and remapping them in the OS)
// and then returning the redundant physical page back to the OS. This should
// work in theory, but is pretty complicated, and we'd need to schedule it.
// Keep in mind that we can't use the Mesh Allocator itself because it returns
// usize pointers, which would be too big for us to have 16B nodes.
// On the plus side, we could be okay with higher memory usage early on,
// and then later use the Mesh strategy to reduce long-running memory usage.
//
// With this system, we can allocate up to 4B nodes. If we wanted to keep
// a generational index in there, like https://crates.io/crates/sharded-slab
// does, we could use some of the 32 bits for that. For example, if we wanted
// to have a 5-bit generational index (supporting up to 32 generations), then
// we would have 27 bits remaining, meaning we could only support at most
// 134M nodes. Since the editor has a separate Pool for each module, is that
// enough for any single module we'll encounter in practice? Probably, and
// especially if we allocate super large collection literals on the heap instead
// of in the pool.
//
// Another possible design is to try to catch reuse bugs using an "ASan" like
// approach: in development builds, whenever we "free" a particular slot, we
// can add it to a dev-build-only "freed nodes" list and don't hand it back
// out (so, we leak the memory.) Then we can (again, in development builds only)
// check to see if we're about to store something in zeroed-out memory; if so, check
// to see if it was
#[derive(Debug, Eq)]
pub struct NodeId<T> {
pub(super) index: u32,
pub(super) _phantom: PhantomData<T>,
}
impl<T> Clone for NodeId<T> {
fn clone(&self) -> Self {
NodeId {
index: self.index,
_phantom: PhantomData::default(),
}
}
}
impl<T> PartialEq for NodeId<T> {
fn eq(&self, other: &Self) -> bool {
self.index == other.index
}
}
impl<T> Copy for NodeId<T> {}
#[derive(Debug)]
pub struct Pool {
pub(super) nodes: *mut [MaybeUninit<u8>; NODE_BYTES],
num_nodes: u32,
capacity: u32,
// free_1node_slots: Vec<NodeId<T>>,
}
impl Pool {
pub fn with_capacity(nodes: u32) -> Self {
// round up number of nodes requested to nearest page size in bytes
let bytes_per_page = page_size::get();
let node_bytes = NODE_BYTES * nodes as usize;
let leftover = node_bytes % bytes_per_page;
let bytes_to_mmap = if leftover == 0 {
node_bytes
} else {
node_bytes + bytes_per_page - leftover
};
let nodes = unsafe {
// mmap anonymous memory pages - that is, contiguous virtual memory
// addresses from the OS which will be lazily translated into
// physical memory one 4096-byte page at a time, once we actually
// try to read or write in that page's address range.
#[cfg(unix)]
{
use libc::{MAP_ANONYMOUS, MAP_PRIVATE, PROT_READ, PROT_WRITE};
libc::mmap(
std::ptr::null_mut(),
bytes_to_mmap,
PROT_READ | PROT_WRITE,
MAP_PRIVATE | MAP_ANONYMOUS,
0,
0,
)
}
#[cfg(windows)]
{
use winapi::um::memoryapi::VirtualAlloc;
use winapi::um::winnt::PAGE_READWRITE;
use winapi::um::winnt::{MEM_COMMIT, MEM_RESERVE};
VirtualAlloc(
std::ptr::null_mut(),
bytes_to_mmap,
MEM_COMMIT | MEM_RESERVE,
PAGE_READWRITE,
)
}
} as *mut [MaybeUninit<u8>; NODE_BYTES];
// This is our actual capacity, in nodes.
// It might be higher than the requested capacity due to rounding up
// to nearest page size.
let capacity = (bytes_to_mmap / NODE_BYTES) as u32;
Pool {
nodes,
num_nodes: 0,
capacity,
}
}
pub fn add<T>(&mut self, node: T) -> NodeId<T> {
// It's only safe to store this if T fits in S.
debug_assert!(
size_of::<T>() <= NODE_BYTES,
"{} has a size of {}, but it needs to be at most {}",
type_name::<T>(),
size_of::<T>(),
NODE_BYTES
);
let node_id = self.reserve(1);
let node_ptr = self.get_ptr(node_id);
unsafe { node_ptr.write(MaybeUninit::new(node)) };
node_id
}
/// Reserves the given number of contiguous node slots, and returns
/// the NodeId of the first one. We only allow reserving 2^32 in a row.
pub(super) fn reserve<T>(&mut self, nodes: u32) -> NodeId<T> {
// TODO once we have a free list, look in there for an open slot first!
let index = self.num_nodes;
if index < self.capacity {
self.num_nodes = index + nodes;
NodeId {
index,
_phantom: PhantomData::default(),
}
} else {
todo!("pool ran out of capacity. TODO reallocate the nodes pointer to map to a bigger space. Can use mremap on Linux, but must memcpy lots of bytes on macOS and Windows.");
}
}
pub fn get<'a, 'b, T>(&'a self, node_id: NodeId<T>) -> &'b T {
unsafe {
let node_ptr = self.get_ptr(node_id) as *const T;
&*node_ptr
}
}
pub fn get_mut<T>(&mut self, node_id: NodeId<T>) -> &mut T {
unsafe {
let node_ptr = self.get_ptr(node_id) as *mut T;
&mut *node_ptr
}
}
pub fn set<T>(&mut self, node_id: NodeId<T>, element: T) {
unsafe {
let node_ptr = self.get_ptr(node_id);
node_ptr.write(MaybeUninit::new(element));
}
}
fn get_ptr<T>(&self, node_id: NodeId<T>) -> *mut MaybeUninit<T> {
let node_offset = unsafe { self.nodes.offset(node_id.index as isize) };
// This checks if the node_offset is aligned to T
assert!(0 == (node_offset as usize) & (align_of::<T>() - 1));
node_offset as *mut MaybeUninit<T>
}
// A node is available iff its bytes are all zeroes
#[allow(dead_code)]
fn is_available<T>(&self, node_id: NodeId<T>) -> bool {
debug_assert_eq!(size_of::<T>(), NODE_BYTES);
unsafe {
let node_ptr = self.nodes.offset(node_id.index as isize) as *const [u8; NODE_BYTES];
*node_ptr == [0; NODE_BYTES]
}
}
}
impl<T> std::ops::Index<NodeId<T>> for Pool {
type Output = T;
fn index(&self, node_id: NodeId<T>) -> &Self::Output {
self.get(node_id)
}
}
impl<T> std::ops::IndexMut<NodeId<T>> for Pool {
fn index_mut(&mut self, node_id: NodeId<T>) -> &mut Self::Output {
self.get_mut(node_id)
}
}
impl Drop for Pool {
fn drop(&mut self) {
unsafe {
#[cfg(unix)]
{
libc::munmap(
self.nodes as *mut c_void,
NODE_BYTES * self.capacity as usize,
);
}
#[cfg(windows)]
{
use winapi::um::memoryapi::VirtualFree;
use winapi::um::winnt::MEM_RELEASE;
VirtualFree(
self.nodes as *mut c_void,
NODE_BYTES * self.capacity as usize,
MEM_RELEASE,
);
}
}
}
}

View File

@ -1,86 +0,0 @@
use super::pool::{NodeId, Pool, NODE_BYTES};
use super::shallow_clone::ShallowClone;
use std::ffi::c_void;
use std::marker::PhantomData;
use std::mem::size_of;
/// A string containing at most 2^32 pool-allocated bytes.
#[derive(Debug, Copy, Clone)]
pub struct PoolStr {
first_node_id: NodeId<()>,
len: u32,
}
#[test]
fn pool_str_size() {
assert_eq!(size_of::<PoolStr>(), 8);
}
impl PoolStr {
pub fn new(string: &str, pool: &mut Pool) -> Self {
debug_assert!(string.len() <= u32::MAX as usize);
let chars_per_node = NODE_BYTES / size_of::<char>();
let number_of_nodes = f64::ceil(string.len() as f64 / chars_per_node as f64) as u32;
if number_of_nodes > 0 {
let first_node_id = pool.reserve(number_of_nodes);
let index = first_node_id.index as isize;
let next_node_ptr = unsafe { pool.nodes.offset(index) } as *mut c_void;
unsafe {
libc::memcpy(
next_node_ptr,
string.as_ptr() as *const c_void,
string.len(),
);
}
PoolStr {
first_node_id,
len: string.len() as u32,
}
} else {
PoolStr {
first_node_id: NodeId {
index: 0,
_phantom: PhantomData::default(),
},
len: 0,
}
}
}
pub fn as_str(&self, pool: &Pool) -> &str {
unsafe {
let node_ptr = pool.nodes.offset(self.first_node_id.index as isize) as *const u8;
let node_slice: &[u8] = std::slice::from_raw_parts(node_ptr, self.len as usize);
std::str::from_utf8_unchecked(&node_slice[0..self.len as usize])
}
}
#[allow(clippy::len_without_is_empty)]
pub fn len(&self, pool: &Pool) -> usize {
let contents = self.as_str(pool);
contents.len()
}
pub fn is_empty(&self, pool: &Pool) -> bool {
self.len(pool) == 0
}
}
impl ShallowClone for PoolStr {
fn shallow_clone(&self) -> Self {
// Question: should this fully clone, or is a shallow copy
// (and the aliasing it entails) OK?
Self {
first_node_id: self.first_node_id,
len: self.len,
}
}
}

View File

@ -1,323 +0,0 @@
use super::pool::{NodeId, Pool, NODE_BYTES};
use super::shallow_clone::ShallowClone;
use std::any::type_name;
use std::cmp::Ordering;
use std::ffi::c_void;
use std::marker::PhantomData;
use std::mem::size_of;
/// An array of at most 2^32 pool-allocated nodes.
#[derive(Debug)]
pub struct PoolVec<T> {
first_node_id: NodeId<T>,
len: u32,
}
#[test]
fn pool_vec_size() {
assert_eq!(size_of::<PoolVec<()>>(), 8);
}
impl<'a, T: 'a + Sized> PoolVec<T> {
pub fn empty(pool: &mut Pool) -> Self {
Self::new(std::iter::empty(), pool)
}
pub fn with_capacity(len: u32, pool: &mut Pool) -> Self {
debug_assert!(
size_of::<T>() <= NODE_BYTES,
"{} has a size of {}",
type_name::<T>(),
size_of::<T>()
);
if len == 0 {
Self::empty(pool)
} else {
let first_node_id = pool.reserve(len);
PoolVec { first_node_id, len }
}
}
pub fn len(&self) -> usize {
self.len as usize
}
pub fn is_empty(&self) -> bool {
self.len == 0
}
pub fn new<I: ExactSizeIterator<Item = T>>(nodes: I, pool: &mut Pool) -> Self {
debug_assert!(nodes.len() <= u32::MAX as usize);
debug_assert!(size_of::<T>() <= NODE_BYTES);
let len = nodes.len() as u32;
if len > 0 {
let first_node_id = pool.reserve(len);
let index = first_node_id.index as isize;
let mut next_node_ptr = unsafe { pool.nodes.offset(index) } as *mut T;
for (indx_inc, node) in nodes.enumerate() {
unsafe {
*next_node_ptr = node;
next_node_ptr = pool.nodes.offset(index + (indx_inc as isize) + 1) as *mut T;
}
}
PoolVec { first_node_id, len }
} else {
PoolVec {
first_node_id: NodeId {
index: 0,
_phantom: PhantomData::default(),
},
len: 0,
}
}
}
pub fn iter(&self, pool: &'a Pool) -> impl ExactSizeIterator<Item = &'a T> {
self.pool_list_iter(pool)
}
pub fn iter_mut(&self, pool: &'a mut Pool) -> impl ExactSizeIterator<Item = &'a mut T> {
self.pool_list_iter_mut(pool)
}
pub fn iter_node_ids(&self) -> impl ExactSizeIterator<Item = NodeId<T>> {
self.pool_list_iter_node_ids()
}
/// Private version of into_iter which exposes the implementation detail
/// of PoolVecIter. We don't want that struct to be public, but we
/// actually do want to have this separate function for code reuse
/// in the iterator's next() method.
#[inline(always)]
fn pool_list_iter(&self, pool: &'a Pool) -> PoolVecIter<'a, T> {
PoolVecIter {
pool,
current_node_id: self.first_node_id,
len_remaining: self.len,
}
}
#[inline(always)]
fn pool_list_iter_mut(&self, pool: &'a Pool) -> PoolVecIterMut<'a, T> {
PoolVecIterMut {
pool,
current_node_id: self.first_node_id,
len_remaining: self.len,
}
}
#[inline(always)]
fn pool_list_iter_node_ids(&self) -> PoolVecIterNodeIds<T> {
PoolVecIterNodeIds {
current_node_id: self.first_node_id,
len_remaining: self.len,
}
}
pub fn free<S>(self, pool: &'a mut Pool) {
// zero out the memory
unsafe {
let index = self.first_node_id.index as isize;
let node_ptr = pool.nodes.offset(index) as *mut c_void;
let bytes = self.len as usize * NODE_BYTES;
libc::memset(node_ptr, 0, bytes);
}
// TODO insert it into the pool's free list
}
}
impl<T> ShallowClone for PoolVec<T> {
fn shallow_clone(&self) -> Self {
// Question: should this fully clone, or is a shallow copy
// (and the aliasing it entails) OK?
Self {
first_node_id: self.first_node_id,
len: self.len,
}
}
}
struct PoolVecIter<'a, T> {
pool: &'a Pool,
current_node_id: NodeId<T>,
len_remaining: u32,
}
impl<'a, T> ExactSizeIterator for PoolVecIter<'a, T>
where
T: 'a,
{
fn len(&self) -> usize {
self.len_remaining as usize
}
}
impl<'a, T> Iterator for PoolVecIter<'a, T>
where
T: 'a,
{
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
let len_remaining = self.len_remaining;
match len_remaining.cmp(&1) {
Ordering::Greater => {
// Get the current node
let index = self.current_node_id.index;
let node_ptr = unsafe { self.pool.nodes.offset(index as isize) } as *const T;
// Advance the node pointer to the next node in the current page
self.current_node_id = NodeId {
index: index + 1,
_phantom: PhantomData::default(),
};
self.len_remaining = len_remaining - 1;
Some(unsafe { &*node_ptr })
}
Ordering::Equal => {
self.len_remaining = 0;
// Don't advance the node pointer's node, because that might
// advance past the end of the page!
let index = self.current_node_id.index;
let node_ptr = unsafe { self.pool.nodes.offset(index as isize) } as *const T;
Some(unsafe { &*node_ptr })
}
Ordering::Less => {
// len_remaining was 0
None
}
}
}
}
struct PoolVecIterMut<'a, T> {
pool: &'a Pool,
current_node_id: NodeId<T>,
len_remaining: u32,
}
impl<'a, T> ExactSizeIterator for PoolVecIterMut<'a, T>
where
T: 'a,
{
fn len(&self) -> usize {
self.len_remaining as usize
}
}
impl<'a, T> Iterator for PoolVecIterMut<'a, T>
where
T: 'a,
{
type Item = &'a mut T;
fn next(&mut self) -> Option<Self::Item> {
let len_remaining = self.len_remaining;
match len_remaining.cmp(&1) {
Ordering::Greater => {
// Get the current node
let index = self.current_node_id.index;
let node_ptr = unsafe { self.pool.nodes.offset(index as isize) } as *mut T;
// Advance the node pointer to the next node in the current page
self.current_node_id = NodeId {
index: index + 1,
_phantom: PhantomData::default(),
};
self.len_remaining = len_remaining - 1;
Some(unsafe { &mut *node_ptr })
}
Ordering::Equal => {
self.len_remaining = 0;
// Don't advance the node pointer's node, because that might
// advance past the end of the page!
let index = self.current_node_id.index;
let node_ptr = unsafe { self.pool.nodes.offset(index as isize) } as *mut T;
Some(unsafe { &mut *node_ptr })
}
Ordering::Less => {
// len_remaining was 0
None
}
}
}
}
struct PoolVecIterNodeIds<T> {
current_node_id: NodeId<T>,
len_remaining: u32,
}
impl<T> ExactSizeIterator for PoolVecIterNodeIds<T> {
fn len(&self) -> usize {
self.len_remaining as usize
}
}
impl<T> Iterator for PoolVecIterNodeIds<T> {
type Item = NodeId<T>;
fn next(&mut self) -> Option<Self::Item> {
let len_remaining = self.len_remaining;
match len_remaining.cmp(&1) {
Ordering::Greater => {
// Get the current node
let current = self.current_node_id;
let index = current.index;
// Advance the node pointer to the next node in the current page
self.current_node_id = NodeId {
index: index + 1,
_phantom: PhantomData::default(),
};
self.len_remaining = len_remaining - 1;
Some(current)
}
Ordering::Equal => {
self.len_remaining = 0;
// Don't advance the node pointer's node, because that might
// advance past the end of the page!
Some(self.current_node_id)
}
Ordering::Less => {
// len_remaining was 0
None
}
}
}
}
#[test]
fn pool_vec_iter_test() {
let expected_vec: Vec<usize> = vec![2, 4, 8, 16];
let mut test_pool = Pool::with_capacity(1024);
let pool_vec = PoolVec::new(expected_vec.clone().into_iter(), &mut test_pool);
let current_vec: Vec<usize> = pool_vec.iter(&test_pool).copied().collect();
assert_eq!(current_vec, expected_vec);
}

View File

@ -1,35 +0,0 @@
use roc_can::expected::Expected;
use roc_can::expected::PExpected;
/// Clones the outer node, but does not clone any nodeids
pub trait ShallowClone {
fn shallow_clone(&self) -> Self;
}
impl<T> ShallowClone for Expected<T>
where
T: ShallowClone,
{
fn shallow_clone(&self) -> Self {
use Expected::*;
match self {
NoExpectation(t) => NoExpectation(t.shallow_clone()),
ForReason(reason, t, region) => ForReason(reason.clone(), t.shallow_clone(), *region),
FromAnnotation(loc_pat, n, source, t) => {
FromAnnotation(loc_pat.clone(), *n, *source, t.shallow_clone())
}
}
}
}
impl<T: ShallowClone> ShallowClone for PExpected<T> {
fn shallow_clone(&self) -> Self {
use PExpected::*;
match self {
NoExpectation(t) => NoExpectation(t.shallow_clone()),
ForReason(reason, t, region) => ForReason(reason.clone(), t.shallow_clone(), *region),
}
}
}

View File

@ -1,38 +0,0 @@
use bumpalo::Bump;
use roc_load::{LoadedModule, Threading};
use roc_target::TargetInfo;
use std::path::Path;
pub fn load_module(src_file: &Path, threading: Threading) -> LoadedModule {
let subs_by_module = Default::default();
let arena = Bump::new();
let loaded = roc_load::load_and_typecheck(
&arena,
src_file.to_path_buf(),
src_file.parent().unwrap_or_else(|| {
panic!(
"src_file {:?} did not have a parent directory but I need to have one.",
src_file
)
}),
subs_by_module,
TargetInfo::default_x86_64(),
roc_reporting::report::RenderTarget::ColorTerminal,
threading,
);
match loaded {
Ok(x) => x,
Err(roc_load::LoadingProblem::FormattedReport(report)) => {
panic!(
"Failed to load module from src_file {:?}. Report: {}",
src_file, report
);
}
Err(e) => panic!(
"Failed to load module from src_file {:?}: {:?}",
src_file, e
),
}
}

View File

@ -1,2 +0,0 @@
pub mod parse_ast;
pub mod parse_header;

View File

@ -1,54 +0,0 @@
use bumpalo::Bump;
use roc_module::symbol::Interns;
use roc_region::all::Region;
use crate::{
ast_error::ASTResult,
lang::{
core::{
ast::AST,
def::{def2::DefId, def_to_def2::str_to_def2},
expr::expr2::Expr2,
},
env::Env,
scope::Scope,
},
};
use super::parse_header;
pub fn parse_from_string<'a>(
code_str: &'a str,
env: &mut Env<'a>,
ast_arena: &'a Bump,
interns: &mut Interns,
) -> ASTResult<AST> {
let blank_line_indx = code_str
.find("\n\n")
.expect("I was expecting two newline chars to split header and rest of code.");
let header_str = &code_str[0..blank_line_indx];
let tail_str = &code_str[blank_line_indx..];
let mut scope = Scope::new(env.home, env.pool, env.var_store);
scope.fill_scope(env, &mut interns.all_ident_ids)?;
let region = Region::zero();
let mut def_ids = Vec::<DefId>::new();
let def2_vec = str_to_def2(ast_arena, tail_str, env, &mut scope, region)?;
for def2 in def2_vec {
let def_id = env.pool.add(def2);
def_ids.push(def_id);
}
let ast_node_id = env.pool.add(Expr2::Blank);
Ok(AST {
header: parse_header::parse_from_string(header_str, ast_node_id),
def_ids,
})
}

View File

@ -1,12 +0,0 @@
use crate::lang::core::{expr::expr2::ExprId, header::AppHeader};
// TODO don't use mock struct and actually parse string
pub fn parse_from_string(_header_str: &str, ast_node_id: ExprId) -> AppHeader {
AppHeader {
app_name: "\"untitled-app\"".to_owned(),
packages_base: "\"c-platform\"".to_owned(),
imports: vec![],
provides: vec!["main".to_owned()],
ast_node_id,
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,42 +0,0 @@
[package]
name = "roc-bindgen"
version = "0.1.0"
authors = ["The Roc Contributors"]
license = "UPL-1.0"
repository = "https://github.com/rtfeldman/roc"
edition = "2021"
description = "A CLI for roc-bindgen"
[[bin]]
name = "roc-bindgen"
path = "src/main.rs"
test = false
bench = false
[dependencies]
roc_std = { path = "../roc_std" }
roc_can = { path = "../compiler/can" }
roc_mono = { path = "../compiler/mono" }
roc_load = { path = "../compiler/load" }
roc_reporting = { path = "../reporting" }
roc_types = { path = "../compiler/types" }
roc_builtins = { path = "../compiler/builtins" }
roc_module = { path = "../compiler/module" }
roc_collections = { path = "../compiler/collections" }
roc_target = { path = "../compiler/roc_target" }
roc_error_macros = { path = "../error_macros" }
bumpalo = { version = "3.8.0", features = ["collections"] }
target-lexicon = "0.12.3"
clap = { version = "3.1.15", default-features = false, features = ["std", "color", "suggestions", "derive"] }
strum = "0.24.0"
strum_macros = "0.24"
indexmap = "1.8.1"
[dev-dependencies]
pretty_assertions = "1.0.0"
tempfile = "3.2.0"
indoc = "1.0.3"
cli_utils = { path = "../cli_utils" }
roc_test_utils = { path = "../test_utils" }
dircpy = "0.3.9"
ctor = "0.1.22"

View File

@ -1,495 +0,0 @@
use crate::structs::Structs;
use crate::types::{RocTagUnion, TypeId, Types};
use crate::{
enums::Enums,
types::{RocNum, RocType},
};
use bumpalo::Bump;
use roc_builtins::bitcode::{FloatWidth::*, IntWidth::*};
use roc_collections::VecMap;
use roc_module::ident::TagName;
use roc_module::symbol::{Interns, Symbol};
use roc_mono::layout::{
cmp_fields, ext_var_is_empty_tag_union, Builtin, Layout, LayoutCache, TagOrClosure,
};
use roc_types::subs::{Label, UnionLabels};
use roc_types::{
subs::{Content, FlatType, LambdaSet, Subs, Variable},
types::RecordField,
};
use std::fmt::Display;
pub struct Env<'a> {
pub arena: &'a Bump,
pub subs: &'a Subs,
pub layout_cache: &'a mut LayoutCache<'a>,
pub interns: &'a Interns,
pub struct_names: Structs,
pub enum_names: Enums,
pub pending_recursive_types: VecMap<TypeId, Variable>,
pub known_recursive_types: VecMap<Variable, TypeId>,
}
impl<'a> Env<'a> {
pub fn vars_to_types<I>(&mut self, variables: I) -> Types
where
I: IntoIterator<Item = Variable>,
{
let mut types = Types::default();
for var in variables {
self.add_type(var, &mut types);
}
self.resolve_pending_recursive_types(&mut types);
types
}
fn add_type(&mut self, var: Variable, types: &mut Types) -> TypeId {
let layout = self
.layout_cache
.from_var(self.arena, var, self.subs)
.expect("Something weird ended up in the content");
add_type_help(self, layout, var, None, types)
}
fn resolve_pending_recursive_types(&mut self, types: &mut Types) {
// TODO if VecMap gets a drain() method, use that instead of doing take() and into_iter
let pending = core::mem::take(&mut self.pending_recursive_types);
for (type_id, var) in pending.into_iter() {
let actual_type_id = self.known_recursive_types.get(&var).unwrap_or_else(|| {
unreachable!(
"There was no known recursive TypeId for the pending recursive variable {:?}",
var
);
});
debug_assert!(
matches!(types.get(type_id), RocType::RecursivePointer(TypeId::PENDING)),
"The TypeId {:?} was registered as a pending recursive pointer, but was not stored in Types as one.",
type_id
);
types.replace(type_id, RocType::RecursivePointer(*actual_type_id));
}
}
}
fn add_type_help<'a>(
env: &mut Env<'a>,
layout: Layout<'a>,
var: Variable,
opt_name: Option<Symbol>,
types: &mut Types,
) -> TypeId {
let subs = env.subs;
match subs.get_content_without_compacting(var) {
Content::FlexVar(_)
| Content::RigidVar(_)
| Content::FlexAbleVar(_, _)
| Content::RigidAbleVar(_, _) => {
todo!("TODO give a nice error message for a non-concrete type being passed to the host")
}
Content::Structure(FlatType::Record(fields, ext)) => {
let it = fields
.unsorted_iterator(subs, *ext)
.expect("something weird in content")
.flat_map(|(label, field)| {
match field {
RecordField::Required(field_var) | RecordField::Demanded(field_var) => {
Some((label.to_string(), field_var))
}
RecordField::Optional(_) => {
// drop optional fields
None
}
}
});
let name = match opt_name {
Some(sym) => sym.as_str(env.interns).to_string(),
None => env.struct_names.get_name(var),
};
add_struct(env, name, it, types, |name, fields| RocType::Struct {
name,
fields,
})
}
Content::LambdaSet(LambdaSet {
solved,
recursion_var: _,
unspecialized,
}) => {
debug_assert!(
unspecialized.is_empty(),
"unspecialized lambda sets left over"
);
add_union(env, opt_name, solved, var, types)
}
Content::Structure(FlatType::TagUnion(tags, ext_var)) => {
debug_assert!(ext_var_is_empty_tag_union(subs, *ext_var));
add_union(env, opt_name, tags, var, types)
}
Content::Structure(FlatType::RecursiveTagUnion(_rec_var, tag_vars, ext_var)) => {
debug_assert!(ext_var_is_empty_tag_union(subs, *ext_var));
add_union(env, opt_name, tag_vars, var, types)
}
Content::Structure(FlatType::Apply(symbol, _)) => match layout {
Layout::Builtin(builtin) => add_builtin_type(env, builtin, var, opt_name, types),
_ => {
if symbol.is_builtin() {
todo!(
"Handle Apply for builtin symbol {:?} and layout {:?}",
symbol,
layout
)
} else {
todo!(
"Handle non-builtin Apply for symbol {:?} and layout {:?}",
symbol,
layout
)
}
}
},
Content::Structure(FlatType::Func(_, _, _)) => {
todo!()
}
Content::Structure(FlatType::FunctionOrTagUnion(_, _, _)) => {
todo!()
}
Content::Structure(FlatType::Erroneous(_)) => todo!(),
Content::Structure(FlatType::EmptyRecord) => todo!(),
Content::Structure(FlatType::EmptyTagUnion) => {
// This can happen when unwrapping a tag union; don't do anything.
todo!()
}
Content::Alias(name, _, real_var, _) => {
if name.is_builtin() {
match layout {
Layout::Builtin(builtin) => {
add_builtin_type(env, builtin, var, opt_name, types)
}
_ => {
unreachable!()
}
}
} else {
// If this was a non-builtin type alias, we can use that alias name
// in the generated bindings.
add_type_help(env, layout, *real_var, Some(*name), types)
}
}
Content::RangedNumber(_, _) => todo!(),
Content::Error => todo!(),
Content::RecursionVar { structure, .. } => {
let type_id = types.add(RocType::RecursivePointer(TypeId::PENDING));
env.pending_recursive_types.insert(type_id, *structure);
type_id
}
}
}
fn add_builtin_type<'a>(
env: &mut Env<'a>,
builtin: Builtin<'a>,
var: Variable,
opt_name: Option<Symbol>,
types: &mut Types,
) -> TypeId {
match builtin {
Builtin::Int(width) => match width {
U8 => types.add(RocType::Num(RocNum::U8)),
U16 => types.add(RocType::Num(RocNum::U16)),
U32 => types.add(RocType::Num(RocNum::U32)),
U64 => types.add(RocType::Num(RocNum::U64)),
U128 => types.add(RocType::Num(RocNum::U128)),
I8 => types.add(RocType::Num(RocNum::I8)),
I16 => types.add(RocType::Num(RocNum::I16)),
I32 => types.add(RocType::Num(RocNum::I32)),
I64 => types.add(RocType::Num(RocNum::I64)),
I128 => types.add(RocType::Num(RocNum::I128)),
},
Builtin::Float(width) => match width {
F32 => types.add(RocType::Num(RocNum::F32)),
F64 => types.add(RocType::Num(RocNum::F64)),
F128 => types.add(RocType::Num(RocNum::F128)),
},
Builtin::Decimal => types.add(RocType::Num(RocNum::Dec)),
Builtin::Bool => types.add(RocType::Bool),
Builtin::Str => types.add(RocType::RocStr),
Builtin::Dict(key_layout, val_layout) => {
// TODO FIXME this `var` is wrong - should have a different `var` for key and for val
let key_id = add_type_help(env, *key_layout, var, opt_name, types);
let val_id = add_type_help(env, *val_layout, var, opt_name, types);
let dict_id = types.add(RocType::RocDict(key_id, val_id));
types.depends(dict_id, key_id);
types.depends(dict_id, val_id);
dict_id
}
Builtin::Set(elem_layout) => {
let elem_id = add_type_help(env, *elem_layout, var, opt_name, types);
let set_id = types.add(RocType::RocSet(elem_id));
types.depends(set_id, elem_id);
set_id
}
Builtin::List(elem_layout) => {
let elem_id = add_type_help(env, *elem_layout, var, opt_name, types);
let list_id = types.add(RocType::RocList(elem_id));
types.depends(list_id, elem_id);
list_id
}
}
}
fn add_struct<I, L, F>(
env: &mut Env<'_>,
name: String,
fields: I,
types: &mut Types,
to_type: F,
) -> TypeId
where
I: IntoIterator<Item = (L, Variable)>,
L: Display + Ord,
F: FnOnce(String, Vec<(L, TypeId)>) -> RocType,
{
let subs = env.subs;
let fields_iter = &mut fields.into_iter();
let mut sortables =
bumpalo::collections::Vec::with_capacity_in(fields_iter.size_hint().0, env.arena);
for (label, field_var) in fields_iter {
sortables.push((
label,
field_var,
env.layout_cache
.from_var(env.arena, field_var, subs)
.unwrap(),
));
}
sortables.sort_by(|(label1, _, layout1), (label2, _, layout2)| {
cmp_fields(
label1,
layout1,
label2,
layout2,
env.layout_cache.target_info,
)
});
let fields = sortables
.into_iter()
.map(|(label, field_var, field_layout)| {
let type_id = add_type_help(env, field_layout, field_var, None, types);
(label, type_id)
})
.collect::<Vec<(L, TypeId)>>();
types.add(to_type(name, fields))
}
fn add_union<L>(
env: &mut Env<'_>,
opt_name: Option<Symbol>,
union_tags: &UnionLabels<L>,
var: Variable,
types: &mut Types,
) -> TypeId
where
L: Label + Into<TagOrClosure>,
{
let subs = env.subs;
let mut tags: Vec<(String, Vec<Variable>)> = union_tags
.iter_from_subs(subs)
.map(|(label, payload_vars)| {
let name_str = match label.clone().into() {
TagOrClosure::Tag(TagName(uppercase)) => uppercase.as_str().to_string(),
TagOrClosure::Closure(_) => unreachable!(),
};
(name_str, payload_vars.to_vec())
})
.collect();
let layout = env.layout_cache.from_var(env.arena, var, subs).unwrap();
let name = match opt_name {
Some(sym) => sym.as_str(env.interns).to_string(),
None => env.enum_names.get_name(var),
};
// Sort tags alphabetically by tag name
tags.sort_by(|(name1, _), (name2, _)| name1.cmp(name2));
let is_recursive = is_recursive_tag_union(&layout);
let mut tags: Vec<_> = tags
.into_iter()
.map(|(tag_name, payload_vars)| {
match struct_fields_needed(env, payload_vars.iter().copied()) {
0 => {
// no payload
(tag_name, None)
}
1 if !is_recursive => {
// this isn't recursive and there's 1 payload item, so it doesn't
// need its own struct - e.g. for `[Foo Str, Bar Str]` both of them
// can have payloads of plain old Str, no struct wrapper needed.
let payload_var = payload_vars.get(0).unwrap();
let layout = env
.layout_cache
.from_var(env.arena, *payload_var, env.subs)
.expect("Something weird ended up in the content");
let payload_id = add_type_help(env, layout, *payload_var, None, types);
(tag_name, Some(payload_id))
}
_ => {
// create a RocType for the payload and save it
let struct_name = format!("{}_{}", name, tag_name); // e.g. "MyUnion_MyVariant"
let fields = payload_vars.iter().copied().enumerate();
let struct_id = add_struct(env, struct_name, fields, types, |name, fields| {
RocType::TagUnionPayload { name, fields }
});
(tag_name, Some(struct_id))
}
}
})
.collect();
let typ = match layout {
Layout::Union(union_layout) => {
use roc_mono::layout::UnionLayout::*;
match union_layout {
// A non-recursive tag union
// e.g. `Result ok err : [Ok ok, Err err]`
NonRecursive(_) => RocType::TagUnion(RocTagUnion::NonRecursive { name, tags }),
// A recursive tag union (general case)
// e.g. `Expr : [Sym Str, Add Expr Expr]`
Recursive(_) => RocType::TagUnion(RocTagUnion::Recursive { name, tags }),
// A recursive tag union with just one constructor
// Optimization: No need to store a tag ID (the payload is "unwrapped")
// e.g. `RoseTree a : [Tree a (List (RoseTree a))]`
NonNullableUnwrapped(_) => {
todo!()
}
// A recursive tag union that has an empty variant
// Optimization: Represent the empty variant as null pointer => no memory usage & fast comparison
// It has more than one other variant, so they need tag IDs (payloads are "wrapped")
// e.g. `FingerTree a : [Empty, Single a, More (Some a) (FingerTree (Tuple a)) (Some a)]`
// see also: https://youtu.be/ip92VMpf_-A?t=164
NullableWrapped { .. } => {
todo!()
}
// A recursive tag union with only two variants, where one is empty.
// Optimizations: Use null for the empty variant AND don't store a tag ID for the other variant.
// e.g. `ConsList a : [Nil, Cons a (ConsList a)]`
NullableUnwrapped {
nullable_id: null_represents_first_tag,
other_fields: _, // TODO use this!
} => {
// NullableUnwrapped tag unions should always have exactly 2 tags.
debug_assert_eq!(tags.len(), 2);
let null_tag;
let non_null;
if null_represents_first_tag {
// If nullable_id is true, then the null tag is second, which means
// pop() will return it because it's at the end of the vec.
null_tag = tags.pop().unwrap().0;
non_null = tags.pop().unwrap();
} else {
// The null tag is first, which means the tag with the payload is second.
non_null = tags.pop().unwrap();
null_tag = tags.pop().unwrap().0;
}
let (non_null_tag, non_null_payload) = non_null;
RocType::TagUnion(RocTagUnion::NullableUnwrapped {
name,
null_tag,
non_null_tag,
non_null_payload: non_null_payload.unwrap(),
null_represents_first_tag,
})
}
}
}
Layout::Builtin(Builtin::Int(_)) => RocType::TagUnion(RocTagUnion::Enumeration {
name,
tags: tags.into_iter().map(|(tag_name, _)| tag_name).collect(),
}),
Layout::Builtin(_)
| Layout::Struct { .. }
| Layout::Boxed(_)
| Layout::LambdaSet(_)
| Layout::RecursivePointer => {
// These must be single-tag unions. Bindgen ordinary nonrecursive
// tag unions for them, and let Rust do the unwrapping.
//
// This should be a very rare use case, and it's not worth overcomplicating
// the rest of bindgen to make it do something different.
RocType::TagUnion(RocTagUnion::NonRecursive { name, tags })
}
};
let type_id = types.add(typ);
if is_recursive {
env.known_recursive_types.insert(var, type_id);
}
type_id
}
fn is_recursive_tag_union(layout: &Layout) -> bool {
use roc_mono::layout::UnionLayout::*;
match layout {
Layout::Union(tag_union) => match tag_union {
NonRecursive(_) => false,
Recursive(_)
| NonNullableUnwrapped(_)
| NullableWrapped { .. }
| NullableUnwrapped { .. } => true,
},
_ => false,
}
}
fn struct_fields_needed<I: IntoIterator<Item = Variable>>(env: &mut Env<'_>, vars: I) -> usize {
let subs = env.subs;
let arena = env.arena;
vars.into_iter().fold(0, |count, var| {
let layout = env.layout_cache.from_var(arena, var, subs).unwrap();
if layout.is_dropped_because_empty() {
count
} else {
count + 1
}
})
}

View File

@ -1,40 +0,0 @@
use std::io;
static TEMPLATE: &[u8] = include_bytes!("../templates/template.c");
pub fn write_template(writer: &mut impl io::Write) -> io::Result<()> {
writer.write_all(TEMPLATE)?;
Ok(())
}
// pub fn write_bindings(_writer: &mut impl io::Write) -> io::Result<()> {
// extern struct RocStr roc__mainForHost_1_exposed();
// int main() {
// struct RocStr str = roc__mainForHost_1_exposed();
// // Determine str_len and the str_bytes pointer,
// // taking into account the small string optimization.
// size_t str_len = roc_str_len(str);
// char* str_bytes;
// if (is_small_str(str)) {
// str_bytes = (char*)&str;
// } else {
// str_bytes = str.bytes;
// }
// // Write to stdout
// if (write(1, str_bytes, str_len) >= 0) {
// // Writing succeeded!
// return 0;
// } else {
// printf("Error writing to stdout: %s\n", strerror(errno));
// return 1;
// }
// }
// Ok(())
// }

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More