Merge branch 'kylegoetz-udp' of https://github.com/unisonweb/unison into kylegoetz-udp

This commit is contained in:
Kyle Goetz 2024-04-02 10:51:08 -05:00
commit 178fa7d96e
156 changed files with 5885 additions and 3830 deletions

261
.github/workflows/bundle-ucm.yaml vendored Normal file
View File

@ -0,0 +1,261 @@
name: bundle ucm
# build optimized ucm
# package racket lib
# build/dist unison-runtime
on:
workflow_call:
inputs:
ref:
description: Git ref to check out for this build, e.g. `trunk` or `release/0.5.19`
type: string
required: true
env:
racket_version: "8.7"
defaults:
run:
shell: bash
jobs:
build-ucm:
name: build ucm
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, macos-12, windows-2019]
runs-on: ${{matrix.os}}
steps:
- uses: actions/checkout@v4
with:
ref: ${{inputs.ref}}
- name: restore stack caches
uses: unisonweb/actions/stack/cache/restore@main
with:
cache-prefix: release
- name: install stack
uses: unisonweb/actions/stack/install@main
- name: build
run: |
# unison-cli-main embeds version numbers using TH
# so it needs to be forced to rebuild to ensure those are updated.
stack clean unison-cli-main
mkdir ucm-bin
# Windows will crash on build intermittently because the filesystem
# sucks at managing concurrent file access;
# Just keep retrying on these failures.
tries=5
for (( i = 0; i < $tries; i++ )); do
stack build :unison \
--flag unison-parser-typechecker:optimized \
--local-bin-path ucm-bin \
--copy-bins \
&& break;
done
if [[ ${{runner.os}} = 'Windows' ]]; then
ucm=$(stack exec where unison)
else
ucm=$(stack exec which unison)
fi
echo ucm="$ucm" >> $GITHUB_ENV
ls -l $ucm
- name: save stack caches
uses: unisonweb/actions/stack/cache/save@main
with:
cache-prefix: release
- name: upload ucm
uses: actions/upload-artifact@v4
with:
name: unison-${{matrix.os}}
path: ${{ env.ucm }}
if-no-files-found: error
package-racket-lib:
strategy:
matrix:
os: [ubuntu-20.04]
needs: build-ucm
name: package racket lib
runs-on: ${{matrix.os}}
steps:
- name: set up environment
run: echo "ucm=${{ runner.temp }}/unison" >> $GITHUB_ENV
- name: download racket `unison` source
uses: actions/checkout@v4
with:
ref: ${{inputs.ref}}
- name: download ucm artifact
uses: actions/download-artifact@v4
with:
name: unison-${{matrix.os}}
path: ${{ runner.temp }}
- name: generate source
run: |
chmod +x ${{ env.ucm }}
${{ env.ucm }} transcript unison-src/transcripts-manual/gen-racket-libs.md
- uses: Bogdanp/setup-racket@v1.11
with:
architecture: "x64"
distribution: "full"
variant: "CS"
version: ${{env.racket_version}}
- name: create racket lib
run: |
raco pkg create scheme-libs/racket/unison
ls -l scheme-libs/racket/unison.zip{,.CHECKSUM}
- name: upload racket lib
uses: actions/upload-artifact@v4
with:
name: racket-lib
path: |
scheme-libs/racket/unison.zip
scheme-libs/racket/unison.zip.CHECKSUM
if-no-files-found: error
build-dist-unison-runtime:
needs: package-racket-lib
name: build unison-runtime
strategy:
fail-fast: false
matrix:
os:
- ubuntu-20.04
- macos-12
- windows-2019
runs-on: ${{matrix.os}}
steps:
- uses: actions/checkout@v4
with:
ref: ${{inputs.ref}}
- name: download racket lib
uses: actions/download-artifact@v4
with:
name: racket-lib
path: scheme-libs/racket/
- name: Cache Racket dependencies
id: cache-racket-deps
uses: actions/cache@v4
with:
path: |
~/.cache/racket
~/.local/share/racket
~/Library/Racket/${{env.racket_version}}
# This isn't right because unison.zip is going to include different dates each time.
# Maybe we can unpack it and hash the contents.
key: ${{ runner.os }}-racket-${{env.racket_version}}-${{hashFiles('scheme-libs/racket/unison.zip')}}
- uses: Bogdanp/setup-racket@v1.11
with:
architecture: "x64"
distribution: "full"
variant: "CS"
version: ${{env.racket_version}}
- uses: awalsh128/cache-apt-pkgs-action@latest
if: runner.os == 'Linux'
with:
packages: libb2-dev
version: 1.0 # cache key version afaik
- name: install unison racket lib
if: steps.cache-racket-deps.outputs.cache-hit != 'true'
run: raco pkg install --auto scheme-libs/racket/unison.zip
- name: build unison-runtime
run: |
raco exe --embed-dlls --orig-exe scheme-libs/racket/unison-runtime.rkt
mkdir runtime
if [[ ${{runner.os}} = 'Windows' ]]; then exe=".exe"; else exe=""; fi
raco distribute runtime scheme-libs/racket/unison-runtime$exe
ls -l runtime/
- name: upload unison-runtime
uses: actions/upload-artifact@v4
with:
name: unison-runtime-${{matrix.os}}
path: runtime/
if-no-files-found: error
bundle:
name: bundle ucm, jit, and ui
needs: [build-ucm, package-racket-lib, build-dist-unison-runtime]
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, macos-12, windows-2019]
steps:
- name: set up environment
run: |
staging_dir="${RUNNER_TEMP//\\//}/ucm-staging"
artifact_os="$(echo $RUNNER_OS | tr '[:upper:]' '[:lower:]')"
echo "staging_dir=$staging_dir" >> $GITHUB_ENV
echo "artifact_os=$artifact_os" >> $GITHUB_ENV
- name: download ucm
uses: actions/download-artifact@v4
with:
name: unison-${{matrix.os}}
path: ${{env.staging_dir}}/unison/
- name: restore permissions on ucm
run: chmod +x ${{env.staging_dir}}/unison/unison
- name: download racket lib
uses: actions/download-artifact@v4
with:
name: racket-lib
path: ${{env.staging_dir}}/racket/
- name: download unison-runtime
uses: actions/download-artifact@v4
with:
name: unison-runtime-${{matrix.os}}
path: ${{env.staging_dir}}/runtime
- name: restore permissions on unison-runtime
# here we have the `if:` not because of the missing .exe on Windows,
# nor the lack of need to chmod, but because /runtime/bin/ probably doesn't exist
# due to differences in `raco distribute` on Windows vs macOS and Linux.
if: runner.os != 'Windows'
run: chmod +x ${{env.staging_dir}}/runtime/bin/unison-runtime
- name: download latest unison-local-ui
run: |
curl -L -o /tmp/unisonLocal.zip \
https://github.com/unisonweb/unison-local-ui/releases/download/latest/unisonLocal.zip
unzip -d ${{env.staging_dir}}/ui /tmp/unisonLocal.zip
- name: create startup script (non-Windows)
if: runner.os != 'Windows'
uses: 1arp/create-a-file-action@0.4.4
with:
path: ${{env.staging_dir}}
file: ucm
content: |
#!/bin/bash
$(dirname "$0")/unison/unison --runtime-path $(dirname "$0")/runtime/bin/unison-runtime "$@"
- name: create startup script (Windows)
if: runner.os == 'Windows'
uses: 1arp/create-a-file-action@0.4.4
with:
path: ${{env.staging_dir}}
file: ucm.cmd
content: |
@echo off
"%~dp0unison\unison.exe" --runtime-path "%~dp0runtime\unison-runtime.exe" %*
- name: package everything together
run: |
if [[ ${{runner.os}} = 'Windows' ]]; then
artifact_archive=ucm-${{env.artifact_os}}.zip
7z a -r -tzip ${artifact_archive} ${{env.staging_dir}}/*
else
chmod +x ${{env.staging_dir}}/ucm
artifact_archive=ucm-${{env.artifact_os}}.tar.gz
tar -c -z -f ${artifact_archive} -C ${{env.staging_dir}} .
fi
echo "artifact_archive=${artifact_archive}" >> $GITHUB_ENV
- name: upload artifact
uses: actions/upload-artifact@v4
with:
name: bundle-${{env.artifact_os}}
path: ${{env.artifact_archive}}
if-no-files-found: error

103
.github/workflows/ci.md vendored Normal file
View File

@ -0,0 +1,103 @@
The new CI workflow builds `ucm`, generates racket source, and generates `unison-runtime` (aka `ucr`), saving them all as build artifacts.
At a high level, the CI process is:
1. On all platforms, build `unisonweb/unison` Haskell program and run tests; save the resulting binaries as build artifacts
2. On Ubuntu, generate and save the Racket sources as a build artifact
3. On all platforms, build the `unison-runtime` Racket program save the resulting binaries as build artifacts.
### `env` vars at the top of `CI.yaml`:
Some version numbers that are used during CI:
- `ormolu_version: "0.5.0.1"`
- `racket_version: "8.7"`
- `jit_version: "@unison/internal/releases/0.0.11"`
Some cached directories:
- `ucm_local_bin` a temp path for caching a built `ucm`
- `jit_src_scheme` a temp path for caching generated jit sources
- `unison-jit-dist`
- `base-codebase` a codebase path for caching a codebase generated by `unison-src/builtin-tests/base.md`
- `unison_src_test_results` a temp path for caching the result of passing tests that depend on `unison-src/`, which includes:
- `round-trip-tests`
- `transcripts`
- `unison-src/builtin-tests/interpreter-tests.md`
`jit_generator_os: ubuntu-20.04`
- afaik, the jit sources are generated in a platform-independent way, so we just choose one platform to generate them on.
### Cached directories:
One reason for this change is to reduce the CI time for commits that only change docs, or yaml or other uninteresting things.
#### `.stack`
Caches build dependencies needed by unison packages.
- The **cache key** includes the os, the stackage resolver, `stack.yaml`, and any `package.yaml`.
This currently will re-save on success or failure, but only on a cache miss (source changed).If we find we want to re-save even on a cache key miss (e.g. due to `stack` weirdness), we can change the condition.
#### `.stack-work`
Caches build outputs for unison packages themselves.
- The **cache key** includes the os, the stackage resolver, `stack.yaml`, and any `package.yaml`.
This currently will re-save on success or failure, but only on a cache miss (source changed).If we find we want to re-save even on a cache key miss (e.g. due to `stack` weirdness), we can change the condition.
#### `ucm_local_bin`
A built `ucm` is cached in `ucm_local_bin` after a successful build and Haskell tests pass.
- The **cache key** includes the os, `stack.yaml`, any `package.yaml`, and any `.hs` file.
- On an exact cache hit, these steps are skipped, otherwise they are run:
- restore `.stack`
- restore `.stack-work`
- install `stack`
- build `ucm` dependencies
- build `ucm`
- `unison-cli` tests
- `unison-core` tests
- `unison-parser-typechecker` tests
- `unison-sqlite` tests
- `unison-syntax` tests
- `unison-util-bytes` tests
- `unison-util-cache` tests
- `unison-util-relation` tests
- `cli-integration-tests`
- verification of `stack ghci` startup
- `interpreter-tests.md`
#### `unison_src_test_results`
A bit is cached in `unison_src_test_results` after non-Haskell tests in the `unison` repo pass.
- The **cache key** includes os, `stack.yaml`, any `package.yaml`, any `.hs` file, and any file in `unison-src/`
- On an exact cache hit, these steps are skipped, otherwise they are run:
- `round-trip-tests`
- `transcripts`
- `unison-src/builtin-tests/interpreter-tests.md`
- If all steps suceed, the `unison_src_test_results` bit is saved.
#### `base-codebase`
This stores the result of `base.md`, which can be reused later to save the cost of a `pull`.
No steps are skipped on a cache hit; however, a second `pull` will mostly be a no-op.
#### `jit_src_scheme`
JIT sources are cached in `jit_src_scheme` if the `generate-jit-source` job completes.
- The **cache key** includes the version of Racket, and the release version of `@unison/internal`.
- If the cache contains `{data-info, boot-generated, simple-wrappers, builtin-generated, compound-wrappers}.ss`, then these steps are skipped, otherwise they are run:
- "create transcript" to produce pull `@unison/internal` and run `generateSchemeBoot`.
- download `ucm artifact` saved in the previous step
- set `ucm` permissions
- checkout `unison` repo, which includes some static scheme and racket files.
- run the previously generated transcript
- If all steps succeed, the `jit_src_scheme` cache is saved.
#### `jit_dist`
JIT binaries are cached in `jit_dist` if the `build-jit-binary` job completes.
- The **cache key** includes the version of Racket, and the release version of `@unison/internal`.
- On an exact cache hit, these steps are skipped, otherwise they are run:
- Restore Racket dependencies
- setup Racket
- restore apt cache (Linux only)
- download jit source from previous job
- use `raco` to build jit binary
- download `ucm` artifact from previous job
- set `ucm` permissions
- restore `base` codebase saved in previous job
- jit integration test
- If all of these steps succeed, the `jit_dist` cache is saved.

View File

@ -17,17 +17,24 @@ on:
- release/*
workflow_dispatch:
env:
ormolu_version: "0.5.2.0"
racket_version: "8.7"
ucm_local_bin: "ucm-local-bin"
jit_version: "@unison/internal/releases/0.0.13"
jit_src_scheme: "unison-jit-src/scheme-libs/racket"
jit_dist: "unison-jit-dist"
jit_generator_os: ubuntu-20.04
base-codebase: "~/.cache/unisonlanguage/base.unison"
# refers to all tests that depend on **/unison-src/**
unison_src_test_results: "unison-src-test-results"
jobs:
ormolu:
runs-on: ubuntu-20.04
# Only run formatting on trunk commits
# This is because the job won't have permission to push back to
# contributor forks on contributor PRs.
if: github.ref_name == 'trunk'
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v41
@ -37,26 +44,25 @@ jobs:
**/*.hs
**/*.hs-boot
separator: "\n"
- uses: haskell-actions/run-ormolu@v14
- uses: haskell-actions/run-ormolu@v15
with:
version: "0.5.0.1"
version: ${{ env.ormolu_version }}
mode: inplace
pattern: ${{ steps.changed-files.outputs.all_changed_files }}
- name: apply formatting changes
uses: stefanzweifel/git-auto-commit-action@v4
if: ${{ always() }}
uses: stefanzweifel/git-auto-commit-action@v5
# Only try to commit formatting changes if we're running within the repo containing the PR,
# and not on a protected branch.
# The job doesn't have permission to push back to contributor forks on contributor PRs.
if: |
always()
&& !github.ref_protected
&& github.event.pull_request.base.repo.full_name == github.event.pull_request.head.repo.full_name
with:
commit_message: automatically run ormolu
build:
name: ${{ matrix.os }}
build-ucm:
name: Build UCM ${{ matrix.os }}
runs-on: ${{ matrix.os }}
# The 'always()' causes this to build even if the ormolu job is skipped.
if: ${{ always() }}
needs: ormolu
defaults:
run:
shell: bash
strategy:
# Run each build to completion, regardless of if any have failed
fail-fast: false
@ -66,128 +72,61 @@ jobs:
- ubuntu-20.04
- macOS-12
- windows-2019
# - windows-2022
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
# The number towards the beginning of the cache keys allow you to manually avoid using a previous cache.
# GitHub will automatically delete caches that haven't been accessed in 7 days, but there is no way to
# purge one manually.
- id: stackage-resolver
name: record stackage resolver
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files
# looks for `resolver: nightly-yyyy-mm-dd` or `resolver: lts-xx.yy` in `stack.yaml` and splits it into
# `nightly` or `lts-xx`. the whole resolver string is put into resolver_long as a backup cache key
# ${{ steps.stackage-resolver.outputs.resolver_short }}
# ${{ steps.stackage-resolver.outputs.resolver_long }}
- name: tweak environment
run: |
grep resolver stack.yaml | awk '{ x="resolver_short="; if (split($2,a,"-") > 2) print x a[1]; else {split($2,b,"."); print x b[1]}}' >> "$GITHUB_OUTPUT"
grep resolver stack.yaml | awk '{print "resolver_long="$2}' >> "$GITHUB_OUTPUT"
# Cache ~/.stack, keyed by the contents of 'stack.yaml'.
- uses: actions/cache@v3
name: cache ~/.stack (unix)
if: runner.os != 'Windows'
ucm_local_bin="${RUNNER_TEMP//\\//}/${ucm_local_bin}"
unison_src_test_results="${RUNNER_TEMP//\\//}/${unison_src_test_results}"
echo "ucm_local_bin=$ucm_local_bin" >> $GITHUB_ENV
if [[ ${{runner.os}} = "Windows" ]]; then
echo "ucm=$ucm_local_bin/unison.exe" >> $GITHUB_ENV
echo "transcripts=$ucm_local_bin/transcripts.exe" >> $GITHUB_ENV
else
echo "ucm=$ucm_local_bin/unison" >> $GITHUB_ENV
echo "transcripts=$ucm_local_bin/transcripts" >> $GITHUB_ENV
fi
- name: cache ucm binaries
id: cache-ucm-binaries
uses: actions/cache@v4
with:
path: ~/.stack
key: stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-${{github.sha}}
# Fall-back to use the most recent cache for the stack.yaml, or failing that the OS
restore-keys: |
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}.
stack-1_${{matrix.os}}-
path: ${{env.ucm_local_bin}}
key: ucm-${{ matrix.os }}-${{ hashFiles('**/stack.yaml', '**/package.yaml', '**/*.hs')}}
# Cache ~/.stack, keyed by the contents of 'stack.yaml'.
- uses: actions/cache@v3
name: cache ~/.stack (Windows)
if: runner.os == 'Windows'
- name: cache unison-src test results
id: cache-unison-src-test-results
uses: actions/cache@v4
with:
path: |
C:\Users\runneradmin\AppData\Roaming\stack
C:\Users\runneradmin\AppData\Local\Programs\stack
key: stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-${{github.sha}}
# Fall-back to use the most recent cache for the stack.yaml, or failing that the OS
restore-keys: |
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}.
stack-1_${{matrix.os}}-
path: ${{env.unison_src_test_results}}
key: unison-src-test-results-${{ matrix.os }}-${{ hashFiles('**/ci.yaml', '**/stack.yaml', '**/package.yaml', '**/*.hs')}}-${{ hashFiles('**/unison-src/**') }}
# Cache each local package's ~/.stack-work for fast incremental builds in CI.
- uses: actions/cache@v3
name: cache .stack-work
- name: restore stack caches
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
id: restore-stack-caches
uses: unisonweb/actions/stack/cache/restore@main
with:
path: |
**/.stack-work
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-${{hashFiles('**/stack.yaml')}}-${{github.sha}}
restore-keys: |
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}.
stack-work-4_${{matrix.os}}-
cache-prefix: ci
# Install stack by downloading the binary from GitHub.
# The installation process differs by OS.
- name: install stack (Linux)
if: runner.os == 'Linux'
working-directory: ${{ runner.temp }}
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
- name: install stack (macOS)
working-directory: ${{ runner.temp }}
if: runner.os == 'macOS'
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-osx-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
- name: install stack (windows)
working-directory: ${{ runner.temp }}
if: runner.os == 'Windows'
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-windows-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
- name: install stack
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
uses: unisonweb/actions/stack/install@main
# One of the transcripts fails if the user's git name hasn't been set.
## (Which transcript? -AI)
- name: set git user info
run: |
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
- name: remove ~/.stack/setup-exe-cache on macOS
if: runner.os == 'macOS'
run: rm -rf ~/.stack/setup-exe-cache
- name: install stack-clean-old (to scan or clean up old stackage caches)
run: |
if ! stack exec -- which stack-clean-old; then
stack install stack-clean-old
fi
- name: check initial stackage cache size
run: |
echo global .stack
stack exec -- stack-clean-old list -G || true
echo project .stack-work
stack exec -- stack-clean-old list -P || true
# Build deps, then build local code. Splitting it into two steps just allows us to see how much time each step
# takes.
- name: build dependencies
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
# Run up to 5 times in a row before giving up.
# It's very unlikely that our build-dependencies step will fail on most builds,
# so if it fails its almost certainly due to a race condition on the Windows
@ -202,122 +141,326 @@ jobs:
fi
for (( i = 0; i < $tries; i++ )); do
stack --no-terminal build --fast --only-dependencies && break;
stack build --fast --only-dependencies --test --bench && break;
done
- name: build
run: stack --no-terminal build --fast --no-run-tests --test
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
run: |
stack build \
--fast \
--test \
--no-run-tests \
--local-bin-path ${{env.ucm_local_bin}} \
--copy-bins
# Run each test suite (tests and transcripts)
- name: check disk space before
if: ${{ always() }}
run: df -h
- name: unison-cli test
run: stack --no-terminal build --fast --test unison-cli
- name: check disk space after
if: ${{ always() }}
run: df -h
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
run: stack build --fast --test unison-cli
- name: unison-core tests
run: stack --no-terminal build --fast --test unison-core
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
run: stack build --fast --test unison-core
- name: unison-parser-typechecker tests
run: stack --no-terminal build --fast --test unison-parser-typechecker
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
run: stack build --fast --test unison-parser-typechecker
- name: unison-sqlite tests
run: stack --no-terminal build --fast --test unison-sqlite
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
run: stack build --fast --test unison-sqlite
- name: unison-syntax tests
run: stack --no-terminal build --fast --test unison-syntax
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
run: stack build --fast --test unison-syntax
- name: unison-util-bytes tests
run: stack --no-terminal build --fast --test unison-util-bytes
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
run: stack build --fast --test unison-util-bytes
- name: unison-util-cache tests
run: stack --no-terminal build --fast --test unison-util-cache
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
run: stack build --fast --test unison-util-cache
- name: unison-util-relation tests
run: stack --no-terminal build --fast --test unison-util-relation
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
run: stack build --fast --test unison-util-relation
- name: round-trip-tests
if: steps.cache-unison-src-test-results.outputs.cache-hit != 'true'
run: |
stack --no-terminal exec unison transcript unison-src/transcripts-round-trip/main.md
git add unison-src/transcripts-round-trip/main.output.md
${{env.ucm}} transcript unison-src/transcripts-round-trip/main.md
${{env.ucm}} transcript unison-src/transcripts-manual/rewrites.md
# Fail if any transcripts cause git diffs.
git diff --cached --ignore-cr-at-eol --exit-code
stack --no-terminal exec unison transcript unison-src/transcripts-manual/rewrites.md
git add unison-src/transcripts-manual/rewrites.output.md
# Fail if any transcripts cause git diffs.
git diff --cached --ignore-cr-at-eol --exit-code
git diff --ignore-cr-at-eol --exit-code \
unison-src/transcripts-round-trip/main.output.md \
unison-src/transcripts-manual/rewrites.output.md
- name: transcripts
if: steps.cache-unison-src-test-results.outputs.cache-hit != 'true'
run: |
stack --no-terminal exec transcripts
# Add all changes to the index for when we diff.
git add --all
${{env.transcripts}}
# Fail if any transcripts cause git diffs.
git diff --cached --ignore-cr-at-eol --exit-code
git diff --ignore-cr-at-eol --exit-code unison-src/transcripts
- name: cli-integration-tests
run: stack --no-terminal exec cli-integration-tests
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
run: stack exec cli-integration-tests
- name: verify stack ghci startup
if: runner.os == 'macOS' && steps.cache-ucm-binaries.outputs.cache-hit != 'true'
run: echo | stack ghci
- name: cache base codebase
id: cache-base-codebase
uses: actions/cache@v4
with:
path: ${{ env.base-codebase }}
# key = base transcript contents + sqlite schema version
key: base.unison-${{hashFiles('**/unison-src/builtin-tests/base.md', '**/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs')}}.
- name: create base.md codebase
if: steps.cache-base-codebase.outputs.cache-hit != 'true'
run: ${{env.ucm}} transcript.fork -C ${{env.base-codebase}} -S ${{env.base-codebase}} unison-src/builtin-tests/base.md
- name: interpreter tests
# this one should be re-run if the ucm binaries have changed or unison-src/ has changed
if: |
runner.os != 'Windows'
&& (steps.cache-ucm-binaries.outputs.cache-hit != 'true'
|| steps.cache-unison-src-test-results.outputs.cache-hit != 'true')
run: |
${{ env.ucm }} transcript.fork -c ${{env.base-codebase}} unison-src/builtin-tests/interpreter-tests.md
cat unison-src/builtin-tests/interpreter-tests.output.md
git diff --exit-code unison-src/builtin-tests/interpreter-tests.output.md
- name: mark transcripts as passing
if: steps.cache-unison-src-test-results.outputs.cache-hit != 'true'
run: |
echo "passing=true" >> "${{env.unison_src_test_results}}"
- name: save ucm artifact
uses: actions/upload-artifact@v4
with:
name: unison-${{ matrix.os }}
path: ${{ env.ucm }}
if-no-files-found: error
- name: save stack caches
if: |
!cancelled()
&& steps.restore-stack-caches.outputs.cache-hit != 'true'
&& steps.cache-ucm-binaries.outputs.cache-hit != 'true'
uses: unisonweb/actions/stack/cache/save@main
with:
cache-prefix: ci
generate-jit-source:
if: always() && needs.build-ucm.result == 'success'
name: Generate JIT source
needs: build-ucm
runs-on: ubuntu-20.04
steps:
- name: set up environment
run: |
echo "jit_src_scheme=${{ runner.temp }}/${{ env.jit_src_scheme }}" >> $GITHUB_ENV
echo "ucm=${{ runner.temp }}/unison" >> $GITHUB_ENV
- uses: actions/cache@v4
name: cache jit source
with:
path: ${{ env.jit_src_scheme }}
key: jit_src_scheme-racket_${{env.racket_version}}.jit_${{env.jit_version}}-${{hashFiles('**/scheme-libs/**')}}
- name: check source exists
id: jit_src_exists
if: steps.cache-jit-source.outputs.cache-hit != 'true'
run: |
files=(data-info boot-generated simple-wrappers builtin-generated compound-wrappers)
all_exist=true
for file in "${files[@]}"; do
if [[ ! -f "${{ env.jit_src_scheme }}/unison/$file.ss" ]]; then
echo "$file does not exist."
all_exist=false
# Uncomment the next line if you want to stop checking after the first missing file
# break
fi
done
if $all_exist; then
echo "files_exists=true" >> $GITHUB_OUTPUT
else
echo "files_exists=false" >> $GITHUB_OUTPUT
fi
- name: create transcript
if: steps.jit_src_exists.outputs.files_exists == 'false'
uses: DamianReeves/write-file-action@v1.3
with:
path: ${{ runner.temp }}/setup-jit.md
write-mode: overwrite
contents: |
```ucm
.> project.create-empty jit-setup
jit-setup/main> pull ${{ env.jit_version }} lib.jit
```
```unison
go = generateSchemeBoot "${{ env.jit_src_scheme }}"
```
```ucm
jit-setup/main> run go
```
- name: download ucm artifact
if: steps.jit_src_exists.outputs.files_exists == 'false'
uses: actions/download-artifact@v4
with:
name: unison-${{ env.jit_generator_os }}
path: ${{ runner.temp }}
- name: set ucm permissions
if: steps.jit_src_exists.outputs.files_exists == 'false'
run: chmod +x ${{ env.ucm }}
- name: download scheme-libs
if: steps.jit_src_exists.outputs.files_exists == 'false'
uses: actions/checkout@v4
- name: generate source
if: steps.jit_src_exists.outputs.files_exists == 'false'
run: |
mkdir -p ${{ env.jit_src_scheme }}
cp -R scheme-libs/racket/* ${{ env.jit_src_scheme }}
${{ env.ucm }} transcript ${{ runner.temp }}/setup-jit.md
- name: save jit source
if: always()
uses: actions/upload-artifact@v4
with:
name: jit-source
path: ${{ env.jit_src_scheme }}/**
if-no-files-found: error
build-jit-binary:
if: always() && needs.generate-jit-source.result == 'success'
name: Build JIT binary ${{ matrix.os }}
needs: generate-jit-source
runs-on: ${{ matrix.os }}
strategy:
# Run each build to completion, regardless of if any have failed
fail-fast: false
matrix:
os:
# While iterating on this file, you can disable one or more of these to speed things up
- ubuntu-20.04
- macOS-12
- windows-2019
steps:
- name: set up environment
id: checks
run: |
jit_src_scheme="${{ runner.temp }}/${{ env.jit_src_scheme }}" # scheme source
jit_exe="${jit_src_scheme}/unison-runtime" # initially built jit
jit_dist="${{ runner.temp }}/${{ env.jit_dist }}" # jit binary with libraries destination
jit_dist_exe="${jit_dist}/bin/unison-runtime" # jit binary itself
ucm="${{ runner.temp }}/unison"
if [[ ${{runner.os}} = "Windows" ]]; then
jit_src_scheme="${jit_src_scheme//\\//}"
jit_dist="${jit_dist//\\//}"
jit_exe="${jit_exe//\\//}.exe"
jit_dist_exe="${jit_dist//\\//}/unison-runtime.exe"
ucm="${ucm//\\//}.exe"
fi
echo "jit_src_scheme=$jit_src_scheme" >> $GITHUB_ENV
echo "jit_exe=$jit_exe" >> $GITHUB_ENV
echo "jit_dist=$jit_dist" >> $GITHUB_ENV
echo "jit_dist_exe=$jit_dist_exe" >> $GITHUB_ENV
echo "ucm=$ucm" >> $GITHUB_ENV
- name: restore jit binaries
id: restore-jit-binaries
uses: actions/cache@v4
with:
path: ${{ env.jit_dist }}
key: jit_dist-racket_${{ env.racket_version }}.jit_${{ env.jit_version }}
- name: Cache Racket dependencies
uses: actions/cache@v2
if: runner.os == 'Linux'
if: steps.restore-jit-binaries.outputs.cache-hit != 'true'
uses: actions/cache@v4
with:
path: |
~/.cache/racket
~/.local/share/racket
key: ${{ runner.os }}-racket-8.7
- uses: Bogdanp/setup-racket@v1.10
if: runner.os == 'Linux'
key: ${{ runner.os }}-racket-${{env.racket_version}}
- uses: Bogdanp/setup-racket@v1.11
if: steps.restore-jit-binaries.outputs.cache-hit != 'true'
with:
architecture: 'x64'
distribution: 'full'
variant: 'CS'
version: '8.7' # match with cache key above
- run: raco pkg install --auto --skip-installed --batch x509-lib
if: runner.os == 'Linux'
version: ${{env.racket_version}}
- uses: awalsh128/cache-apt-pkgs-action@latest
if: runner.os == 'Linux' && steps.restore-jit-binaries.outputs.cache-hit != 'true'
# read this if a package isn't installing correctly
# https://github.com/awalsh128/cache-apt-pkgs-action#caveats
if: runner.os == 'Linux'
with:
packages: libb2-dev
version: 1.0 # cache key version afaik
- uses: actions/cache@v3
name: cache base.md codebase (unix)
if: runner.os == 'Linux'
- name: download jit source
if: steps.restore-jit-binaries.outputs.cache-hit != 'true'
uses: actions/download-artifact@v4
with:
path: ~/.cache/unisonlanguage/base.unison
key: base.unison_${{hashFiles('**/unison-src/builtin-tests/base.md','**/unison-cli/src/Unison/JitInfo.hs')}}-${{github.sha}}
restore-keys: base.unison_${{hashFiles('**/unison-src/builtin-tests/base.md','**/unison-cli/src/Unison/JitInfo.hs')}}-
name: jit-source
path: ${{ env.jit_src_scheme }}
- name: set up `base` codebase
if: runner.os == 'Linux'
run: |
./unison-src/builtin-tests/setup-base-codebase.sh
- uses: actions/checkout@v4 # checkout scheme-libs from unison repo
- name: jit tests
# if: false # temporarily disabled
if: runner.os == 'Linux'
- name: build jit binary
if: steps.restore-jit-binaries.outputs.cache-hit != 'true'
shell: bash
run: |
./unison-src/builtin-tests/jit-tests.sh
cat ./unison-src/builtin-tests/jit-tests.output.md
CHANGE=$(git diff unison-src/builtin-tests/jit-tests.output.md)
if [ -n "$CHANGE" ]; then
echo "The jit-tests output has changed"
exit 1
fi
cp -R scheme-libs/racket/* "$jit_src_scheme"
raco pkg install --auto --skip-installed "$jit_src_scheme"/unison
raco exe --embed-dlls "$jit_src_scheme"/unison-runtime.rkt
raco distribute "$jit_dist" "$jit_exe"
- name: interpreter tests
# if: false # temporarily disabled
if: runner.os == 'Linux'
run: |
./unison-src/builtin-tests/interpreter-tests.sh
cat ./unison-src/builtin-tests/interpreter-tests.output.md
CHANGE=$(git diff unison-src/builtin-tests/interpreter-tests.output.md)
if [ -n "$CHANGE" ]; then
echo "The interpreter-tests output has changed"
exit 1
fi
- name: save jit binary
uses: actions/upload-artifact@v4
with:
name: jit-binary-${{ matrix.os }}
path: ${{ env.jit_dist }}/**
- name: verify stack ghci startup
if: runner.os == 'macOS'
run: echo | stack ghci
- name: check final stackage cache size
- name: download ucm
if: steps.restore-jit-binaries.outputs.cache-hit != 'true'
uses: actions/download-artifact@v4
with:
name: unison-${{ matrix.os }}
path: ${{ runner.temp }}
- name: set ucm permissions
if: steps.restore-jit-binaries.outputs.cache-hit != 'true'
run: chmod +x ${{ env.ucm }}
- name: get base codebase
if: steps.restore-jit-binaries.outputs.cache-hit != 'true'
uses: actions/cache/restore@v4
with:
path: ${{ env.base-codebase}}
key: base.unison-${{hashFiles('**/unison-src/builtin-tests/base.md', '**/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs')}}.
- name: jit integration test ${{ matrix.os }}
if: runner.os != 'Windows' && steps.restore-jit-binaries.outputs.cache-hit != 'true'
run: |
echo global .stack
stack exec -- stack-clean-old list -G || true
echo project .stack-work
stack exec -- stack-clean-old list -P || true
${{ env.ucm }} transcript.fork --runtime-path ${{ env.jit_dist_exe }} -c ${{env.base-codebase}} unison-src/builtin-tests/jit-tests.md
cat unison-src/builtin-tests/jit-tests.output.md
git diff --exit-code unison-src/builtin-tests/jit-tests.output.md
# - name: Setup tmate session
# uses: mxschmitt/action-tmate@v3
# if: ${{ failure() }}
# timeout-minutes: 15

View File

@ -15,76 +15,45 @@ jobs:
name: Haddocks
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
path: unison
# The number towards the beginning of the cache keys allow you to manually avoid using a previous cache.
# GitHub will automatically delete caches that haven't been accessed in 7 days, but there is no way to
# purge one manually.
# Cache ~/.stack, keyed by the contents of 'stack.yaml'.
- uses: actions/cache@v2
name: cache ~/.stack
- name: restore stack caches
uses: unisonweb/actions/stack/cache/restore@main
with:
path: ~/.stack
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-0-haddocks-${{github.sha}}
# Fall-back to the most recent haddocks build, or to a standard Linux build failing that.
restore-keys: |
stack-0-haddocks
stack-0-Linux
cache-prefix: haddocks
stack-yaml-dir: unison
# Cache each local package's ~/.stack-work for fast incremental builds in CI.
- uses: actions/cache@v2
name: cache .stack-work
- name: install stack
uses: unisonweb/actions/stack/install@main
- name: build with haddocks
working-directory: unison
run: stack build --fast --haddock
- name: save stack caches
uses: unisonweb/actions/stack/cache/save@main
with:
path: |
**/.stack-work
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-work-2-haddocks-${{github.sha}}
# Fall-back to the most recent haddocks build, or to a standard Linux build failing that.
restore-keys: |
stack-work-2_Linux-haddocks
stack-work-2_Linux
cache-prefix: haddocks
stack-yaml-dir: unison
- name: install stack (Linux)
working-directory: ${{ github.workspace }}
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
# Haddocks
- name: Checkout haddocks branch
uses: actions/checkout@v4
with:
ref: haddocks
path: haddocks
# One of the transcripts fails if the user's git name hasn't been set.
# Needed for `git commit` below
- name: set git user info
working-directory: unison
run: |
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
- name: build with haddocks
working-directory: unison
run: stack --no-terminal build --fast --haddock
# Haddocks
- name: Checkout haddocks branch
uses: actions/checkout@v2
with:
ref: 'haddocks'
path: 'haddocks'
- name: Copy haddocks
working-directory: 'unison'
working-directory: unison
run: |
docs_root="$(stack path --local-doc-root)"
# Erase any stale files

View File

@ -22,7 +22,7 @@ jobs:
- ubuntu-20.04
- macOS-12
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: cachix/install-nix-action@v22
with:
extra_nix_config: |
@ -34,4 +34,3 @@ jobs:
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- name: build all packages and development shells
run: nix -L build --accept-flake-config --no-link --keep-going '.#build-tools'

27
.github/workflows/ormolu.yaml vendored Normal file
View File

@ -0,0 +1,27 @@
# This workflow runs ormolu on all Haskell files in the branch and creates a PR with the result.
# (The ormolu job in CI.yaml only runs ormolu on Haskell files that have changed in that PR.)
name: ormolu everything
on:
workflow_dispatch:
env:
ormolu_version: "0.5.2.0"
jobs:
ormolu:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v4
- uses: haskell-actions/run-ormolu@v15
with:
version: ${{ env.ormolu_version }}
mode: inplace
- name: create pull request with formatting changes
uses: peter-evans/create-pull-request@v6
with:
commit_message: automatically run ormolu
branch: autoformat/${{github.ref_name}}
# branch_suffix: random
title: format `${{github.ref_name}}` with ormolu ${{env.ormolu_version}}

View File

@ -1,170 +1,52 @@
name: "pre-release"
name: pre-release
run-name: pre-release ${{github.ref_name}}
defaults:
run:
shell: bash
on:
# run on each merge to `trunk`
workflow_run:
workflows: ["CI"]
branches: [ trunk ]
branches: [trunk]
types:
- completed
# run manually
workflow_dispatch:
jobs:
build_linux:
name: "build_linux"
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- name: install stack (Linux)
working-directory: ${{ github.workspace }}
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
# One of the transcripts fails if the user's git name hasn't been set.
- name: set git user info
run: |
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
- name: build
run: stack --no-terminal build --flag unison-parser-typechecker:optimized
- name: fetch latest Unison Local UI and package with ucm
run: |
mkdir -p /tmp/ucm/ui
UCM=$(stack path | awk '/local-install-root/{print $2}')/bin/unison
cp $UCM /tmp/ucm/ucm
wget -O/tmp/unisonLocal.zip https://github.com/unisonweb/unison-local-ui/releases/download/latest/unisonLocal.zip
unzip -d /tmp/ucm/ui /tmp/unisonLocal.zip
tar -c -z -f ucm-linux.tar.gz -C /tmp/ucm .
- name: Upload linux artifact
uses: actions/upload-artifact@v2
with:
if-no-files-found: error
name: build-linux
path: ucm-linux.tar.gz
build_macos:
name: "build_macos"
runs-on: macos-12
steps:
- uses: actions/checkout@v2
- name: install stack (macOS)
working-directory: ${{ github.workspace }}
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-osx-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
# One of the transcripts fails if the user's git name hasn't been set.
- name: set git user info
run: |
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
- name: remove ~/.stack/setup-exe-cache on macOS
run: rm -rf ~/.stack/setup-exe-cache
- name: build
run: stack --no-terminal build --flag unison-parser-typechecker:optimized
- name: fetch latest Unison Local UI and package with ucm
run: |
mkdir -p /tmp/ucm/ui
UCM=$(stack path | awk '/local-install-root/{print $2}')/bin/unison
cp $UCM /tmp/ucm/ucm
wget -O/tmp/unisonLocal.zip https://github.com/unisonweb/unison-local-ui/releases/download/latest/unisonLocal.zip
unzip -d /tmp/ucm/ui /tmp/unisonLocal.zip
tar -c -z -f ucm-macos.tar.gz -C /tmp/ucm .
- name: Upload macos artifact
uses: actions/upload-artifact@v2
with:
if-no-files-found: error
name: build-macos
path: ucm-macos.tar.gz
build_windows:
name: "build_windows"
runs-on: windows-2019
steps:
- uses: actions/checkout@v2
- name: install stack (windows)
working-directory: ${{ github.workspace }}
if: runner.os == 'Windows'
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-windows-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
- name: build
# Run up to 5 times in a row before giving up.
# Builds sometimes fail due to a race condition on the Windows
# file-system API that stack runs into. Since any successful packages are
# cached within a single build, it should get further along on each re-start
# and should hopefully finish.
run: |
tries=5
for (( i = 0; i < $tries; i++ )); do
stack --no-terminal build --flag unison-parser-typechecker:optimized && break;
done
- name: fetch latest codebase-ui and package with ucm
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Powershell
shell: pwsh
run: |
mkdir -p tmp\ui
mkdir -p release\ui
$UCM = .\stack\stack-2.9.1-windows-x86_64\stack.exe exec -- where unison
cp $UCM .\release\ucm.exe
Invoke-WebRequest -Uri https://github.com/unisonweb/unison-local-ui/releases/download/latest/unisonLocal.zip -OutFile tmp\unisonLocal.zip
Expand-Archive -Path tmp\unisonLocal.zip -DestinationPath release\ui
Compress-Archive -Path .\release\* -DestinationPath ucm-windows.zip
- name: Upload windows artifact
uses: actions/upload-artifact@v2
with:
if-no-files-found: error
name: build-windows
path: ucm-windows.zip
bundle-ucm:
name: build and bundle ucm
uses: ./.github/workflows/bundle-ucm.yaml
with:
ref: ${{ github.ref }}
release:
name: "create_release"
name: create release
runs-on: ubuntu-20.04
needs:
- build_linux
- build_macos
- build_windows
- bundle-ucm
steps:
- name: make download dir
run: "mkdir /tmp/ucm"
run: mkdir /tmp/ucm
- name: "download artifacts"
uses: actions/download-artifact@v2
uses: actions/download-artifact@v4
with:
path: /tmp/ucm
- name: derive release tag
run: echo "ref_name=$(echo ${{ github.ref_name }} | awk -F'/' '{print $NF}')" >> $GITHUB_ENV
- uses: actions/checkout@v2
- uses: "marvinpinto/action-automatic-releases@latest"
with:
repo_token: "${{ secrets.GITHUB_TOKEN }}"
automatic_release_tag: "trunk-build"
automatic_release_tag: ${{ env.ref_name }}-build
prerelease: true
title: "Development Build"
title: Development Build (${{ env.ref_name }})
files: |
/tmp/ucm/**/*.tar.gz
/tmp/ucm/**/*.zip
/tmp/ucm/**/ucm-*.tar.gz
/tmp/ucm/**/ucm-*.zip

View File

@ -1,6 +1,6 @@
name: "release"
name: release
run-name: "release ${{inputs.version}}"
run-name: release ${{inputs.version}}
defaults:
run:
@ -10,34 +10,29 @@ on:
workflow_dispatch:
inputs:
version:
description: 'Release Version (E.g. M4 or M4a or 0.4.1)'
description: Release version; e.g. `0.5.19`. We'll create tag `release/${version}`.
required: true
type: string
target:
description: 'Ref to use for this release, defaults to trunk'
required: true
default: 'trunk'
type: string
jobs:
bundle-ucm:
name: build and bundle ucm
uses: ./.github/workflows/bundle-ucm.yaml
with:
ref: ${{github.ref}}
release:
name: "create_release"
name: create release
runs-on: ubuntu-20.04
needs:
- build_linux
- build_macos
- build_windows
- bundle-ucm
steps:
- uses: actions/checkout@v2
with:
ref: release/${{inputs.version}}
- name: make download dir
run: "mkdir /tmp/ucm"
run: mkdir /tmp/ucm
- name: "download artifacts"
uses: actions/download-artifact@v2
uses: actions/download-artifact@v4
with:
path: /tmp/ucm
@ -45,238 +40,20 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
version="${{inputs.version}}"
target="${{inputs.target}}"
# E.g. M4a -> M4, M4c -> M4b, M4 -> M3
prev_version="$(${{ github.workspace }}/scripts/previous-tag.sh "${version}")"
prev_tag="$( \
gh release view \
--repo unisonweb/unison \
--json tagName -t '{{printf .tagName}}' \
)"
if [ -z "$prev_tag" ]; then echo "No previous release found"; exit 1; fi
echo "Creating a release from these artifacts:"
ls -R /tmp/ucm
ls -R /tmp/ucm/**/ucm-*.{zip,tar.gz}
gh release create "release/${version}" --target "${target}" --generate-notes --notes-start-tag "release/${prev_version}" /tmp/ucm/**/*.tar.gz /tmp/ucm/**/*.zip
build_linux:
name: "build_linux"
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
with:
ref: release/${{inputs.version}}
# Cache ~/.stack, keyed by the contents of 'stack.yaml'.
- uses: actions/cache@v3
name: cache ~/.stack (linux)
with:
path: ~/.stack
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-0_ubuntu-20.04-${{hashFiles('stack.yaml')}}-${{github.sha}}
# Fall-back to use the most recent cache for the stack.yaml, or failing that the OS
restore-keys: |
stack-0_ubuntu-20.04-${{hashFiles('stack.yaml')}}
stack-0_ubuntu-20.04
# Cache each local package's ~/.stack-work for fast incremental builds in CI.
- uses: actions/cache@v3
name: cache .stack-work
with:
path: |
**/.stack-work
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-work-3_ubuntu-20.04-${{github.sha}}
restore-keys: stack-work-3_ubuntu-20.04
- name: install stack (Linux)
working-directory: ${{ github.workspace }}
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
- name: build
run: |
# unison-cli embeds version numbers using TH
# so it needs to be forced to rebuild to ensure those are updated.
stack clean unison-cli
stack --no-terminal build --flag unison-parser-typechecker:optimized
- name: fetch latest Unison Local UI and package with ucm
run: |
mkdir -p /tmp/ucm/ui
UCM=$(stack path | awk '/local-install-root/{print $2}')/bin/unison
cp $UCM /tmp/ucm/ucm
wget -O/tmp/unisonLocal.zip https://github.com/unisonweb/unison-local-ui/releases/download/latest/unisonLocal.zip
unzip -d /tmp/ucm/ui /tmp/unisonLocal.zip
tar -c -z -f ucm-linux.tar.gz -C /tmp/ucm .
- name: Upload linux artifact
uses: actions/upload-artifact@v2
with:
if-no-files-found: error
name: build-linux
path: ucm-linux.tar.gz
build_macos:
name: "build_macos"
runs-on: macos-12
steps:
- uses: actions/checkout@v2
with:
ref: release/${{inputs.version}}
# Cache ~/.stack, keyed by the contents of 'stack.yaml'.
- uses: actions/cache@v3
name: cache ~/.stack (mac)
with:
path: ~/.stack
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-0_macOS-12-${{hashFiles('stack.yaml')}}-${{github.sha}}
# Fall-back to use the most recent cache for the stack.yaml, or failing that the OS
restore-keys: |
stack-0_macOS-12-${{hashFiles('stack.yaml')}}
stack-0_macOS-12
# Cache each local package's ~/.stack-work for fast incremental builds in CI.
- uses: actions/cache@v3
name: cache .stack-work
with:
path: |
**/.stack-work
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-work-3_macOS-12-${{github.sha}}
restore-keys: stack-work-3_macOS-12
- name: install stack (macOS)
working-directory: ${{ github.workspace }}
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-osx-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
- name: remove ~/.stack/setup-exe-cache on macOS
run: rm -rf ~/.stack/setup-exe-cache
- name: build
run: |
# unison-cli embeds version numbers using TH
# so it needs to be forced to rebuild to ensure those are updated.
stack clean unison-cli
stack --no-terminal build --flag unison-parser-typechecker:optimized
- name: fetch latest Unison Local UI and package with ucm
run: |
mkdir -p /tmp/ucm/ui
UCM=$(stack path | awk '/local-install-root/{print $2}')/bin/unison
cp $UCM /tmp/ucm/ucm
wget -O/tmp/unisonLocal.zip https://github.com/unisonweb/unison-local-ui/releases/download/latest/unisonLocal.zip
unzip -d /tmp/ucm/ui /tmp/unisonLocal.zip
tar -c -z -f ucm-macos.tar.gz -C /tmp/ucm .
- name: Upload macos artifact
uses: actions/upload-artifact@v2
with:
if-no-files-found: error
name: build-macos
path: ucm-macos.tar.gz
build_windows:
name: "build_windows"
runs-on: windows-2019
steps:
- uses: actions/checkout@v2
with:
ref: release/${{inputs.version}}
# Cache ~/.stack, keyed by the contents of 'stack.yaml'.
- uses: actions/cache@v3
name: cache ~/.stack (Windows)
with:
path: "C:\\Users\\runneradmin\\AppData\\Roaming\\stack"
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-0_windows-2019-${{hashFiles('stack.yaml')}}-${{github.sha}}
# Fall-back to use the most recent cache for the stack.yaml, or failing that the OS
restore-keys: |
stack-0_windows-2019-${{hashFiles('stack.yaml')}}
stack-0_windows-2019
# Cache each local package's ~/.stack-work for fast incremental builds in CI.
- uses: actions/cache@v3
name: cache .stack-work
with:
path: |
**/.stack-work
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-work-3_windows-2019-${{github.sha}}
restore-keys: stack-work-3_windows-2019
- name: install stack (windows)
working-directory: ${{ github.workspace }}
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-windows-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
- name: build
run: |
# unison-cli embeds version numbers using TH
# so it needs to be forced to rebuild to ensure those are updated.
stack clean unison-cli
# Windows will crash on build intermittently because the filesystem
# sucks at managing concurrent file access;
# Just keep retrying on these failures.
tries=5
for (( i = 0; i < $tries; i++ )); do
stack --no-terminal build --flag unison-parser-typechecker:optimized && break;
done
- name: fetch latest Unison Local UI and package with ucm
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Powershell
shell: pwsh
run: |
mkdir -p tmp\ui
mkdir -p release\ui
$UCM = .\stack\stack-2.9.1-windows-x86_64\stack.exe exec -- where unison
cp $UCM .\release\ucm.exe
Invoke-WebRequest -Uri https://github.com/unisonweb/unison-local-ui/releases/download/latest/unisonLocal.zip -OutFile tmp\unisonLocal.zip
Expand-Archive -Path tmp\unisonLocal.zip -DestinationPath release\ui
Compress-Archive -Path .\release\* -DestinationPath ucm-windows.zip
- name: Upload windows artifact
uses: actions/upload-artifact@v2
with:
if-no-files-found: error
name: build-windows
path: ucm-windows.zip
gh release create "release/${{inputs.version}}" \
--repo unisonweb/unison \
--target "${{github.ref}}" \
--generate-notes \
--notes-start-tag "${prev_tag}" \
\
/tmp/ucm/**/ucm-*.{zip,tar.gz}

View File

@ -15,60 +15,13 @@ jobs:
- macOS-12
steps:
- uses: actions/checkout@v4
- id: stackage-resolver
name: record stackage resolver
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files
# looks for `resolver: nightly-yyyy-mm-dd` or `resolver: lts-xx.yy` in `stack.yaml` and splits it into
# `nightly` or `lts-xx`. the whole resolver string is put into resolver_long as a backup cache key
# ${{ steps.stackage-resolver.outputs.resolver_short }}
# ${{ steps.stackage-resolver.outputs.resolver_long }}
run: |
grep resolver stack.yaml | awk '{ x="resolver_short="; if (split($2,a,"-") > 2) print x a[1]; else {split($2,b,"."); print x b[1]}}' >> "$GITHUB_OUTPUT"
grep resolver stack.yaml | awk '{print "resolver_long="$2}' >> "$GITHUB_OUTPUT"
# Cache ~/.stack, keyed by the contents of 'stack.yaml'.
- uses: actions/cache@v3
name: cache ~/.stack (unix)
if: runner.os != 'Windows'
- uses: unisonweb/actions/stack/cache/restore@main
with:
path: ~/.stack
key: stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-${{github.sha}}
# Fall-back to use the most recent cache for the stack.yaml, or failing that the OS
restore-keys: |
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}.
stack-1_${{matrix.os}}-
# take cache from the ci job, read-only
cache-prefix: ci
# Cache each local package's ~/.stack-work for fast incremental builds in CI.
- uses: actions/cache@v3
name: cache .stack-work
with:
path: |
**/.stack-work
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-${{hashFiles('**/stack.yaml')}}-${{github.sha}}
restore-keys: |
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}.
stack-work-4_${{matrix.os}}-
# Install stack by downloading the binary from GitHub.
# The installation process differs by OS.
- name: install stack (Linux)
if: runner.os == 'Linux'
working-directory: ${{ runner.temp }}
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
- name: install stack
uses: unisonweb/actions/stack/install@main
# One of the transcripts fails if the user's git name hasn't been set.
- name: set git user info
@ -76,14 +29,14 @@ jobs:
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
- name: build
run: stack --no-terminal build --fast --no-run-tests --test
run: stack build --fast --no-run-tests --test
- name: round-trip-tests
run: |
stack --no-terminal exec unison transcript unison-src/transcripts-round-trip/main.md
stack --no-terminal exec unison transcript unison-src/transcripts-manual/rewrites.md
stack exec unison transcript unison-src/transcripts-round-trip/main.md
stack exec unison transcript unison-src/transcripts-manual/rewrites.md
- name: transcripts
run: stack --no-terminal exec transcripts
run: stack exec transcripts
- name: save transcript changes
uses: stefanzweifel/git-auto-commit-action@v4
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: rerun transcripts (reminder to rerun CI!)

View File

@ -31,7 +31,7 @@ Other resources:
* [Learn about the big idea behind Unison](https://www.unison-lang.org/learn/the-big-idea/)
* Check out [the project website](https://unison-lang.org)
* Say hello or lurk [in the Slack chat](https://unison-lang.org/slack)
* Say hello or lurk [in the Discord chat](https://unison-lang.org/discord)
* Explore [the Unison ecosystem](https://share.unison-lang.org/)
* [Learn Unison](https://www.unison-lang.org/learn/)

View File

@ -2791,7 +2791,7 @@ ancestorSql h =
SELECT self_hash_id
FROM causal
WHERE self_hash_id = :h
UNION ALL
UNION
SELECT parent_id
FROM causal_parent
JOIN ancestor ON ancestor.id = causal_id

View File

@ -23,6 +23,8 @@ packages:
lib/unison-util-cache
lib/unison-util-relation
lib/unison-util-rope
lib/unison-util-file-embed
lib/unison-util-nametree
parser-typechecker
unison-core
@ -46,7 +48,6 @@ source-repository-package
constraints:
fsnotify < 0.4,
crypton-x509-store <= 1.6.8,
lsp < 2.0.0.0,
servant <= 0.19.1,
optparse-applicative <= 0.17.1.0

View File

@ -2,7 +2,7 @@ These are commands that will likely be useful during development.
__General:__ `./scripts/test.sh` compiles and builds the Haskell code and runs all tests. Recommended that you run this before pushing any code to a branch that others might be working on.
_Disclaimer_ If you have trouble getting started, please get in touch via [Slack](https://unison-lang.org/community) so we can help. If you have any fixes to the process, please send us a PR!
_Disclaimer_ If you have trouble getting started, please get in touch via [Discord](https://unison-lang.org/discord) so we can help. If you have any fixes to the process, please send us a PR!
## Running Unison
@ -126,9 +126,9 @@ This is specified with the normal
Some examples:
```
nix build '.#unison-cli:lib:unison-cli'
nix build '.#unison-syntax:test:syntax-tests'
nix build '.#unison-cli:exe:transcripts'
nix build '.#haskell-nix.unison-cli:lib:unison-cli'
nix build '.#haskell-nix.unison-syntax:test:syntax-tests'
nix build '.#haskell-nix.unison-cli:exe:transcripts'
```
### Development environments
@ -154,7 +154,7 @@ all non-local haskell dependencies (including profiling dependencies)
are provided in the nix shell.
```
nix develop '.#local'
nix develop '.#haskell-nix.local'
```
#### Get into a development environment for building a specific package
@ -164,17 +164,17 @@ all haskell dependencies of this package are provided by the nix shell
(including profiling dependencies).
```
nix develop '.#<package-name>'
nix develop '.#haskell-nix.<package-name>'
```
for example:
```
nix develop '.#unison-cli'
nix develop '.#haskell-nix.unison-cli'
```
or
```
nix develop '.#unison-parser-typechecker'
nix develop '.#haskell-nix.unison-parser-typechecker'
```
This is useful if you wanted to profile a package. For example, if you

View File

@ -117,6 +117,33 @@ Defaults to enabled.
$ UNISON_ENTITY_VALIDATION="false" ucm
```
### `UNISON_PULL_WORKERS`
Allows setting the number of workers to use when pulling from a codebase server.
Defaults to 5.
```sh
$ UNISON_PULL_WORKERS=6 ucm
```
### `UNISON_PUSH_WORKERS`
Allows setting the number of workers to use when pushing to a codebase server.
Defaults to 1.
```sh
$ UNISON_PULL_WORKERS=2 ucm
```
### `UNISON_SYNC_CHUNK_SIZE`
Allows setting the chunk size used in requests when syncing a codebase.
Defaults to 50.
```sh
$ UNISON_SYNC_CHUNK_SIZE=100 ucm
```
### Local Codebase Server
The port, host and token to be used for the local codebase server can all be configured by providing environment

View File

@ -0,0 +1,85 @@
## Some things I wish I'd known about Github Actions
You can't have an `env:` key defined in terms of another `env` key, but you can use `$GITHUB_ENV` to get around this.
You can't define a `matrix` at the top level, it has to be defined within a `job`'s `strategy`.
`runs-on:` doesn't allow `env` for some reason.
Strings don't need quotes, unless you need to force something to be a string.
A `@ref` is always needed on a remote action.
Windows doesn't seem to honor the `default: run: shell:` setting, so you need to set the `shell:` on `run:` manually?
Don't hesitate to do a lot with `run:` blocks aka bash scripts — at least bash is mature and well documented.
e.g.
echo "bar=whatever" >> $GITHUB_OUTPUT
# access with `steps.<name>.outputs.bar` in yaml strings
echo "foo=whatever" >> $GITHUB_ENV
# access with `env.foo` in yaml strings, or `$foo` in bash
`$GITHUB_ENV` updates the `env` context between steps, but not in the middle of a step. Obvious in retrospect.
It's not clear to me when to use `$GITHUB_OUTPUT` vs `$GITHUB_ENV`, but I have been favoring `$GITHUB_ENV` because it requires fewer characters to access.
However, it seems a little wrong.
### `if:`
Although the type rules don't totally make sense in Github Actions, `if:` takes a Boolean.
Therefore, I think the String interpolation in `if: ${{runner.os}} != 'Windows'` causes the whole expression to become a String, which is coerced to `true`, when you definitely didn't mean `if: true`. So don't use `${{}}` here.
### Job names
Job names will automatically get `(${{matrix.os}})` if you don't use `${{matrix.os}}` somewhere in the name.
### Windows
The whole thing with `.exe` is a mess. Unix commands typically drop and add `.exe` correctly as needed, but Github Actions (e.g. `actions/upload-artifact`?) don't.
### Cache
When using the `cache` action, getting a cache hit on the primary key means you won't update the cache with any changes.
When picking a key, you have to ask, "Which key, if exactly matched, would mean that I'm already so done that I don't even want to save anything new from this run."
Similarly, `save-always: true` only if a key hit means there will be nothing new to save, even if a previous run failed AND a failed result is worth starting with.
Backup restore keys: "Is there a prior run that would be worth starting out from? With the caveat that any irrelevant garbage it includes will be saved into this run too."
### Upload Artifact
I suspect on Windows it can't support paths that select a drive in a Unix-y way,
like `/c/asdf` or `/d/asdf`. It's got to be `C:/asdf` or `C:\asdf` etc.
Upload will complain if any
Upload and Download plugin versions have to match.
### Reusability
Github supports splitting off "reusable workflows" (`jobs` that can be imported into another workflow), and "composite actions" (multi-step `steps` that can be imported into another `job`).
#### Composite actions
Needs to have `shell:` specified on every `run:`
#### Reusable workflows
These have to be in `.github/workflows`, you can't organize them deeper, or elsewhere.
### Reference
Default Environment Variables:
https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables
Workflow syntax:
https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions
Reusable workflows:
https://docs.github.com/en/actions/using-workflows/reusing-workflows
Composite actions:
https://docs.github.com/en/actions/creating-actions/creating-a-composite-action

View File

@ -173,6 +173,10 @@ Simply install the [Unison Language VSCode extension](https://marketplace.visual
To `~/.config/helix/languages.toml` append this code:
```toml
[language-server.ucm]
command = "nc" # or 'ncat' or 'netcat'
args = ["localhost", "5757"]
[[language]]
name = "unison"
scope = "source.unison"
@ -183,7 +187,8 @@ roots = []
auto-format = false
comment-token = "--"
indent = { tab-width = 4, unit = " " }
language-server = { command = "ncat", args = ["localhost", "5757"] }
language-servers = [ "ucm" ]
```
or follow the instructions for Unison in "[How to install the default language servers](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers#unison)" wiki page.

View File

@ -21,18 +21,24 @@ cradle:
- path: "codebase2/util-term/./"
component: "unison-util-term:lib"
- path: "lib/orphans/network-uri-orphans-sqlite/src"
component: "network-uri-orphans-sqlite:lib"
- path: "lib/orphans/unison-core-orphans-sqlite/src"
component: "unison-core-orphans-sqlite:lib"
- path: "lib/unison-hash/src"
component: "unison-hash:lib"
- path: "lib/orphans/unison-hash-orphans-aeson/src"
component: "unison-hash-orphans-aeson:lib"
- path: "lib/orphans/unison-hash-orphans-sqlite/src"
component: "unison-hash-orphans-sqlite:lib"
- path: "lib/orphans/uuid-orphans-sqlite/src"
component: "uuid-orphans-sqlite:lib"
- path: "lib/unison-hash/src"
component: "unison-hash:lib"
- path: "lib/unison-hashing/src"
component: "unison-hashing:lib"
@ -72,6 +78,9 @@ cradle:
- path: "lib/unison-util-file-embed/src"
component: "unison-util-file-embed:lib"
- path: "lib/unison-util-nametree/src"
component: "unison-util-nametree:lib"
- path: "lib/unison-util-relation/src"
component: "unison-util-relation:lib"
@ -84,42 +93,36 @@ cradle:
- path: "lib/unison-util-rope/src"
component: "unison-util-rope:lib"
- path: "lib/orphans/uuid-orphans-sqlite/src"
component: "uuid-orphans-sqlite:lib"
- path: "parser-typechecker/src"
component: "unison-parser-typechecker:lib"
- path: "parser-typechecker/tests"
component: "unison-parser-typechecker:test:parser-typechecker-tests"
- path: "unison-cli/src"
- path: "unison-cli/unison"
component: "unison-cli:lib"
- path: "unison-cli/integration-tests/Suite.hs"
component: "unison-cli:exe:cli-integration-tests"
- path: "unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs"
component: "unison-cli:exe:cli-integration-tests"
- path: "unison-cli/src"
component: "unison-cli:lib:unison-cli-lib"
- path: "unison-cli/transcripts/Transcripts.hs"
component: "unison-cli:exe:transcripts"
- path: "unison-cli/unison/Main.hs"
component: "unison-cli:exe:unison"
- path: "unison-cli/unison/ArgParse.hs"
component: "unison-cli:exe:unison"
- path: "unison-cli/unison/System/Path.hs"
component: "unison-cli:exe:unison"
- path: "unison-cli/unison/Version.hs"
component: "unison-cli:exe:unison"
- path: "unison-cli/tests"
component: "unison-cli:test:cli-tests"
- path: "unison-cli-integration/integration-tests/Suite.hs"
component: "unison-cli-integration:exe:cli-integration-tests"
- path: "unison-cli-integration/integration-tests/IntegrationTests/ArgumentParsing.hs"
component: "unison-cli-integration:exe:cli-integration-tests"
- path: "unison-cli-main/unison/Main.hs"
component: "unison-cli-main:exe:unison"
- path: "unison-cli-main/unison/Version.hs"
component: "unison-cli-main:exe:unison"
- path: "unison-core/src"
component: "unison-core1:lib"

View File

@ -65,7 +65,7 @@ let
};
local = shellFor {
packages = hpkgs: (map (p: hpkgs."${p}") localPackageNames);
withHoogle = true;
withHoogle = false;
};
} // localPackageDevShells;
in

View File

@ -509,6 +509,7 @@ builtinsSrc =
B "Text.patterns.notCharIn" $ list char --> pat text,
-- Pattern.many : Pattern a -> Pattern a
B "Pattern.many" $ forall1 "a" (\a -> pat a --> pat a),
B "Pattern.many.corrected" $ forall1 "a" (\a -> pat a --> pat a),
B "Pattern.replicate" $ forall1 "a" (\a -> nat --> nat --> pat a --> pat a),
B "Pattern.capture" $ forall1 "a" (\a -> pat a --> pat a),
B "Pattern.captureAs" $ forall1 "a" (\a -> a --> pat a --> pat a),

View File

@ -66,8 +66,12 @@ module Unison.Codebase.Branch
modifyAt,
modifyAtM,
children0,
-- *** Libdep manipulations
withoutLib,
withoutTransitiveLibs,
deleteLibdep,
deleteLibdeps,
-- * Branch terms/types/edits
@ -172,6 +176,16 @@ withoutTransitiveLibs Branch0 {..} =
)
in branch0 _terms _types newChildren _edits
-- | @deleteLibdep name branch@ deletes the libdep named @name@ from @branch@, if it exists.
deleteLibdep :: NameSegment -> Branch0 m -> Branch0 m
deleteLibdep dep =
over (children . ix NameSegment.libSegment . head_ . children) (Map.delete dep)
-- | @deleteLibdeps branch@ deletes all libdeps from @branch@.
deleteLibdeps :: Branch0 m -> Branch0 m
deleteLibdeps =
over children (Map.delete NameSegment.libSegment)
deepReferents :: Branch0 m -> Set Referent
deepReferents = R.dom . deepTerms

View File

@ -8,11 +8,11 @@ import Unison.Codebase.Path (Path)
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.ShortCausalHash (ShortCausalHash)
import Unison.Codebase.ShortCausalHash qualified as SCH
import Unison.NameSegment qualified as NameSegment
import Unison.Prelude
import Unison.Project (ProjectAndBranch (..), ProjectBranchName, ProjectName)
import Unison.Share.Types
import Unison.Util.Monoid qualified as Monoid
import qualified Unison.NameSegment as NameSegment
data ReadRepo
= ReadRepoGit ReadGitRepo

View File

@ -95,9 +95,9 @@ hashFieldAccessors ppe declName vars declRef dd = do
let typecheckingEnv :: Typechecker.Env v ()
typecheckingEnv =
Typechecker.Env
{ Typechecker._ambientAbilities = mempty,
Typechecker._typeLookup = typeLookup,
Typechecker._termsByShortname = mempty
{ ambientAbilities = mempty,
typeLookup,
termsByShortname = mempty
}
accessorsWithTypes :: [(v, Term.Term v (), Type.Type v ())] <-
for accessors \(v, _a, trm) ->

View File

@ -26,7 +26,7 @@ import Unison.Reference (Reference)
import Unison.Referent qualified as Referent
import Unison.Result (CompilerBug (..), Note (..), ResultT, pattern Result)
import Unison.Result qualified as Result
import Unison.Syntax.Name qualified as Name (toText, unsafeParseVar)
import Unison.Syntax.Name qualified as Name (unsafeParseVar)
import Unison.Syntax.Parser qualified as Parser
import Unison.Term qualified as Term
import Unison.Type qualified as Type
@ -85,18 +85,19 @@ computeTypecheckingEnvironment shouldUseTndr ambientAbilities typeLookupf uf =
tl <- typeLookupf (UF.dependencies uf)
pure
Typechecker.Env
{ _ambientAbilities = ambientAbilities,
_typeLookup = tl,
_termsByShortname = Map.empty
{ ambientAbilities = ambientAbilities,
typeLookup = tl,
termsByShortname = Map.empty
}
ShouldUseTndr'Yes parsingEnv -> do
let preexistingNames = Parser.names parsingEnv
tm = UF.typecheckingTerm uf
possibleDeps =
[ (Name.toText name, Var.name v, r)
[ (name, shortname, r)
| (name, r) <- Rel.toList (Names.terms preexistingNames),
v <- Set.toList (Term.freeVars tm),
name `Name.endsWithReverseSegments` List.NonEmpty.toList (Name.reverseSegments (Name.unsafeParseVar v))
let shortname = Name.unsafeParseVar v,
name `Name.endsWithReverseSegments` List.NonEmpty.toList (Name.reverseSegments shortname)
]
possibleRefs = Referent.toReference . view _3 <$> possibleDeps
tl <- fmap (UF.declsToTypeLookup uf <>) (typeLookupf (UF.dependencies uf <> Set.fromList possibleRefs))
@ -115,22 +116,23 @@ computeTypecheckingEnvironment shouldUseTndr ambientAbilities typeLookupf uf =
[ (shortname, nr)
| (name, shortname, r) <- possibleDeps,
typ <- toList $ TL.typeOfReferent tl r,
let nr = Typechecker.NamedReference name typ (Right r)
let nr = Typechecker.NamedReference name typ (Context.ReplacementRef r)
]
<>
-- local file TDNR possibilities
[ (Var.name v, nr)
[ (shortname, nr)
| (name, r) <- Rel.toList (Names.terms $ UF.toNames uf),
v <- Set.toList (Term.freeVars tm),
name `Name.endsWithReverseSegments` List.NonEmpty.toList (Name.reverseSegments (Name.unsafeParseVar v)),
let shortname = Name.unsafeParseVar v,
name `Name.endsWithReverseSegments` List.NonEmpty.toList (Name.reverseSegments shortname),
typ <- toList $ TL.typeOfReferent tl r,
let nr = Typechecker.NamedReference (Name.toText name) typ (Right r)
let nr = Typechecker.NamedReference name typ (Context.ReplacementRef r)
]
pure
Typechecker.Env
{ _ambientAbilities = ambientAbilities,
_typeLookup = tl,
_termsByShortname = fqnsByShortName
{ ambientAbilities = ambientAbilities,
typeLookup = tl,
termsByShortname = fqnsByShortName
}
synthesizeFile ::

View File

@ -8,7 +8,7 @@
-- decl to discover constraints on the decl vars. These constraints
-- are then given to a constraint solver that determines a unique kind
-- for each type variable. Unconstrained variables are defaulted to
-- kind * (just like Haskell 98). This is done by 'inferDecls'.
-- kind Type (just like Haskell 98). This is done by 'inferDecls'.
--
-- Afterwards, the 'SolveState' holds the kinds of all decls and we
-- can check that type annotations in terms that may mention the

View File

@ -8,15 +8,15 @@ where
import Control.Lens (Traversal, Traversal')
import Unison.KindInference.Constraint.Provenance (Provenance)
import Unison.KindInference.Constraint.Provenance qualified as Provenance
import Unison.KindInference.Constraint.StarProvenance (StarProvenance)
import Unison.KindInference.Constraint.StarProvenance qualified as SP
import Unison.KindInference.Constraint.TypeProvenance (TypeProvenance)
import Unison.KindInference.Constraint.TypeProvenance qualified as TP
-- | Solved constraints
--
-- These constraints are associated with unification variables during
-- kind inference.
data Constraint uv v loc
= IsType (StarProvenance v loc)
= IsType (TypeProvenance v loc)
| IsAbility (Provenance v loc)
| IsArr (Provenance v loc) uv uv
deriving stock (Show, Eq, Ord)
@ -28,7 +28,7 @@ prov ::
(Provenance v loc)
(Provenance v loc')
prov f = \case
IsType x -> IsType <$> SP.prov f x
IsType x -> IsType <$> TP.prov f x
IsAbility x -> IsAbility <$> f x
IsArr l a b -> (\x -> IsArr x a b) <$> f l
{-# INLINE prov #-}

View File

@ -1,5 +1,5 @@
module Unison.KindInference.Constraint.StarProvenance
( StarProvenance (..),
module Unison.KindInference.Constraint.TypeProvenance
( TypeProvenance (..),
prov,
)
where
@ -11,15 +11,15 @@ import Unison.KindInference.Constraint.Provenance (Provenance)
-- in constraint generation (in which case it will have a
-- @Provenance@) and also in the solver through kind-defaulting on
-- unconstrained unification variables.
data StarProvenance v loc
data TypeProvenance v loc
= NotDefault (Provenance v loc)
| Default
deriving stock (Show, Eq, Ord)
prov ::
Traversal
(StarProvenance v loc)
(StarProvenance v loc')
(TypeProvenance v loc)
(TypeProvenance v loc')
(Provenance v loc)
(Provenance v loc')
prov f = \case

View File

@ -1,6 +1,6 @@
module Unison.KindInference.Constraint.Unsolved
( Constraint (..),
starProv,
typeProv,
prov,
loc,
)
@ -14,29 +14,29 @@ import Unison.KindInference.Constraint.Provenance qualified as Provenance
--
-- These are produced during constraint generation and given as input
-- to the constraint solver.
data Constraint uv v loc starProv
data Constraint uv v loc typeProv
= -- | An IsType constraint may arise from generation or from the
-- solver. During generation the provenance is always a real
-- source code location, but the solver defaults unconstrained
-- kind vars to Star.
IsType uv (starProv v loc)
IsType uv (typeProv v loc)
| IsArr uv (Provenance v loc) uv uv
| IsAbility uv (Provenance v loc)
| Unify (Provenance v loc) uv uv
deriving stock (Show, Eq, Ord)
starProv ::
typeProv ::
Traversal
(Constraint uv v loc prov)
(Constraint uv v loc prov')
(prov v loc)
(prov' v loc)
starProv f = \case
typeProv f = \case
IsType x l -> IsType x <$> f l
IsAbility x l -> pure (IsAbility x l)
IsArr s l a b -> pure (IsArr s l a b)
Unify l a b -> pure (Unify l a b)
{-# INLINE starProv #-}
{-# INLINE typeProv #-}
prov ::
Lens

View File

@ -1,3 +1,5 @@
-- | Handles generating kind constraints to be fed to the kind
-- constraint solver (found in "Unison.KindInference.Solve").
module Unison.KindInference.Generate
( typeConstraints,
termConstraints,
@ -28,40 +30,16 @@ import Unison.Term qualified as Term
import Unison.Type qualified as Type
import Unison.Var (Type (User), Var (typed), freshIn)
data ConstraintTree v loc
= Node [ConstraintTree v loc]
| Constraint (GeneratedConstraint v loc) (ConstraintTree v loc)
| ParentConstraint (GeneratedConstraint v loc) (ConstraintTree v loc)
| StrictOrder (ConstraintTree v loc) (ConstraintTree v loc)
newtype TreeWalk = TreeWalk (forall a. ([a] -> [a]) -> [([a] -> [a], [a] -> [a])] -> [a] -> [a])
--------------------------------------------------------------------------------
-- Constraints arising from Types
--------------------------------------------------------------------------------
bottomUp :: TreeWalk
bottomUp = TreeWalk \down pairs0 -> foldr (\(d, u) b -> d . u . b) id pairs0 . down
flatten :: TreeWalk -> ConstraintTree v loc -> [GeneratedConstraint v loc]
flatten (TreeWalk f) = ($ []) . flattenTop
where
flattenTop :: ConstraintTree v loc -> [GeneratedConstraint v loc] -> [GeneratedConstraint v loc]
flattenTop t0 =
f id [flattenRec id t0]
flattenRec ::
([GeneratedConstraint v loc] -> [GeneratedConstraint v loc]) ->
ConstraintTree v loc ->
([GeneratedConstraint v loc] -> [GeneratedConstraint v loc], [GeneratedConstraint v loc] -> [GeneratedConstraint v loc])
flattenRec down = \case
Node cts ->
let pairs = map (flattenRec id) cts
in (f down pairs, id)
Constraint c ct -> flattenRec (down . (c :)) ct
ParentConstraint c ct ->
let (down', up) = flattenRec down ct
in (down', up . (c :))
StrictOrder a b ->
let as = flattenTop a
bs = flattenTop b
in (f down [(as . bs, id)], id)
-- | Generate kind constraints arising from a given type. The given
-- @UVar@ is constrained to have the kind of the given type.
typeConstraints :: (Var v, Ord loc) => UVar v loc -> Type.Type v loc -> Gen v loc [GeneratedConstraint v loc]
typeConstraints resultVar typ =
flatten bottomUp <$> typeConstraintTree resultVar typ
typeConstraintTree :: (Var v, Ord loc) => UVar v loc -> Type.Type v loc -> Gen v loc (ConstraintTree v loc)
typeConstraintTree resultVar term@ABT.Term {annotation, out} = do
@ -130,11 +108,6 @@ typeConstraintTree resultVar term@ABT.Term {annotation, out} = do
effConstraints <- typeConstraintTree effKind eff
pure $ ParentConstraint (IsAbility effKind (Provenance EffectsList $ ABT.annotation eff)) effConstraints
-- | Generate kind constraints arising from a given type. The given
-- @UVar@ is constrained to have the kind of the given type.
typeConstraints :: (Var v, Ord loc) => UVar v loc -> Type.Type v loc -> Gen v loc [GeneratedConstraint v loc]
typeConstraints resultVar typ =
flatten bottomUp <$> typeConstraintTree resultVar typ
handleIntroOuter :: Var v => v -> loc -> (GeneratedConstraint v loc -> Gen v loc r) -> Gen v loc r
handleIntroOuter v loc k = do
@ -146,6 +119,29 @@ handleIntroOuter v loc k = do
Just a -> pure a
k (Unify (Provenance ScopeReference loc) new orig)
--------------------------------------------------------------------------------
-- Constraints arising from Type annotations
--------------------------------------------------------------------------------
-- | Check that all annotations in a term are well-kinded
termConstraints :: forall v loc. (Var v, Ord loc) => Term.Term v loc -> Gen v loc [GeneratedConstraint v loc]
termConstraints x = flatten bottomUp <$> termConstraintTree x
termConstraintTree :: forall v loc. (Var v, Ord loc) => Term.Term v loc -> Gen v loc (ConstraintTree v loc)
termConstraintTree = fmap Node . dfAnns processAnn cons nil . hackyStripAnns
where
processAnn :: loc -> Type.Type v loc -> Gen v loc [ConstraintTree v loc] -> Gen v loc [ConstraintTree v loc]
processAnn ann typ mrest = do
instantiateType typ \typ gcs -> do
typKind <- freshVar typ
annConstraints <- ParentConstraint (IsType typKind (Provenance TypeAnnotation ann)) <$> typeConstraintTree typKind typ
let annConstraints' = foldr Constraint annConstraints gcs
rest <- mrest
pure (annConstraints' : rest)
cons mlhs mrhs = (++) <$> mlhs <*> mrhs
nil = pure []
-- | Helper for @termConstraints@ that instantiates the outermost
-- foralls and keeps the type in scope (in the type map) while
-- checking lexically nested type annotations.
@ -165,24 +161,6 @@ instantiateType type0 k =
t -> k t (reverse acc)
in go [] type0
-- | Check that all annotations in a term are well-kinded
termConstraints :: forall v loc. (Var v, Ord loc) => Term.Term v loc -> Gen v loc [GeneratedConstraint v loc]
termConstraints x = flatten bottomUp <$> termConstraintTree x
termConstraintTree :: forall v loc. (Var v, Ord loc) => Term.Term v loc -> Gen v loc (ConstraintTree v loc)
termConstraintTree = fmap Node . dfAnns processAnn cons nil . hackyStripAnns
where
processAnn :: loc -> Type.Type v loc -> Gen v loc [ConstraintTree v loc] -> Gen v loc [ConstraintTree v loc]
processAnn ann typ mrest = do
instantiateType typ \typ gcs -> do
typKind <- freshVar typ
annConstraints <- ParentConstraint (IsType typKind (Provenance TypeAnnotation ann)) <$> typeConstraintTree typKind typ
let annConstraints' = foldr Constraint annConstraints gcs
rest <- mrest
pure (annConstraints' : rest)
cons mlhs mrhs = (++) <$> mlhs <*> mrhs
nil = pure []
-- | Process type annotations depth-first. Allows processing
-- annotations with lexical scoping.
dfAnns :: (loc -> Type.Type v loc -> b -> b) -> (b -> b -> b) -> b -> Term.Term v loc -> b
@ -222,6 +200,10 @@ hackyStripAnns =
Term.Ann trm _typ -> trm
t -> ABT.tm ann t
--------------------------------------------------------------------------------
-- Constraints arising from Decls
--------------------------------------------------------------------------------
-- | Generate kind constraints for a mutally recursive component of
-- decls
declComponentConstraints ::
@ -345,6 +327,12 @@ withInstantiatedConstructorType declType tyParams0 constructorType0 k =
pure (Unify (Provenance DeclDefinition (ABT.annotation typ)) x tp)
in goForall constructorType0
--------------------------------------------------------------------------------
-- Constraints on builtins
--------------------------------------------------------------------------------
-- | Constraints on language builtins, used to initialize the kind
-- inference state ('Unison.KindInference.Solve.initialState')
builtinConstraints :: forall v loc. (Ord loc, BuiltinAnnotation loc, Var v) => Gen v loc [GeneratedConstraint v loc]
builtinConstraints = flatten bottomUp <$> builtinConstraintTree
@ -420,6 +408,11 @@ builtinConstraintTree =
kindVar <- pushType (t builtinAnnotation)
foldr Constraint (Node []) <$> constrainToKind (Provenance Builtin builtinAnnotation) kindVar k
--------------------------------------------------------------------------------
-- Helpers for constructing constraints
--------------------------------------------------------------------------------
-- | Constrain a @UVar@ to the provided @Kind@
constrainToKind :: (Var v) => Provenance v loc -> UVar v loc -> Kind -> Gen v loc [GeneratedConstraint v loc]
constrainToKind prov resultVar0 = fmap ($ []) . go resultVar0
where
@ -441,7 +434,52 @@ data Kind = Type | Ability | Kind :-> Kind
infixr 9 :->
-- | Convert the 'Unison.Kind' annotation type to our internal 'Kind'
fromUnisonKind :: Unison.Kind -> Kind
fromUnisonKind = \case
Unison.Star -> Type
Unison.Arrow a b -> fromUnisonKind a :-> fromUnisonKind b
--------------------------------------------------------------------------------
-- Constraint ordering
--------------------------------------------------------------------------------
-- | The order in which constraints are generated has a great impact
-- on the error observed. To separate the concern of constraint
-- generation and constraint ordering the constraints are generated as
-- a constraint tree, and the flattening of this tree determines the
-- generated constraint order.
data ConstraintTree v loc
= Node [ConstraintTree v loc]
| Constraint (GeneratedConstraint v loc) (ConstraintTree v loc)
| ParentConstraint (GeneratedConstraint v loc) (ConstraintTree v loc)
| StrictOrder (ConstraintTree v loc) (ConstraintTree v loc)
newtype TreeWalk = TreeWalk (forall a. ([a] -> [a]) -> [([a] -> [a], [a] -> [a])] -> [a] -> [a])
bottomUp :: TreeWalk
bottomUp = TreeWalk \down pairs0 -> foldr (\(d, u) b -> d . u . b) id pairs0 . down
flatten :: TreeWalk -> ConstraintTree v loc -> [GeneratedConstraint v loc]
flatten (TreeWalk f) = ($ []) . flattenTop
where
flattenTop :: ConstraintTree v loc -> [GeneratedConstraint v loc] -> [GeneratedConstraint v loc]
flattenTop t0 =
f id [flattenRec id t0]
flattenRec ::
([GeneratedConstraint v loc] -> [GeneratedConstraint v loc]) ->
ConstraintTree v loc ->
([GeneratedConstraint v loc] -> [GeneratedConstraint v loc], [GeneratedConstraint v loc] -> [GeneratedConstraint v loc])
flattenRec down = \case
Node cts ->
let pairs = map (flattenRec id) cts
in (f down pairs, id)
Constraint c ct -> flattenRec (down . (c :)) ct
ParentConstraint c ct ->
let (down', up) = flattenRec down ct
in (down', up . (c :))
StrictOrder a b ->
let as = flattenTop a
bs = flattenTop b
in (f down [(as . bs, id)], id)

View File

@ -25,8 +25,10 @@ import Unison.Symbol
import Unison.Type qualified as T
import Unison.Var
-- | A generated constraint
type GeneratedConstraint v loc = Constraint (UVar v loc) v loc Provenance
-- | The @Gen@ monad state
data GenState v loc = GenState
{ unifVars :: !(Set Symbol),
typeMap :: !(Map (T.Type v loc) (NonEmpty (UVar v loc))),
@ -45,6 +47,7 @@ newtype Gen v loc a = Gen
)
via State (GenState v loc)
-- | @Gen@ monad runner
run :: Gen v loc a -> GenState v loc -> (a, GenState v loc)
run (Gen ma) st0 = ma st0
@ -71,11 +74,13 @@ pushType t = do
modify \st -> st {typeMap = newTypeMap}
pure var
-- | Lookup the @UVar@ associated with a @Type@
lookupType :: Var v => T.Type v loc -> Gen v loc (Maybe (UVar v loc))
lookupType t = do
GenState {typeMap} <- get
pure (NonEmpty.head <$> Map.lookup t typeMap)
-- | Remove a @Type@ from the context
popType :: Var v => T.Type v loc -> Gen v loc ()
popType t = do
modify \st -> st {typeMap = del (typeMap st)}
@ -88,6 +93,7 @@ popType t = do
x : xs -> Just (x :| xs)
in Map.alter f t m
-- | Helper to run an action with the given @Type@ in the context
scopedType :: Var v => T.Type v loc -> (UVar v loc -> Gen v loc r) -> Gen v loc r
scopedType t m = do
s <- pushType t

View File

@ -1,3 +1,5 @@
-- | Handles solving kind constraints generated by
-- "Unison.KindInference.Generate".
module Unison.KindInference.Solve
( step,
verify,
@ -19,7 +21,7 @@ import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation)
import Unison.Debug (DebugFlag (KindInference), shouldDebug)
import Unison.KindInference.Constraint.Provenance (Provenance (..))
import Unison.KindInference.Constraint.Solved qualified as Solved
import Unison.KindInference.Constraint.StarProvenance (StarProvenance (..))
import Unison.KindInference.Constraint.TypeProvenance (TypeProvenance (..))
import Unison.KindInference.Constraint.Unsolved qualified as Unsolved
import Unison.KindInference.Error (ConstraintConflict (..), KindError (..), improveError)
import Unison.KindInference.Generate (builtinConstraints)
@ -43,10 +45,16 @@ import Unison.Syntax.TypePrinter qualified as TP
import Unison.Util.Pretty qualified as P
import Unison.Var (Var)
type UnsolvedConstraint v loc = Unsolved.Constraint (UVar v loc) v loc StarProvenance
-- | Like 'GeneratedConstraint' but the provenance of @IsType@
-- constraints may be due to kind defaulting. (See 'defaultUnconstrainedVars')
type UnsolvedConstraint v loc = Unsolved.Constraint (UVar v loc) v loc TypeProvenance
-- | We feed both @UnsolvedConstraint@ and @GeneratedConstraint@ to
-- our constraint solver, so it is useful to convert
-- @GeneratedConstraint@ into @UnsolvedConstraint@ to avoid code
-- duplication.
_Generated :: forall v loc. Prism' (UnsolvedConstraint v loc) (GeneratedConstraint v loc)
_Generated = prism' (Unsolved.starProv %~ NotDefault) \case
_Generated = prism' (Unsolved.typeProv %~ NotDefault) \case
Unsolved.IsType s l -> case l of
Default -> Nothing
NotDefault l -> Just (Unsolved.IsType s l)
@ -54,8 +62,9 @@ _Generated = prism' (Unsolved.starProv %~ NotDefault) \case
Unsolved.IsArr s l a b -> Just (Unsolved.IsArr s l a b)
Unsolved.Unify l a b -> Just (Unsolved.Unify l a b)
-- | Apply some generated constraints to a solve state, returning a
-- kind error if detected or a new solve state.
-- | This is the primary function in the exposed API. @step@ applies
-- some generated constraints to a solve state, returning a kind error
-- if detected or a new solve state.
step ::
(Var v, Ord loc, Show loc) =>
Env ->
@ -79,7 +88,7 @@ step e st cs =
Left e -> Left e
Right () -> Right finalState
-- | Default any unconstrained vars to *
-- | Default any unconstrained vars to @Type@
defaultUnconstrainedVars :: Var v => SolveState v loc -> SolveState v loc
defaultUnconstrainedVars st =
let newConstraints = foldl' phi (constraints st) (newUnifVars st)
@ -90,124 +99,12 @@ defaultUnconstrainedVars st =
Just _ -> U.Canonical ecSize d
in st {constraints = newConstraints, newUnifVars = []}
prettyConstraintD' :: Show loc => Var v => PrettyPrintEnv -> UnsolvedConstraint v loc -> P.Pretty P.ColorText
prettyConstraintD' ppe =
P.wrap . \case
Unsolved.IsType v p -> prettyUVar ppe v <> " ~ Type" <> prettyProv p
Unsolved.IsAbility v p -> prettyUVar ppe v <> " ~ Ability" <> prettyProv p
Unsolved.IsArr v p a b -> prettyUVar ppe v <> " ~ " <> prettyUVar ppe a <> " -> " <> prettyUVar ppe b <> prettyProv p
Unsolved.Unify p a b -> prettyUVar ppe a <> " ~ " <> prettyUVar ppe b <> prettyProv p
where
prettyProv x =
"[" <> P.string (show x) <> "]"
prettyConstraints :: Show loc => Var v => PrettyPrintEnv -> [UnsolvedConstraint v loc] -> P.Pretty P.ColorText
prettyConstraints ppe = P.sep "\n" . map (prettyConstraintD' ppe)
prettyUVar :: Var v => PrettyPrintEnv -> UVar v loc -> P.Pretty P.ColorText
prettyUVar ppe (UVar s t) = TP.pretty ppe t <> " :: " <> P.prettyVar s
tracePretty :: P.Pretty P.ColorText -> a -> a
tracePretty p = trace (P.toAnsiUnbroken p)
data OccCheckState v loc = OccCheckState
{ visitingSet :: Set (UVar v loc),
visitingStack :: [UVar v loc],
solvedSet :: Set (UVar v loc),
solvedConstraints :: ConstraintMap v loc,
kindErrors :: [KindError v loc]
}
markVisiting :: Var v => UVar v loc -> M.State (OccCheckState v loc) CycleCheck
markVisiting x = do
OccCheckState {visitingSet, visitingStack} <- M.get
case Set.member x visitingSet of
True -> do
OccCheckState {solvedConstraints} <- M.get
let loc = case U.lookupCanon x solvedConstraints of
Just (_, _, Descriptor {descriptorConstraint = Just (Solved.IsArr (Provenance _ loc) _ _)}, _) -> loc
_ -> error "cycle without IsArr constraint"
addError (CycleDetected loc x solvedConstraints)
pure Cycle
False -> do
M.modify \st ->
st
{ visitingSet = Set.insert x visitingSet,
visitingStack = x : visitingStack
}
pure NoCycle
unmarkVisiting :: Var v => UVar v loc -> M.State (OccCheckState v loc) ()
unmarkVisiting x = M.modify \st ->
st
{ visitingSet = Set.delete x (visitingSet st),
visitingStack = tail (visitingStack st),
solvedSet = Set.insert x (solvedSet st)
}
addError :: KindError v loc -> M.State (OccCheckState v loc) ()
addError ke = M.modify \st -> st {kindErrors = ke : kindErrors st}
isSolved :: Var v => UVar v loc -> M.State (OccCheckState v loc) Bool
isSolved x = do
OccCheckState {solvedSet} <- M.get
pure $ Set.member x solvedSet
data CycleCheck
= Cycle
| NoCycle
-- | occurence check and report any errors
occCheck ::
forall v loc.
Var v =>
ConstraintMap v loc ->
Either (NonEmpty (KindError v loc)) (ConstraintMap v loc)
occCheck constraints0 =
let go ::
[(UVar v loc)] ->
M.State (OccCheckState v loc) ()
go = \case
[] -> pure ()
u : us -> do
isSolved u >>= \case
True -> go us
False -> do
markVisiting u >>= \case
Cycle -> pure ()
NoCycle -> do
st@OccCheckState {solvedConstraints} <- M.get
let handleNothing = error "impossible"
handleJust _canonK ecSize d = case descriptorConstraint d of
Nothing -> ([], U.Canonical ecSize d {descriptorConstraint = Just $ Solved.IsType Default})
Just v ->
let descendants = case v of
Solved.IsType _ -> []
Solved.IsAbility _ -> []
Solved.IsArr _ a b -> [a, b]
in (descendants, U.Canonical ecSize d)
let (descendants, solvedConstraints') = U.alterF u handleNothing handleJust solvedConstraints
M.put st {solvedConstraints = solvedConstraints'}
go descendants
unmarkVisiting u
go us
OccCheckState {solvedConstraints, kindErrors} =
M.execState
(go (U.keys constraints0))
OccCheckState
{ visitingSet = Set.empty,
visitingStack = [],
solvedSet = Set.empty,
solvedConstraints = constraints0,
kindErrors = []
}
in case kindErrors of
[] -> Right solvedConstraints
e : es -> Left (e :| es)
-- | loop through the constraints, eliminating constraints until we
-- have some set that cannot be reduced
-- | Loop through the constraints, eliminating constraints until we
-- have some set that cannot be reduced. There isn't any strong reason
-- to avoid halting at the first error -- we don't have constraints
-- that error but may succeed with more information or anything. The
-- idea of looping was to resolve as much as possible so that the
-- error message can be as filled out as possible.
reduce ::
forall v loc.
(Show loc, Var v, Ord loc) =>
@ -224,36 +121,50 @@ reduce cs0 = dbg "reduce" cs0 (go False [])
Right () -> error "impossible"
c : cs ->
addConstraint c >>= \case
-- If an error occurs then push it back onto the unsolved
-- stack
Left _ -> go b (c : acc) cs
-- Signal that we solved something on this pass (by passing
-- @True@) and continue
Right () -> go True acc cs
-- | tracing helper
dbg ::
forall a.
-- | A hanging prefix or header
P.Pretty P.ColorText ->
-- | The constraints to print
[GeneratedConstraint v loc] ->
([GeneratedConstraint v loc] -> Solve v loc a) ->
Solve v loc a
dbg hdr cs f =
case shouldDebug KindInference of
True -> do
ppe <- asks prettyPrintEnv
tracePretty (P.hang (P.bold hdr) (prettyConstraints ppe (map (review _Generated) cs))) (f cs)
False -> f cs
dbg = traceApp \ppe cs -> prettyConstraints ppe (map (review _Generated) cs)
-- | Like @dbg@, but for a single constraint
dbgSingle ::
forall a.
P.Pretty P.ColorText ->
GeneratedConstraint v loc ->
(GeneratedConstraint v loc -> Solve v loc a) ->
Solve v loc a
dbgSingle hdr c f =
dbgSingle = traceApp \ppe c -> prettyConstraintD' ppe (review _Generated c)
-- | A helper for @dbg*@
traceApp ::
forall a b.
(PrettyPrintEnv -> a -> P.Pretty P.ColorText) ->
P.Pretty P.ColorText ->
a ->
(a -> Solve v loc b) ->
Solve v loc b
traceApp prettyA hdr a ab =
case shouldDebug KindInference of
False -> ab a
True -> do
ppe <- asks prettyPrintEnv
tracePretty (P.hang (P.bold hdr) (prettyConstraintD' ppe (review _Generated c))) (f c)
False -> f c
tracePretty (P.hang (P.bold hdr) (prettyA ppe a)) (ab a)
-- | Add a single constraint, returning an error if there is a
-- contradictory constraint
-- contradictory constraint.
addConstraint ::
forall v loc.
Ord loc =>
@ -284,6 +195,9 @@ addConstraint constraint = do
processPostAction . fmap concat =<< runExceptT ((traverse (ExceptT . addConstraint') (x : xs)))
processPostAction =<< addConstraint' (review _Generated constraint)
-- | Decompose the unsolved constraint into implied constraints,
-- returning a constraint conflict if the constraint cannot be
-- satisfied.
addConstraint' ::
forall v loc.
Ord loc =>
@ -291,11 +205,21 @@ addConstraint' ::
UnsolvedConstraint v loc ->
Solve v loc (Either (ConstraintConflict v loc) [UnsolvedConstraint v loc])
addConstraint' = \case
-- @IsAbility@ and @IsType@ constraints are very straightforward,
-- they are satisfied of the constraint already exists or no
-- constraint exists.
Unsolved.IsAbility s p0 -> do
handleConstraint s (Solved.IsAbility p0) \case
Solved.IsAbility _ -> Just (Solved.IsAbility p0, [])
_ -> Nothing
Unsolved.IsType s p0 -> do
handleConstraint s (Solved.IsType p0) \case
Solved.IsType _ -> Just (Solved.IsType p0, [])
_ -> Nothing
Unsolved.IsArr s p0 a b -> do
-- If an @IsArr@ constraint is already present then we need to unify
-- the left and right hand sides of the input constraints and the
-- existing constraints, so we return those as implied constraints.
handleConstraint s (Solved.IsArr p0 a b) \case
Solved.IsArr _p1 c d ->
let implied =
@ -305,18 +229,23 @@ addConstraint' = \case
prov = p0
in Just (Solved.IsArr prov a b, implied)
_ -> Nothing
Unsolved.IsType s p0 -> do
handleConstraint s (Solved.IsType p0) \case
Solved.IsType _ -> Just (Solved.IsType p0, [])
_ -> Nothing
Unsolved.Unify l a b -> Right <$> union l a b
where
-- | A helper for solving various @Is*@ constraints. In each case
-- we want to lookup any existing constraints on the constrained
-- variable. If none exist then we simply add the new constraint,
-- as it can't conflict with anything. If there is an existing
-- constraint we defer to the passed in function.
handleConstraint ::
-- | The variable mentioned in the input constraint
UVar v loc ->
-- | The new constraint
Solved.Constraint (UVar v loc) v loc ->
-- | How to handle the an existing constraint
( Solved.Constraint (UVar v loc) v loc ->
Maybe (Solved.Constraint (UVar v loc) v loc, [UnsolvedConstraint v loc])
) ->
-- | An error or a list of implied constraints
Solve v loc (Either (ConstraintConflict v loc) [UnsolvedConstraint v loc])
handleConstraint s solvedConstraint phi = do
st@SolveState {constraints} <- M.get
@ -384,6 +313,16 @@ verify st =
Left e -> Left e
Right m -> Right st {constraints = m}
--------------------------------------------------------------------------------
-- @SolveState@ initialization
--------------------------------------------------------------------------------
initialState :: forall v loc. (BuiltinAnnotation loc, Show loc, Ord loc, Var v) => Env -> SolveState v loc
initialState env =
let ((), finalState) = run env emptyState initializeState
in finalState
initializeState :: forall v loc. (BuiltinAnnotation loc, Ord loc, Show loc, Var v) => Solve v loc ()
initializeState = assertGen do
builtinConstraints
@ -399,10 +338,129 @@ assertGen gen = do
st <- step env st cs
verify st
case comp of
Left _ -> error "[assertGen]: constraint failure in among builtin constraints"
Left _ -> error "[assertGen]: constraint failure in builtin constraints"
Right st -> M.put st
initialState :: forall v loc. (BuiltinAnnotation loc, Show loc, Ord loc, Var v) => Env -> SolveState v loc
initialState env =
let ((), finalState) = run env emptyState initializeState
in finalState
--------------------------------------------------------------------------------
-- Occurence check and helpers
--------------------------------------------------------------------------------
-- | occurence check and report any errors
occCheck ::
forall v loc.
Var v =>
ConstraintMap v loc ->
Either (NonEmpty (KindError v loc)) (ConstraintMap v loc)
occCheck constraints0 =
let go ::
[(UVar v loc)] ->
M.State (OccCheckState v loc) ()
go = \case
[] -> pure ()
u : us -> do
isSolved u >>= \case
True -> go us
False -> do
markVisiting u >>= \case
Cycle -> pure ()
NoCycle -> do
st@OccCheckState {solvedConstraints} <- M.get
let handleNothing = error "impossible"
handleJust _canonK ecSize d = case descriptorConstraint d of
Nothing -> ([], U.Canonical ecSize d {descriptorConstraint = Just $ Solved.IsType Default})
Just v ->
let descendants = case v of
Solved.IsType _ -> []
Solved.IsAbility _ -> []
Solved.IsArr _ a b -> [a, b]
in (descendants, U.Canonical ecSize d)
let (descendants, solvedConstraints') = U.alterF u handleNothing handleJust solvedConstraints
M.put st {solvedConstraints = solvedConstraints'}
go descendants
unmarkVisiting u
go us
OccCheckState {solvedConstraints, kindErrors} =
M.execState
(go (U.keys constraints0))
OccCheckState
{ visitingSet = Set.empty,
visitingStack = [],
solvedSet = Set.empty,
solvedConstraints = constraints0,
kindErrors = []
}
in case kindErrors of
[] -> Right solvedConstraints
e : es -> Left (e :| es)
data OccCheckState v loc = OccCheckState
{ visitingSet :: Set (UVar v loc),
visitingStack :: [UVar v loc],
solvedSet :: Set (UVar v loc),
solvedConstraints :: ConstraintMap v loc,
kindErrors :: [KindError v loc]
}
markVisiting :: Var v => UVar v loc -> M.State (OccCheckState v loc) CycleCheck
markVisiting x = do
OccCheckState {visitingSet, visitingStack} <- M.get
case Set.member x visitingSet of
True -> do
OccCheckState {solvedConstraints} <- M.get
let loc = case U.lookupCanon x solvedConstraints of
Just (_, _, Descriptor {descriptorConstraint = Just (Solved.IsArr (Provenance _ loc) _ _)}, _) -> loc
_ -> error "cycle without IsArr constraint"
addError (CycleDetected loc x solvedConstraints)
pure Cycle
False -> do
M.modify \st ->
st
{ visitingSet = Set.insert x visitingSet,
visitingStack = x : visitingStack
}
pure NoCycle
unmarkVisiting :: Var v => UVar v loc -> M.State (OccCheckState v loc) ()
unmarkVisiting x = M.modify \st ->
st
{ visitingSet = Set.delete x (visitingSet st),
visitingStack = tail (visitingStack st),
solvedSet = Set.insert x (solvedSet st)
}
addError :: KindError v loc -> M.State (OccCheckState v loc) ()
addError ke = M.modify \st -> st {kindErrors = ke : kindErrors st}
isSolved :: Var v => UVar v loc -> M.State (OccCheckState v loc) Bool
isSolved x = do
OccCheckState {solvedSet} <- M.get
pure $ Set.member x solvedSet
data CycleCheck
= Cycle
| NoCycle
--------------------------------------------------------------------------------
-- Debug output helpers
--------------------------------------------------------------------------------
prettyConstraintD' :: Show loc => Var v => PrettyPrintEnv -> UnsolvedConstraint v loc -> P.Pretty P.ColorText
prettyConstraintD' ppe =
P.wrap . \case
Unsolved.IsType v p -> prettyUVar ppe v <> " ~ Type" <> prettyProv p
Unsolved.IsAbility v p -> prettyUVar ppe v <> " ~ Ability" <> prettyProv p
Unsolved.IsArr v p a b -> prettyUVar ppe v <> " ~ " <> prettyUVar ppe a <> " -> " <> prettyUVar ppe b <> prettyProv p
Unsolved.Unify p a b -> prettyUVar ppe a <> " ~ " <> prettyUVar ppe b <> prettyProv p
where
prettyProv x =
"[" <> P.string (show x) <> "]"
prettyConstraints :: Show loc => Var v => PrettyPrintEnv -> [UnsolvedConstraint v loc] -> P.Pretty P.ColorText
prettyConstraints ppe = P.sep "\n" . map (prettyConstraintD' ppe)
prettyUVar :: Var v => PrettyPrintEnv -> UVar v loc -> P.Pretty P.ColorText
prettyUVar ppe (UVar s t) = TP.pretty ppe t <> " :: " <> P.prettyVar s
tracePretty :: P.Pretty P.ColorText -> a -> a
tracePretty p = trace (P.toAnsiUnbroken p)

View File

@ -35,6 +35,18 @@ data Env = Env {prettyPrintEnv :: PrettyPrintEnv}
type ConstraintMap v loc = U.UFMap (UVar v loc) (Descriptor v loc)
-- | The @SolveState@ holds all kind constraints gathered for each
-- type. For example, after processing data and effect decls the
-- @typeMap@ will hold entries for every decl, and looking up the
-- corresponding @UVar@ in @constraints@ will return its kind.
--
-- The other fields, @unifVars@ and @newUnifVars@, are relevant when
-- interleaving constraint generation with solving. Constraint
-- generation needs to create fresh unification variables, so it needs
-- the set of bound unification variables from
-- @unifVars@. @newUnifVars@ holds the uvars that are candidates for
-- kind defaulting (see
-- 'Unison.KindInference.Solve.defaultUnconstrainedVars').
data SolveState v loc = SolveState
{ unifVars :: !(Set Symbol),
newUnifVars :: [UVar v loc],
@ -42,6 +54,7 @@ data SolveState v loc = SolveState
typeMap :: !(Map (T.Type v loc) (NonEmpty (UVar v loc)))
}
-- | Constraints associated with a unification variable
data Descriptor v loc = Descriptor
{ descriptorConstraint :: Maybe (Constraint (UVar v loc) v loc)
}
@ -57,6 +70,7 @@ newtype Solve v loc a = Solve {unSolve :: Env -> SolveState v loc -> (a, SolveSt
)
via M.ReaderT Env (M.State (SolveState v loc))
-- | Helper for inteleaving constraint generation and solving
genStateL :: Lens' (SolveState v loc) (Gen.GenState v loc)
genStateL f st =
( \genState ->
@ -72,6 +86,7 @@ genStateL f st =
newVars = []
}
-- | Interleave constraint generation into constraint solving
runGen :: Var v => Gen v loc a -> Solve v loc a
runGen gena = do
st <- M.get
@ -85,15 +100,20 @@ runGen gena = do
M.modify \st -> st {newUnifVars = vs ++ newUnifVars st}
pure cs
-- | Add a unification variable to the constarint mapping with no
-- constraints. This is done on uvars created during constraint
-- generation to initialize the new uvars (see 'runGen').
addUnconstrainedVar :: Var v => UVar v loc -> Solve v loc ()
addUnconstrainedVar uvar = do
st@SolveState {constraints} <- M.get
let constraints' = U.insert uvar Descriptor {descriptorConstraint = Nothing} constraints
M.put st {constraints = constraints'}
-- | Runner for the @Solve@ monad
run :: Env -> SolveState v loc -> Solve v loc a -> (a, SolveState v loc)
run e st action = unSolve action e st
-- | Initial solve state
emptyState :: SolveState v loc
emptyState =
SolveState
@ -103,6 +123,7 @@ emptyState =
typeMap = M.empty
}
-- | Lookup the constraints associated with a unification variable
find :: Var v => UVar v loc -> Solve v loc (Maybe (Constraint (UVar v loc) v loc))
find k = do
st@SolveState {constraints} <- M.get

View File

@ -16,6 +16,7 @@ where
import Control.Lens ((%~))
import Control.Lens.Tuple (_1, _2, _3)
import Data.Foldable qualified as Foldable
import Data.Function (on)
import Data.List (find, intersperse, sortBy)
import Data.List.Extra (nubOrd)
@ -47,7 +48,7 @@ import Unison.Prelude
import Unison.PrettyPrintEnv qualified as PPE
import Unison.PrettyPrintEnv.Names qualified as PPE
import Unison.Reference qualified as R
import Unison.Referent (Referent, toReference, pattern Ref)
import Unison.Referent (Referent, pattern Ref)
import Unison.Result (Note (..))
import Unison.Result qualified as Result
import Unison.Settings qualified as Settings
@ -625,12 +626,7 @@ renderTypeError e env src = case e of
foldr
sep
id
( sortBy
( comparing length <> compare
`on` (Text.splitOn "." . C.suggestionName)
)
suggestions
)
(sortBy (comparing length <> compare `on` (Name.segments . C.suggestionName)) suggestions)
([], [], [])
sep s@(C.Suggestion _ _ _ match) r =
case match of
@ -1186,19 +1182,16 @@ renderType env f t = renderType0 env f (0 :: Int) (cleanup t)
where
go = renderType0 env f
renderSuggestion ::
(IsString s, Semigroup s, Var v) => Env -> C.Suggestion v loc -> s
renderSuggestion :: (IsString s, Semigroup s, Var v) => Env -> C.Suggestion v loc -> s
renderSuggestion env sug =
renderTerm
env
( case C.suggestionReplacement sug of
Right ref -> Term.ref () (toReference ref)
Left v -> Term.var () v
)
renderTerm env term
<> " : "
<> renderType'
env
(C.suggestionType sug)
<> renderType' env (C.suggestionType sug)
where
term =
case C.suggestionReplacement sug of
C.ReplacementRef ref -> Term.fromReferent () ref
C.ReplacementVar v -> Term.var () v
spaces :: (IsString a, Monoid a) => (b -> a) -> [b] -> a
spaces = intercalateMap " "
@ -1490,20 +1483,50 @@ renderParseErrors s = \case
"",
excerpt
]
L.Opaque msg -> style ErrorSite msg
P.TrivialError errOffset unexpected expected ->
let (src, ranges) = case unexpected of
Just (P.Tokens (toList -> ts)) -> case ts of
[] -> (mempty, [])
_ ->
let rs = rangeForToken <$> ts
in (showSource s $ (\r -> (r, ErrorSite)) <$> rs, rs)
_ -> mempty
-- Same error that we just pattern matched on, but with a different error component (here Void) - we need one
-- with a ShowErrorComponent instance, which our error type doesn't have.
sameErr :: P.ParseError Parser.Input Void
sameErr = P.TrivialError errOffset unexpected expected
in [(fromString (P.parseErrorPretty sameErr) <> src, ranges)]
L.UnexpectedTokens msg ->
Pr.lines
[ "I got confused here:",
"",
excerpt,
"",
style ErrorSite msg
]
P.TrivialError _errOffset unexpected expected ->
let unexpectedTokens :: Maybe (Nel.NonEmpty (L.Token L.Lexeme))
unexpectedTokenStrs :: Set String
(unexpectedTokens, unexpectedTokenStrs) = case unexpected of
Just (P.Tokens ts) ->
Foldable.toList ts
& fmap (L.displayLexeme . L.payload)
& Set.fromList
& (Just ts,)
Just (P.Label ts) -> (mempty, Set.singleton $ Foldable.toList ts)
Just (P.EndOfInput) -> (mempty, Set.singleton "end of input")
Nothing -> (mempty, mempty)
expectedTokenStrs :: Set String
expectedTokenStrs =
expected & foldMap \case
(P.Tokens ts) ->
Foldable.toList ts
& fmap (L.displayLexeme . L.payload)
& Set.fromList
(P.Label ts) -> Set.singleton $ Foldable.toList ts
(P.EndOfInput) -> Set.singleton "end of input"
ranges = case unexpectedTokens of
Nothing -> []
Just ts -> rangeForToken <$> Foldable.toList ts
excerpt = showSource s ((\r -> (r, ErrorSite)) <$> ranges)
msg = L.formatTrivialError unexpectedTokenStrs expectedTokenStrs
in [ ( Pr.lines
[ "I got confused here:",
"",
excerpt,
"",
style ErrorSite msg
],
ranges
)
]
P.FancyError _sp fancyErrors ->
(go' <$> Set.toList fancyErrors)
where

View File

@ -234,8 +234,27 @@ enclose keep rec t@(Handle' h body)
lamb
| null evs = lam' a [fv] lbody
| otherwise = lam' a evs lbody
enclose keep rec t@(Match' s0 cs0) = Just $ match a s cs
where
a = ABT.annotation t
s = rec keep s0
cs = encloseCase a keep rec <$> cs0
enclose _ _ _ = Nothing
encloseCase ::
(Var v, Monoid a) =>
a ->
Set v ->
(Set v -> Term v a -> Term v a) ->
MatchCase a (Term v a) ->
MatchCase a (Term v a)
encloseCase a keep rec0 (MatchCase pats guard body) =
MatchCase pats (rec <$> guard) (rec body)
where
rec (ABT.AbsN' vs bd) =
ABT.absChain' ((,) a <$> vs) $
rec0 (keep `Set.difference` Set.fromList vs) bd
newtype Prefix v x = Pfx (Map v [v]) deriving (Show)
instance Functor (Prefix v) where

View File

@ -35,7 +35,7 @@ import Control.Monad.Catch (MonadCatch)
import Control.Monad.Primitive qualified as PA
import Control.Monad.Reader (ReaderT (..), ask, runReaderT)
import Control.Monad.State.Strict (State, execState, modify)
import Crypto.Error (CryptoError(..), CryptoFailable(..))
import Crypto.Error (CryptoError (..), CryptoFailable (..))
import Crypto.Hash qualified as Hash
import Crypto.MAC.HMAC qualified as HMAC
import Crypto.PubKey.Ed25519 qualified as Ed25519
@ -2937,10 +2937,12 @@ declareForeigns = do
in pure . Bytes.fromArray . hmac alg $ serializeValueLazy x
declareForeign Untracked "crypto.Ed25519.sign.impl" boxBoxBoxToEFBox
. mkForeign $ pure . signEd25519Wrapper
. mkForeign
$ pure . signEd25519Wrapper
declareForeign Untracked "crypto.Ed25519.verify.impl" boxBoxBoxToEFBool
. mkForeign $ pure . verifyEd25519Wrapper
. mkForeign
$ pure . verifyEd25519Wrapper
let catchAll :: (MonadCatch m, MonadIO m, NFData a) => m a -> m (Either Util.Text.Text a)
catchAll e = do
@ -3215,7 +3217,9 @@ declareForeigns = do
_ -> die "Text.patterns.notCharIn: non-character closure"
evaluate . TPat.cpattern . TPat.Char . TPat.Not $ TPat.CharSet cs
declareForeign Untracked "Pattern.many" boxDirect . mkForeign $
\(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Many p
\(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Many False p
declareForeign Untracked "Pattern.many.corrected" boxDirect . mkForeign $
\(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Many True p
declareForeign Untracked "Pattern.capture" boxDirect . mkForeign $
\(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Capture p
declareForeign Untracked "Pattern.captureAs" boxBoxDirect . mkForeign $
@ -3540,15 +3544,16 @@ hostPreference (Just host) = SYS.Host $ Util.Text.unpack host
signEd25519Wrapper ::
(Bytes.Bytes, Bytes.Bytes, Bytes.Bytes) -> Either Failure Bytes.Bytes
signEd25519Wrapper (secret0, public0, msg0) = case validated of
CryptoFailed err ->
Left (Failure Ty.cryptoFailureRef (errMsg err) unitValue)
CryptoPassed (secret, public) ->
Right . Bytes.fromArray $ Ed25519.sign secret public msg
CryptoFailed err ->
Left (Failure Ty.cryptoFailureRef (errMsg err) unitValue)
CryptoPassed (secret, public) ->
Right . Bytes.fromArray $ Ed25519.sign secret public msg
where
msg = Bytes.toArray msg0 :: ByteString
validated =
(,) <$> Ed25519.secretKey (Bytes.toArray secret0 :: ByteString)
<*> Ed25519.publicKey (Bytes.toArray public0 :: ByteString)
(,)
<$> Ed25519.secretKey (Bytes.toArray secret0 :: ByteString)
<*> Ed25519.publicKey (Bytes.toArray public0 :: ByteString)
errMsg CryptoError_PublicKeySizeInvalid =
"ed25519: Public key size invalid"
@ -3561,15 +3566,16 @@ signEd25519Wrapper (secret0, public0, msg0) = case validated of
verifyEd25519Wrapper ::
(Bytes.Bytes, Bytes.Bytes, Bytes.Bytes) -> Either Failure Bool
verifyEd25519Wrapper (public0, msg0, sig0) = case validated of
CryptoFailed err ->
Left $ Failure Ty.cryptoFailureRef (errMsg err) unitValue
CryptoPassed (public, sig) ->
Right $ Ed25519.verify public msg sig
CryptoFailed err ->
Left $ Failure Ty.cryptoFailureRef (errMsg err) unitValue
CryptoPassed (public, sig) ->
Right $ Ed25519.verify public msg sig
where
msg = Bytes.toArray msg0 :: ByteString
validated =
(,) <$> Ed25519.publicKey (Bytes.toArray public0 :: ByteString)
<*> Ed25519.signature (Bytes.toArray sig0 :: ByteString)
(,)
<$> Ed25519.publicKey (Bytes.toArray public0 :: ByteString)
<*> Ed25519.signature (Bytes.toArray sig0 :: ByteString)
errMsg CryptoError_PublicKeySizeInvalid =
"ed25519: Public key size invalid"

View File

@ -18,5 +18,8 @@ instance Exception RuntimeExn
die :: (HasCallStack) => String -> IO a
die = throwIO . PE callStack . P.lit . fromString
dieP :: HasCallStack => P.Pretty P.ColorText -> IO a
dieP = throwIO . PE callStack
exn :: (HasCallStack) => String -> a
exn = throw . PE callStack . P.lit . fromString

View File

@ -21,6 +21,7 @@ module Unison.Runtime.Interface
where
import Control.Concurrent.STM as STM
import Control.Exception (throwIO)
import Control.Monad
import Data.Binary.Get (runGetOrFail)
-- import Data.Bits (shiftL)
@ -44,10 +45,23 @@ import Data.Set as Set
)
import Data.Set qualified as Set
import Data.Text (isPrefixOf, unpack)
import GHC.IO.Exception (IOErrorType (NoSuchThing, OtherError, PermissionDenied), IOException (ioe_description, ioe_type))
import GHC.Stack (callStack)
import System.Directory
( XdgDirectory (XdgCache),
createDirectoryIfMissing,
getXdgDirectory,
)
import System.Exit (ExitCode (..))
import System.FilePath ((<.>), (</>))
import System.Process
( CreateProcess (..),
( CmdSpec (RawCommand, ShellCommand),
CreateProcess (..),
StdStream (..),
callProcess,
proc,
readCreateProcessWithExitCode,
shell,
waitForProcess,
withCreateProcess,
)
@ -349,6 +363,7 @@ performRehash rgrp0 ctx =
irs = remap $ intermedRemap ctx
f b r
| not b,
r `Map.notMember` rgrp0,
r <- Map.findWithDefault r r frs,
Just r <- Map.lookup r irs =
r
@ -433,18 +448,19 @@ decompileCtx crs ctx = decompile ib $ backReferenceTm crs fr ir dt
dt = decompTm ctx
nativeEval ::
FilePath ->
IORef EvalCtx ->
CodeLookup Symbol IO () ->
PrettyPrintEnv ->
Term Symbol ->
IO (Either Error ([Error], Term Symbol))
nativeEval ctxVar cl ppe tm = catchInternalErrors $ do
nativeEval executable ctxVar cl ppe tm = catchInternalErrors $ do
ctx <- readIORef ctxVar
(tyrs, tmrs) <- collectDeps cl tm
(ctx, codes) <- loadDeps cl ppe ctx tyrs tmrs
(ctx, tcodes, base) <- prepareEvaluation ppe tm ctx
writeIORef ctxVar ctx
nativeEvalInContext ppe ctx (codes ++ tcodes) base
nativeEvalInContext executable ppe ctx (codes ++ tcodes) base
interpEval ::
ActiveThreads ->
@ -465,19 +481,144 @@ interpEval activeThreads cleanupThreads ctxVar cl ppe tm =
evalInContext ppe ctx activeThreads initw
`UnliftIO.finally` cleanupThreads
ensureExists :: HasCallStack => CreateProcess -> (CmdSpec -> Either (Int, String, String) IOException -> Pretty ColorText) -> IO ()
ensureExists cmd err =
ccall >>= \case
Nothing -> pure ()
Just failure -> dieP $ err (cmdspec cmd) failure
where
call =
readCreateProcessWithExitCode cmd "" >>= \case
(ExitSuccess, _stdout, _stderr) -> pure Nothing
(ExitFailure exitCode, stdout, stderr) -> pure (Just (Left (exitCode, stdout, stderr)))
ccall = call `UnliftIO.catch` \(e :: IOException) -> pure . Just $ Right e
ensureRuntimeExists :: HasCallStack => FilePath -> IO ()
ensureRuntimeExists executable =
ensureExists cmd runtimeErrMsg
where
cmd = proc executable ["--help"]
ensureRacoExists :: HasCallStack => IO ()
ensureRacoExists = ensureExists (shell "raco help") racoErrMsg
prettyCmdSpec :: CmdSpec -> Pretty ColorText
prettyCmdSpec = \case
ShellCommand string -> fromString string
System.Process.RawCommand filePath args ->
P.sep " " (fromString filePath : Prelude.map fromString args)
prettyCallError :: Either (Int, String, String) IOException -> Pretty ColorText
prettyCallError = \case
Right ex ->
P.lines
[ P.wrap . fromString $ "The error type was: '" ++ show (ioe_type ex) ++ "', and the message is:",
"",
P.indentN 2 (fromString (ioe_description ex))
]
Left (errCode, stdout, stderr) ->
let prettyExitCode = "The exit code was" <> fromString (show errCode)
in if null stdout && null stderr
then P.wrap $ prettyExitCode <> " but there was no output."
else
P.lines
[ P.wrap $ prettyExitCode <> "and the output was:",
"",
P.indentN
2
if null stdout
then fromString stderr
else
if null stderr
then fromString stdout
else P.lines $ [fromString stdout, "", "---", "", fromString stderr]
]
-- https://hackage.haskell.org/package/process-1.6.18.0/docs/System-Process.html#t:CreateProcess
-- https://hackage.haskell.org/package/base-4.19.0.0/docs/GHC-IO-Exception.html#t:IOError
-- https://hackage.haskell.org/package/base-4.19.0.0/docs/GHC-IO-Exception.html#t:IOErrorType
runtimeErrMsg :: CmdSpec -> Either (Int, String, String) IOException -> Pretty ColorText
runtimeErrMsg c error =
case error of
Right (ioe_type -> NoSuchThing) ->
P.lines
[ P.wrap "I couldn't find the Unison native runtime. I tried to start it with:",
"",
P.indentN 2 $ prettyCmdSpec c,
"",
P.wrap
"If that doesn't look right, you can use the `--runtime-path` command line \
\argument to specify the correct path for the executable."
]
Right (ioe_type -> PermissionDenied) ->
P.lines
[ P.wrap
"I got a 'Permission Denied' error when trying to start the \
\Unison native runtime with:",
"",
P.indentN 2 $ prettyCmdSpec c,
"",
P.wrap
"Please check the permisssions (e.g. check that the directory is accessible, \
\and that the program is marked executable).",
"",
P.wrap
"If it looks like I'm calling the wrong executable altogether, you can use the \
\`--runtime-path` command line argument to specify the correct one."
]
_ ->
P.lines
[ P.wrap
"I got an error when starting the Unison native runtime using:",
"",
P.indentN 2 (prettyCmdSpec c),
"",
prettyCallError error
]
racoErrMsg :: CmdSpec -> Either (Int, String, String) IOException -> Pretty ColorText
racoErrMsg c = \case
Right (ioe_type -> e@OtherError) ->
P.lines
[ P.wrap . fromString $
"Sorry, I got an error of type '"
++ show e
++ "' when I ran `raco`, \
\and I'm not sure what to do about it.",
"",
"For debugging purposes, the full command was:",
"",
P.indentN 2 (prettyCmdSpec c)
]
error ->
P.lines
[ P.wrap
"I can't seem to call `raco`. Please ensure Racket \
\is installed.",
"",
prettyCallError error,
"",
"See",
"",
P.indentN 2 "https://download.racket-lang.org/",
"",
"for how to install Racket manually."
]
nativeCompile ::
Text ->
FilePath ->
IORef EvalCtx ->
CodeLookup Symbol IO () ->
PrettyPrintEnv ->
Reference ->
FilePath ->
IO (Maybe Error)
nativeCompile _version ctxVar cl ppe base path = tryM $ do
nativeCompile executable ctxVar cl ppe base path = tryM $ do
ctx <- readIORef ctxVar
(tyrs, tmrs) <- collectRefDeps cl base
(_, codes) <- loadDeps cl ppe ctx tyrs tmrs
nativeCompileCodes codes base path
(ctx, codes) <- loadDeps cl ppe ctx tyrs tmrs
Just ibase <- pure $ baseToIntermed ctx base
nativeCompileCodes executable codes ibase path
interpCompile ::
Text ->
@ -617,7 +758,9 @@ prepareEvaluation ppe tm ctx = do
pure (backrefAdd rbkr ctx', rgrp, rmn)
where
(rmn0, frem, rgrp0, rbkr) = intermediateTerm ppe ctx tm
int b r = if b then r else toIntermed ctx r
int b r
| b || Map.member r rgrp0 = r
| otherwise = toIntermed ctx r
(ctx', rrefs, rgrp) =
performRehash
((fmap . overGroupLinks) int rgrp0)
@ -647,9 +790,9 @@ backReferenceTm ws frs irs dcm c i = do
bs <- Map.lookup r dcm
Map.lookup i bs
schemeProc :: [String] -> CreateProcess
schemeProc args =
(proc "native-compiler/bin/runner" args)
ucrProc :: FilePath -> [String] -> CreateProcess
ucrProc executable args =
(proc executable args)
{ std_in = CreatePipe,
std_out = Inherit,
std_err = Inherit
@ -667,12 +810,14 @@ schemeProc args =
-- taken over the input. This could probably be without a side
-- channel, but a side channel is probably better.
nativeEvalInContext ::
FilePath ->
PrettyPrintEnv ->
EvalCtx ->
[(Reference, SuperGroup Symbol)] ->
Reference ->
IO (Either Error ([Error], Term Symbol))
nativeEvalInContext _ ctx codes base = do
nativeEvalInContext executable _ ctx codes base = do
ensureRuntimeExists executable
let cc = ccache ctx
crs <- readTVarIO $ combRefs cc
let bytes = serializeValue . compileValue base $ codes
@ -696,15 +841,24 @@ nativeEvalInContext _ ctx codes base = do
-- decodeResult . deserializeValue =<< BS.hGetContents pout
callout _ _ _ _ =
pure . Left $ "withCreateProcess didn't provide handles"
withCreateProcess (schemeProc []) callout
p = ucrProc executable []
ucrError (e :: IOException) = pure $ Left (runtimeErrMsg (cmdspec p) (Right e))
withCreateProcess p callout
`UnliftIO.catch` ucrError
nativeCompileCodes ::
FilePath ->
[(Reference, SuperGroup Symbol)] ->
Reference ->
FilePath ->
IO ()
nativeCompileCodes codes base path = do
nativeCompileCodes executable codes base path = do
ensureRuntimeExists executable
ensureRacoExists
genDir <- getXdgDirectory XdgCache "unisonlanguage/racket-tmp"
createDirectoryIfMissing True genDir
let bytes = serializeValue . compileValue base $ codes
srcPath = genDir </> path <.> "rkt"
callout (Just pin) _ _ ph = do
BS.hPut pin . runPutS . putWord32be . fromIntegral $ BS.length bytes
BS.hPut pin bytes
@ -712,7 +866,17 @@ nativeCompileCodes codes base path = do
waitForProcess ph
pure ()
callout _ _ _ _ = fail "withCreateProcess didn't provide handles"
withCreateProcess (schemeProc ["-o", path]) callout
ucrError (e :: IOException) =
throwIO $ PE callStack (runtimeErrMsg (cmdspec p) (Right e))
racoError (e :: IOException) =
throwIO $ PE callStack (racoErrMsg (makeRacoCmd RawCommand) (Right e))
p = ucrProc executable ["-G", srcPath]
makeRacoCmd :: (FilePath -> [String] -> a) -> a
makeRacoCmd f = f "raco" ["exe", "-o", path, srcPath]
withCreateProcess p callout
`UnliftIO.catch` ucrError
makeRacoCmd callProcess
`UnliftIO.catch` racoError
evalInContext ::
PrettyPrintEnv ->
@ -872,7 +1036,11 @@ icon = "💔💥"
catchInternalErrors ::
IO (Either Error a) ->
IO (Either Error a)
catchInternalErrors sub = sub `UnliftIO.catch` \(CE _ e) -> pure $ Left e
catchInternalErrors sub = sub `UnliftIO.catch` hCE `UnliftIO.catch` hRE
where
hCE (CE _ e) = pure $ Left e
hRE (PE _ e) = pure $ Left e
hRE (BU _ _ _) = pure $ Left "impossible"
decodeStandalone ::
BL.ByteString ->
@ -917,14 +1085,14 @@ startRuntime sandboxed runtimeHost version = do
ioTestTypes = builtinIOTestTypes External
}
startNativeRuntime :: Text -> IO (Runtime Symbol)
startNativeRuntime version = do
startNativeRuntime :: Text -> FilePath -> IO (Runtime Symbol)
startNativeRuntime _version executable = do
ctxVar <- newIORef =<< baseContext False
pure $
Runtime
{ terminate = pure (),
evaluate = nativeEval ctxVar,
compileTo = nativeCompile version ctxVar,
evaluate = nativeEval executable ctxVar,
compileTo = nativeCompile executable ctxVar,
mainType = builtinMain External,
ioTestTypes = builtinIOTestTypes External
}
@ -934,10 +1102,14 @@ withRuntime sandboxed runtimeHost version action =
UnliftIO.bracket (liftIO $ startRuntime sandboxed runtimeHost version) (liftIO . terminate) action
tryM :: IO () -> IO (Maybe Error)
tryM = fmap (either (Just . extract) (const Nothing)) . try
tryM =
flip UnliftIO.catch hRE
. flip UnliftIO.catch hCE
. fmap (const Nothing)
where
extract (PE _ e) = e
extract (BU _ _ _) = "impossible"
hCE (CE _ e) = pure $ Just e
hRE (PE _ e) = pure $ Just e
hRE (BU _ _ _) = pure $ Just "impossible"
runStandalone :: StoredCache -> Word64 -> IO (Either (Pretty ColorText) ())
runStandalone sc init =

View File

@ -175,7 +175,10 @@ fieldNames env r name dd = do
[(_, typ)] -> Just typ
_ -> Nothing
let vars :: [v]
vars = [Var.freshenId (fromIntegral n) (Var.named "_") | n <- [0 .. Type.arity typ - 1]]
-- We add `n` to the end of the variable name as a quick fix to #4752, but we suspect there's a more
-- fundamental fix to be made somewhere in the term printer to automatically suffix a var name with its
-- freshened id if it would be ambiguous otherwise.
vars = [Var.freshenId (fromIntegral n) (Var.named ("_" <> Text.pack (show n))) | n <- [0 .. Type.arity typ - 1]]
hashes <- DD.hashFieldAccessors env (HQ.toVar name) vars r dd
let names =
[ (r, HQ.toText . PPE.termName env . Referent.Ref $ DerivedId r)

View File

@ -1,6 +1,7 @@
module Unison.Syntax.FileParser
( file
) where
( file,
)
where
import Control.Lens
import Control.Monad.Reader (asks, local)

View File

@ -187,9 +187,8 @@ pretty0 ::
AmbientContext ->
Term3 v PrintAnnotation ->
m (Pretty SyntaxText)
pretty0 a tm | precedence a == -2 && not (isBindingSoftHangable tm) = do
-- precedence = -2 means this is a top level binding, and we allow
-- use clause insertion here even when it otherwise wouldn't be
pretty0 a tm | isTopLevelPrecedence (precedence a) && not (isBindingSoftHangable tm) = do
-- we allow use clause insertion here even when it otherwise wouldn't be
-- (as long as the tm isn't soft hangable, if it gets soft hung then
-- adding use clauses beforehand will mess things up)
tmp <- pretty0 (a {imports = im, precedence = -1}) tm
@ -301,25 +300,24 @@ pretty0
`hangHandler` ph
]
Delay' x
| isLet x || p < 0 -> do
let (im', uses) = calcImports im x
let hang = if isSoftHangable x && null uses then PP.softHang else PP.hang
px <- pretty0 (ac 0 Block im' doc) x
pure . paren (p >= 3) $
fmt S.ControlKeyword "do" `hang` PP.lines (uses <> [px])
| Match' _ _ <- x -> do
px <- pretty0 (ac 0 Block im doc) x
let hang = if isSoftHangable x then PP.softHang else PP.hang
pure . paren (p >= 3) $
fmt S.ControlKeyword "do" `hang` px
| otherwise -> do
px <- pretty0 (ac 10 Normal im doc) x
pure . paren (p >= 11 || isBlock x && p >= 3) $
fmt S.DelayForceChar (l "'")
-- Add indentation below since we're opening parens with '(
-- This is in case the contents are a long function application
-- in which case the arguments should be indented.
<> PP.indentAfterNewline " " px
let (im0', uses0) = calcImports im x
let allowUses = isLet x || p < 0
let im' = if allowUses then im0' else im
let uses = if allowUses then uses0 else []
let soft = isSoftHangable x && null uses && p < 3
let hang = if soft then PP.softHang else PP.hang
px <- pretty0 (ac 0 Block im' doc) x
-- this makes sure we get proper indentation if `px` spills onto
-- multiple lines, since `do` introduces layout block
let indent = PP.Width (if soft then 2 else 0) + (if soft && p < 3 then 1 else 0)
pure . paren (p >= 3) $
fmt S.ControlKeyword "do" `hang` PP.lines (uses <> [PP.indentNAfterNewline indent px])
List' xs -> do
let listLink p = fmt (S.TypeReference Type.listRef) p
let comma = listLink ", " `PP.orElse` ("\n" <> listLink ", ")
@ -2171,3 +2169,7 @@ isLeaf (Constructor' {}) = True
isLeaf (Request' {}) = True
isLeaf (Ref' {}) = True
isLeaf _ = False
-- | Indicates this is the RHS of a top-level definition.
isTopLevelPrecedence :: Int -> Bool
isTopLevelPrecedence i = i == -2

View File

@ -1,10 +1,11 @@
{-# LANGUAGE OverloadedStrings #-}
module Unison.Syntax.TypeParser
( computationType
, valueType
, valueTypeLeaf
) where
( computationType,
valueType,
valueTypeLeaf,
)
where
import Control.Monad.Reader (asks)
import Data.Set qualified as Set

View File

@ -1,7 +1,5 @@
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE OverloadedRecordDot #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
-- | This module is the primary interface to the Unison typechecker
-- module Unison.Typechecker (admissibleTypeAt, check, check', checkAdmissible', equals, locals, subtype, isSubtype, synthesize, synthesize', typeAt, wellTyped) where
@ -34,24 +32,20 @@ import Control.Monad.State
import Control.Monad.Writer
import Data.Foldable
import Data.Map qualified as Map
import Data.Sequence qualified as Seq
import Data.Sequence.NonEmpty qualified as NESeq (toSeq)
import Data.Set qualified as Set
import Data.Text qualified as Text
import Data.Tuple qualified as Tuple
import Unison.ABT qualified as ABT
import Unison.Blank qualified as B
import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation)
import Unison.Name qualified as Name
import Unison.Prelude
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.Referent (Referent)
import Unison.Result
( Result,
ResultT,
runResultT,
pattern Result,
)
import Unison.Result (Result, ResultT, runResultT, pattern Result)
import Unison.Result qualified as Result
import Unison.Syntax.Name qualified as Name (toText, unsafeParseText)
import Unison.Syntax.Name qualified as Name (unsafeParseText, unsafeParseVar)
import Unison.Term (Term)
import Unison.Term qualified as Term
import Unison.Type (Type)
@ -81,28 +75,27 @@ convertResult :: Context.Result v loc a -> Result (Notes v loc) a
convertResult = \case
Context.Success is a -> Result (Notes mempty mempty is) (Just a)
Context.TypeError es is -> Result (Notes mempty (NESeq.toSeq es) is) Nothing
Context.CompilerBug bug es is -> Result (Notes [bug] es is) Nothing
Context.CompilerBug bug es is -> Result (Notes (Seq.singleton bug) es is) Nothing
data NamedReference v loc = NamedReference
{ fqn :: Name,
{ fqn :: Name.Name,
fqnType :: Type v loc,
replacement :: Either v Referent
replacement :: Context.Replacement v
}
deriving (Show)
deriving stock (Show)
data Env v loc = Env
{ _ambientAbilities :: [Type v loc],
_typeLookup :: TL.TypeLookup v loc,
{ ambientAbilities :: [Type v loc],
typeLookup :: TL.TypeLookup v loc,
-- TDNR environment - maps short names like `+` to fully-qualified
-- lists of named references whose full name matches the short name
-- Example: `+` maps to [Nat.+, Float.+, Int.+]
--
-- This mapping is populated before typechecking with as few entries
-- as are needed to help resolve variables needing TDNR in the file.
_termsByShortname :: Map Name [NamedReference v loc]
termsByShortname :: Map Name.Name [NamedReference v loc]
}
makeLenses ''Env
deriving stock (Generic)
-- | Infer the type of a 'Unison.Term', using
-- a function to resolve the type of @Ref@ constructors
@ -120,8 +113,8 @@ synthesize ppe pmccSwitch env t =
Context.synthesizeClosed
ppe
pmccSwitch
(TypeVar.liftType <$> view ambientAbilities env)
(view typeLookup env)
(TypeVar.liftType <$> env.ambientAbilities)
env.typeLookup
(TypeVar.liftTerm t)
in Result.hoist (pure . runIdentity) $ fmap TypeVar.lowerType result
@ -188,16 +181,16 @@ synthesizeAndResolve ppe env = do
compilerBug :: Context.CompilerBug v loc -> Result (Notes v loc) ()
compilerBug bug = do
tell $ Notes [bug] mempty mempty
tell $ Notes (Seq.singleton bug) mempty mempty
Control.Monad.Fail.fail ""
typeError :: Context.ErrorNote v loc -> Result (Notes v loc) ()
typeError note = do
tell $ Notes mempty [note] mempty
tell $ Notes mempty (Seq.singleton note) mempty
Control.Monad.Fail.fail ""
btw :: (Monad f) => Context.InfoNote v loc -> ResultT (Notes v loc) f ()
btw note = tell $ Notes mempty mempty [note]
btw note = tell $ Notes mempty mempty (Seq.singleton note)
liftResult :: (Monad f) => Result (Notes v loc) a -> TDNR f v loc a
liftResult = lift . MaybeT . WriterT . pure . runIdentity . runResultT
@ -226,39 +219,35 @@ typeDirectedNameResolution ppe oldNotes oldType env = do
-- Resolve blanks in the notes and generate some resolutions
resolutions <-
liftResult . traverse (resolveNote tdnrEnv) . toList $
infos
oldNotes
infos oldNotes
case catMaybes resolutions of
[] -> pure oldType
rs ->
applySuggestions rs >>= \case
True -> do
synthesizeAndResolve ppe tdnrEnv
resolutions -> do
substituted <- traverse substSuggestion resolutions
case or substituted of
True -> synthesizeAndResolve ppe tdnrEnv
False -> do
-- The type hasn't changed
liftResult $ suggest rs
liftResult $ suggest resolutions
pure oldType
where
addTypedComponent :: Context.InfoNote v loc -> State (Env v loc) ()
addTypedComponent (Context.TopLevelComponent vtts) =
for_ vtts $ \(v, typ, _) ->
for_ (Name.suffixes . Name.unsafeParseText . Var.name $ Var.reset v) $ \suffix ->
termsByShortname
%= Map.insertWith
(<>)
(Name.toText suffix)
[NamedReference (Var.name v) typ (Left v)]
for_ vtts \(v, typ, _) ->
let name = Name.unsafeParseVar (Var.reset v)
in for_ (Name.suffixes name) \suffix ->
#termsByShortname %= Map.insertWith (<>) suffix [NamedReference name typ (Context.ReplacementVar v)]
addTypedComponent _ = pure ()
suggest :: [Resolution v loc] -> Result (Notes v loc) ()
suggest =
traverse_
( \(Resolution name inferredType loc v suggestions) ->
typeError $
Context.ErrorNote
(Context.UnknownTerm loc (suggestedVar v name) (dedupe suggestions) inferredType)
[]
)
traverse_ \(Resolution name inferredType loc v suggestions) ->
typeError $
Context.ErrorNote
{ cause = Context.UnknownTerm loc (suggestedVar v name) (dedupe suggestions) inferredType,
path = Seq.empty
}
guard x a = if x then Just a else Nothing
suggestedVar :: Var v => v -> Text -> v
@ -267,10 +256,10 @@ typeDirectedNameResolution ppe oldNotes oldType env = do
Var.MissingResult -> v
_ -> Var.named name
extractSubstitution :: [Context.Suggestion v loc] -> Maybe (Either v Referent)
extractSubstitution :: [Context.Suggestion v loc] -> Maybe (Context.Replacement v)
extractSubstitution suggestions =
let groupedByName :: [([Name.Name], Either v Referent)] =
map (\(a, b) -> (b, a))
let groupedByName :: [([Name.Name], Context.Replacement v)] =
map Tuple.swap
. Map.toList
. fmap Set.toList
. foldl'
@ -278,86 +267,84 @@ typeDirectedNameResolution ppe oldNotes oldType env = do
Map.insertWith
Set.union
suggestionReplacement
(Set.singleton (Name.unsafeParseText suggestionName))
(Set.singleton suggestionName)
b
)
Map.empty
$ filter Context.isExact suggestions
matches :: Set (Either v Referent) = Name.preferShallowLibDepth groupedByName
matches :: Set (Context.Replacement v) = Name.preferShallowLibDepth groupedByName
in case toList matches of
[x] -> Just x
_ -> Nothing
applySuggestions :: [Resolution v loc] -> TDNR f v loc Bool
applySuggestions = foldlM phi False
where
phi b a = do
didSub <- substSuggestion a
pure $! b || didSub
substSuggestion :: Resolution v loc -> TDNR f v loc Bool
substSuggestion
( Resolution
name
_
loc
v
(extractSubstitution -> Just replacement)
) =
do
modify (substBlank (Text.unpack name) loc solved)
lift . btw $ Context.Decision (suggestedVar v name) loc solved
pure True
where
solved = either (Term.var loc) (Term.fromReferent loc) replacement
substSuggestion (Resolution name _ loc v (extractSubstitution -> Just replacement)) = do
modify (substBlank name loc solved)
lift . btw $ Context.Decision (suggestedVar v name) loc solved
pure True
where
solved =
case replacement of
Context.ReplacementRef ref -> Term.fromReferent loc ref
Context.ReplacementVar var -> Term.var loc var
substSuggestion _ = pure False
-- Resolve a `Blank` to a term
substBlank :: String -> loc -> Term v loc -> Term v loc -> Term v loc
substBlank :: Text -> loc -> Term v loc -> Term v loc -> Term v loc
substBlank s a r = ABT.visitPure go
where
go t = guard (ABT.annotation t == a) $ ABT.visitPure resolve t
resolve (Term.Blank' (B.Recorded (B.Resolve loc name)))
| name == s =
Just (loc <$ r)
| name == Text.unpack s = Just (loc <$ r)
resolve _ = Nothing
-- Returns Nothing for irrelevant notes
-- Returns Nothing for irrelevant notes
resolveNote ::
Env v loc ->
Context.InfoNote v loc ->
Result (Notes v loc) (Maybe (Resolution v loc))
resolveNote env (Context.SolvedBlank (B.Resolve loc n) v it) =
fmap (Just . Resolution (Text.pack n) it loc v . join)
. traverse (resolve it)
. join
. maybeToList
. Map.lookup (Text.pack n)
$ view termsByShortname env
-- Solve the case where we have a placeholder for a missing result
-- at the end of a block. This is always an error.
resolveNote _ (Context.SolvedBlank (B.MissingResultPlaceholder loc) v it) =
pure . Just $ Resolution "_" it loc v []
resolveNote _ n = btw n >> pure Nothing
resolveNote env = \case
Context.SolvedBlank (B.Resolve loc str) v it -> do
let shortname = Name.unsafeParseText (Text.pack str)
matches = Map.findWithDefault [] shortname env.termsByShortname
suggestions <- wither (resolve it) matches
pure $
Just
Resolution
{ resolvedName = Text.pack str,
inferredType = it,
resolvedLoc = loc,
v,
suggestions
}
-- Solve the case where we have a placeholder for a missing result
-- at the end of a block. This is always an error.
Context.SolvedBlank (B.MissingResultPlaceholder loc) v it ->
pure . Just $ Resolution "_" it loc v []
note -> do
btw note
pure Nothing
dedupe :: [Context.Suggestion v loc] -> [Context.Suggestion v loc]
dedupe = uniqueBy Context.suggestionReplacement
dedupe =
uniqueBy Context.suggestionReplacement
resolve ::
Context.Type v loc ->
NamedReference v loc ->
Result (Notes v loc) [Context.Suggestion v loc]
Result (Notes v loc) (Maybe (Context.Suggestion v loc))
resolve inferredType (NamedReference fqn foundType replace) =
-- We found a name that matches. See if the type matches too.
case Context.isSubtype (TypeVar.liftType foundType) (Context.relax inferredType) of
Left bug -> const [] <$> compilerBug bug
Left bug -> Nothing <$ compilerBug bug
-- Suggest the import if the type matches.
Right b ->
pure
[ Context.Suggestion
fqn
(TypeVar.liftType foundType)
replace
(if b then Context.Exact else Context.WrongType)
]
pure . Just $
Context.Suggestion
fqn
(TypeVar.liftType foundType)
replace
(if b then Context.Exact else Context.WrongType)
-- | Check whether a term matches a type, using a
-- function to resolve the type of @Ref@ constructors

View File

@ -31,6 +31,7 @@ module Unison.Typechecker.Context
fitsScheme,
isRedundant,
Suggestion (..),
Replacement (..),
SuggestionMatch (..),
isExact,
typeErrors,
@ -103,6 +104,7 @@ import Unison.Typechecker.TypeLookup qualified as TL
import Unison.Typechecker.TypeVar qualified as TypeVar
import Unison.Var (Var)
import Unison.Var qualified as Var
import Unison.Name (Name)
type TypeVar v loc = TypeVar.TypeVar (B.Blank loc) v
@ -329,16 +331,21 @@ data SuggestionMatch = Exact | WrongType | WrongName
deriving (Ord, Eq, Show)
data Suggestion v loc = Suggestion
{ suggestionName :: Text,
{ suggestionName :: Name,
suggestionType :: Type v loc,
suggestionReplacement :: Either v Referent,
suggestionReplacement :: Replacement v,
suggestionMatch :: SuggestionMatch
}
deriving (Eq, Show)
deriving stock (Eq, Show)
isExact :: Suggestion v loc -> Bool
isExact Suggestion {..} = suggestionMatch == Exact
data Replacement v
= ReplacementRef Referent
| ReplacementVar v
deriving stock (Eq, Ord, Show)
data ErrorNote v loc = ErrorNote
{ cause :: Cause v loc,
path :: Seq (PathElement v loc)
@ -2442,6 +2449,19 @@ checkWanted want (Term.LetRecTop' isTop lr) t =
markThenRetractWanted (Var.named "let-rec-marker") $ do
e <- annotateLetRecBindings isTop lr
checkWanted want e t
checkWanted want e@(Term.Match' scrut cases) t = do
(scrutType, swant) <- synthesize scrut
want <- coalesceWanted swant want
cwant <- checkCases scrutType t cases
want <- coalesceWanted cwant want
ctx <- getContext
let matchType = apply ctx t
getPatternMatchCoverageCheckAndKindInferenceSwitch >>= \case
PatternMatchCoverageCheckAndKindInferenceSwitch'Enabled ->
ensurePatternCoverage e matchType scrut scrutType cases
PatternMatchCoverageCheckAndKindInferenceSwitch'Disabled ->
pure ()
pure want
checkWanted want e t = do
(u, wnew) <- synthesize e
ctx <- getContext

View File

@ -12,7 +12,7 @@ data Pattern
| Or Pattern Pattern -- left-biased choice: tries second pattern only if first fails
| Capture Pattern -- capture all the text consumed by the inner pattern, discarding its subcaptures
| CaptureAs Text Pattern -- capture the given text, discarding its subcaptures, and name the capture
| Many Pattern -- zero or more repetitions (at least 1 can be written: Join [p, Many p])
| Many Bool Pattern -- zero or more repetitions (at least 1 can be written: Join [p, Many p]); boolean determines whether it's the correct version (True) or the original (False).
| Replicate Int Int Pattern -- m to n occurrences of a pattern, optional = 0-1
| Eof -- succeed if given the empty text, fail otherwise
| Literal Text -- succeed if input starts with the given text, advance by that text
@ -128,7 +128,7 @@ compile (CaptureAs t p) !err !success = go
success' _ rem acc0 _ = success (pushCapture t acc0) rem
compiled = compile p err' success'
go acc t = compiled acc t acc t
compile (Capture (Many (Char Any))) !_ !success = \acc t -> success (pushCapture t acc) Text.empty
compile (Capture (Many _ (Char Any))) !_ !success = \acc t -> success (pushCapture t acc) Text.empty
compile (Capture c) !err !success = go
where
err' _ _ acc0 t0 = err acc0 t0
@ -152,12 +152,13 @@ compile (Char cp) !err !success = go
go acc t = case Text.uncons t of
Just (ch, rem) | ok ch -> success acc rem
_ -> err acc t
compile (Many p) !_ !success = case p of
compile (Many correct p) !_ !success = case p of
Char Any -> (\acc _ -> success acc Text.empty)
Char cp -> walker (charPatternPred cp)
p -> go
where
go = compile p success success'
go | correct = try "Many" (compile p) success success'
| otherwise = compile p success success'
success' acc rem
| Text.size rem == 0 = success acc rem
| otherwise = go acc rem

View File

@ -114,12 +114,12 @@ test =
expect' (P.run (P.Char (P.CharSet "0123")) "3ab" == Just ([], "ab"))
expect' (P.run (P.Char (P.Not (P.CharSet "0123"))) "a3b" == Just ([], "3b"))
expect' (P.run (P.Capture (P.Char (P.Not (P.CharSet "0123")))) "a3b" == Just (["a"], "3b"))
expect' (P.run (P.Many (P.Char (P.CharSet "abcd"))) "babbababac123" == Just ([], "123"))
expect' (P.run (P.Capture (P.Many (P.Char (P.CharSet "abcd")))) "babbababac123" == Just (["babbababac"], "123"))
expect' (P.run (P.Capture (P.Many (P.Char (P.CharClass P.Number)))) "012345abc" == Just (["012345"], "abc"))
expect' (P.run (P.Join [P.Capture (P.Many (P.Char (P.CharClass P.Number))), P.Literal ",", P.Capture (P.Many (P.Char P.Any))]) "012345,abc" == Just (["012345", "abc"], ""))
expect' (P.run (P.Many True (P.Char (P.CharSet "abcd"))) "babbababac123" == Just ([], "123"))
expect' (P.run (P.Capture (P.Many True (P.Char (P.CharSet "abcd")))) "babbababac123" == Just (["babbababac"], "123"))
expect' (P.run (P.Capture (P.Many True (P.Char (P.CharClass P.Number)))) "012345abc" == Just (["012345"], "abc"))
expect' (P.run (P.Join [P.Capture (P.Many True (P.Char (P.CharClass P.Number))), P.Literal ",", P.Capture (P.Many True (P.Char P.Any))]) "012345,abc" == Just (["012345", "abc"], ""))
expect'
( P.run (P.Many (P.Join [P.Capture (P.Many (P.Char (P.CharClass P.Number))), P.Many (P.Char (P.CharClass P.Whitespace))])) "01 10 20 1123 292 110 10"
( P.run (P.Many True (P.Join [P.Capture (P.Many True (P.Char (P.CharClass P.Number))), P.Many True (P.Char (P.CharClass P.Whitespace))])) "01 10 20 1123 292 110 10"
== Just (["01", "10", "20", "1123", "292", "110", "10"], "")
)
expect' $

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.2.
-- This file has been generated from package.yaml by hpack version 0.36.0.
--
-- see: https://github.com/sol/hpack
@ -101,7 +101,7 @@ library
Unison.KindInference.Constraint.Pretty
Unison.KindInference.Constraint.Provenance
Unison.KindInference.Constraint.Solved
Unison.KindInference.Constraint.StarProvenance
Unison.KindInference.Constraint.TypeProvenance
Unison.KindInference.Constraint.Unsolved
Unison.KindInference.Error
Unison.KindInference.Error.Pretty

View File

@ -1,88 +0,0 @@
#!racket/base
(require
(except-in racket false true unit any)
compiler/embed
unison/boot
unison/data
unison/data-info
unison/chunked-seq
unison/primops
unison/primops-generated
unison/builtin-generated)
(define (grab-bytes)
(let* ([size-bytes (read-bytes 4)]
[size (integer-bytes->integer size-bytes #f #t 0 4)])
(read-bytes size)))
(define (decode-input)
(let ([bs (grab-bytes)])
(match (builtin-Value.deserialize (bytes->chunked-bytes bs))
[(unison-data _ t (list q))
(= t unison-either-right:tag)
(apply
values
(unison-tuple->list (reify-value (unison-quote-val q))))]
[else
(raise "unexpected input")])))
(define (build-main-module main-def)
`(module unison-main racket/base
(require
unison/boot)
(provide main)
(define (main)
(handle ['ref-4n0fgs00] top-exn-handler
(,(termlink->name main-def))))))
(define (do-evaluate)
(let-values ([(code main-ref) (decode-input)])
(add-runtime-code 'unison-main code)
(handle ['ref-4n0fgs00] top-exn-handler
((termlink->proc main-ref))
(data 'unit 0))))
; stub implementation
(define (do-compile output) (void))
; (let-values ([(code main-ref) (decode-input)])
; (create-embedding-executable
; output
; #:modules '((#f unison-main))
; #:literal-expression '(begin (require unison-main) (main)))))
(define runtime-namespace
(let ([ns (variable-reference->namespace (#%variable-reference))])
(namespace-require ''#%kernel ns)
ns))
(define (chunked-list->list cl)
(vector->list (chunked-list->vector cl)))
(define (list->chunked-list l)
(vector->chunked-list (list->vector l)))
(define (join ls)
(cond
[(null? ls) '()]
[else (append (car ls) (join (cdr ls)))]))
(define compile (make-parameter #f))
(define (handle-command-line)
(command-line
#:program "runner"
#:once-any
[("-o" "--output")
file
"compile to <file>"
(compile file)]
#:args ()
(compile)))
(let ([out (handle-command-line)])
(if out
(do-compile out)
(do-evaluate)))

View File

@ -0,0 +1,115 @@
#lang racket/base
; This implements a standalone unison runtime, with options for
; generating compilable racket modules.
;
; For runtime, it relies on the support for unison dynamic code
; loading. It expects to be provided with a serialized list of term
; links and associated code. It then loads the code in the same manner
; as dynamic runtime execution, and evaluates a main definition.
;
; Since this is intended to be an implementation of evaluation for
; e.g. ucm, the input is expected to be complete. No protocol is
; implemented for negotiating with a host for additional needed
; definitions. The program has all the built in definitions, and
; everything else is expected to be provided in the initial input.
;
; In addition to this mode, it is possible to supply a command line
; argument `-G` with a file name. This will instead produce a racket
; file with the supplied definitions. This file should be suitable for
; compilation and distribution with the `raco` tool, so long as the
; supporting unison-on-racket libraries are known to the racket
; install.
(require
racket/pretty
(except-in racket false true unit any)
compiler/embed
unison/boot
unison/data
unison/data-info
unison/chunked-seq
unison/primops
unison/primops-generated
unison/builtin-generated)
; Gets bytes using the expected input format. The format is simple:
;
; - 4 bytes indicating how many bytes follow
; - the actual payload, with size matching the above
(define (grab-bytes)
(let* ([size-bytes (read-bytes 4)]
[size (integer-bytes->integer size-bytes #f #t 0 4)])
(read-bytes size)))
; Reads and decodes the input. First uses `grab-bytes` to read the
; payload, then uses unison functions to deserialize the `Value` that
; is expected.
;
; The `Value` is expected to be a pair of loadable code and which
; definition should be executed. In unison types, it is:
;
; ([(Link.Term, Code)], Link.Term)
(define (decode-input)
(let ([bs (grab-bytes)])
(match (builtin-Value.deserialize (bytes->chunked-bytes bs))
[(unison-data _ t (list q))
(= t ref-either-right:tag)
(apply
values
(unison-tuple->list (reify-value (unison-quote-val q))))]
[else
(raise "unexpected input")])))
; Implements the evaluation mode of operation. First decodes the
; input. Then uses the dynamic loading machinery to add the code to
; the runtime. Finally executes a specified main reference.
(define (do-evaluate)
(let-values ([(code main-ref) (decode-input)])
(add-runtime-code 'unison-main code)
(handle [ref-exception:typelink] top-exn-handler
((termlink->proc main-ref))
(data 'unit 0))))
; Uses racket pretty printing machinery to instead generate a file
; containing the given code, and which executes the main definition on
; loading. This file can then be built with `raco exe`.
(define (write-module srcf main-ref icode)
(call-with-output-file
srcf
(lambda (port)
(parameterize ([print-as-expression #t])
(display "#lang racket/base\n\n" port)
(for ([expr (build-intermediate-module main-ref icode)])
(pretty-print expr port 1)
(newline port))
(newline port)))
#:exists 'replace))
; Decodes input and writes a module to the specified file.
(define (do-generate srcf)
(define-values (icode main-ref) (decode-input))
(write-module srcf main-ref icode))
(define generate-to (make-parameter #f))
(define show-version (make-parameter #f))
(define (handle-command-line)
(command-line
#:program "unison-runtime"
#:once-any
["--version"
"display version"
(show-version #t)]
[("-G" "--generate-file")
file
"generate code to <file>"
(generate-to file)]))
(begin
(handle-command-line)
(cond
[(show-version) (displayln "unison-runtime version 0.0.11")]
[(generate-to) (do-generate (generate-to))]
[else (do-evaluate)]))

View File

@ -11,6 +11,7 @@
Float.*
Float.fromRepresentation
Float.toRepresentation
Float.ceiling
Int.+
Int.-
Int./
@ -21,28 +22,30 @@
Int.signum
)))
(require racket)
(require racket/fixnum)
(require racket/flonum)
(require racket/performance-hint)
(require unison/boot)
(require racket
racket/fixnum
racket/flonum
racket/performance-hint
unison/boot)
(begin-encourage-inline
(define-unison (Nat.+ m n) (+ m n))
(define-unison (Nat.+ m n) (clamp-natural (+ m n)))
(define-unison (Nat.drop m n) (max 0 (- m n)))
(define-unison (Nat.increment n) (add1 n))
(define-unison (Int.increment i) (add1 i))
(define-unison (Int.negate i) (- i))
(define-unison (Int.+ i j) (+ i j))
(define-unison (Int.- i j) (- i j))
(define-unison (Int./ i j) (quotient i j))
(define-unison (Nat.increment n) (clamp-natural (add1 n)))
(define-unison (Int.increment i) (clamp-integer (add1 i)))
(define-unison (Int.negate i) (if (> i nbit63) (- i) i))
(define-unison (Int.+ i j) (clamp-integer (+ i j)))
(define-unison (Int.- i j) (clamp-integer (- i j)))
(define-unison (Int./ i j) (floor (/ i j)))
(define-unison (Int.signum i) (sgn i))
(define-unison (Float.* x y) (fl* x y))
(define-unison (Nat.toFloat n) (->fl n))
(define-unison (Float.ceiling f)
(clamp-integer (fl->exact-integer (ceiling f))))
; If someone can suggest a better mechanism for these,
; that would be appreciated.
(define-unison (Float.toRepresentation fl)

View File

@ -12,7 +12,43 @@
; that arity appropriately.
#!racket/base
(provide
(all-from-out unison/data-info)
builtin-any:typelink
builtin-boolean:typelink
builtin-bytes:typelink
builtin-char:typelink
builtin-float:typelink
builtin-int:typelink
builtin-nat:typelink
builtin-text:typelink
builtin-code:typelink
builtin-mvar:typelink
builtin-pattern:typelink
builtin-promise:typelink
builtin-sequence:typelink
builtin-socket:typelink
builtin-tls:typelink
builtin-timespec:typelink
builtin-threadid:typelink
builtin-value:typelink
builtin-crypto.hashalgorithm:typelink
builtin-char.class:typelink
builtin-immutablearray:typelink
builtin-immutablebytearray:typelink
builtin-mutablearray:typelink
builtin-mutablebytearray:typelink
builtin-processhandle:typelink
builtin-ref.ticket:typelink
builtin-tls.cipher:typelink
builtin-tls.clientconfig:typelink
builtin-tls.privatekey:typelink
builtin-tls.serverconfig:typelink
builtin-tls.signedcert:typelink
builtin-tls.version:typelink
bytevector
bytes
control
define-unison
handle
@ -20,6 +56,13 @@
data
data-case
clamp-integer
clamp-natural
wrap-natural
bit64
bit63
nbit63
expand-sandbox
check-sandbox
set-sandbox
@ -35,6 +78,10 @@
declare-function-link
declare-code
exn:bug?
exn:bug->exception
exception->string
request
request-case
sum
@ -64,7 +111,7 @@
(require
(for-syntax
racket/set
(only-in racket partition))
(only-in racket partition flatten))
(rename-in
(except-in racket false true unit any)
[make-continuation-prompt-tag make-prompt])
@ -72,6 +119,7 @@
; (for (only (racket base) quasisyntax/loc) expand)
; (for-syntax (only-in unison/core syntax->list))
(only-in racket/control prompt0-at control0-at)
racket/performance-hint
unison/core
unison/data
unison/sandbox
@ -402,13 +450,11 @@
[(pure . xs) #t]
[_ #f]))
(define (mk-pure scrut ps)
(define (mk-pure ps)
(if (null? ps)
#`(pure-val #,scrut)
#'((unison-pure v) v)
(syntax-case (car ps) (pure)
[(pure (v) e ...)
#`(let ([v (unison-pure-val #,scrut)])
e ...)]
[(pure (v) e ...) #'((unison-pure v) e ...)]
[(pure vs e ...)
(raise-syntax-error
#f
@ -416,24 +462,19 @@
(car ps)
#'vs)])))
(define (mk-req scrut-stx)
(lambda (stx)
(syntax-case stx ()
[(t vs e ...)
(with-syntax ([scrut scrut-stx])
#'((t) (let-values
([vs (apply values (unison-request-fields scrut))])
e ...)))])))
(define (mk-req stx)
(syntax-case stx ()
[(t (v ...) e ...)
#'((t (list v ...)) e ...)]))
(define (mk-abil scrut-stx)
(lambda (stx)
(syntax-case stx ()
[(t sc ...)
(let ([sub (mk-req scrut-stx)])
(with-syntax
([(sc ...) (map sub (syntax->list #'(sc ...)))]
[scrut scrut-stx])
#'((t) (case (unison-request-tag scrut) sc ...))))])))
[(a sc ...)
#`((unison-request b t vs)
#:when (equal? a b)
(match* (t vs)
#,@(map mk-req (syntax->list #'(sc ...)))))])))
(syntax-case stx ()
[(request-case scrut c ...)
@ -445,127 +486,64 @@
"multiple pure cases in request-case"
stx)
(with-syntax
([pc (mk-pure #'scrut ps)]
([pc (mk-pure ps)]
[(ac ...) (map (mk-abil #'scrut) as)])
#'(cond
[(unison-pure? scrut) pc]
[else (case (unison-request-ability scrut) ac ...)]))))])))
#'(match scrut pc ac ...))))])))
; (define (describe-list n l)
; (let rec ([pre "["] [post "[]"] [cur l])
; (cond
; [(null? cur) post]
; [else
; (let* ([sx (describe-value-depth (- n 1) (car cur))]
; [sxs (rec ", " "]" (cdr cur))])
; (string-append pre sx sxs))])))
;
; (define (describe-ref r)
; (cond
; [(symbol? r) (symbol->string r)]
; [(data? r)
; (data-case r
; [0 (s) (string-append "##" s)]
; [1 (i)
; (data-case i
; [0 (bs ix)
; (let* ([bd (bytevector->base32-string b32h bs)]
; [td (istring-take 5 bd)]
; [sx (if (>= 0 ix)
; ""
; (string-append "." (number->string ix)))])
; (string-append "#" td sx))])])]))
;
; (define (describe-bytes bs)
; (let* ([s (bytevector->base32-string b32h bs)]
; [l (string-length s)]
; [sfx (if (<= l 10) "" "...")])
; (string-append "32x" (istring-take 10 s) sfx)))
;
; (define (describe-value-depth n x)
; (if (< n 0) "..."
; (cond
; [(sum? x)
; (let ([tt (number->string (sum-tag x))]
; [vs (describe-list n (sum-fields x))])
; (string-append "Sum " tt " " vs))]
; [(data? x)
; (let ([tt (number->string (data-tag x))]
; [rt (describe-ref (data-ref x))]
; [vs (describe-list n (data-fields x))])
; (string-append "Data " rt " " tt " " vs))]
; [(list? x) (describe-list n x)]
; [(number? x) (number->string x)]
; [(string? x) (string-append "\"" x "\"")]
; [(bytevector? x) (describe-bytes x)]
; [(procedure? x) (format "~a" x)]
; [else
; (format "describe-value: unimplemented case: ~a " x)])))
;
; (define (describe-value x) (describe-value-depth 20 x))
;
(define (decode-value x) '())
(define (reference->termlink rf)
(match rf
[(unison-data _ t (list nm))
#:when (= t unison-reference-builtin:tag)
#:when (= t ref-reference-builtin:tag)
(unison-termlink-builtin (chunked-string->string nm))]
[(unison-data _ t (list id))
#:when (= t unison-reference-derived:tag)
#:when (= t ref-reference-derived:tag)
(match id
[(unison-data _ t (list rf i))
#:when (= t unison-id-id:tag)
#:when (= t ref-id-id:tag)
(unison-termlink-derived rf i)])]))
(define (referent->termlink rn)
(match rn
[(unison-data _ t (list rf i))
#:when (= t unison-referent-con:tag)
#:when (= t ref-referent-con:tag)
(unison-termlink-con (reference->typelink rf) i)]
[(unison-data _ t (list rf))
#:when (= t unison-referent-def:tag)
#:when (= t ref-referent-def:tag)
(reference->termlink rf)]))
(define (reference->typelink rf)
(match rf
[(unison-data _ t (list nm))
#:when (= t unison-reference-builtin:tag)
#:when (= t ref-reference-builtin:tag)
(unison-typelink-builtin (chunked-string->string nm))]
[(unison-data _ t (list id))
#:when (= t unison-reference-derived:tag)
#:when (= t ref-reference-derived:tag)
(match id
[(unison-data _ t (list rf i))
#:when (= t unison-id-id:tag)
#:when (= t ref-id-id:tag)
(unison-typelink-derived rf i)])]))
(define (typelink->reference tl)
(match tl
[(unison-typelink-builtin nm)
(unison-reference-builtin (string->chunked-string nm))]
(ref-reference-builtin (string->chunked-string nm))]
[(unison-typelink-derived hs i)
(unison-reference-derived
(unison-id-id hs i))]))
(ref-reference-derived (ref-id-id hs i))]))
(define (termlink->referent tl)
(match tl
[(unison-termlink-builtin nm)
(unison-referent-def
(unison-reference-builtin nm))]
(ref-referent-def
(ref-reference-builtin nm))]
[(unison-termlink-derived rf i)
(unison-referent-def
(unison-reference-derived
(unison-id-id rf i)))]
(ref-referent-def
(ref-reference-derived
(ref-id-id rf i)))]
[(unison-termlink-con tyl i)
(unison-referent-con
(typelink->reference tyl)
i)]))
(define (list->unison-tuple l)
(foldr unison-tuple-pair unison-unit-unit l))
(define (unison-tuple . l) (list->unison-tuple l))
(ref-referent-con (typelink->reference tyl) i)]))
(define (unison-seq . l)
(vector->chunked-list (list->vector l)))
@ -574,20 +552,45 @@
; The in-unison definition was effectively just literal scheme code
; represented as a unison data type, with some names generated from
; codebase data.
;
; Note: the ref-4n0fgs00 stuff is probably not ultimately correct, but
; is how things work for now.
(define (top-exn-handler rq)
(request-case rq
[pure (x)
(match x
[(unison-data r 0 (list))
(eq? r unison-unit:link)
(eq? r ref-unit:typelink)
(display "")]
[else
(display (describe-value x))])]
[ref-4n0fgs00
[ref-exception:typelink
[0 (f)
(control 'ref-4n0fgs00 k
(control ref-exception:typelink k
(let ([disp (describe-value f)])
(raise (make-exn:bug "builtin.bug" disp))))]]))
(begin-encourage-inline
(define mask64 #xffffffffffffffff)
(define mask63 #x7fffffffffffffff)
(define bit63 #x8000000000000000)
(define bit64 #x10000000000000000)
(define nbit63 (- #x8000000000000000))
; Operation to maintain Int values to within a range from
; -2^63 to 2^63-1.
(define (clamp-integer i)
(if (fixnum? i) i
(let ([j (bitwise-and mask64 i)])
(if (< j bit63) j
(- j bit64)))))
; modular arithmetic appropriate for when a Nat operation can only
; overflow (be too large a positive number).
(define (clamp-natural n)
(if (fixnum? n) n
(modulo n bit64)))
; module arithmetic appropriate for when a Nat operation my either
; have too large or a negative result.
(define (wrap-natural n)
(if (and (fixnum? n) (exact-nonnegative-integer? n)) n
(modulo n bit64))))

View File

@ -504,7 +504,7 @@
new-len
(λ (chunk)
(chunk-copy! chunk 0 first-c 1)
(chunk-copy! chunk first-len last-c 0)))))]
(chunk-copy! chunk (sub1 first-len) last-c 0)))))]
[(= first-len 1)
(define-values [vt* first-c*] (vector-trie-pop-first vt))
(struct-copy
@ -516,7 +516,7 @@
(struct-copy
chunks cs
[length (sub1 len)]
[first-chunk (chunk-drop-first last-c)])])
[first-chunk (chunk-drop-first first-c)])])
(chunk-first first-c))]))
(define (chunked-seq-pop-last cs)
@ -573,13 +573,13 @@
[{(single-chunk chunk-a) (single-chunk chunk-b)}
(define len (+ (chunk-length chunk-a) (chunk-length chunk-b)))
;; see Note [chunks-length invariant]
(if (< len CHUNK-CAPACITY)
(if (<= len CHUNK-CAPACITY)
(single-chunk (chunk-append chunk-a chunk-b))
(chunks len chunk-a empty-vector-trie chunk-b))]
[{(single-chunk chunk) (chunks len first-c vt _)}
(cond
[(< (+ (chunk-length chunk) (chunk-length first-c)) CHUNK-CAPACITY)
[(<= (+ (chunk-length chunk) (chunk-length first-c)) CHUNK-CAPACITY)
(struct-copy
chunks cs-b
[length (+ (chunk-length chunk) len)]
@ -594,7 +594,7 @@
[{(chunks len _ vt last-c) (single-chunk chunk)}
(cond
[(< (+ (chunk-length last-c) (chunk-length chunk)) CHUNK-CAPACITY)
[(<= (+ (chunk-length last-c) (chunk-length chunk)) CHUNK-CAPACITY)
(struct-copy
chunks cs-a
[length (+ len (chunk-length chunk))]
@ -688,9 +688,10 @@
;; If `first-a` contains too many elements to fit in the next
;; partially-constructed chunk, we need to split it as well.
[(> first-a-len insert-i)
(chunk-copy! new-chunk 0 first-a split-i)
(define copy-len (- first-a-len insert-i))
(chunk-copy! new-chunk 0 first-a copy-len)
(transfer-chunk! #:done? #t)
(chunk-slice first-a 0 split-i)]
(chunk-slice first-a 0 copy-len)]
;; Otherwise, we can move the elements from the partially-
;; constructed chunk into the new first chunk.

View File

@ -18,6 +18,7 @@
(import (rnrs)
(rnrs records syntactic)
(unison data)
(unison data-info)
(unison core)
(unison chunked-seq)
(rename
@ -105,15 +106,36 @@
(or (exn:fail:contract:divide-by-zero? e)
(exn:fail:contract:non-fixnum-result? e)))
;; TODO Replace strings with proper type links once we have them
(define (try-eval thunk)
(with-handlers
([exn:break?
(lambda (e) (exception "ThreadKilledFailure" (string->chunked-string "thread killed") ()))]
[exn:io? (lambda (e) (exception "IOFailure" (exception->string e) ()))]
[exn:arith? (lambda (e) (exception "ArithmeticFailure" (exception->string e) ()))]
(lambda (e)
(exception
ref-threadkilledfailure:typelink
(string->chunked-string "thread killed")
ref-unit-unit))]
[exn:io?
(lambda (e)
(exception
ref-iofailure:typelink
(exception->string e) ref-unit-unit))]
[exn:arith?
(lambda (e)
(exception
ref-arithfailure:typelink
(exception->string e)
ref-unit-unit))]
[exn:bug? (lambda (e) (exn:bug->exception e))]
[exn:fail? (lambda (e) (exception "RuntimeFailure" (exception->string e) ()))]
[exn:fail?
(lambda (e)
(exception
ref-runtimefailure:typelink
(exception->string e)
ref-unit-unit))]
[(lambda (x) #t)
(lambda (e) (exception "MiscFailure" (string->chunked-string "unknown exception") e))])
(lambda (e)
(exception
ref-miscfailure:typelink
(exception->string e)
ref-unit-unit))])
(right (thunk)))))

View File

@ -19,11 +19,20 @@
(for-syntax raise-syntax-error)
exception->string
exn:bug
make-exn:bug
exn:bug?
exn:bug->exception
let-marks
ref-mark
chunked-string-foldMap-chunks
unison-tuple
list->unison-tuple
freeze-bytevector!
freeze-vector!
freeze-subvector
@ -63,6 +72,7 @@
build-path
path->string
match
match*
for/fold)
(string-copy! racket-string-copy!)
(bytes-append bytevector-append)
@ -74,6 +84,7 @@
(only-in racket/fixnum fl->fx fx- fxand fxlshift fxrshift fxior)
racket/unsafe/ops
unison/data
unison/data-info
unison/chunked-seq)
(define (fx1- n) (fx- n 1))
@ -177,12 +188,43 @@
[sfx (if (<= l 10) "" "...")])
(string-append "32x" (substring s 0 10) sfx)))
(define (describe-tuple x)
(define (format-tuple l)
(for/fold
([sep ")"]
[bits '()]
#:result (apply string-append (cons "(" bits)))
([e l])
(values ", " (list* (describe-value e) sep bits))))
(define (format-non-tuple l)
(for/fold
([result #f])
([e l])
(let ([de (describe-value e)])
(if (not result) de
(string-append "Cons (" de ") (" result ")")))))
(let rec ([acc '()] [tup x])
(match tup
[(unison-data r t (list x y))
#:when (eq? r ref-tuple:typelink)
(rec (cons x acc) y)]
[(unison-data r t (list))
#:when (eq? r ref-unit:typelink)
(format-tuple acc)]
[else
(format-non-tuple (cons tup acc))])))
(define (describe-value x)
(match x
[(unison-sum t fs)
(let ([tt (number->string t)]
[vs (describe-list-br fs)])
(string-append "Sum " tt " " vs))]
[(unison-data r t fs)
#:when (eq? r ref-tuple:typelink)
(describe-tuple x)]
[(unison-data r t fs)
(let ([tt (number->string t)]
[rt (describe-ref r)]
@ -251,48 +293,158 @@
[else sc]))]))
; universal-compares two lists of values lexicographically
(define (lexico-compare ls rs)
(define (lexico-compare ls rs cmp-ty)
(let rec ([cls ls] [crs rs])
(cond
[(and (null? cls) (null? crs)) '=]
[else
(comparisons
(universal-compare (car cls) (car crs))
(universal-compare (car cls) (car crs) cmp-ty)
(rec (cdr cls) (cdr crs)))])))
(define (cmp-num l r)
(define ((comparison e? l?) l r)
(cond
[(= l r) '=]
[(< l r) '<]
[(e? l r) '=]
[(l? l r) '<]
[else '>]))
(define (universal-compare l r)
(define compare-num (comparison = <))
(define compare-char (comparison char=? char<?))
(define compare-byte (comparison = <))
(define compare-bytes (comparison bytes=? bytes<?))
(define compare-string (comparison string=? string<?))
(define (compare-typelink ll rl)
(match ll
[(unison-typelink-builtin lnm)
(match rl
[(unison-typelink-builtin rnm) (compare-string lnm rnm)]
[(? unison-typelink-derived?) '<])]
[(unison-typelink-derived lh i)
(match rl
[(unison-typelink-derived rh j)
(comparisons
(compare-bytes lh rh)
(compare-num i j))]
[(? unison-typelink-builtin?) '>])]))
(define (compare-termlink ll rl)
(match ll
[(unison-termlink-builtin lnm)
(match rl
[(unison-termlink-builtin rnm)
(compare-string lnm rnm)]
[else '<])]
[(unison-termlink-derived lh i)
(match rl
[(unison-termlink-derived rh j)
(comparisons
(compare-bytes lh rh)
(compare-num i j))]
[(? unison-termlink-builtin?) '>]
[else '<])]
[(unison-termlink-con lr t)
(match rl
[(unison-termlink-con rr u)
(comparisons
(compare-typelink lr rr)
(compare-num t u))]
[else '>])]))
(define (value->category v)
(cond
[(equal? l r) '=]
[(and (number? l) (number? r)) (if (< l r) '< '>)]
[(and (chunked-list? l) (chunked-list? r)) (chunked-list-compare/recur l r universal-compare)]
[(procedure? v) 0]
[(unison-closure? v) 0]
[(number? v) 1]
[(char? v) 1]
[(boolean? v) 1]
[(unison-data? v) 1]
[(chunked-list? v) 3]
[(chunked-string? v) 3]
[(chunked-bytes? v) 3]
[(unison-termlink? v) 3]
[(unison-typelink? v) 3]
[(bytes? v) 5]))
(define (compare-data l r cmp-ty)
(match* (l r)
[((unison-data lr lt lfs) (unison-data rr rt rfs))
(compare-data-stuff lr lt lfs rr rt rfs cmp-ty)]))
(define (compare-data-stuff lr lt lfs rr rt rfs cmp-ty)
(define new-cmp-ty (or cmp-ty (eq? lr builtin-any:typelink)))
(comparisons
(if cmp-ty (compare-typelink lr rr) '=)
(compare-num lt rt)
(compare-num (length lfs) (length rfs))
(lexico-compare lfs rfs new-cmp-ty)))
; gives links to compare values as pseudo- or actual data types.
; This is how the interpreter works, so this is an attempt to obtain
; the same ordering.
(define (pseudo-data-link v)
(cond
[(boolean? v) builtin-boolean:typelink]
[(char? v) builtin-char:typelink]
[(flonum? v) builtin-float:typelink]
[(and (number? v) (negative? v)) builtin-int:typelink]
[(number? v) builtin-nat:typelink]
[(unison-data? v) (unison-data-ref v)]))
(define (compare-proc l r cmp-ty)
(define (unpack v)
(if (procedure? v)
(values (lookup-function-link v) '())
(values
(lookup-function-link (unison-closure-code v))
(unison-closure-env v))))
(define-values (lnl envl) (unpack l))
(define-values (lnr envr) (unpack r))
(comparisons
(compare-termlink lnl lnr)
(lexico-compare envl envr cmp-ty)))
(define (universal-compare l r [cmp-ty #f])
(define (u-proc? v)
(or (procedure? v) (unison-closure? v)))
(cond
[(eq? l r) '=] ; optimistic equality case
[(and (boolean? l) (boolean? r)) (if r '< '>)]
[(and (char? l) (char? r)) (if (char<? l r) '< '>)]
[(and (number? l) (number? r)) (compare-num l r)]
[(and (chunked-list? l) (chunked-list? r))
(chunked-list-compare/recur l r universal-compare)]
[(and (chunked-string? l) (chunked-string? r))
(chunked-string-compare/recur l r (lambda (a b) (if (char<? a b) '< '>)))]
(chunked-string-compare/recur l r compare-char)]
[(and (chunked-bytes? l) (chunked-bytes? r))
(chunked-bytes-compare/recur l r (lambda (a b) (if (< a b) '< '>)))]
[(and (bytes? l) (bytes? r))
(cond
[(bytes=? l r) '=]
[(bytes<? l r) '<]
[else '>])]
[(and (unison-data? l) (unison-data? r))
(let ([fls (unison-data-fields l)] [frs (unison-data-fields r)])
(comparisons
(cmp-num (unison-data-tag l) (unison-data-tag r))
(cmp-num (length fls) (length frs))
(lexico-compare fls frs)))]
(chunked-bytes-compare/recur l r compare-byte)]
[(and (unison-data? l) (unison-data? r)) (compare-data l r cmp-ty)]
[(and (bytes? r) (bytes? r)) (compare-bytes l r)]
[(and (u-proc? l) (u-proc? r)) (compare-proc l r)]
[(and (unison-termlink? l) (unison-termlink? r))
(compare-termlink l r)]
[(and (unison-typelink? l) (unison-typelink? r))
(compare-typelink l r)]
[(= 3 (value->category l) (value->category r))
(compare-typelink (pseudo-data-link l) (pseudo-data-link r))]
[(= (value->category l) (value->category r))
(raise
(make-exn:bug
"unsupported universal comparison of values"
(unison-tuple l r)))]
[else
(let ([dl (describe-value l)]
[dr (describe-value r)])
(raise
(format
"universal-compare: unimplemented\n~a\n\n~a"
dl dr)))]))
(compare-num (value->category l) (value->category r))]))
(define (list->unison-tuple l)
(foldr ref-tuple-pair ref-unit-unit l))
(define (unison-tuple . l) (list->unison-tuple l))
(define (chunked-string<? l r) (chunked-string=?/recur l r char<?))
@ -358,3 +510,30 @@
(begin
(vector-set! dst i (vector-ref src (+ off i)))
(next (fx1- i)))))))
(define (write-exn:bug ex port mode)
(when mode
(write-string "<exn:bug " port))
(let ([recur (case mode
[(#t) write]
[(#f) display]
[else (lambda (v port) (print v port mode))])])
(recur (chunked-string->string (exn:bug-msg ex)) port)
(if mode (write-string " " port) (newline port))
(write-string (describe-value (exn:bug-val ex)) port))
(when mode
(write-string ">")))
(struct exn:bug (msg val)
#:constructor-name make-exn:bug
#:methods gen:custom-write
[(define write-proc write-exn:bug)])
(define (exn:bug->exception b)
(exception
ref-runtimefailure:typelink
(exn:bug-msg b)
(exn:bug-val b)))

View File

@ -52,21 +52,49 @@
ord
failure
exception
exn:bug
make-exn:bug
exn:bug?
exn:bug->exception
unison-any:typelink
builtin-any:typelink
unison-any-any:tag
unison-any-any
unison-boolean:typelink
builtin-boolean:typelink
unison-boolean-true:tag
unison-boolean-false:tag
unison-boolean-true
unison-boolean-false
builtin-bytes:typelink
builtin-char:typelink
builtin-float:typelink
builtin-int:typelink
builtin-nat:typelink
builtin-text:typelink
builtin-code:typelink
builtin-mvar:typelink
builtin-pattern:typelink
builtin-promise:typelink
builtin-sequence:typelink
builtin-socket:typelink
builtin-tls:typelink
builtin-timespec:typelink
builtin-threadid:typelink
builtin-value:typelink
builtin-crypto.hashalgorithm:typelink
builtin-char.class:typelink
builtin-immutablearray:typelink
builtin-immutablebytearray:typelink
builtin-mutablearray:typelink
builtin-mutablebytearray:typelink
builtin-processhandle:typelink
builtin-ref.ticket:typelink
builtin-tls.cipher:typelink
builtin-tls.clientconfig:typelink
builtin-tls.privatekey:typelink
builtin-tls.serverconfig:typelink
builtin-tls.signedcert:typelink
builtin-tls.version:typelink
unison-tuple->list)
(require
@ -110,7 +138,8 @@
(struct unison-request
(ability tag fields)
#:constructor-name make-request)
#:constructor-name make-request
#:transparent)
; Structures for other unison builtins. Originally the plan was
; just to secretly use an in-unison data type representation.
@ -173,15 +202,43 @@
(struct unison-typelink ()
#:transparent
#:reflection-name 'typelink)
#:reflection-name 'typelink
#:property prop:equal+hash
(let ()
(define (equal-proc lnl lnr rec)
(match lnl
[(unison-typelink-builtin l)
(match lnr
[(unison-typelink-builtin r)
(equal? l r)]
[else #f])]
[(unison-typelink-derived hl i)
(match lnr
[(unison-typelink-derived hr j)
(and (equal? hl hr) (= i j))]
[else #f])]))
(define ((hash-proc init) ln rec)
(match ln
[(unison-typelink-builtin n)
(fxxor (fx*/wraparound (rec n) 53)
(fx*/wraparound init 17))]
[(unison-typelink-derived hl i)
(fxxor (fx*/wraparound (rec hl) 59)
(fx*/wraparound (rec i) 61)
(fx*/wraparound init 19))]))
(list equal-proc (hash-proc 3) (hash-proc 5))))
(struct unison-typelink-builtin unison-typelink
(name)
#:reflection-name 'typelink)
#:reflection-name 'typelink
#:transparent)
(struct unison-typelink-derived unison-typelink
(ref ix)
#:reflection-name 'typelink)
#:reflection-name 'typelink
#:transparent)
(struct unison-code (rep))
(struct unison-quote (val))
@ -275,18 +332,64 @@
(define (either-get either) (car (unison-sum-fields either)))
; a -> Any
(define unison-any:typelink (unison-typelink-builtin "Any"))
(define builtin-any:typelink (unison-typelink-builtin "Any"))
(define unison-any-any:tag 0)
(define (unison-any-any x)
(data unison-any:typelink unison-any-any:tag x))
(data builtin-any:typelink unison-any-any:tag x))
(define unison-boolean:typelink (unison-typelink-builtin "Boolean"))
(define builtin-boolean:typelink (unison-typelink-builtin "Boolean"))
(define unison-boolean-true:tag 1)
(define unison-boolean-false:tag 0)
(define unison-boolean-true
(data unison-boolean:typelink unison-boolean-true:tag))
(data builtin-boolean:typelink unison-boolean-true:tag))
(define unison-boolean-false
(data unison-boolean:typelink unison-boolean-false:tag))
(data builtin-boolean:typelink unison-boolean-false:tag))
(define builtin-bytes:typelink (unison-typelink-builtin "Bytes"))
(define builtin-char:typelink (unison-typelink-builtin "Char"))
(define builtin-code:typelink (unison-typelink-builtin "Code"))
(define builtin-float:typelink (unison-typelink-builtin "Float"))
(define builtin-int:typelink (unison-typelink-builtin "Int"))
(define builtin-mvar:typelink (unison-typelink-builtin "MVar"))
(define builtin-nat:typelink (unison-typelink-builtin "Nat"))
(define builtin-pattern:typelink (unison-typelink-builtin "Pattern"))
(define builtin-promise:typelink (unison-typelink-builtin "Promise"))
(define builtin-sequence:typelink (unison-typelink-builtin "Sequence"))
(define builtin-socket:typelink (unison-typelink-builtin "Socket"))
(define builtin-text:typelink (unison-typelink-builtin "Text"))
(define builtin-tls:typelink (unison-typelink-builtin "Tls"))
(define builtin-timespec:typelink (unison-typelink-builtin "TimeSpec"))
(define builtin-threadid:typelink (unison-typelink-builtin "ThreadId"))
(define builtin-value:typelink (unison-typelink-builtin "Value"))
(define builtin-crypto.hashalgorithm:typelink
(unison-typelink-builtin "crypto.HashAlgorithm"))
(define builtin-char.class:typelink
(unison-typelink-builtin "Char.Class"))
(define builtin-immutablearray:typelink
(unison-typelink-builtin "ImmutableArray"))
(define builtin-immutablebytearray:typelink
(unison-typelink-builtin "ImmutableByteArray"))
(define builtin-mutablearray:typelink
(unison-typelink-builtin "MutableArray"))
(define builtin-mutablebytearray:typelink
(unison-typelink-builtin "MutableArray"))
(define builtin-processhandle:typelink
(unison-typelink-builtin "ProcessHandle"))
(define builtin-ref.ticket:typelink
(unison-typelink-builtin "Ref.Ticket"))
(define builtin-tls.cipher:typelink
(unison-typelink-builtin "Tls.Cipher"))
(define builtin-tls.clientconfig:typelink
(unison-typelink-builtin "Tls.ClientConfig"))
(define builtin-tls.privatekey:typelink
(unison-typelink-builtin "Tls.PrivateKey"))
(define builtin-tls.serverconfig:typelink
(unison-typelink-builtin "Tls.ServerConfig"))
(define builtin-tls.signedcert:typelink
(unison-typelink-builtin "Tls.SignedCert"))
(define builtin-tls.version:typelink
(unison-typelink-builtin "Tls.Version"))
; Type -> Text -> Any -> Failure
(define (failure typeLink msg any)
@ -296,12 +399,6 @@
(define (exception typeLink msg a)
(failure typeLink msg (unison-any-any a)))
; TODO needs better pretty printing for when it isn't caught
(struct exn:bug (msg a)
#:constructor-name make-exn:bug)
(define (exn:bug->exception b) (exception "RuntimeFailure" (exn:bug-msg b) (exn:bug-a b)))
; A counter for internally numbering declared data, so that the
; entire reference doesn't need to be stored in every data record.
(define next-data-number 0)

View File

@ -26,5 +26,9 @@
(bytes->chunked-bytes (gzip-bytes (chunked-bytes->bytes bytes))))
(define (gzip.decompress bytes)
(with-handlers [[exn:fail? (lambda (e) (exception "Gzip data corrupted" (exception->string e) '()))] ]
(right (bytes->chunked-bytes (gunzip-bytes (chunked-bytes->bytes bytes))))))
(with-handlers
[[exn:fail? (lambda (e) (left (exception->string e)))]]
(right
(bytes->chunked-bytes
(gunzip-bytes
(chunked-bytes->bytes bytes))))))

View File

@ -0,0 +1,12 @@
#lang info
(define collection "unison")
(define deps
(list
"x509-lib"
"r6rs-lib"
"rackunit-lib"
"math-lib"
"srfi-lib"
))

View File

@ -43,87 +43,103 @@
; typeLink msg any
(define (Exception typeLink message payload)
(let* ([x7 (unison-any-any payload)]
[x8 (unison-failure-failure typeLink message x7)])
(unison-either-left x8)))
(let* ([a (unison-any-any payload)]
[msg (string->chunked-string message)]
[f (ref-failure-failure typeLink msg a)])
(ref-either-left f)))
(define-unison (isFileOpen.impl.v3 port)
(unison-either-right (not (port-closed? port))))
(ref-either-right (not (port-closed? port))))
(define-unison (ready.impl.v1 port)
(if (byte-ready? port)
(unison-either-right #t)
(ref-either-right #t)
(if (port-eof? port)
(Exception 'IO "EOF" port)
(unison-either-right #f))))
(Exception ref-iofailure:typelink "EOF" port)
(ref-either-right #f))))
(define-unison (getCurrentDirectory.impl.v3 unit)
(unison-either-right
(ref-either-right
(string->chunked-string (path->string (current-directory)))))
(define-unison (isSeekable.impl.v3 handle)
(unison-either-right
(ref-either-right
(port-has-set-port-position!? handle)))
(define-unison (handlePosition.impl.v3 handle)
(unison-either-right (port-position handle)))
(ref-either-right (port-position handle)))
(define-unison (seekHandle.impl.v3 handle mode amount)
(data-case mode
(0 ()
(set-port-position! handle amount)
(unison-either-right none))
(ref-either-right none))
(1 ()
(let ([current (port-position handle)])
(set-port-position! handle (+ current amount))
(unison-either-right none)))
(ref-either-right none)))
(2 ()
(Exception 'BadNews "SeekFromEnd not supported" 0))))
(Exception
ref-iofailure:typelink
"SeekFromEnd not supported"
0))))
(define-unison (getLine.impl.v1 handle)
(let* ([line (read-line handle)])
(if (eof-object? line)
(unison-either-right (string->chunked-string ""))
(unison-either-right (string->chunked-string line))
(ref-either-right (string->chunked-string ""))
(ref-either-right (string->chunked-string line))
)))
(define-unison (getChar.impl.v1 handle)
(let* ([char (read-char handle)])
(if (eof-object? char)
(Exception 'isEOFError "End of file reached")
(unison-either-right char))))
(Exception
ref-iofailure:typelink
"End of file reached"
ref-unit-unit)
(ref-either-right char))))
(define-unison (getSomeBytes.impl.v1 handle bytes)
(let* ([buffer (make-bytes bytes)]
[line (read-bytes-avail! buffer handle)])
(if (eof-object? line)
(unison-either-right (bytes->chunked-bytes #""))
(unison-either-right (bytes->chunked-bytes buffer))
(ref-either-right (bytes->chunked-bytes #""))
(ref-either-right (bytes->chunked-bytes buffer))
)))
(define-unison (getBuffering.impl.v3 handle)
(case (file-stream-buffer-mode handle)
[(none) (unison-either-right unison-buffermode-no-buffering)]
[(line) (unison-either-right
unison-buffermode-line-buffering)]
[(block) (unison-either-right
unison-buffermode-block-buffering)]
[(#f) (Exception 'IO "Unable to determine buffering mode of handle" '())]
[else (Exception 'IO "Unexpected response from file-stream-buffer-mode" '())]))
[(none) (ref-either-right ref-buffermode-no-buffering)]
[(line) (ref-either-right
ref-buffermode-line-buffering)]
[(block) (ref-either-right
ref-buffermode-block-buffering)]
[(#f) (Exception
ref-iofailure:typelink
"Unable to determine buffering mode of handle"
ref-unit-unit)]
[else (Exception
ref-iofailure:typelink
"Unexpected response from file-stream-buffer-mode"
ref-unit-unit)]))
(define-unison (setBuffering.impl.v3 handle mode)
(data-case mode
(0 ()
(file-stream-buffer-mode handle 'none)
(unison-either-right none))
(ref-either-right none))
(1 ()
(file-stream-buffer-mode handle 'line)
(unison-either-right none))
(ref-either-right none))
(2 ()
(file-stream-buffer-mode handle 'block)
(unison-either-right none))
(ref-either-right none))
(3 (size)
(Exception 'IO "Sized block buffering not supported" '()))))
(Exception
ref-iofailure:typelink
"Sized block buffering not supported"
ref-unit-unit))))
(define (with-buffer-mode port mode)
(file-stream-buffer-mode port mode)
@ -141,8 +157,11 @@
(define-unison (getEcho.impl.v1 handle)
(if (eq? handle stdin)
(unison-either-right (get-stdin-echo))
(Exception 'IO "getEcho only supported on stdin" '())))
(ref-either-right (get-stdin-echo))
(Exception
ref-iofailure:typelink
"getEcho only supported on stdin"
ref-unit-unit)))
(define-unison (setEcho.impl.v1 handle echo)
(if (eq? handle stdin)
@ -150,23 +169,29 @@
(if echo
(system "stty echo")
(system "stty -echo"))
(unison-either-right none))
(Exception 'IO "setEcho only supported on stdin" '())))
(ref-either-right none))
(Exception
ref-iofailure:typelink
"setEcho only supported on stdin"
ref-unit-unit)))
(define (get-stdin-echo)
(let ([current (with-output-to-string (lambda () (system "stty -a")))])
(string-contains? current " echo ")))
(define-unison (getArgs.impl.v1 unit)
(unison-either-right
(ref-either-right
(vector->chunked-list
(vector-map string->chunked-string (current-command-line-arguments)))))
(define-unison (getEnv.impl.v1 key)
(let ([value (environment-variables-ref (current-environment-variables) (string->bytes/utf-8 (chunked-string->string key)))])
(if (false? value)
(Exception 'IO "environmental variable not found" key)
(unison-either-right
(Exception
ref-iofailure:typelink
"environmental variable not found"
key)
(ref-either-right
(string->chunked-string (bytes->string/utf-8 value))))))
;; From https://github.com/sorawee/shlex/blob/5de06500e8c831cfc8dffb99d57a76decc02c569/main.rkt (MIT License)

View File

@ -44,18 +44,28 @@
(define (getFileSize.impl.v3 path)
(with-handlers
[[exn:fail:filesystem? (lambda (e) (exception "IOFailure" (exception->string e) '()))]]
[[exn:fail:filesystem?
(lambda (e)
(exception
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]]
(right (file-size (chunked-string->string path)))))
(define (getFileTimestamp.impl.v3 path)
(with-handlers
[[exn:fail:filesystem? (lambda (e) (exception "IOFailure" (exception->string e) '()))]]
[[exn:fail:filesystem?
(lambda (e)
(exception
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]]
(right (file-or-directory-modify-seconds (chunked-string->string path)))))
; in haskell, it's not just file but also directory
(define-unison (fileExists.impl.v3 path)
(let ([path-string (chunked-string->string path)])
(unison-either-right
(ref-either-right
(or
(file-exists? path-string)
(directory-exists? path-string)))))
@ -69,10 +79,10 @@
(define-unison (setCurrentDirectory.impl.v3 path)
(current-directory (chunked-string->string path))
(unison-either-right none))
(ref-either-right none))
(define-unison (createTempDirectory.impl.v3 prefix)
(unison-either-right
(ref-either-right
(string->chunked-string
(path->string
(make-temporary-directory*
@ -81,31 +91,31 @@
(define-unison (createDirectory.impl.v3 file)
(make-directory (chunked-string->string file))
(unison-either-right none))
(ref-either-right none))
(define-unison (removeDirectory.impl.v3 file)
(delete-directory/files (chunked-string->string file))
(unison-either-right none))
(ref-either-right none))
(define-unison (isDirectory.impl.v3 path)
(unison-either-right
(ref-either-right
(directory-exists? (chunked-string->string path))))
(define-unison (renameDirectory.impl.v3 old new)
(rename-file-or-directory (chunked-string->string old)
(chunked-string->string new))
(unison-either-right none))
(ref-either-right none))
(define-unison (renameFile.impl.v3 old new)
(rename-file-or-directory (chunked-string->string old)
(chunked-string->string new))
(unison-either-right none))
(ref-either-right none))
(define-unison (systemTime.impl.v3 unit)
(unison-either-right (current-seconds)))
(ref-either-right (current-seconds)))
(define-unison (systemTimeMicroseconds.impl.v3 unit)
(unison-either-right (inexact->exact (* 1000 (current-inexact-milliseconds)))))
(ref-either-right (inexact->exact (* 1000 (current-inexact-milliseconds)))))
(define (threadCPUTime.v1)
(right (current-process-milliseconds (current-thread))))

View File

@ -1,8 +1,14 @@
#lang racket/base
(require math/base
rnrs/arithmetic/fixnums-6
(only-in unison/boot data-case define-unison))
racket/performance-hint
rnrs/arithmetic/bitwise-6
(only-in unison/boot
clamp-integer
clamp-natural
data-case
define-unison
nbit63))
(provide
builtin-Float.exp
@ -73,8 +79,8 @@
(define-unison (builtin-Float.min n m) (min n m))
(define-unison (builtin-Float.tan n) (tan n))
(define-unison (builtin-Float.tanh n) (tanh n))
(define-unison (builtin-Int.* n m) (* n m))
(define-unison (builtin-Int.pow n m) (expt n m))
(define-unison (builtin-Int.* n m) (clamp-integer (* n m)))
(define-unison (builtin-Int.pow n m) (clamp-integer (expt n m)))
(define-unison (builtin-Int.trailingZeros n) (TZRO n))
(define-unison (builtin-Nat.trailingZeros n) (TZRO n))
(define-unison (builtin-Nat.popCount n) (POPC n))
@ -85,19 +91,19 @@
(define ACOS acos)
(define ACSH acosh)
(define ADDF +)
(define ADDI +)
(define (ADDI i j) (clamp-integer (+ i j)))
(define SUBF -)
(define SUBI -)
(define (SUBI i j) (clamp-integer (- i j)))
(define (SGNI n) (if (< n 0) -1 (if (> n 0) +1 0)))
(define MAXF max)
(define MINF min)
(define MULF *)
(define MULI *)
(define NEGI -)
(define (MULI i j) (clamp-integer (* i j)))
(define (NEGI i) (if (> i nbit63) (- i) i))
(define NTOF exact->inexact)
(define POWF expt)
(define POWI expt)
(define POWN expt)
(define (POWI i j) (clamp-integer (expt i j)))
(define (POWN i j) (clamp-natural (expt i j)))
(define ASIN asin)
(define ASNH asinh)
(define ATAN atan)
@ -106,7 +112,10 @@
(define CEIL ceiling)
(define FLOR floor)
(define COSF cos)
(define TRNF truncate)
(define (TRNF f)
(cond
[(or (= f +inf.0) (= f -inf.0) (eqv? f +nan.0) (eqv? f +nan.f)) 0]
[else (clamp-integer (inexact->exact (truncate f)))]))
(define RNDF round)
(define SQRT sqrt)
(define TANF tan)
@ -115,19 +124,17 @@
(define SINH sinh)
(define COSH cosh)
(define DIVF /)
(define DIVI /)
(define (DIVI i j) (floor (/ i j)))
(define ITOF exact->inexact)
(define (EQLF a b) (if (= a b) 1 0))
(define (LEQF a b) (if (<= a b) 1 0))
(define (EQLI a b) (if (= a b) 1 0))
(define (POPC n)
(if (< n 0)
(+ 65 (fxbit-count n))
(fxbit-count n)))
(modulo (bitwise-bit-count n) 65))
(define (TZRO n)
(let ([bit (fxfirst-bit-set n)])
(let ([bit (bitwise-first-bit-set n)])
(if (eq? -1 bit)
64
bit)))

View File

@ -41,6 +41,7 @@
[replicate (-> pattern? exact-nonnegative-integer? exact-nonnegative-integer? pattern?)]
;; Only valid pattern? in the functions below is p:char
[char-class-and (-> pattern? pattern? pattern?)]
[char-class-or (-> pattern? pattern? pattern?)]
[char-class-not (-> pattern? pattern?)]))
;; -----------------------------------------------------------------------------
@ -269,7 +270,7 @@
(define-values [cstr* captures*]
(for/fold ([cstr cstr]
[captures captures]
#:result (ok cstr captures))
#:result (values cstr captures))
([i (in-range min-count)])
#:break (not cstr)
(pat-m cstr captures)))
@ -285,12 +286,11 @@
;; -----------------------------------------------------------------------------
(define (char-class-and cc1 cc2)
(make-pattern
(p:char
(λ (c) (match (cons (pattern-pat cc1) (pattern-pat cc2))
[(cons (p:char 'any) (p:char p)) (p c)]
[(cons (p:char p) (p:char 'any)) (p c)]
[(cons (p:char p1) (p:char p2)) (and (p1 c) (p2 c))])))))
(match* ((pattern-pat cc1) (pattern-pat cc2))
[((p:char 'any) _) cc2]
[(_ (p:char 'any)) cc1]
[((p:char p) (p:char q))
(make-pattern (p:char (λ (c) (and (p c) (q c)))))]))
(define (char-class-not cc)
(make-pattern
@ -298,3 +298,10 @@
(λ (c) (match (pattern-pat cc)
[(p:char 'any) #f]
[(p:char p) (not (p c))])))))
(define (char-class-or cc1 cc2)
(match* ((pattern-pat cc1) (pattern-pat cc2))
[((p:char 'any) _) cc1]
[(_ (p:char 'any)) cc2]
[((p:char p) (p:char q))
(make-pattern (p:char (λ (c) (or (p c) (q c)))))]))

View File

@ -49,6 +49,7 @@
termlink->name
add-runtime-code
build-intermediate-module
build-runtime-module
termlink->proc)
@ -82,38 +83,38 @@
(define (decode-term tm)
(match tm
[(unison-data _ t (list tms))
#:when (= t unison-schemeterm-sexpr:tag)
#:when (= t ref-schemeterm-sexpr:tag)
(map decode-term (chunked-list->list tms))]
[(unison-data _ t (list as h tms))
#:when (= t unison-schemeterm-handle:tag)
#:when (= t ref-schemeterm-handle:tag)
`(handle
,(map
(lambda (tx) `(quote ,(text->ident tx)))
(lambda (tx) (text->linkname tx))
(chunked-list->list as))
,(text->ident h)
,@(map decode-term (chunked-list->list tms)))]
[(unison-data _ t (list hd sc cs))
#:when (= t unison-schemeterm-cases:tag)
#:when (= t ref-schemeterm-cases:tag)
(assemble-cases
(text->ident hd)
(decode-term sc)
(map decode-term (chunked-list->list cs)))]
[(unison-data _ t (list hd bs bd))
#:when (= t unison-schemeterm-binds:tag)
#:when (= t ref-schemeterm-binds:tag)
`(,(text->ident hd)
,(map decode-binding (chunked-list->list bs))
,(decode-term bd))]
[(unison-data _ t (list tx))
#:when (= t unison-schemeterm-ident:tag)
#:when (= t ref-schemeterm-ident:tag)
(text->ident tx)]
[(unison-data _ t (list tx))
#:when (= t unison-schemeterm-string:tag)
#:when (= t ref-schemeterm-string:tag)
(chunked-string->string tx)]
[(unison-data _ t (list tx))
#:when (= t unison-schemeterm-symbol:tag)
#:when (= t ref-schemeterm-symbol:tag)
`(quote ,(text->ident tx))]
[(unison-data _ t (list ns))
#:when (= t unison-schemeterm-bytevec:tag)
#:when (= t ref-schemeterm-bytevec:tag)
(list->bytes (chunked-list->list ns))]
[else
(raise (format "decode-term: unimplemented case: ~a" tm))]))
@ -130,13 +131,13 @@
(define (decode-syntax dfn)
(match dfn
[(unison-data _ t (list nm vs bd))
#:when (= t unison-schemedefn-define:tag)
#:when (= t ref-schemedefn-define:tag)
(let ([head (map text->ident
(cons nm (chunked-list->list vs)))]
[body (decode-term bd)])
(list 'define-unison head body))]
[(unison-data _ t (list nm bd))
#:when (= t unison-schemedefn-alias:tag)
#:when (= t ref-schemedefn-alias:tag)
(list 'define (text->ident nm) (decode-term bd))]
[else
(raise (format "decode-syntax: unimplemented case: ~a" dfn))]))
@ -148,6 +149,10 @@
[(equal? (substring st 0 2) "#\\") (string-ref st 2)]
[else #f]))
(define (text->linkname tx)
(let* ([st (chunked-string->string tx)])
(string->symbol (string-append st ":typelink"))))
(define (text->ident tx)
(let* ([st (chunked-string->string tx)]
[n (string->number st)]
@ -162,10 +167,10 @@
(define (decode-ref rf)
(match rf
[(unison-data r t (list name))
#:when (= t unison-reference-builtin:tag)
#:when (= t ref-reference-builtin:tag)
(sum 0 (chunked-string->string name))]
[(unison-data r t (list id))
#:when (= t unison-reference-derived:tag)
#:when (= t ref-reference-derived:tag)
(data-case id
[0 (bs i) (sum 1 bs i)])]))
@ -195,7 +200,7 @@
[(_)
#`(lambda (gr)
(data-case (group-ref-ident gr)
[#,unison-schemeterm-ident:tag (name) name]
[#,ref-schemeterm-ident:tag (name) name]
[else
(raise
(format
@ -237,10 +242,10 @@
(define (termlink->reference rn)
(match rn
[(unison-termlink-builtin name)
(unison-reference-builtin
(ref-reference-builtin
(string->chunked-string name))]
[(unison-termlink-derived bs i)
(unison-reference-derived (unison-id-id bs i))]
(ref-reference-derived (ref-id-id bs i))]
[else (raise "termlink->reference: con case")]))
(define (group-reference gr)
@ -255,19 +260,19 @@
(define runtime-module-map (make-hash))
(define (reflect-derived bs i)
(data unison-reference:link unison-reference-derived:tag
(data unison-id:link unison-id-id:tag bs i)))
(data ref-reference:typelink ref-reference-derived:tag
(data ref-id:typelink ref-id-id:tag bs i)))
(define (function->groupref f)
(match (lookup-function-link f)
[(unison-termlink-derived h i)
(unison-groupref-group
(unison-reference-derived
(unison-id-id h i))
(ref-groupref-group
(ref-reference-derived
(ref-id-id h i))
0)]
[(unison-termlink-builtin name)
(unison-groupref-group
(unison-reference-builtin (string->chunked-string name))
(ref-groupref-group
(ref-reference-builtin (string->chunked-string name))
0)]
[else (raise "function->groupref: con case")]))
@ -275,19 +280,19 @@
(match vl
[(unison-data _ t (list l))
(cond
[(= t unison-vlit-bytes:tag) l]
[(= t unison-vlit-char:tag) l]
[(= t unison-vlit-bytearray:tag) l]
[(= t unison-vlit-text:tag) l]
[(= t unison-vlit-termlink:tag) (referent->termlink l)]
[(= t unison-vlit-typelink:tag) (reference->typelink l)]
[(= t unison-vlit-float:tag) l]
[(= t unison-vlit-pos:tag) l]
[(= t unison-vlit-neg:tag) (- l)]
[(= t unison-vlit-quote:tag) (unison-quote l)]
[(= t unison-vlit-code:tag) (unison-code l)]
[(= t unison-vlit-array:tag) (vector-map reify-value l)]
[(= t unison-vlit-seq:tag)
[(= t ref-vlit-bytes:tag) l]
[(= t ref-vlit-char:tag) l]
[(= t ref-vlit-bytearray:tag) l]
[(= t ref-vlit-text:tag) l]
[(= t ref-vlit-termlink:tag) (referent->termlink l)]
[(= t ref-vlit-typelink:tag) (reference->typelink l)]
[(= t ref-vlit-float:tag) l]
[(= t ref-vlit-pos:tag) l]
[(= t ref-vlit-neg:tag) (- l)]
[(= t ref-vlit-quote:tag) (unison-quote l)]
[(= t ref-vlit-code:tag) (unison-code l)]
[(= t ref-vlit-array:tag) (vector-map reify-value l)]
[(= t ref-vlit-seq:tag)
; TODO: better map over chunked list
(vector->chunked-list
(vector-map reify-value (chunked-list->vector l)))]
@ -297,19 +302,19 @@
(define (reify-value v)
(match v
[(unison-data _ t (list rf rt bs0))
#:when (= t unison-value-data:tag)
#:when (= t ref-value-data:tag)
(let ([bs (map reify-value (chunked-list->list bs0))])
(make-data (reference->typelink rf) rt bs))]
[(unison-data _ t (list gr bs0))
#:when (= t unison-value-partial:tag)
#:when (= t ref-value-partial:tag)
(let ([bs (map reify-value (chunked-list->list bs0))]
[proc (resolve-proc gr)])
(apply proc bs))]
[(unison-data _ t (list vl))
#:when (= t unison-value-vlit:tag)
#:when (= t ref-value-vlit:tag)
(reify-vlit vl)]
[(unison-data _ t (list bs0 k))
#:when (= t unison-value-cont:tag)
#:when (= t ref-value-cont:tag)
(raise "reify-value: unimplemented cont case")]
[(unison-data r t fs)
(raise "reify-value: unimplemented data case")]
@ -318,72 +323,76 @@
(define (reflect-typelink tl)
(match tl
[(unison-typelink-builtin name) (unison-reference-builtin name)]
[(unison-typelink-builtin name)
(ref-reference-builtin
(string->chunked-string name))]
[(unison-typelink-derived h i)
(unison-reference-derived (unison-id-id h i))]))
(ref-reference-derived (ref-id-id h i))]))
(define (reflect-termlink tl)
(match tl
[(unison-termlink-con r i)
(unison-referent-con (reflect-typelink r) i)]
(ref-referent-con (reflect-typelink r) i)]
[(unison-termlink-builtin name)
(unison-referent-def (unison-reference-builtin name))]
(ref-referent-def
(ref-reference-builtin
(string->chunked-string name)))]
[(unison-termlink-derived h i)
(unison-referent-def
(unison-reference-derived
(unison-id-id h i)))]))
(ref-referent-def
(ref-reference-derived
(ref-id-id h i)))]))
(define (number-reference n)
(cond
[(exact-nonnegative-integer? n)
(unison-reference-builtin (string->chunked-string "Nat"))]
(ref-reference-builtin (string->chunked-string "Nat"))]
[(exact-integer? n)
(unison-reference-builtin (string->chunked-string "Int"))]
(ref-reference-builtin (string->chunked-string "Int"))]
[else
(unison-reference-builtin (string->chunked-string "Float"))]))
(ref-reference-builtin (string->chunked-string "Float"))]))
(define (reflect-value v)
(match v
[(? exact-nonnegative-integer?)
(unison-value-vlit (unison-vlit-pos v))]
(ref-value-vlit (ref-vlit-pos v))]
[(? exact-integer?)
(unison-value-vlit (unison-vlit-neg (- v)))]
(ref-value-vlit (ref-vlit-neg (- v)))]
[(? inexact-real?)
(unison-value-vlit (unison-vlit-float v))]
(ref-value-vlit (ref-vlit-float v))]
[(? char?)
(unison-value-vlit (unison-vlit-char v))]
(ref-value-vlit (ref-vlit-char v))]
[(? chunked-bytes?)
(unison-value-vlit (unison-vlit-bytes v))]
(ref-value-vlit (ref-vlit-bytes v))]
[(? bytes?)
(unison-value-vlit (unison-vlit-bytearray v))]
(ref-value-vlit (ref-vlit-bytearray v))]
[(? vector?)
(unison-value-vlit
(unison-vlit-array
(ref-value-vlit
(ref-vlit-array
(vector-map reflect-value v)))]
[(? chunked-string?)
(unison-value-vlit (unison-vlit-text v))]
(ref-value-vlit (ref-vlit-text v))]
; TODO: better map over chunked lists
[(? chunked-list?)
(unison-value-vlit
(unison-vlit-seq
(ref-value-vlit
(ref-vlit-seq
(list->chunked-list
(map reflect-value (chunked-list->list v)))))]
[(? unison-termlink?)
(unison-value-vlit (unison-vlit-termlink (reflect-termlink v)))]
(ref-value-vlit (ref-vlit-termlink (reflect-termlink v)))]
[(? unison-typelink?)
(unison-value-vlit (unison-vlit-typelink (reflect-typelink v)))]
[(unison-code sg) (unison-value-vlit (unison-vlit-code sg))]
[(unison-quote q) (unison-value-vlit (unison-vlit-quote q))]
(ref-value-vlit (ref-vlit-typelink (reflect-typelink v)))]
[(unison-code sg) (ref-value-vlit (ref-vlit-code sg))]
[(unison-quote q) (ref-value-vlit (ref-vlit-quote q))]
[(unison-closure f as)
(unison-value-partial
(ref-value-partial
(function->groupref f)
(list->chunked-list (map reflect-value as)))]
[(? procedure?)
(unison-value-partial
(ref-value-partial
(function->groupref v)
empty-chunked-list)]
[(unison-data rf t fs)
(unison-value-data
(ref-value-data
(reflect-typelink rf)
t
(list->chunked-list (map reflect-value fs)))]))
@ -419,8 +428,8 @@
#:result
(if (null? unkn)
(unison-either-right (list->chunked-list sdbx))
(unison-either-left (list->chunked-list unkn))))
(ref-either-right (list->chunked-list sdbx))
(ref-either-left (list->chunked-list unkn))))
([r (in-chunked-list (value-term-dependencies v))])
@ -461,12 +470,15 @@
[0 (snd nil)
(values fst snd)])]))
(define (gen-typelinks code)
(define (typelink-deps code)
(group-type-dependencies
(list->chunked-list
(map unison-code-rep code))))
(define (typelink-defns-code links)
(map decode-syntax
(chunked-list->list
(gen-typelink-defns
(list->chunked-list
(map unison-code-rep code))))))
(chunked-list->list
(gen-typelink-defns links))))
(define (gen-code args)
(let-values ([(tl co) (splat-upair args)])
@ -558,22 +570,56 @@
(parameterize ([current-namespace runtime-namespace])
(dynamic-require `(quote ,mname) sym)))]))
; Straight-line module builder given intermediate definitions.
; This expects to receive a list of termlink, code pairs, and
; generates a scheme module that contains the corresponding
; definitions.
(define (build-intermediate-module primary dfns0)
(let* ([udefs (chunked-list->list dfns0)]
[pname (termlink->name primary)]
[tmlinks (map ufst udefs)]
[codes (map usnd udefs)]
[tylinks (typelink-deps codes)]
[sdefs (flatten (map gen-code udefs))])
`((require unison/boot
unison/data-info
unison/primops
unison/primops-generated
unison/builtin-generated
unison/simple-wrappers
unison/compound-wrappers)
,@(typelink-defns-code tylinks)
,@sdefs
(handle [ref-exception:typelink] top-exn-handler
(,pname #f)))))
(define (build-runtime-module mname tylinks tmlinks defs)
(let ([names (map termlink->name tmlinks)])
`(module ,mname racket/base
(require unison/boot
unison/data-info
unison/primops
unison/primops-generated
unison/builtin-generated
unison/simple-wrappers
unison/compound-wrappers)
(define (provided-tylink r)
(string->symbol
(chunked-string->string
(ref-typelink-name r))))
(define tynames (map provided-tylink (chunked-list->list tylinks)))
(define tmnames (map termlink->name tmlinks))
`(module ,mname racket/base
(require unison/boot
unison/data
unison/data-info
unison/primops
unison/primops-generated
unison/builtin-generated
unison/simple-wrappers
unison/compound-wrappers)
(provide ,@names)
(provide
,@tynames
,@tmnames)
,@tylinks
,@(typelink-defns-code tylinks)
,@defs)))
,@defs))
(define (add-runtime-module mname tylinks tmlinks defs)
(eval (build-runtime-module mname tylinks tmlinks defs)
@ -595,28 +641,27 @@
[codes (map usnd udefs)]
[refs (map termlink->reference tmlinks)]
[depss (map code-dependencies codes)]
[tylinks (gen-typelinks codes)]
[tylinks (typelink-deps codes)]
[deps (flatten depss)]
[fdeps (filter need-dependency? deps)]
[rdeps (remove* refs fdeps)])
(cond
[(null? fdeps) #f]
[(null? fdeps) empty-chunked-list]
[(null? rdeps)
(let ([ndefs (map gen-code udefs)] [sdefs (flatten (map gen-code udefs))]
(let ([ndefs (map gen-code udefs)]
[sdefs (flatten (map gen-code udefs))]
[mname (or mname0 (generate-module-name tmlinks))])
(expand-sandbox tmlinks (map-links depss))
(register-code udefs)
(add-module-associations tmlinks mname)
(add-runtime-module mname tylinks tmlinks sdefs)
#f)]
[else (list->chunked-list rdeps)]))]
[else #f])))
empty-chunked-list)]
[else
(list->chunked-list
(map reference->termlink rdeps))]))]
[else empty-chunked-list])))
(define (unison-POp-CACH dfns0)
(let ([result (add-runtime-code #f dfns0)])
(if result
(sum 1 result)
(sum 0 '()))))
(define (unison-POp-CACH dfns0) (add-runtime-code #f dfns0))
(define (unison-POp-LOAD v0)
(let* ([val (unison-quote-val v0)]
@ -625,14 +670,16 @@
[fdeps (filter need-dependency? (chunked-list->list deps))])
(if (null? fdeps)
(sum 1 (reify-value val))
(sum 0 (list->chunked-list fdeps)))))
(sum 0
(list->chunked-list
(map reference->termlink fdeps))))))
(define (unison-POp-LKUP tl) (lookup-code tl))
(define-unison (builtin-Code.lookup tl)
(match (lookup-code tl)
[(unison-sum 0 (list)) unison-optional-none]
[(unison-sum 1 (list co)) (unison-optional-some co)]))
[(unison-sum 0 (list)) ref-optional-none]
[(unison-sum 1 (list co)) (ref-optional-some co)]))
(define-unison (builtin-validateSandboxed ok v)
(let ([l (sandbox-scheme-value (chunked-list->list ok) v)])

View File

@ -40,6 +40,8 @@
builtin-Float.fromRepresentation:termlink
builtin-Float.toRepresentation
builtin-Float.toRepresentation:termlink
builtin-Float.ceiling
builtin-Float.ceiling:termlink
builtin-Float.exp
builtin-Float.exp:termlink
builtin-Float.log
@ -139,6 +141,9 @@
builtin-IO.randomBytes
builtin-IO.randomBytes:termlink
builtin-Scope.bytearrayOf
builtin-Scope.bytearrayOf:termlink
builtin-Universal.==
builtin-Universal.==:termlink
builtin-Universal.>
@ -153,6 +158,9 @@
builtin-Universal.compare:termlink
builtin-Universal.murmurHash:termlink
builtin-unsafe.coerceAbilities
builtin-unsafe.coerceAbilities:termlink
builtin-List.splitLeft
builtin-List.splitLeft:termlink
builtin-List.splitRight
@ -173,6 +181,8 @@
builtin-TermLink.fromReferent:termlink
builtin-TermLink.toReferent
builtin-TermLink.toReferent:termlink
builtin-TypeLink.toReference
builtin-TypeLink.toReference:termlink
unison-FOp-internal.dataTag
unison-FOp-Char.toText
@ -238,6 +248,8 @@
builtin-Char.Class.is:termlink
builtin-Pattern.captureAs
builtin-Pattern.captureAs:termlink
builtin-Pattern.many.corrected
builtin-Pattern.many.corrected:termlink
builtin-Pattern.isMatch
builtin-Pattern.isMatch:termlink
builtin-IO.fileExists.impl.v3
@ -573,25 +585,42 @@
(only (racket)
car
cdr
exact-integer?
exact-nonnegative-integer?
foldl
integer-length
bytes->string/utf-8
string->bytes/utf-8
exn:fail:contract?
file-stream-buffer-mode
with-handlers
match
modulo
quotient
regexp-match-positions
sequence-ref
vector-copy!
bytes-copy!)
bytes-copy!
sub1
add1)
(car icar) (cdr icdr))
(only (racket string)
string-contains?
string-replace)
(unison arithmetic)
(unison bytevector)
(unison core)
(only (unison boot)
define-unison
referent->termlink
termlink->referent)
termlink->referent
typelink->reference
clamp-integer
clamp-natural
wrap-natural
bit64
bit63
nbit63)
(unison data)
(unison data-info)
(unison math)
@ -614,6 +643,7 @@
(define-builtin-link Float.*)
(define-builtin-link Float.fromRepresentation)
(define-builtin-link Float.toRepresentation)
(define-builtin-link Float.ceiling)
(define-builtin-link Float.exp)
(define-builtin-link Float.log)
(define-builtin-link Float.max)
@ -676,6 +706,7 @@
(define-builtin-link Code.toGroup)
(define-builtin-link TermLink.fromReferent)
(define-builtin-link TermLink.toReferent)
(define-builtin-link TypeLink.toReference)
(define-builtin-link IO.seekHandle.impl.v3)
(define-builtin-link IO.getLine.impl.v1)
(define-builtin-link IO.getSomeBytes.impl.v1)
@ -711,8 +742,11 @@
(define-builtin-link Universal.compare)
(define-builtin-link Universal.murmurHash)
(define-builtin-link Pattern.captureAs)
(define-builtin-link Pattern.many.corrected)
(define-builtin-link Pattern.isMatch)
(define-builtin-link Char.Class.is)
(define-builtin-link Scope.bytearrayOf)
(define-builtin-link unsafe.coerceAbilities)
(begin-encourage-inline
(define-unison (builtin-Value.toBuiltin v) (unison-quote v))
@ -725,6 +759,8 @@
(referent->termlink rf))
(define-unison (builtin-TermLink.toReferent tl)
(termlink->referent tl))
(define-unison (builtin-TypeLink.toReference tl)
(typelink->reference tl))
(define-unison (builtin-murmurHashBytes bs)
(murmurhash-bytes (chunked-bytes->bytes bs)))
@ -733,13 +769,13 @@
(define-unison (builtin-List.splitLeft n s)
(match (unison-POp-SPLL n s)
[(unison-sum 0 fs) unison-seqview-empty]
[(unison-sum 1 (list l r)) (unison-seqview-elem l r)]))
[(unison-sum 0 fs) ref-seqview-empty]
[(unison-sum 1 (list l r)) (ref-seqview-elem l r)]))
(define-unison (builtin-List.splitRight n s)
(match (unison-POp-SPLR n s)
[(unison-sum 0 fs) unison-seqview-empty]
[(unison-sum 1 (list l r)) (unison-seqview-elem l r)]))
[(unison-sum 0 fs) ref-seqview-empty]
[(unison-sum 1 (list l r)) (ref-seqview-elem l r)]))
(define-unison (builtin-Float.> x y) (fl> x y))
(define-unison (builtin-Float.< x y) (fl< x y))
@ -788,6 +824,9 @@
(case (universal-compare x y)
[(>) 1] [(<) -1] [else 0]))
(define-unison (builtin-Scope.bytearrayOf i n)
(make-bytevector n i))
(define (hash-string hs)
(string-append "#" (bytevector->base32-string b32h hs)))
@ -826,29 +865,33 @@
(define-unison (builtin-Pattern.captureAs c p)
(capture-as c p))
(define-unison (builtin-Pattern.many.corrected p) (many p))
(define-unison (builtin-Pattern.isMatch p s)
(pattern-match? p s))
(define-unison (builtin-unsafe.coerceAbilities f) f)
(define (unison-POp-UPKB bs)
(build-chunked-list
(chunked-bytes-length bs)
(lambda (i) (chunked-bytes-ref bs i))))
(define unison-POp-ADDI +)
(define unison-POp-MULI *)
(define unison-POp-MODI mod)
(define (unison-POp-ADDI i j) (clamp-integer (+ i j)))
(define (unison-POp-MULI i j) (clamp-integer (* i j)))
(define (unison-POp-MODI i j) (clamp-integer (modulo i j)))
(define (unison-POp-LEQI a b) (bool (<= a b)))
(define unison-POp-POWN expt)
(define (unison-POp-POWN m n) (clamp-natural (expt m n)))
(define unison-POp-LOGF log)
(define (reify-exn thunk)
(guard
(e [else
(sum 0 '() (exception->string e) e)])
(sum 0 '() (exception->string e) ref-unit-unit)])
(thunk)))
; Core implemented primops, upon which primops-in-unison can be built.
(define (unison-POp-ADDN m n) (fx+ m n))
(define (unison-POp-ADDN m n) (clamp-natural (+ m n)))
(define (unison-POp-ANDN m n) (bitwise-and m n))
(define unison-POp-BLDS
(lambda args-list
@ -857,40 +900,47 @@
(define (unison-POp-CATT l r) (chunked-string-append l r))
(define (unison-POp-CATB l r) (chunked-bytes-append l r))
(define (unison-POp-CMPU l r) (ord (universal-compare l r)))
(define (unison-POp-COMN n) (fxnot n))
(define (unison-POp-COMN n) (wrap-natural (bitwise-not n)))
(define (unison-POp-CONS x xs) (chunked-list-add-first xs x))
(define (unison-POp-DECI n) (fx1- n))
(define (unison-POp-INCI n) (fx+ n 1))
(define (unison-POp-DECN n) (- n 1))
(define (unison-POp-INCN n) (+ n 1))
(define (unison-POp-DIVN m n) (fxdiv m n))
(define (unison-POp-DECI n) (clamp-integer (sub1 n)))
(define (unison-POp-INCI n) (clamp-integer (add1 n)))
(define (unison-POp-DECN n) (wrap-natural (sub1 n)))
(define (unison-POp-INCN n) (clamp-natural (add1 n)))
(define (unison-POp-DIVN m n) (quotient m n))
(define (unison-POp-DRPB n bs) (chunked-bytes-drop bs n))
(define (unison-POp-DRPS n l) (chunked-list-drop l n))
(define (unison-POp-DRPT n t) (chunked-string-drop t n))
(define (unison-POp-EQLN m n) (bool (fx=? m n)))
(define (unison-POp-EQLN m n) (bool (= m n)))
(define (unison-POp-EQLT s t) (bool (equal? s t)))
(define (unison-POp-LEQT s t) (bool (chunked-string<? s t)))
(define (unison-POp-EQLU x y) (bool (universal=? x y)))
(define (unison-POp-EROR fnm x) ;; TODO raise the correct failure, use display
(define (unison-POp-EROR fnm x)
(let-values ([(p g) (open-string-output-port)])
(put-string p (chunked-string->string fnm))
(put-string p ": ")
(display (describe-value x) p)
(raise (make-exn:bug fnm x))))
(define (unison-POp-FTOT f) (string->chunked-string (number->string f)))
(define (unison-POp-FTOT f)
(define base (number->string f))
(define dotted
(if (string-contains? base ".")
base
(string-replace base "e" ".0e")))
(string->chunked-string
(string-replace dotted "+" "")))
(define (unison-POp-IDXB n bs)
(guard (x [else none])
(some (chunked-bytes-ref bs n))))
(define (unison-POp-IDXS n l)
(guard (x [else none])
(some (chunked-list-ref l n))))
(define (unison-POp-IORN m n) (fxior m n))
(define (unison-POp-IORN m n) (bitwise-ior m n))
(define (unison-POp-ITOT n)
(string->chunked-string (number->string n)))
(define (unison-POp-LEQN m n) (bool (fx<=? m n)))
(define (unison-POp-LZRO m) (- 64 (fxlength m)))
(define (unison-POp-MULN m n) (* m n))
(define (unison-POp-MODN m n) (fxmod m n))
(define (unison-POp-LZRO m) (- 64 (integer-length m)))
(define (unison-POp-MULN m n) (clamp-natural (* m n)))
(define (unison-POp-MODN m n) (modulo m n))
(define (unison-POp-NTOT n) (string->chunked-string (number->string n)))
(define (unison-POp-PAKB l)
(build-chunked-bytes
@ -900,24 +950,26 @@
(build-chunked-string
(chunked-list-length l)
(lambda (i) (chunked-list-ref l i))))
(define (unison-POp-SHLI i k) (fxarithmetic-shift-left i k))
(define (unison-POp-SHLN n k) (fxarithmetic-shift-left n k))
(define (unison-POp-SHRI i k) (fxarithmetic-shift-right i k))
(define (unison-POp-SHRN n k) (fxarithmetic-shift-right n k))
(define (unison-POp-SHLI i k)
(clamp-integer (bitwise-arithmetic-shift-left i k)))
(define (unison-POp-SHLN n k)
(clamp-natural (bitwise-arithmetic-shift-left n k)))
(define (unison-POp-SHRI i k) (bitwise-arithmetic-shift-right i k))
(define (unison-POp-SHRN n k) (bitwise-arithmetic-shift-right n k))
(define (unison-POp-SIZS l) (chunked-list-length l))
(define (unison-POp-SIZT t) (chunked-string-length t))
(define (unison-POp-SIZB b) (chunked-bytes-length b))
(define (unison-POp-SNOC xs x) (chunked-list-add-last xs x))
(define (unison-POp-SUBN m n) (fx- m n))
(define (unison-POp-SUBI m n) (- m n))
(define (unison-POp-SUBN m n) (clamp-integer (- m n)))
(define (unison-POp-SUBI m n) (clamp-integer (- m n)))
(define (unison-POp-TAKS n s) (chunked-list-take s n))
(define (unison-POp-TAKT n t) (chunked-string-take t n))
(define (unison-POp-TAKB n t) (chunked-bytes-take t n))
(define (->optional v)
(if v
(unison-optional-some v)
unison-optional-none))
(ref-optional-some v)
ref-optional-none))
(define-unison (builtin-Text.indexOf n h)
(->optional (chunked-string-index-of h n)))
@ -946,10 +998,14 @@
(newline))
(define (unison-POp-TTON s)
(let ([mn (string->number (chunked-string->string s))])
(if (and (fixnum? mn) (>= mn 0)) (some mn) none)))
(if (and (exact-nonnegative-integer? mn) (< mn bit64))
(some mn)
none)))
(define (unison-POp-TTOI s)
(let ([mn (string->number (chunked-string->string s))])
(if (fixnum? mn) (some mn) none)))
(if (and (exact-integer? mn) (>= mn nbit63) (< mn bit63))
(some mn)
none)))
(define (unison-POp-TTOF s)
(let ([mn (string->number (chunked-string->string s))])
(if mn (some mn) none)))
@ -994,7 +1050,7 @@
;; TODO flatten operation on Bytes is a no-op for now (and possibly ever)
(define (unison-POp-FLTB b) b)
(define (unison-POp-XORN m n) (fxxor m n))
(define (unison-POp-XORN m n) (bitwise-xor m n))
(define (unison-POp-VALU c) (decode-value c))
(define (unison-FOp-ImmutableByteArray.read16be bs n)
@ -1062,8 +1118,15 @@
;; TODO should we convert Bytes -> Text directly without the intermediate conversions?
(define (unison-FOp-Text.fromUtf8.impl.v3 b)
(with-handlers
([exn:fail:contract? ; TODO proper typeLink
(lambda (e) (exception "MiscFailure" (exception->string e) ()))])
([exn:fail:contract?
(lambda (e)
(exception
ref-iofailure:typelink
(string->chunked-string
(string-append
"Invalid UTF-8 stream: "
(describe-value b)))
(exception->string e)))])
(right (string->chunked-string (bytes->string/utf-8 (chunked-bytes->bytes b))))))
;; TODO should we convert Text -> Bytes directly without the intermediate conversions?
@ -1071,7 +1134,7 @@
(bytes->chunked-bytes (string->bytes/utf-8 (chunked-string->string s))))
(define-unison (builtin-IO.isFileEOF.impl.v3 p)
(unison-either-right (port-eof? p)))
(ref-either-right (port-eof? p)))
(define (unison-FOp-IO.closeFile.impl.v3 h)
(if (input-port? h)
@ -1145,7 +1208,7 @@
(define (unison-FOp-Char.Class.printable) printable)
(define (unison-FOp-Char.Class.mark) mark)
(define (unison-FOp-Char.Class.separator) separator)
(define (unison-FOp-Char.Class.or p1 p2) (unison-FOp-Pattern.or p1 p2))
(define (unison-FOp-Char.Class.or p1 p2) (char-class-or p1 p2))
(define (unison-FOp-Char.Class.range a z)
(unison-FOp-Text.patterns.charRange a z))
(define (unison-FOp-Char.Class.anyOf cs) (unison-FOp-Text.patterns.charIn cs))
@ -1294,10 +1357,12 @@
(define (unison-FOp-Promise.read promise) (promise-read promise))
(define (unison-FOp-Promise.tryRead promise) (promise-try-read promise))
(define (unison-FOp-Promise.write promise a) (promise-write promise a)))
(declare-builtin-link builtin-Float.*)
(declare-builtin-link builtin-Float.fromRepresentation)
(declare-builtin-link builtin-Float.toRepresentation)
(declare-builtin-link builtin-Float.ceiling)
(declare-builtin-link builtin-Float.exp)
(declare-builtin-link builtin-Float.log)
(declare-builtin-link builtin-Float.max)
@ -1360,6 +1425,7 @@
(declare-builtin-link builtin-Code.toGroup)
(declare-builtin-link builtin-TermLink.fromReferent)
(declare-builtin-link builtin-TermLink.toReferent)
(declare-builtin-link builtin-TypeLink.toReference)
(declare-builtin-link builtin-IO.seekHandle.impl.v3)
(declare-builtin-link builtin-IO.getLine.impl.v1)
(declare-builtin-link builtin-IO.getSomeBytes.impl.v1)
@ -1394,5 +1460,8 @@
(declare-builtin-link builtin-Universal.<=)
(declare-builtin-link builtin-Universal.compare)
(declare-builtin-link builtin-Pattern.isMatch)
(declare-builtin-link builtin-Scope.bytearrayOf)
(declare-builtin-link builtin-Char.Class.is)
(declare-builtin-link builtin-Pattern.many.corrected)
(declare-builtin-link builtin-unsafe.coerceAbilities)
)

View File

@ -4,6 +4,7 @@
racket/match
racket/tcp
unison/data
unison/data-info
unison/chunked-seq
unison/core)
@ -26,9 +27,25 @@
(define (handle-errors fn)
(with-handlers
[[exn:fail:network? (lambda (e) (exception "IOFailure" (exception->string e) '()))]
[exn:fail:contract? (lambda (e) (exception "InvalidArguments" (exception->string e) '()))]
[(lambda _ #t) (lambda (e) (exception "MiscFailure" (chunked-string->string (format "Unknown exception ~a" (exn->string e))) e))] ]
[[exn:fail:network?
(lambda (e)
(exception
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]
[exn:fail:contract?
(lambda (e)
(exception
ref-miscfailure:typelink
(exception->string e)
ref-unit-unit))]
[(lambda _ #t)
(lambda (e)
(exception
ref-miscfailure:typelink
(chunked-string->string
(format "Unknown exception ~a" (exn->string e)))
ref-unit-unit))]]
(fn)))
(define (closeSocket.impl.v3 socket)
@ -49,15 +66,20 @@
(define (socketSend.impl.v3 socket data) ; socket bytes -> ()
(if (not (socket-pair? socket))
(exception "InvalidArguments" "Cannot send on a server socket" '())
(exception
ref-iofailure:typelink
(string->chunked-string "Cannot send on a server socket")
ref-unit-unit)
(begin
(write-bytes (chunked-bytes->bytes data) (socket-pair-output socket))
(flush-output (socket-pair-output socket))
(right none)))); )
(right none))))
(define (socketReceive.impl.v3 socket amt) ; socket int -> bytes
(if (not (socket-pair? socket))
(exception "InvalidArguments" "Cannot receive on a server socket")
(exception
ref-iofailure:typelink
(string->chunked-string "Cannot receive on a server socket"))
(handle-errors
(lambda ()
(begin
@ -82,9 +104,24 @@
(chunked-string->string port))])])
(with-handlers
[[exn:fail:network? (lambda (e) (exception "IOFailure" (exception->string e) '()))]
[exn:fail:contract? (lambda (e) (exception "InvalidArguments" (exception->string e) '()))]
[(lambda _ #t) (lambda (e) (exception "MiscFailure" (string->chunked-string "Unknown exception") e))] ]
[[exn:fail:network?
(lambda (e)
(exception
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]
[exn:fail:contract?
(lambda (e)
(exception
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]
[(lambda _ #t)
(lambda (e)
(exception
ref-miscfailure:typelink
(string->chunked-string "Unknown exception")
ref-unit-unit))] ]
(let ([listener (tcp-listen (string->number port ) 4 #f (if (equal? 0 hostname) #f hostname))])
(right listener))))))
@ -99,7 +136,10 @@
(define (socketAccept.impl.v3 listener)
(if (socket-pair? listener)
(exception "InvalidArguments" (string->chunked-string "Cannot accept on a non-server socket"))
(exception
ref-iofailure:typelink
(string->chunked-string "Cannot accept on a non-server socket")
ref-unit-unit)
(begin
(let-values ([(input output) (tcp-accept listener)])
(right (socket-pair input output))))))

View File

@ -6,6 +6,7 @@
(only-in racket empty?)
compatibility/mlist
unison/data
unison/data-info
unison/chunked-seq
unison/core
unison/tcp
@ -61,7 +62,10 @@
(let ([certs (read-pem-certificates (open-input-bytes (chunked-bytes->bytes bytes)))])
(if (= 1 (length certs))
(right bytes)
(exception "Wrong number of certs" (string->chunked-string "nope") certs)))) ; TODO passing certs is wrong, should either be converted to chunked-list or removed
(exception
ref-tlsfailure:typelink
(string->chunked-string "nope")
bytes))))
; We don't actually "decode" certificates, we just validate them
(define (encodeCert bytes) bytes)
@ -111,16 +115,39 @@
(define (handle-errors fn)
(with-handlers
[[exn:fail:network? (lambda (e) (exception "IOFailure" (exception->string e) '()))]
[[exn:fail:network?
(lambda (e)
(exception
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]
[exn:fail:contract?
(lambda (e) (exception "InvalidArguments" (exception->string e) '()))]
(lambda (e)
(exception
ref-miscfailure:typelink
(exception->string e)
ref-unit-unit))]
[(lambda err
(string-contains? (exn->string err) "not valid for hostname"))
(lambda (e) (exception "IOFailure" (string->chunked-string "NameMismatch") '()))]
(lambda (e)
(exception
ref-tlsfailure:typelink
(string->chunked-string "NameMismatch")
ref-unit-unit))]
[(lambda err
(string-contains? (exn->string err) "certificate verify failed"))
(lambda (e) (exception "IOFailure" (string->chunked-string "certificate verify failed") '()))]
[(lambda _ #t) (lambda (e) (exception "MiscFailure" (string->chunked-string (format "Unknown exception ~a" (exn->string e))) e))]]
(lambda (e)
(exception
ref-tlsfailure:typelink
(string->chunked-string "certificate verify failed")
ref-unit-unit))]
[(lambda _ #t)
(lambda (e)
(exception
ref-miscfailure:typelink
(string->chunked-string
(format "Unknown exception ~a" (exn->string e)))
ref-unit-unit))]]
(fn)))
(define (newClient.impl.v3 config socket)

View File

@ -719,7 +719,7 @@
(next-leaf!)
(vector-copy! new-leaf leaf-split-i leaf 0 leaf-split-i))]
[else
(vector-copy! new-leaf leaf-i leaf first-leaf-start leaf-split-i)])))]
(vector-copy! new-leaf leaf-i leaf first-leaf-start last-leaf-end)])))]
[else
(make-node
(λ (new-node)

View File

@ -1,6 +1,7 @@
; Zlib
#lang racket/base
(require unison/data
unison/data-info
unison/core
(only-in unison/chunked-seq
bytes->chunked-bytes
@ -105,5 +106,14 @@
(bytes->chunked-bytes (zlib-deflate-bytes (chunked-bytes->bytes bytes))))
(define (zlib.decompress bytes)
(with-handlers [[exn:fail? (lambda (e) (exception "Zlib data corrupted" (exception->string e) '()))] ]
(right (bytes->chunked-bytes (zlib-inflate-bytes (chunked-bytes->bytes bytes))))))
(with-handlers
[[exn:fail?
(lambda (e)
(exception
ref-miscfailure:typelink
(exception->string e)
'()))]]
(right
(bytes->chunked-bytes
(zlib-inflate-bytes
(chunked-bytes->bytes bytes))))))

View File

@ -2,23 +2,23 @@
set -e
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
if [[ "$1" = "--status" ]]; then
gh workflow view release --repo unisonweb/unison
gh workflow view release --repo unisonweb/homebrew-unison
fi
prev_tag="$(gh release view --json tagName -t '{{printf .tagName}}')"
usage() {
echo "NOTE: must be run from the root of the project."
echo "Usage: $0 VERSION [TARGET]"
echo "VERSION: The version you're releasing, e.g. M4a"
echo "TARGET: The revision to make the release from, defaults to 'origin/trunk'"
prev_version="${prev_tag#release/}"
prefix="${prev_version%.*}"
next_version="${prefix}.$(( ${prev_version##*.} + 1 ))"
echo "usage: $0 <version> [ref]"
echo ""
echo "E.g."
echo "$0 M4a"
echo "version: The new version number"
echo "ref: The Git revision to make the release from, defaults to 'origin/trunk'"
echo ""
echo "I think the latest release is: $(git tag --list 'release/*' | grep -v M | sort -rV | head -n 1 | sed 's/release\///')"
echo "Try: $0 $next_version"
}
if [[ -z "$1" ]] ; then
@ -26,7 +26,7 @@ if [[ -z "$1" ]] ; then
exit 1
fi
if ! command -V "gh" >/dev/null 2>&1; then
if ! command -V gh >/dev/null 2>&1; then
echo "Required command \`gh\` not found, find installation instructions here: https://cli.github.com/manual/installation"
exit 1
fi
@ -38,19 +38,23 @@ if ! [[ "$1" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]] ; then
fi
version="${1}"
prev_version=$("${script_dir}/previous-tag.sh" "$version")
target=${2:-origin/trunk}
tag="release/${version}"
tag="release/$version"
echo "Creating release in unison-local-ui..."
gh release create "release/${version}" --repo unisonweb/unison-local-ui --target main --generate-notes --notes-start-tag "release/${prev_version}"
echo "Creating release in unison-local-ui."
gh release create "release/${version}" \
--repo unisonweb/unison-local-ui \
--target main \
--generate-notes --notes-start-tag "$prev_tag"
echo "Kicking off release workflow in unisonweb/unison"
# Make sure our origin/trunk ref is up to date, since that's usually what gets tagged.
git fetch origin trunk
git tag "${tag}" "${target}"
git push origin "${tag}"
gh workflow run release --repo unisonweb/unison --field "version=${version}"
gh workflow run release --repo unisonweb/unison \
--ref "${tag}" \
--field "version=${version}"
echo "Kicking off Homebrew update task"
gh workflow run release --repo unisonweb/homebrew-unison --field "version=${version}"

View File

@ -1,50 +0,0 @@
#!/bin/bash
# E.g.
# ./previous-tag.sh M4 -> M3
# ./previous-tag.sh M4a -> M4
# ./previous-tag.sh M4b -> M4a
awk_exe="awk"
# if gawk exists, use that:
if command -V "gawk" >/dev/null 2>&1; then
awk_exe=gawk
fi
if ! ("$awk_exe" --version | grep GNU) >/dev/null 2>&1; then
echo "GNU awk is required, install with \`brew install gawk\`"
exit 1
fi
input_version="$1"
if ! [[ "$input_version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]] ; then
echo "Version tag must be of the form 'x.y.z' where x, y, and z are nonnegative integers. e.g."
echo "$0 0.5.11"
exit 1
fi
if [[ "$input_version" == "0.5.11" ]]; then
echo "M5j"
else
IFS='.' read -r -a version_parts <<< "$input_version"
major=${version_parts[0]}
minor=${version_parts[1]}
patch=${version_parts[2]}
if [[ "$patch" -gt 0 ]]; then
patch=$((patch - 1))
echo "$major.$minor.$patch"
elif [[ "$minor" -gt 0 ]]; then
minor=$((minor - 1))
tag=$(git tag --list "release/$major.$minor.*" | sort -r | head -n 1)
echo "${tag#release/}"
elif [[ "$major" -gt 0 ]]; then
major=$((major - 1))
tag=$(git tag --list "release/$major.*" | sort -r | head -n 1)
echo "${tag#release/}"
else
echo "Idk what to do with $input_version".
fi
fi

View File

@ -8,68 +8,70 @@ build:
interleaved-output: false
packages:
- codebase2/codebase
- codebase2/codebase-sqlite
- codebase2/codebase-sqlite-hashing-v2
- codebase2/codebase-sync
- codebase2/core
- codebase2/util-serialization
- codebase2/util-term
- lib/orphans/network-uri-orphans-sqlite
- lib/orphans/unison-core-orphans-sqlite
- lib/orphans/unison-hash-orphans-aeson
- lib/orphans/unison-hash-orphans-sqlite
- lib/orphans/uuid-orphans-sqlite
- lib/unison-hash
- lib/unison-hashing
- lib/unison-prelude
- lib/unison-pretty-printer
- lib/unison-sqlite
- lib/unison-util-base32hex
- lib/unison-util-bytes
- lib/unison-util-cache
- lib/unison-util-file-embed
- lib/unison-util-nametree
- lib/unison-util-relation
- lib/unison-util-rope
- parser-typechecker
- unison-cli
- unison-core
- unison-hashing-v2
- unison-share-api
- unison-share-projects-api
- unison-syntax
- yaks/easytest
- codebase2/codebase
- codebase2/codebase-sqlite
- codebase2/codebase-sqlite-hashing-v2
- codebase2/codebase-sync
- codebase2/core
- codebase2/util-serialization
- codebase2/util-term
- lib/orphans/network-uri-orphans-sqlite
- lib/orphans/unison-core-orphans-sqlite
- lib/orphans/unison-hash-orphans-aeson
- lib/orphans/unison-hash-orphans-sqlite
- lib/orphans/uuid-orphans-sqlite
- lib/unison-hash
- lib/unison-hashing
- lib/unison-prelude
- lib/unison-pretty-printer
- lib/unison-sqlite
- lib/unison-util-base32hex
- lib/unison-util-bytes
- lib/unison-util-cache
- lib/unison-util-file-embed
- lib/unison-util-nametree
- lib/unison-util-relation
- lib/unison-util-rope
- parser-typechecker
- unison-cli
- unison-cli-integration
- unison-cli-main
- unison-core
- unison-hashing-v2
- unison-share-api
- unison-share-projects-api
- unison-syntax
- yaks/easytest
resolver: lts-20.26
extra-deps:
# broken version in snapshot
- github: unisonweb/configurator
commit: e47e9e9fe1f576f8c835183b9def52d73c01327a
# This custom Haskeline alters ANSI rendering on Windows.
# If changing the haskeline dependency, please ensure color renders properly in a
# Windows terminal.
# https://github.com/judah/haskeline/pull/126
- github: unisonweb/haskeline
commit: 9275eea7982dabbf47be2ba078ced669ae7ef3d5
# broken version in snapshot
- github: unisonweb/configurator
commit: e47e9e9fe1f576f8c835183b9def52d73c01327a
# This custom Haskeline alters ANSI rendering on Windows.
# If changing the haskeline dependency, please ensure color renders properly in a
# Windows terminal.
# https://github.com/judah/haskeline/pull/126
- github: unisonweb/haskeline
commit: 9275eea7982dabbf47be2ba078ced669ae7ef3d5
# not in stackage
- fuzzyfind-3.0.1
- guid-0.1.0@sha256:a7c975be473f6f142d5cc1b39bc807a99043d20b1bb0873fdfe7a3ce84d2faf1,1078
- lock-file-0.7.0.0@sha256:3ad84b5e454145e1d928063b56abb96db24a99a21b493989520e58fa0ab37b00,4484
- monad-validate-1.2.0.0@sha256:9850f408431098b28806dd464b6825a88a0b56c84f380d7fe0454c1df9d6f881,3505
- recover-rtti-0.4.2@sha256:c179a303921126d8d782264e14f386c96e54a270df74be002e4c4ec3c8c7aebd,4529
- lsp-2.2.0.0@sha256:82fbf4b69d94d8d22543be71f89986b3e90050032d671fb3de3f8253ea1e5b6f,3550
- lsp-types-2.0.2.0@sha256:a9a51c3cea0726d91fe63fa0670935ee720f7b31bc3f3b33b2483fc538152677,29421
- row-types-1.0.1.2@sha256:4d4c7cb95d06a32b28ba977852d52a26b4c1f695ef083a6fd874ab6d79933b64,3071
- network-udp-0.0.0
# not in stackage
- fuzzyfind-3.0.1
- guid-0.1.0@sha256:a7c975be473f6f142d5cc1b39bc807a99043d20b1bb0873fdfe7a3ce84d2faf1,1078
- lock-file-0.7.0.0@sha256:3ad84b5e454145e1d928063b56abb96db24a99a21b493989520e58fa0ab37b00,4484
- monad-validate-1.2.0.0@sha256:9850f408431098b28806dd464b6825a88a0b56c84f380d7fe0454c1df9d6f881,3505
- recover-rtti-0.4.2@sha256:c179a303921126d8d782264e14f386c96e54a270df74be002e4c4ec3c8c7aebd,4529
- lsp-2.2.0.0@sha256:82fbf4b69d94d8d22543be71f89986b3e90050032d671fb3de3f8253ea1e5b6f,3550
- lsp-types-2.0.2.0@sha256:a9a51c3cea0726d91fe63fa0670935ee720f7b31bc3f3b33b2483fc538152677,29421
- row-types-1.0.1.2@sha256:4d4c7cb95d06a32b28ba977852d52a26b4c1f695ef083a6fd874ab6d79933b64,3071
- network-udp-0.0.0
ghc-options:
# All packages
"$locals": -Wall -Werror -Wno-name-shadowing -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info #-freverse-errors
# All packages
"$locals": -Wall -Werror -Wno-name-shadowing -Wno-missing-pattern-synonym-signatures -fprint-expanded-synonyms -fwrite-ide-info #-freverse-errors
# See https://github.com/haskell/haskell-language-server/issues/208
"$everything": -haddock
# See https://github.com/haskell/haskell-language-server/issues/208
"$everything": -haddock
statistics: -fsimpl-tick-factor=10000 # statistics fails on GHC 9 without this, https://github.com/haskell/statistics/issues/173
statistics: -fsimpl-tick-factor=10000 # statistics fails on GHC 9 without this, https://github.com/haskell/statistics/issues/173

View File

@ -12,7 +12,7 @@ import System.Process (readProcessWithExitCode)
import Text.Printf
integrationTestsDir :: FilePath
integrationTestsDir = "unison-cli" </> "integration-tests" </> "IntegrationTests"
integrationTestsDir = "unison-cli-integration" </> "integration-tests" </> "IntegrationTests"
uFile :: FilePath
uFile = integrationTestsDir </> "print.u"

View File

@ -40,5 +40,5 @@ main = do
```ucm
.> add
.> compile main ./unison-cli/integration-tests/IntegrationTests/main
.> compile main ./unison-cli-integration/integration-tests/IntegrationTests/main
```

View File

@ -29,6 +29,8 @@ main = do
```ucm
Loading changes detected in scratch.u.
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
@ -36,7 +38,7 @@ main = do
⍟ These new definitions are ok to `add`:
structural ability Break
unique type MyBool
type MyBool
main : '{IO, Exception} ()
resume : Request {g, Break} x -> x
@ -47,10 +49,10 @@ main = do
⍟ I've added these definitions:
structural ability Break
unique type MyBool
type MyBool
main : '{IO, Exception} ()
resume : Request {g, Break} x -> x
.> compile main ./unison-cli/integration-tests/IntegrationTests/main
.> compile main ./unison-cli-integration/integration-tests/IntegrationTests/main
```

View File

@ -0,0 +1,68 @@
name: unison-cli-integration
github: unisonweb/unison
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
flags:
optimized:
manual: true
default: false
ghc-options: -Wall
executables:
cli-integration-tests:
when:
- condition: false
other-modules: Paths_unison_cli_integration
source-dirs: integration-tests
main: Suite.hs
ghc-options: -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0
dependencies:
- base
- code-page
- filepath
- directory
- easytest
- process
- shellmet
- time
build-tools:
- unison-cli-main:unison
when:
- condition: flag(optimized)
ghc-options: -O2 -funbox-strict-fields
default-extensions:
- ApplicativeDo
- BangPatterns
- BlockArguments
- DeriveAnyClass
- DeriveFunctor
- DeriveFoldable
- DeriveTraversable
- DeriveGeneric
- DerivingStrategies
- DerivingVia
- DoAndIfThenElse
- DuplicateRecordFields
- FlexibleContexts
- FlexibleInstances
- GADTs
- GeneralizedNewtypeDeriving
- ImportQualifiedPost
- InstanceSigs
- KindSignatures
- LambdaCase
- MultiParamTypeClasses
- MultiWayIf
- NamedFieldPuns
- NumericUnderscores
- OverloadedLabels
- OverloadedStrings
- PatternSynonyms
- RankNTypes
- ScopedTypeVariables
- TupleSections
- TypeApplications
- ViewPatterns

View File

@ -0,0 +1,75 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.2.
--
-- see: https://github.com/sol/hpack
name: unison-cli-integration
version: 0.0.0
homepage: https://github.com/unisonweb/unison#readme
bug-reports: https://github.com/unisonweb/unison/issues
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
build-type: Simple
source-repository head
type: git
location: https://github.com/unisonweb/unison
flag optimized
manual: True
default: False
executable cli-integration-tests
main-is: Suite.hs
other-modules:
IntegrationTests.ArgumentParsing
hs-source-dirs:
integration-tests
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFunctor
DeriveFoldable
DeriveTraversable
DeriveGeneric
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0
build-tool-depends:
unison-cli-main:unison
build-depends:
base
, code-page
, directory
, easytest
, filepath
, process
, shellmet
, time
default-language: Haskell2010
if flag(optimized)
ghc-options: -O2 -funbox-strict-fields

19
unison-cli-main/LICENSE Normal file
View File

@ -0,0 +1,19 @@
Copyright (c) 2021, Unison Computing, public benefit corp and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,63 @@
name: unison-cli-main
github: unisonweb/unison
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
flags:
optimized:
manual: true
default: false
ghc-options: -Wall
executables:
unison:
when:
- condition: false
other-modules: Paths_unison_cli_main
source-dirs: unison
main: Main.hs
ghc-options: -threaded -rtsopts "-with-rtsopts=-I0 -N -qn4 -qg1 -T" -optP-Wno-nonportable-include-path
dependencies:
- base
- shellmet
- template-haskell
- text
- unison-cli
when:
- condition: flag(optimized)
ghc-options: -O2 -funbox-strict-fields
default-extensions:
- ApplicativeDo
- BangPatterns
- BlockArguments
- DeriveAnyClass
- DeriveFunctor
- DeriveFoldable
- DeriveTraversable
- DeriveGeneric
- DerivingStrategies
- DerivingVia
- DoAndIfThenElse
- DuplicateRecordFields
- FlexibleContexts
- FlexibleInstances
- GADTs
- GeneralizedNewtypeDeriving
- ImportQualifiedPost
- InstanceSigs
- KindSignatures
- LambdaCase
- MultiParamTypeClasses
- MultiWayIf
- NamedFieldPuns
- NumericUnderscores
- OverloadedLabels
- OverloadedStrings
- PatternSynonyms
- RankNTypes
- ScopedTypeVariables
- TupleSections
- TypeApplications
- ViewPatterns

View File

@ -0,0 +1,72 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.36.0.
--
-- see: https://github.com/sol/hpack
name: unison-cli-main
version: 0.0.0
homepage: https://github.com/unisonweb/unison#readme
bug-reports: https://github.com/unisonweb/unison/issues
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
license: MIT
license-file: LICENSE
build-type: Simple
source-repository head
type: git
location: https://github.com/unisonweb/unison
flag optimized
manual: True
default: False
executable unison
main-is: Main.hs
other-modules:
Version
hs-source-dirs:
unison
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFunctor
DeriveFoldable
DeriveTraversable
DeriveGeneric
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall -threaded -rtsopts "-with-rtsopts=-I0 -N -qn4 -qg1 -T" -optP-Wno-nonportable-include-path
build-depends:
base
, shellmet
, template-haskell
, text
, unison-cli
default-language: Haskell2010
if flag(optimized)
ghc-options: -O2 -funbox-strict-fields

View File

@ -0,0 +1,15 @@
{-# LANGUAGE ApplicativeDo #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-partial-type-signatures #-}
module Main (main) where
import Unison.Main qualified
import Version (version)
main :: IO ()
main = Unison.Main.main version

View File

@ -1,7 +1,7 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Version where
module Version (version) where
import Data.Bifunctor
import Data.Text
@ -9,6 +9,10 @@ import Data.Text qualified as Text
import Language.Haskell.TH (Exp (TupE), runIO)
import Language.Haskell.TH.Syntax (Exp (LitE), Lit (StringL))
import Shellmet
import Unison.Version (CommitDate, GitRef, Version (Version))
version :: Version
version = Version gitDescribeWithDate gitDescribe
-- | A formatted descriptor of when and against which commit this unison executable was built
-- E.g. latest-149-g5cef8f851 (built on 2021-10-04)
@ -16,13 +20,9 @@ import Shellmet
gitDescribeWithDate :: Text
gitDescribeWithDate =
let formatDate d = " (built on " <> d <> ")"
(gitRef, date) = gitDescribe
(gitRef, date) = Version.gitDescribe
in gitRef <> formatDate date
type CommitDate = Text
type GitRef = Text
-- | Uses Template Haskell to embed a git descriptor of the commit
-- which was used to build the executable.
-- E.g. latest-149-g5cef8f851 (built on 2021-10-04)

View File

@ -19,6 +19,7 @@ dependencies:
- base
- bytes
- bytestring
- code-page
- concurrent-output
- configurator
- containers >= 0.6.3
@ -55,6 +56,7 @@ dependencies:
- uri-encode
- nonempty-containers
- open-browser
- optparse-applicative >= 0.16.1.0
- pretty-simple
- process
- random >= 1.2.0
@ -65,7 +67,10 @@ dependencies:
- semigroups
- servant
- servant-client
- shellmet
- stm
- template-haskell
- temporary
- text
- text-builder
- text-rope
@ -99,13 +104,27 @@ dependencies:
- witch
- witherable
internal-libraries:
unison-cli-lib:
source-dirs: src
when:
- condition: "!os(windows)"
dependencies: unix
- condition: false
other-modules: Paths_unison_cli
library:
source-dirs: src
source-dirs: unison
when:
- condition: '!os(windows)'
dependencies: unix
- condition: false
other-modules: Paths_unison_cli
dependencies:
- code-page
- optparse-applicative >= 0.16.1.0
- shellmet
- template-haskell
- temporary
- unison-cli-lib
tests:
cli-tests:
@ -118,26 +137,11 @@ tests:
- here
- shellmet
- temporary
- unison-cli
- unison-cli-lib
main: Main.hs
source-dirs: tests
executables:
unison:
when:
- condition: false
other-modules: Paths_unison_cli
source-dirs: unison
main: Main.hs
ghc-options: -threaded -rtsopts "-with-rtsopts=-I0 -N -qn4 -qg1 -T" -optP-Wno-nonportable-include-path
dependencies:
- code-page
- optparse-applicative >= 0.16.1.0
- shellmet
- template-haskell
- temporary
- unison-cli
transcripts:
when:
- condition: false
@ -150,25 +154,9 @@ executables:
- easytest
- process
- shellmet
- unison-cli
- unison-cli-lib
- silently
cli-integration-tests:
when:
- condition: false
other-modules: Paths_unison_cli
source-dirs: integration-tests
main: Suite.hs
ghc-options: -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0
dependencies:
- code-page
- easytest
- process
- shellmet
- time
build-tools:
- unison-cli:unison
when:
- condition: flag(optimized)
ghc-options: -O2 -funbox-strict-fields

View File

@ -61,14 +61,15 @@ import Unison.Sqlite qualified as Sqlite
import Witch (unsafeFrom)
branchRelativePathToAbsolute :: BranchRelativePath -> Cli Path.Absolute
branchRelativePathToAbsolute brp = resolveBranchRelativePath brp <&> \case
BranchRelativePath.ResolvedLoosePath p -> p
BranchRelativePath.ResolvedBranchRelative projectBranch mRel ->
let projectBranchIds = getIds projectBranch
handleRel = case mRel of
Nothing -> id
Just rel -> flip Path.resolve rel
in handleRel (projectBranchPath projectBranchIds)
branchRelativePathToAbsolute brp =
resolveBranchRelativePath brp <&> \case
BranchRelativePath.ResolvedLoosePath p -> p
BranchRelativePath.ResolvedBranchRelative projectBranch mRel ->
let projectBranchIds = getIds projectBranch
handleRel = case mRel of
Nothing -> id
Just rel -> flip Path.resolve rel
in handleRel (projectBranchPath projectBranchIds)
where
getIds = \case
ProjectAndBranch project branch -> ProjectAndBranch (view #projectId project) (view #branchId branch)
@ -91,7 +92,6 @@ resolveBranchRelativePath = \case
toThese = \case
Left branchName -> That branchName
Right (projectName, branchName) -> These projectName branchName
-- | Get the current project that a user is on.
getCurrentProject :: Cli (Maybe Sqlite.Project)

View File

@ -47,9 +47,9 @@ typecheckTerm codebase tm = do
typeLookup <- Codebase.typeLookupForDependencies codebase (UF.dependencies file)
let typecheckingEnv =
Typechecker.Env
{ _ambientAbilities = [],
_typeLookup = typeLookup,
_termsByShortname = Map.empty
{ ambientAbilities = [],
typeLookup,
termsByShortname = Map.empty
}
pure $ fmap extract $ FileParsers.synthesizeFile typecheckingEnv file
where

View File

@ -1,15 +1,11 @@
{-# HLINT ignore "Use tuple-section" #-}
{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-}
module Unison.Codebase.Editor.HandleInput
( loop,
)
where
module Unison.Codebase.Editor.HandleInput (loop) where
-- TODO: Don't import backend
import Control.Error.Util qualified as ErrorUtil
import Control.Exception (catch)
import Control.Lens hiding (from)
import Control.Monad.Reader (ask)
import Control.Monad.State (StateT)
@ -20,7 +16,6 @@ import Data.List.Extra (nubOrd)
import Data.List.NonEmpty (NonEmpty)
import Data.List.NonEmpty qualified as Nel
import Data.Map qualified as Map
import Data.Sequence qualified as Seq
import Data.Set qualified as Set
import Data.Set.NonEmpty (NESet)
import Data.Set.NonEmpty qualified as NESet
@ -28,10 +23,6 @@ import Data.Text qualified as Text
import Data.These (These (..))
import Data.Time (UTCTime)
import Data.Tuple.Extra (uncurry3)
import System.Directory (XdgDirectory (..), createDirectoryIfMissing, doesFileExist, getXdgDirectory)
import System.Exit (ExitCode (..))
import System.FilePath ((</>))
import System.Process (callProcess, readCreateProcessWithExitCode, shell)
import Text.Megaparsec qualified as Megaparsec
import U.Codebase.Branch.Diff qualified as V2Branch.Diff
import U.Codebase.Causal qualified as V2Causal
@ -50,7 +41,6 @@ import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.NamesUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils qualified as Cli
import Unison.Cli.ProjectUtils qualified as ProjectUtils
import Unison.Cli.TypeCheck (typecheckTerm)
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch (Branch (..), Branch0 (..))
@ -92,7 +82,7 @@ import Unison.Codebase.Editor.HandleInput.ReleaseDraft (handleReleaseDraft)
import Unison.Codebase.Editor.HandleInput.Run (handleRun)
import Unison.Codebase.Editor.HandleInput.RuntimeUtils qualified as RuntimeUtils
import Unison.Codebase.Editor.HandleInput.ShowDefinition (showDefinitions)
import Unison.Codebase.Editor.HandleInput.TermResolution (resolveCon, resolveMainRef, resolveTermRef)
import Unison.Codebase.Editor.HandleInput.TermResolution (resolveMainRef)
import Unison.Codebase.Editor.HandleInput.Tests qualified as Tests
import Unison.Codebase.Editor.HandleInput.UI (openUI)
import Unison.Codebase.Editor.HandleInput.Update (doSlurpAdds, handleUpdate)
@ -103,7 +93,6 @@ import Unison.Codebase.Editor.Input qualified as Input
import Unison.Codebase.Editor.Output
import Unison.Codebase.Editor.Output qualified as Output
import Unison.Codebase.Editor.Output.DumpNamespace qualified as Output.DN
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace (..))
import Unison.Codebase.Editor.RemoteRepo qualified as RemoteRepo
import Unison.Codebase.Editor.Slurp qualified as Slurp
import Unison.Codebase.Editor.SlurpResult qualified as SlurpResult
@ -119,13 +108,11 @@ import Unison.Codebase.Path.Parse qualified as Path
import Unison.Codebase.Runtime qualified as Runtime
import Unison.Codebase.ShortCausalHash qualified as SCH
import Unison.Codebase.SqliteCodebase.Conversions qualified as Conversions
import Unison.Codebase.SyncMode qualified as SyncMode
import Unison.Codebase.TermEdit (TermEdit (..))
import Unison.Codebase.TermEdit qualified as TermEdit
import Unison.Codebase.TermEdit.Typing qualified as TermEdit
import Unison.Codebase.TypeEdit (TypeEdit)
import Unison.Codebase.TypeEdit qualified as TypeEdit
import Unison.Codebase.Verbosity qualified as Verbosity
import Unison.CommandLine.BranchRelativePath (BranchRelativePath)
import Unison.CommandLine.Completion qualified as Completion
import Unison.CommandLine.DisplayValues qualified as DisplayValues
@ -138,7 +125,6 @@ import Unison.Hash qualified as Hash
import Unison.HashQualified qualified as HQ
import Unison.HashQualified' qualified as HQ'
import Unison.HashQualified' qualified as HashQualified
import Unison.JitInfo qualified as JitInfo
import Unison.LabeledDependency (LabeledDependency)
import Unison.LabeledDependency qualified as LD
import Unison.LabeledDependency qualified as LabeledDependency
@ -158,13 +144,12 @@ import Unison.PrettyPrintEnv.Names qualified as PPE
import Unison.PrettyPrintEnvDecl qualified as PPE hiding (biasTo, empty)
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.PrettyPrintEnvDecl.Names qualified as PPED
import Unison.Project (ProjectAndBranch (..), ProjectBranchNameOrLatestRelease (..))
import Unison.Project (ProjectAndBranch (..))
import Unison.Project.Util (projectContextFromPath)
import Unison.Reference (Reference, TermReference)
import Unison.Reference qualified as Reference
import Unison.Referent (Referent)
import Unison.Referent qualified as Referent
import Unison.Result qualified as Result
import Unison.Runtime.IOSource qualified as IOSource
import Unison.Server.Backend (ShallowListEntry (..))
import Unison.Server.Backend qualified as Backend
@ -179,19 +164,17 @@ import Unison.Share.Codeserver qualified as Codeserver
import Unison.ShortHash qualified as SH
import Unison.Sqlite qualified as Sqlite
import Unison.Symbol (Symbol)
import Unison.Syntax.HashQualified qualified as HQ (parseText, parseTextWith, toText, unsafeParseText)
import Unison.Syntax.HashQualified qualified as HQ (parseTextWith, toText)
import Unison.Syntax.Lexer qualified as L
import Unison.Syntax.Lexer qualified as Lexer
import Unison.Syntax.Name qualified as Name (toText, toVar, unsafeParseVar)
import Unison.Syntax.NameSegment qualified as NameSegment (toEscapedText)
import Unison.Syntax.Parser qualified as Parser
import Unison.Syntax.TermPrinter qualified as TP
import Unison.Term (Term)
import Unison.Term qualified as Term
import Unison.Type (Type)
import Unison.Type qualified as Type
import Unison.Type.Names qualified as Type
import Unison.Typechecker qualified as Typechecker
import Unison.UnisonFile (TypecheckedUnisonFile)
import Unison.UnisonFile qualified as UF
import Unison.UnisonFile.Names qualified as UF
@ -209,7 +192,6 @@ import Unison.Var (Var)
import Unison.Var qualified as Var
import Unison.WatchKind qualified as WK
import UnliftIO.Directory qualified as Directory
import Witch (unsafeFrom)
------------------------------------------------------------------------------------------------------------------------
-- Main loop
@ -976,12 +958,9 @@ loop e = do
when (not updated) (Cli.respond $ NothingToPatch patchPath scopePath')
ExecuteI main args -> handleRun False main args
MakeStandaloneI output main -> doCompile False output main
CompileSchemeI output main -> doCompileScheme output main
ExecuteSchemeI main args -> doRunAsScheme main args
GenSchemeLibsI mdir ->
doGenerateSchemeBoot True Nothing mdir
FetchSchemeCompilerI name branch ->
doFetchCompiler name branch
CompileSchemeI output main ->
doCompile True (Text.unpack output) main
ExecuteSchemeI main args -> handleRun True main args
IOTestI main -> Tests.handleIOTest main
IOTestAllI -> Tests.handleAllIOTests
-- UpdateBuiltinsI -> do
@ -1332,11 +1311,6 @@ inputDescription input =
ExecuteSchemeI nm args ->
pure $ "run.native " <> Text.unwords (nm : fmap Text.pack args)
CompileSchemeI fi nm -> pure ("compile.native " <> HQ.toText nm <> " " <> fi)
GenSchemeLibsI mdir ->
pure $
"compile.native.genlibs" <> Text.pack (maybe "" (" " ++) mdir)
FetchSchemeCompilerI name branch ->
pure ("compile.native.fetch" <> Text.pack name <> " " <> Text.pack branch)
CreateAuthorI id name -> pure ("create.author " <> NameSegment.toEscapedText id <> " " <> name)
RemoveTermReplacementI src p0 -> do
p <- opatch p0
@ -1918,167 +1892,6 @@ searchBranchScored names0 score queries =
pair qn =
(\score -> (Just score, result)) <$> score qn (Name.toText name)
compilerPath :: Path.Path'
compilerPath = Path.Path' {Path.unPath' = Left abs}
where
segs = ["unison", "internal"]
rootPath = Path.Path {Path.toSeq = Seq.fromList segs}
abs = Path.Absolute {Path.unabsolute = rootPath}
doFetchCompiler :: String -> String -> Cli ()
doFetchCompiler username branch =
doPullRemoteBranch sourceTarget SyncMode.Complete Input.PullWithoutHistory Verbosity.Silent
where
-- fetching info
prj =
These
(unsafeFrom @Text $ "@" <> Text.pack username <> "/internal")
(ProjectBranchNameOrLatestRelease'Name . unsafeFrom @Text $ Text.pack branch)
sourceTarget =
PullSourceTarget2
(ReadShare'ProjectBranch prj)
(This compilerPath)
ensureCompilerExists :: Cli ()
ensureCompilerExists =
Cli.branchExistsAtPath' compilerPath
>>= flip unless (doFetchCompiler "unison" JitInfo.currentRelease)
getCacheDir :: Cli String
getCacheDir = liftIO $ getXdgDirectory XdgCache "unisonlanguage"
getSchemeGenLibDir :: Cli String
getSchemeGenLibDir =
Cli.getConfig "SchemeLibs.Generated" >>= \case
Just dir -> pure dir
Nothing -> (</> "scheme-libs") <$> getCacheDir
getSchemeStaticLibDir :: Cli String
getSchemeStaticLibDir =
Cli.getConfig "SchemeLibs.Static" >>= \case
Just dir -> pure dir
Nothing ->
liftIO $
getXdgDirectory XdgData ("unisonlanguage" </> "scheme-libs")
doGenerateSchemeBoot ::
Bool -> Maybe PPE.PrettyPrintEnv -> Maybe String -> Cli ()
doGenerateSchemeBoot force mppe mdir = do
ppe <- maybe (PPED.suffixifiedPPE <$> Cli.currentPrettyPrintEnvDecl) pure mppe
dir <- maybe getSchemeGenLibDir pure mdir
let bootf = dir </> "unison" </> "boot-generated.ss"
swrapf = dir </> "unison" </> "simple-wrappers.ss"
binf = dir </> "unison" </> "builtin-generated.ss"
cwrapf = dir </> "unison" </> "compound-wrappers.ss"
dinfof = dir </> "unison" </> "data-info.ss"
dirTm = Term.text a (Text.pack dir)
liftIO $ createDirectoryIfMissing True dir
saveData <- Term.ref a <$> resolveTermRef sdName
saveBase <- Term.ref a <$> resolveTermRef sbName
saveWrap <- Term.ref a <$> resolveTermRef swName
gen ppe saveData dinfof dirTm dinfoName
gen ppe saveBase bootf dirTm bootName
gen ppe saveWrap swrapf dirTm simpleWrapName
gen ppe saveBase binf dirTm builtinName
gen ppe saveWrap cwrapf dirTm compoundWrapName
where
a = External
sbName = HQ.unsafeParseText ".unison.internal.compiler.scheme.saveBaseFile"
swName = HQ.unsafeParseText ".unison.internal.compiler.scheme.saveWrapperFile"
sdName = HQ.unsafeParseText ".unison.internal.compiler.scheme.saveDataInfoFile"
dinfoName = HQ.unsafeParseText ".unison.internal.compiler.scheme.dataInfos"
bootName = HQ.unsafeParseText ".unison.internal.compiler.scheme.bootSpec"
builtinName = HQ.unsafeParseText ".unison.internal.compiler.scheme.builtinSpec"
simpleWrapName =
HQ.unsafeParseText ".unison.internal.compiler.scheme.simpleWrapperSpec"
compoundWrapName =
HQ.unsafeParseText ".unison.internal.compiler.scheme.compoundWrapperSpec"
gen ppe save file dir nm =
liftIO (doesFileExist file) >>= \b -> when (not b || force) do
spec <- Term.ref a <$> resolveTermRef nm
let make = Term.apps' save [dir, spec]
typecheckAndEval ppe make
typecheckAndEval :: PPE.PrettyPrintEnv -> Term Symbol Ann -> Cli ()
typecheckAndEval ppe tm = do
Cli.Env {codebase, runtime} <- ask
let mty = Runtime.mainType runtime
Cli.runTransaction (typecheckTerm codebase (Term.delay a tm)) >>= \case
-- Type checking succeeded
Result.Result _ (Just ty)
| Typechecker.fitsScheme ty mty ->
() <$ RuntimeUtils.evalUnisonTerm False ppe False tm
| otherwise ->
Cli.returnEarly $ BadMainFunction "run" rendered ty ppe [mty]
Result.Result notes Nothing -> do
currentPath <- Cli.getCurrentPath
let tes = [err | Result.TypeError err <- toList notes]
Cli.returnEarly (TypeErrors currentPath rendered ppe tes)
where
a = External
rendered = Text.pack (P.toPlainUnbroken $ TP.pretty ppe tm)
ensureSchemeExists :: Cli ()
ensureSchemeExists =
liftIO callScheme >>= \case
True -> pure ()
False -> Cli.returnEarly (PrintMessage msg)
where
msg =
P.lines
[ "I can't seem to call racket. See",
"",
P.indentN
2
"https://download.racket-lang.org/",
"",
"for how to install Racket."
]
cmd = "racket -l- raco help"
callScheme =
readCreateProcessWithExitCode (shell cmd) "" >>= \case
(ExitSuccess, _, _) -> pure True
(ExitFailure _, _, _) -> pure False
racketOpts :: FilePath -> FilePath -> [String] -> [String]
racketOpts gendir statdir args = "-y" : libs ++ args
where
includes = [gendir, statdir </> "racket"]
libs = concatMap (\dir -> ["-S", dir]) includes
runScheme :: String -> [String] -> Cli ()
runScheme file args = do
ensureSchemeExists
gendir <- getSchemeGenLibDir
statdir <- getSchemeStaticLibDir
let cmd = "racket"
opts = racketOpts gendir statdir (file : args)
success <-
liftIO $
(True <$ callProcess cmd opts)
`catch` \(_ :: IOException) -> pure False
unless success $
Cli.returnEarly (PrintMessage "Scheme evaluation failed.")
buildScheme :: Text -> String -> Cli ()
buildScheme main file = do
ensureSchemeExists
statDir <- getSchemeStaticLibDir
genDir <- getSchemeGenLibDir
buildRacket genDir statDir main file
buildRacket :: String -> String -> Text -> String -> Cli ()
buildRacket genDir statDir main file =
let args = ["-l", "raco", "--", "exe", "-o", Text.unpack main, file]
opts = racketOpts genDir statDir args
in void . liftIO $
catch
(True <$ callProcess "racket" opts)
(\(_ :: IOException) -> pure False)
doCompile :: Bool -> String -> HQ.HashQualified Name -> Cli ()
doCompile native output main = do
Cli.Env {codebase, runtime, nativeRuntime} <- ask
@ -2096,43 +1909,6 @@ doCompile native output main = do
)
(Cli.returnEarly . EvaluationFailure)
doRunAsScheme :: Text -> [String] -> Cli ()
doRunAsScheme main0 args = case HQ.parseText main0 of
Just main -> do
fullpath <- generateSchemeFile True main0 main
runScheme fullpath args
Nothing -> Cli.respond $ BadName main0
doCompileScheme :: Text -> HQ.HashQualified Name -> Cli ()
doCompileScheme out main =
generateSchemeFile True out main >>= buildScheme out
generateSchemeFile :: Bool -> Text -> HQ.HashQualified Name -> Cli String
generateSchemeFile exec out main = do
(comp, ppe) <- resolveMainRef main
ensureCompilerExists
doGenerateSchemeBoot False (Just ppe) Nothing
cacheDir <- getCacheDir
liftIO $ createDirectoryIfMissing True (cacheDir </> "scheme-tmp")
let scratch = Text.unpack out ++ ".scm"
fullpath = cacheDir </> "scheme-tmp" </> scratch
output = Text.pack fullpath
sscm <- Term.ref a <$> resolveTermRef saveNm
fprf <- resolveCon filePathNm
let toCmp = Term.termLink a (Referent.Ref comp)
outTm = Term.text a output
fpc = Term.constructor a fprf
fp = Term.app a fpc outTm
tm :: Term Symbol Ann
tm = Term.apps' sscm [Term.boolean a exec, toCmp, fp]
typecheckAndEval ppe tm
pure fullpath
where
a = External
saveNm = HQ.unsafeParseText ".unison.internal.compiler.saveScheme"
filePathNm = HQ.unsafeParseText "FilePath.FilePath"
delete ::
Input ->
DeleteOutput ->

View File

@ -155,9 +155,9 @@ synthesizeForce tl typeOfFunc = do
ref = Reference.DerivedId (Reference.Id (Hash.fromByteString "deadbeef") 0)
env =
Typechecker.Env
{ Typechecker._ambientAbilities = [DD.exceptionType External, Type.builtinIO External],
Typechecker._typeLookup = mempty {TypeLookup.typeOfTerms = Map.singleton ref typeOfFunc} <> tl,
Typechecker._termsByShortname = Map.empty
{ ambientAbilities = [DD.exceptionType External, Type.builtinIO External],
typeLookup = mempty {TypeLookup.typeOfTerms = Map.singleton ref typeOfFunc} <> tl,
termsByShortname = Map.empty
}
case Result.runResultT
( Typechecker.synthesize

View File

@ -3,8 +3,9 @@ module Unison.Codebase.Editor.HandleInput.Upgrade
)
where
import Control.Lens (ix, over, (^.))
import Control.Lens ((^.))
import Control.Monad.Reader (ask)
import Data.Char qualified as Char
import Data.List qualified as List
import Data.List.NonEmpty (pattern (:|))
import Data.Map.Strict qualified as Map
@ -18,7 +19,6 @@ import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.ProjectUtils qualified as Cli
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch (Branch0)
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Branch.Names qualified as Branch
import Unison.Codebase.Editor.HandleInput.Branch qualified as HandleInput.Branch
@ -35,6 +35,7 @@ import Unison.Codebase.Editor.HandleInput.Update2
)
import Unison.Codebase.Editor.Output qualified as Output
import Unison.Codebase.Path qualified as Path
import Unison.HashQualified' qualified as HQ'
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment)
@ -58,11 +59,10 @@ import Unison.Util.Relation (Relation)
import Unison.Util.Relation qualified as Relation
import Unison.Util.Set qualified as Set
import Witch (unsafeFrom)
import qualified Data.Char as Char
handleUpgrade :: NameSegment -> NameSegment -> Cli ()
handleUpgrade oldDepName newDepName = do
when (oldDepName == newDepName) do
handleUpgrade oldName newName = do
when (oldName == newName) do
Cli.returnEarlyWithoutOutput
Cli.Env {codebase, writeSource} <- ask
@ -70,21 +70,29 @@ handleUpgrade oldDepName newDepName = do
(projectAndBranch, _path) <- Cli.expectCurrentProjectBranch
let projectId = projectAndBranch ^. #project . #projectId
let projectPath = Cli.projectBranchPath (ProjectAndBranch projectId (projectAndBranch ^. #branch . #branchId))
let oldDepPath = Path.resolve projectPath (Path.Relative (Path.fromList [NameSegment.libSegment, oldDepName]))
let newDepPath = Path.resolve projectPath (Path.Relative (Path.fromList [NameSegment.libSegment, newDepName]))
let oldPath = Path.resolve projectPath (Path.Relative (Path.fromList [NameSegment.libSegment, oldName]))
let newPath = Path.resolve projectPath (Path.Relative (Path.fromList [NameSegment.libSegment, newName]))
currentV1Branch <- Cli.getBranch0At projectPath
let currentV1BranchWithoutOldDep = deleteLibdep oldDepName currentV1Branch
oldDep <- Cli.expectBranch0AtPath' oldDepPath
let oldDepWithoutDeps = deleteLibdeps oldDep
let oldTransitiveDeps = fromMaybe Branch.empty0 $ fmap Branch.head $ Map.lookup NameSegment.libSegment (oldDep ^. Branch.children)
currentNamespace <- Cli.getBranch0At projectPath
let currentNamespaceSansOld = Branch.deleteLibdep oldName currentNamespace
let currentDeepTermsSansOld = Branch.deepTerms currentNamespaceSansOld
let currentDeepTypesSansOld = Branch.deepTypes currentNamespaceSansOld
let currentLocalNames = Branch.toNames (Branch.deleteLibdeps currentNamespace)
let currentLocalConstructorNames = forwardCtorNames currentLocalNames
let currentDeepNamesSansOld = Branch.toNames currentNamespaceSansOld
newDep <- Cli.expectBranch0AtPath' newDepPath
let newDepWithoutDeps = deleteLibdeps newDep
oldNamespace <- Cli.expectBranch0AtPath' oldPath
let oldLocalNamespace = Branch.deleteLibdeps oldNamespace
let oldLocalTerms = Branch.deepTerms oldLocalNamespace
let oldLocalTypes = Branch.deepTypes oldLocalNamespace
let oldNamespaceMinusLocal = maybe Branch.empty0 Branch.head (Map.lookup NameSegment.libSegment (oldNamespace ^. Branch.children))
let oldDeepMinusLocalTerms = Branch.deepTerms oldNamespaceMinusLocal
let oldDeepMinusLocalTypes = Branch.deepTypes oldNamespaceMinusLocal
let namesExcludingLibdeps = Branch.toNames (deleteLibdeps currentV1Branch)
let constructorNamesExcludingLibdeps = forwardCtorNames namesExcludingLibdeps
let namesExcludingOldDep = Branch.toNames currentV1BranchWithoutOldDep
newNamespace <- Cli.expectBranch0AtPath' newPath
let newLocalNamespace = Branch.deleteLibdeps newNamespace
let newLocalTerms = Branch.deepTerms newLocalNamespace
let newLocalTypes = Branch.deepTypes newLocalNamespace
-- High-level idea: we are trying to perform substitution in every term that depends on something in `old` with the
-- corresponding thing in `new`, by first rendering the user's code with a particular pretty-print environment, then
@ -113,75 +121,44 @@ handleUpgrade oldDepName newDepName = do
--
-- mything#mything2 = #newfoo + 10
let filterUnchangedTerms :: Relation Referent Name -> Set TermReference
filterUnchangedTerms oldTerms =
let phi ref oldNames = case Referent.toTermReference ref of
Nothing -> Set.empty
Just termRef ->
let newNames = Relation.lookupDom ref newTerms
in case newNames `Set.disjoint` oldNames of
True -> Set.singleton termRef
False -> Set.empty
in Map.foldMapWithKey phi $
Relation.domain oldTerms
where
newTerms = Branch.deepTerms newDepWithoutDeps
let filterUnchangedTypes :: Relation TypeReference Name -> Set TypeReference
filterUnchangedTypes oldTypes =
let phi typeRef oldNames =
let newNames = Relation.lookupDom typeRef newTypes
in case newNames `Set.disjoint` oldNames of
True -> Set.singleton typeRef
False -> Set.empty
in Map.foldMapWithKey phi $
Relation.domain oldTypes
where
newTypes = Branch.deepTypes newDepWithoutDeps
let filterTransitiveTerms :: Relation Referent Name -> Set TermReference
filterTransitiveTerms oldTerms =
Relation.dom oldTerms
& Set.mapMaybe \referent -> do
ref <- Referent.toTermReference referent
guard (not $ Relation.memberDom referent (Branch.deepTerms currentV1BranchWithoutOldDep))
pure ref
let filterTransitiveTypes :: Relation TypeReference Name -> Set TypeReference
filterTransitiveTypes oldTypes =
Relation.dom oldTypes
& Set.filter \typ -> not (Relation.memberDom typ (Branch.deepTypes currentV1BranchWithoutOldDep))
(unisonFile, printPPE) <-
Cli.runTransactionWithRollback \abort -> do
dependents <-
getNamespaceDependentsOf
namesExcludingLibdeps
( filterUnchangedTerms (Branch.deepTerms oldDepWithoutDeps)
<> filterUnchangedTypes (Branch.deepTypes oldDepWithoutDeps)
<> filterTransitiveTerms (Branch.deepTerms oldTransitiveDeps)
<> filterTransitiveTypes (Branch.deepTypes oldTransitiveDeps)
currentLocalNames
( Set.unions
[ keepOldLocalTermsNotInNew oldLocalTerms newLocalTerms,
keepOldLocalTypesNotInNew oldLocalTypes newLocalTypes,
keepOldDeepTermsStillInUse oldDeepMinusLocalTerms currentDeepTermsSansOld,
keepOldDeepTypesStillInUse oldDeepMinusLocalTypes currentDeepTypesSansOld
]
)
unisonFile <- do
addDefinitionsToUnisonFile
abort
codebase
(findCtorNames Output.UOUUpgrade namesExcludingLibdeps constructorNamesExcludingLibdeps)
(findCtorNames Output.UOUUpgrade currentLocalNames currentLocalConstructorNames)
dependents
UnisonFile.emptyUnisonFile
hashLength <- Codebase.hashLength
pure
( unisonFile,
makeOldDepPPE oldDepName newDepName namesExcludingOldDep oldDep oldDepWithoutDeps newDepWithoutDeps
`PPED.addFallback` makeComplicatedPPE hashLength namesExcludingOldDep mempty dependents
makeOldDepPPE
oldName
newName
currentDeepNamesSansOld
(Branch.toNames oldNamespace)
(Branch.toNames oldLocalNamespace)
(Branch.toNames newLocalNamespace)
`PPED.addFallback` makeComplicatedPPE hashLength currentDeepNamesSansOld mempty dependents
)
parsingEnv <- makeParsingEnv projectPath namesExcludingOldDep
parsingEnv <- makeParsingEnv projectPath currentDeepNamesSansOld
typecheckedUnisonFile <-
prettyParseTypecheck unisonFile printPPE parsingEnv & onLeftM \prettyUnisonFile -> do
-- Small race condition: since picking a branch name and creating the branch happen in different
-- transactions, creating could fail.
temporaryBranchName <- Cli.runTransaction (findTemporaryBranchName projectId oldDepName newDepName)
temporaryBranchName <- Cli.runTransaction (findTemporaryBranchName projectId oldName newName)
temporaryBranchId <-
HandleInput.Branch.doCreateBranch
(HandleInput.Branch.CreateFrom'Branch projectAndBranch)
@ -189,13 +166,13 @@ handleUpgrade oldDepName newDepName = do
temporaryBranchName
textualDescriptionOfUpgrade
let temporaryBranchPath = Path.unabsolute (Cli.projectBranchPath (ProjectAndBranch projectId temporaryBranchId))
Cli.stepAt textualDescriptionOfUpgrade (temporaryBranchPath, \_ -> currentV1BranchWithoutOldDep)
Cli.stepAt textualDescriptionOfUpgrade (temporaryBranchPath, \_ -> currentNamespaceSansOld)
scratchFilePath <-
Cli.getLatestFile <&> \case
Nothing -> "scratch.u"
Just (file, _) -> file
liftIO $ writeSource (Text.pack scratchFilePath) (Text.pack $ Pretty.toPlain 80 prettyUnisonFile)
Cli.respond (Output.UpgradeFailure scratchFilePath oldDepName newDepName)
Cli.respond (Output.UpgradeFailure scratchFilePath oldName newName)
Cli.returnEarlyWithoutOutput
branchUpdates <-
@ -203,64 +180,91 @@ handleUpgrade oldDepName newDepName = do
Codebase.addDefsToCodebase codebase typecheckedUnisonFile
typecheckedUnisonFileToBranchUpdates
abort
(findCtorNamesMaybe Output.UOUUpgrade namesExcludingLibdeps constructorNamesExcludingLibdeps Nothing)
(findCtorNamesMaybe Output.UOUUpgrade currentLocalNames currentLocalConstructorNames Nothing)
typecheckedUnisonFile
Cli.stepAt
textualDescriptionOfUpgrade
( Path.unabsolute projectPath,
deleteLibdep oldDepName . Branch.batchUpdates branchUpdates
Branch.deleteLibdep oldName . Branch.batchUpdates branchUpdates
)
Cli.respond (Output.UpgradeSuccess oldDepName newDepName)
Cli.respond (Output.UpgradeSuccess oldName newName)
where
textualDescriptionOfUpgrade :: Text
textualDescriptionOfUpgrade =
Text.unwords ["upgrade", NameSegment.toEscapedText oldDepName, NameSegment.toEscapedText newDepName]
Text.unwords ["upgrade", NameSegment.toEscapedText oldName, NameSegment.toEscapedText newName]
keepOldLocalTermsNotInNew :: Relation Referent Name -> Relation Referent Name -> Set TermReference
keepOldLocalTermsNotInNew oldLocalTerms newLocalTerms =
f oldLocalTerms `Set.difference` f newLocalTerms
where
f :: Relation Referent Name -> Set TermReference
f =
Set.mapMaybe Referent.toTermReference . Relation.dom
keepOldLocalTypesNotInNew :: Relation TypeReference Name -> Relation TypeReference Name -> Set TypeReference
keepOldLocalTypesNotInNew oldLocalTypes newLocalTypes =
Relation.dom oldLocalTypes `Set.difference` Relation.dom newLocalTypes
keepOldDeepTermsStillInUse :: Relation Referent Name -> Relation Referent Name -> Set TermReference
keepOldDeepTermsStillInUse oldDeepMinusLocalTerms currentDeepTermsSansOld =
Relation.dom oldDeepMinusLocalTerms & Set.mapMaybe \referent -> do
ref <- Referent.toTermReference referent
guard (not (Relation.memberDom referent currentDeepTermsSansOld))
pure ref
keepOldDeepTypesStillInUse :: Relation TypeReference Name -> Relation TypeReference Name -> Set TypeReference
keepOldDeepTypesStillInUse oldDeepMinusLocalTypes currentDeepTypesSansOld =
Relation.dom oldDeepMinusLocalTypes
& Set.filter \typ -> not (Relation.memberDom typ currentDeepTypesSansOld)
makeOldDepPPE ::
NameSegment ->
NameSegment ->
Names ->
Branch0 m ->
Branch0 m ->
Branch0 m ->
Names ->
Names ->
Names ->
PrettyPrintEnvDecl
makeOldDepPPE oldDepName newDepName namesExcludingOldDep oldDep oldDepWithoutDeps newDepWithoutDeps =
makeOldDepPPE oldName newName currentDeepNamesSansOld oldDeepNames oldLocalNames newLocalNames =
let makePPE suffixifier =
PPE.PrettyPrintEnv
( \ref ->
let oldDirectNames = Relation.lookupDom ref (Branch.deepTerms oldDepWithoutDeps)
newDirectRefsForOldDirectNames =
Relation.range (Branch.deepTerms newDepWithoutDeps) `Map.restrictKeys` oldDirectNames
in case ( Set.null oldDirectNames,
Map.null newDirectRefsForOldDirectNames,
Set.member ref (Branch.deepReferents oldDep),
Relation.memberRan ref (Names.terms namesExcludingOldDep)
) of
(False, False, _, _) -> PPE.makeTermNames fakeNames suffixifier ref
(_, _, True, False) -> PPE.makeTermNames prefixedOldNames PPE.dontSuffixify ref
_ -> []
)
( \ref ->
let oldDirectNames = Relation.lookupDom ref (Branch.deepTypes oldDepWithoutDeps)
newDirectRefsForOldDirectNames =
Relation.range (Branch.deepTypes newDepWithoutDeps) `Map.restrictKeys` oldDirectNames
in case ( Set.null oldDirectNames,
Map.null newDirectRefsForOldDirectNames,
Set.member ref (Branch.deepTypeReferences oldDep),
Relation.memberRan ref (Names.types namesExcludingOldDep)
) of
(False, False, _, _) -> PPE.makeTypeNames fakeNames suffixifier ref
(_, _, True, False) -> PPE.makeTypeNames prefixedOldNames PPE.dontSuffixify ref
_ -> []
)
PPE.PrettyPrintEnv termToNames typeToNames
where
termToNames :: Referent -> [(HQ'.HashQualified Name, HQ'.HashQualified Name)]
termToNames ref
| inNewNamespace = []
| hasNewLocalTermsForOldLocalNames = PPE.makeTermNames fakeLocalNames suffixifier ref
| onlyInOldNamespace = PPE.makeTermNames fullOldDeepNames PPE.dontSuffixify ref
| otherwise = []
where
inNewNamespace = Relation.memberRan ref (Names.terms newLocalNames)
hasNewLocalTermsForOldLocalNames =
not (Map.null (Relation.domain (Names.terms newLocalNames) `Map.restrictKeys` theOldLocalNames))
theOldLocalNames = Relation.lookupRan ref (Names.terms oldLocalNames)
onlyInOldNamespace = inOldNamespace && not inCurrentNamespaceSansOld
inOldNamespace = Relation.memberRan ref (Names.terms oldDeepNames)
inCurrentNamespaceSansOld = Relation.memberRan ref (Names.terms currentDeepNamesSansOld)
typeToNames :: TypeReference -> [(HQ'.HashQualified Name, HQ'.HashQualified Name)]
typeToNames ref
| inNewNamespace = []
| hasNewLocalTypesForOldLocalNames = PPE.makeTypeNames fakeLocalNames suffixifier ref
| onlyInOldNamespace = PPE.makeTypeNames fullOldDeepNames PPE.dontSuffixify ref
| otherwise = []
where
inNewNamespace = Relation.memberRan ref (Names.types newLocalNames)
hasNewLocalTypesForOldLocalNames =
not (Map.null (Relation.domain (Names.types newLocalNames) `Map.restrictKeys` theOldLocalNames))
theOldLocalNames = Relation.lookupRan ref (Names.types oldLocalNames)
onlyInOldNamespace = inOldNamespace && not inCurrentNamespaceSansOld
inOldNamespace = Relation.memberRan ref (Names.types oldDeepNames)
inCurrentNamespaceSansOld = Relation.memberRan ref (Names.types currentDeepNamesSansOld)
in PrettyPrintEnvDecl
{ unsuffixifiedPPE = makePPE PPE.dontSuffixify,
suffixifiedPPE = makePPE (PPE.suffixifyByHash namesExcludingOldDep)
suffixifiedPPE = makePPE (PPE.suffixifyByHash currentDeepNamesSansOld)
}
where
oldNames = Branch.toNames oldDep
prefixedOldNames = PPE.namer (Names.prefix0 (Name.fromReverseSegments (oldDepName :| [NameSegment.libSegment])) oldNames)
fakeNames = PPE.namer (Names.prefix0 (Name.fromReverseSegments (newDepName :| [NameSegment.libSegment])) oldNames)
-- "full" means "with lib.old.* prefix"
fullOldDeepNames = PPE.namer (Names.prefix0 (Name.fromReverseSegments (oldName :| [NameSegment.libSegment])) oldDeepNames)
fakeLocalNames = PPE.namer (Names.prefix0 (Name.fromReverseSegments (newName :| [NameSegment.libSegment])) oldLocalNames)
-- @findTemporaryBranchName projectId oldDepName newDepName@ finds some unused branch name in @projectId@ with a name
-- like "upgrade-<oldDepName>-to-<newDepName>".
@ -291,11 +295,3 @@ findTemporaryBranchName projectId oldDepName newDepName = do
<> Text.filter Char.isAlpha (NameSegment.toEscapedText newDepName)
pure (fromJust (List.find (\name -> not (Set.member name allBranchNames)) allCandidates))
deleteLibdep :: NameSegment -> Branch0 m -> Branch0 m
deleteLibdep dep =
over (Branch.children . ix NameSegment.libSegment . Branch.head_ . Branch.children) (Map.delete dep)
deleteLibdeps :: Branch0 m -> Branch0 m
deleteLibdeps =
over Branch.children (Map.delete NameSegment.libSegment)

View File

@ -185,10 +185,6 @@ data Input
ExecuteSchemeI Text [String]
| -- compile to a scheme file
CompileSchemeI Text (HQ.HashQualified Name)
| -- generate scheme libraries, optional target directory
GenSchemeLibsI (Maybe String)
| -- fetch scheme compiler from a given username and branch
FetchSchemeCompilerI String String
| TestI TestInput
| CreateAuthorI NameSegment {- identifier -} Text {- name -}
| -- Display provided definitions.

View File

@ -196,11 +196,12 @@ withTranscriptRunner ::
(UnliftIO.MonadUnliftIO m) =>
Verbosity ->
UCMVersion ->
FilePath ->
Maybe FilePath ->
(TranscriptRunner -> m r) ->
m r
withTranscriptRunner verbosity ucmVersion configFile action = do
withRuntimes \runtime sbRuntime nRuntime -> withConfig \config -> do
withTranscriptRunner verbosity ucmVersion nrtp configFile action = do
withRuntimes nrtp \runtime sbRuntime nRuntime -> withConfig \config -> do
action \transcriptName transcriptSrc (codebaseDir, codebase) -> do
Server.startServer (Backend.BackendEnv {Backend.useNamesIndex = False}) Server.defaultCodebaseServerOpts runtime codebase \baseUrl -> do
let parsed = parse transcriptName transcriptSrc
@ -209,12 +210,12 @@ withTranscriptRunner verbosity ucmVersion configFile action = do
pure $ join @(Either TranscriptError) result
where
withRuntimes ::
(Runtime.Runtime Symbol -> Runtime.Runtime Symbol -> Runtime.Runtime Symbol -> m a) -> m a
withRuntimes action =
FilePath -> (Runtime.Runtime Symbol -> Runtime.Runtime Symbol -> Runtime.Runtime Symbol -> m a) -> m a
withRuntimes nrtp action =
RTI.withRuntime False RTI.Persistent ucmVersion \runtime -> do
RTI.withRuntime True RTI.Persistent ucmVersion \sbRuntime -> do
action runtime sbRuntime
=<< liftIO (RTI.startNativeRuntime ucmVersion)
=<< liftIO (RTI.startNativeRuntime ucmVersion nrtp)
withConfig :: forall a. ((Maybe Config -> m a) -> m a)
withConfig action = do
case configFile of

View File

@ -52,7 +52,6 @@ import Unison.CommandLine.FZFResolvers qualified as Resolvers
import Unison.CommandLine.InputPattern (ArgumentType (..), InputPattern (InputPattern), IsOptional (..), unionSuggestions)
import Unison.CommandLine.InputPattern qualified as I
import Unison.HashQualified qualified as HQ
import Unison.JitInfo qualified as JitInfo
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment)
@ -2508,7 +2507,7 @@ runScheme =
InputPattern
"run.native"
[]
I.Visible
I.Hidden
[("definition to run", Required, exactDefinitionTermQueryArg), ("arguments", ZeroPlus, noCompletionsArg)]
( P.wrapColumn2
[ ( makeExample runScheme ["main", "args"],
@ -2525,7 +2524,7 @@ compileScheme =
InputPattern
"compile.native"
[]
I.Visible
I.Hidden
[("definition to compile", Required, exactDefinitionTermQueryArg), ("output file", Required, filePathArg)]
( P.wrapColumn2
[ ( makeExample compileScheme ["main", "file"],
@ -2540,73 +2539,6 @@ compileScheme =
Input.CompileSchemeI (Text.pack file) <$> parseHashQualifiedName main
_ -> Left $ showPatternHelp compileScheme
schemeLibgen :: InputPattern
schemeLibgen =
InputPattern
"compile.native.genlibs"
[]
I.Visible
[("target directory", Optional, filePathArg)]
( P.wrapColumn2
[ ( makeExample schemeLibgen ["[targetDir]"],
"Generates libraries necessary for scheme compilation.\n\n\
\There is no need to run this before"
<> P.group (makeExample compileScheme [])
<> "as\
\ the latter will check if the libraries are missing and\
\ auto-generate them. However, this will generate the\
\ libraries even if their files already exist, so if the\
\ compiler has been upgraded, this can be used to ensure\
\ the generated libraries are up to date."
)
]
)
\case
[] -> pure $ Input.GenSchemeLibsI Nothing
[dir] -> pure . Input.GenSchemeLibsI $ Just dir
_ -> Left $ showPatternHelp schemeLibgen
fetchScheme :: InputPattern
fetchScheme =
InputPattern
"compile.native.fetch"
[]
I.Visible
[("name", Optional, noCompletionsArg), ("branch", Optional, noCompletionsArg)]
( P.wrapColumn2
[ ( makeExample fetchScheme [],
P.lines . fmap P.wrap $
[ "Fetches the unison library for compiling to scheme.",
"This is done automatically when"
<> P.group (makeExample compileScheme [])
<> "is run if the library is not already in the\
\ standard location (unison.internal). However,\
\ this command will force a pull even if the\
\ library already exists.",
"You can also specify a user and branch name to pull\
\ from in order to use an alternate version of the\
\ unison compiler (for development purposes, for\
\ example).",
"The default user is `unison`. The default branch\
\ for the `unison` user is a specified latest\
\ version of the compiler for stability. The\
\ default branch for other uses is `main`. The\
\ command fetches code from a project:",
P.indentN 2 ("@user/internal/branch")
]
)
]
)
\case
[] -> pure (Input.FetchSchemeCompilerI "unison" JitInfo.currentRelease)
[name] -> pure (Input.FetchSchemeCompilerI name branch)
where
branch
| name == "unison" = JitInfo.currentRelease
| otherwise = "main"
[name, branch] -> pure (Input.FetchSchemeCompilerI name branch)
_ -> Left $ showPatternHelp fetchScheme
createAuthor :: InputPattern
createAuthor =
InputPattern
@ -3049,7 +2981,6 @@ validInputs =
edit,
editNamespace,
execute,
fetchScheme,
find,
findAll,
findGlobal,
@ -3104,7 +3035,6 @@ validInputs =
resetRoot,
runScheme,
saveExecuteResult,
schemeLibgen,
squashMerge,
test,
testAll,

View File

@ -2757,7 +2757,7 @@ renderEditConflicts ppe Patch {..} = do
then "deprecated and also replaced with"
else "replaced with"
)
`P.hang` P.lines replacements
`P.hang` P.lines replacements
formatTermEdits ::
(Reference.TermReference, Set TermEdit.TermEdit) ->
Numbered Pretty
@ -2772,7 +2772,7 @@ renderEditConflicts ppe Patch {..} = do
then "deprecated and also replaced with"
else "replaced with"
)
`P.hang` P.lines replacements
`P.hang` P.lines replacements
formatConflict ::
Either
(Reference, Set TypeEdit.TypeEdit)

View File

@ -1,4 +0,0 @@
module Unison.JitInfo (currentRelease) where
currentRelease :: String
currentRelease = "releases/0.0.10"

View File

@ -43,7 +43,7 @@ import Unison.Referent qualified as Referent
import Unison.Runtime.IOSource qualified as IOSource
import Unison.Syntax.DeclPrinter qualified as DeclPrinter
import Unison.Syntax.HashQualified' qualified as HQ' (toText)
import Unison.Syntax.Name qualified as Name (parseText, nameP, toText)
import Unison.Syntax.Name qualified as Name (nameP, parseText, toText)
import Unison.Syntax.TypePrinter qualified as TypePrinter
import Unison.Util.Monoid qualified as Monoid
import Unison.Util.Pretty qualified as Pretty
@ -57,15 +57,16 @@ completionHandler m respond =
(range, prefix) <- VFS.completionPrefix (m ^. params . textDocument . uri) (m ^. params . position)
ppe <- PPED.suffixifiedPPE <$> lift currentPPED
codebaseCompletions <- lift getCodebaseCompletions
Config {maxCompletions} <- lift getConfig
-- Config {maxCompletions} <- lift getConfig
let defMatches = matchCompletions codebaseCompletions prefix
let (isIncomplete, defCompletions) =
defMatches
& nubOrdOn (\(p, _name, ref) -> (p, ref))
& fmap (over _1 Path.toText)
& case maxCompletions of
Nothing -> (False,)
Just n -> takeCompletions n
& (False,)
-- case maxCompletions of
-- Nothing -> (False,)
-- Just n -> takeCompletions n
let defCompletionItems =
defCompletions
& mapMaybe \(path, fqn, dep) ->
@ -75,12 +76,13 @@ completionHandler m respond =
let itemDefaults = Nothing
pure . CompletionList isIncomplete itemDefaults $ defCompletionItems
where
-- Takes at most the specified number of completions, but also indicates with a boolean
-- whether there were more completions remaining so we can pass that along to the client.
takeCompletions :: Int -> [a] -> (Bool, [a])
takeCompletions 0 xs = (not $ null xs, [])
takeCompletions _ [] = (False, [])
takeCompletions n (x : xs) = second (x :) $ takeCompletions (pred n) xs
-- Takes at most the specified number of completions, but also indicates with a boolean
-- whether there were more completions remaining so we can pass that along to the client.
-- takeCompletions :: Int -> [a] -> (Bool, [a])
-- takeCompletions 0 xs = (not $ null xs, [])
-- takeCompletions _ [] = (False, [])
-- takeCompletions n (x : xs) = second (x :) $ takeCompletions (pred n) xs
mkDefCompletionItem :: Uri -> Range -> Name -> Name -> Text -> Text -> LabeledDependency -> CompletionItem
mkDefCompletionItem fileUri range relativeName fullyQualifiedName path suffixified dep =

View File

@ -341,10 +341,10 @@ analyseNotes fileUri ppe src notes = do
Context.Suggestion {suggestionName, suggestionType, suggestionMatch} <- sortOn nameResolutionSuggestionPriority suggestions
let prettyType = TypePrinter.prettyStr Nothing ppe suggestionType
let ranges = (diags ^.. folded . range)
let rca = rangedCodeAction ("Use " <> suggestionName <> " : " <> Text.pack prettyType) diags ranges
let rca = rangedCodeAction ("Use " <> Name.toText suggestionName <> " : " <> Text.pack prettyType) diags ranges
pure $
rca
& includeEdits fileUri suggestionName ranges
& includeEdits fileUri (Name.toText suggestionName) ranges
& codeAction . isPreferred ?~ (suggestionMatch == Context.Exact)
nameResolutionSuggestionPriority (Context.Suggestion {suggestionMatch, suggestionName}) = case suggestionMatch of

View File

@ -84,7 +84,10 @@ identifierSplitAtPosition uri pos = do
pure (Text.takeWhileEnd isIdentifierChar before, Text.takeWhile isIdentifierChar after)
where
isIdentifierChar c =
Lexer.wordyIdChar c || Lexer.symbolyIdChar c
-- Manually exclude '!' and apostrophe, since those are usually just forces and
-- delays, which shouldn't be replaced by auto-complete.
(c /= '!' && c /= '\'')
&& (c == '.' || Lexer.wordyIdChar c || Lexer.symbolyIdChar c)
-- | Returns the prefix of the symbol at the provided location, and the range that prefix
-- spans.

View File

@ -75,13 +75,28 @@ import Unison.Util.Monoid (foldMapM)
-- | The maximum number of downloader threads, during a pull.
maxSimultaneousPullDownloaders :: Int
maxSimultaneousPullDownloaders = 5
maxSimultaneousPullDownloaders = unsafePerformIO $ do
lookupEnv "UNISON_PULL_WORKERS" <&> \case
Just n -> read n
Nothing -> 5
{-# NOINLINE maxSimultaneousPullDownloaders #-}
-- | The maximum number of push workers at a time. Each push worker reads from the database and uploads entities.
-- Share currently parallelizes on it's own in the backend, and any more than one push worker
-- just results in serialization conflicts which slow things down.
maxSimultaneousPushWorkers :: Int
maxSimultaneousPushWorkers = 1
maxSimultaneousPushWorkers = unsafePerformIO $ do
lookupEnv "UNISON_PUSH_WORKERS" <&> \case
Just n -> read n
Nothing -> 1
{-# NOINLINE maxSimultaneousPushWorkers #-}
syncChunkSize :: Int
syncChunkSize = unsafePerformIO $ do
lookupEnv "UNISON_SYNC_CHUNK_SIZE" <&> \case
Just n -> read n
Nothing -> 50
{-# NOINLINE syncChunkSize #-}
------------------------------------------------------------------------------------------------------------------------
-- Push
@ -606,7 +621,7 @@ completeTempEntities httpClient unisonShareUrl connect repoInfo downloadedCallba
dispatchWorkMode = do
hashes <- readTVar hashesVar
check (not (Set.null hashes))
let (hashes1, hashes2) = Set.splitAt 50 hashes
let (hashes1, hashes2) = Set.splitAt syncChunkSize hashes
modifyTVar' uninsertedHashesVar (Set.union hashes1)
writeTVar hashesVar hashes2
pure (DispatcherForkWorker (NESet.unsafeFromSet hashes1))
@ -820,7 +835,7 @@ uploadEntities unisonShareUrl repoInfo hashes0 uploadedCallback = do
dispatchWorkMode = do
hashes <- readTVar hashesVar
when (Set.null hashes) retry
let (hashes1, hashes2) = Set.splitAt 50 hashes
let (hashes1, hashes2) = Set.splitAt syncChunkSize hashes
modifyTVar' dedupeVar (Set.union hashes1)
writeTVar hashesVar hashes2
pure (UploadDispatcherForkWorkerWhenAvailable (NESet.unsafeFromSet hashes1))

View File

@ -66,7 +66,7 @@ runTranscript :: Codebase -> Transcript -> IO TranscriptOutput
runTranscript (Codebase codebasePath fmt) transcript = do
let err e = fail $ "Parse error: \n" <> show e
cbInit = case fmt of CodebaseFormat2 -> SC.init
TR.withTranscriptRunner Verbosity.Silent "Unison.Test.Ucm.runTranscript Invalid Version String" configFile $ \runner -> do
TR.withTranscriptRunner Verbosity.Silent "Unison.Test.Ucm.runTranscript Invalid Version String" rtp configFile $ \runner -> do
result <- Codebase.Init.withOpenCodebase cbInit "transcript" codebasePath SC.DoLock SC.DontMigrate \codebase -> do
Codebase.runTransaction codebase (Codebase.installUcmDependencies codebase)
let transcriptSrc = stripMargin . Text.pack $ unTranscript transcript
@ -78,6 +78,9 @@ runTranscript (Codebase codebasePath fmt) transcript = do
Right x -> pure x
where
configFile = Nothing
-- Note: this needs to be properly configured if these tests ever
-- need to do native compiles. But I suspect they won't.
rtp = "native-compiler/bin"
lowLevel :: Codebase -> (Codebase.Codebase IO Symbol Ann -> IO a) -> IO a
lowLevel (Codebase root fmt) action = do

View File

@ -1,4 +1,5 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{- This module kicks off the Transcript Tests.
It doesn't do the transcript parsing itself.
@ -10,11 +11,13 @@ import Data.Text qualified as Text
import Data.Text.IO qualified as Text
import EasyTest
import System.Directory
import System.Environment (getArgs)
import System.Environment (getArgs, getExecutablePath)
import System.FilePath
( replaceExtension,
splitFileName,
takeDirectory,
takeExtensions,
(<.>),
(</>),
)
import System.IO.CodePage (withCP65001)
@ -27,17 +30,24 @@ import Unison.Prelude
import UnliftIO.STM qualified as STM
data TestConfig = TestConfig
{ matchPrefix :: Maybe String
{ matchPrefix :: Maybe String,
runtimePath :: FilePath
}
deriving (Show)
type TestBuilder = FilePath -> [String] -> String -> Test ()
type TestBuilder = FilePath -> FilePath -> [String] -> String -> Test ()
testBuilder ::
Bool -> ((FilePath, Text) -> IO ()) -> FilePath -> [String] -> String -> Test ()
testBuilder expectFailure recordFailure dir prelude transcript = scope transcript $ do
Bool ->
((FilePath, Text) -> IO ()) ->
FilePath ->
FilePath ->
[String] ->
String ->
Test ()
testBuilder expectFailure recordFailure runtimePath dir prelude transcript = scope transcript $ do
outputs <- io . withTemporaryUcmCodebase SC.init Verbosity.Silent "transcript" SC.DoLock $ \(codebasePath, codebase) -> do
withTranscriptRunner Verbosity.Silent "TODO: pass version here" Nothing \runTranscript -> do
withTranscriptRunner Verbosity.Silent "TODO: pass version here" runtimePath Nothing \runTranscript -> do
for files \filePath -> do
transcriptSrc <- readUtf8 filePath
out <- silence $ runTranscript filePath transcriptSrc (codebasePath, codebase)
@ -73,7 +83,7 @@ outputFileForTranscript filePath =
replaceExtension filePath ".output.md"
buildTests :: TestConfig -> TestBuilder -> FilePath -> Test ()
buildTests config testBuilder dir = do
buildTests TestConfig {..} testBuilder dir = do
io
. putStrLn
. unlines
@ -88,7 +98,7 @@ buildTests config testBuilder dir = do
& filter (\f -> takeExtensions f == ".md")
& partition ((isPrefixOf "_") . snd . splitFileName)
-- if there is a matchPrefix set, filter non-prelude files by that prefix - or return True
& second (filter (\f -> maybe True (`isPrefixOf` f) (matchPrefix config)))
& second (filter (\f -> maybe True (`isPrefixOf` f) matchPrefix))
case length transcripts of
0 -> pure ()
@ -96,7 +106,7 @@ buildTests config testBuilder dir = do
-- if you don't give it any tests, this keeps it going
-- till the end so we can search all transcripts for
-- prefix matches.
_ -> tests (testBuilder dir prelude <$> transcripts)
_ -> tests (testBuilder runtimePath dir prelude <$> transcripts)
-- Transcripts that exit successfully get cleaned-up by the transcript parser.
-- Any remaining folders matching "transcript-.*" are output directories
@ -139,14 +149,21 @@ test config = do
Text.putStrLn msg
cleanup
handleArgs :: [String] -> TestConfig
handleArgs args =
let matchPrefix = case args of
[prefix] -> Just prefix
_ -> Nothing
in TestConfig matchPrefix
handleArgs :: TestConfig -> [String] -> TestConfig
handleArgs acc ("--runtime-path" : p : rest) =
handleArgs (acc {runtimePath = p}) rest
handleArgs acc [prefix] = acc {matchPrefix = Just prefix}
handleArgs acc _ = acc
defaultConfig :: IO TestConfig
defaultConfig = TestConfig Nothing <$> defaultRTP
where
defaultRTP = do
ucm <- getExecutablePath
pure (takeDirectory ucm </> "runtime" </> "unison-runtime" <.> exeExtension)
main :: IO ()
main = withCP65001 do
testConfig <- handleArgs <$> getArgs
dcfg <- defaultConfig
testConfig <- handleArgs dcfg <$> getArgs
run (test testConfig)

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
cabal-version: 2.0
-- This file has been generated from package.yaml by hpack version 0.35.2.
-- This file has been generated from package.yaml by hpack version 0.36.0.
--
-- see: https://github.com/sol/hpack
@ -22,6 +22,147 @@ flag optimized
default: False
library
exposed-modules:
ArgParse
Stats
System.Path
Unison.Main
Unison.Version
hs-source-dirs:
unison
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFunctor
DeriveFoldable
DeriveTraversable
DeriveGeneric
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall
build-depends:
IntervalMap
, ListLike
, aeson >=2.0.0.0
, aeson-pretty
, ansi-terminal
, async
, base
, bytes
, bytestring
, co-log-core
, code-page
, concurrent-output
, configurator
, containers >=0.6.3
, cryptonite
, directory
, either
, errors
, exceptions
, extra
, filepath
, free
, friendly-time
, fsnotify
, fuzzyfind
, generic-lens
, haskeline
, http-client >=0.7.6
, http-client-tls
, http-types
, jwt
, ki
, lens
, lock-file
, lsp >=2.2.0.0
, lsp-types >=2.0.2.0
, megaparsec
, memory
, mtl
, network
, network-simple
, network-udp
, network-uri
, nonempty-containers
, open-browser
, optparse-applicative >=0.16.1.0
, pretty-simple
, process
, random >=1.2.0
, random-shuffle
, recover-rtti
, regex-tdfa
, semialign
, semigroups
, servant
, servant-client
, shellmet
, stm
, template-haskell
, temporary
, text
, text-builder
, text-rope
, these
, these-lens
, time
, transformers
, unison-cli-lib
, unison-codebase
, unison-codebase-sqlite
, unison-codebase-sqlite-hashing-v2
, unison-core
, unison-core1
, unison-hash
, unison-parser-typechecker
, unison-prelude
, unison-pretty-printer
, unison-share-api
, unison-share-projects-api
, unison-sqlite
, unison-syntax
, unison-util-base32hex
, unison-util-nametree
, unison-util-relation
, unliftio
, unordered-containers
, uri-encode
, uuid
, vector
, wai
, warp
, witch
, witherable
default-language: Haskell2010
if flag(optimized)
ghc-options: -O2 -funbox-strict-fields
library unison-cli-lib
exposed-modules:
Compat
Unison.Auth.CredentialFile
@ -107,7 +248,6 @@ library
Unison.CommandLine.OutputMessages
Unison.CommandLine.Types
Unison.CommandLine.Welcome
Unison.JitInfo
Unison.LSP
Unison.LSP.CancelRequest
Unison.LSP.CodeAction
@ -180,6 +320,7 @@ library
, bytes
, bytestring
, co-log-core
, code-page
, concurrent-output
, configurator
, containers >=0.6.3
@ -214,6 +355,7 @@ library
, network-uri
, nonempty-containers
, open-browser
, optparse-applicative >=0.16.1.0
, pretty-simple
, process
, random >=1.2.0
@ -224,7 +366,10 @@ library
, semigroups
, servant
, servant-client
, shellmet
, stm
, template-haskell
, temporary
, text
, text-builder
, text-rope
@ -264,143 +409,6 @@ library
build-depends:
unix
executable cli-integration-tests
main-is: Suite.hs
other-modules:
IntegrationTests.ArgumentParsing
hs-source-dirs:
integration-tests
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFunctor
DeriveFoldable
DeriveTraversable
DeriveGeneric
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0
build-tools:
unison
build-depends:
IntervalMap
, ListLike
, aeson >=2.0.0.0
, aeson-pretty
, ansi-terminal
, async
, base
, bytes
, bytestring
, co-log-core
, code-page
, concurrent-output
, configurator
, containers >=0.6.3
, cryptonite
, directory
, easytest
, either
, errors
, exceptions
, extra
, filepath
, free
, friendly-time
, fsnotify
, fuzzyfind
, generic-lens
, haskeline
, http-client >=0.7.6
, http-client-tls
, http-types
, jwt
, ki
, lens
, lock-file
, lsp >=2.2.0.0
, lsp-types >=2.0.2.0
, megaparsec
, memory
, mtl
, network
, network-simple
, network-udp
, network-uri
, nonempty-containers
, open-browser
, pretty-simple
, process
, random >=1.2.0
, random-shuffle
, recover-rtti
, regex-tdfa
, semialign
, semigroups
, servant
, servant-client
, shellmet
, stm
, text
, text-builder
, text-rope
, these
, these-lens
, time
, transformers
, unison-codebase
, unison-codebase-sqlite
, unison-codebase-sqlite-hashing-v2
, unison-core
, unison-core1
, unison-hash
, unison-parser-typechecker
, unison-prelude
, unison-pretty-printer
, unison-share-api
, unison-share-projects-api
, unison-sqlite
, unison-syntax
, unison-util-base32hex
, unison-util-nametree
, unison-util-relation
, unliftio
, unordered-containers
, uri-encode
, uuid
, vector
, wai
, warp
, witch
, witherable
default-language: Haskell2010
if flag(optimized)
ghc-options: -O2 -funbox-strict-fields
executable transcripts
main-is: Transcripts.hs
hs-source-dirs:
@ -486,6 +494,7 @@ executable transcripts
, network-uri
, nonempty-containers
, open-browser
, optparse-applicative >=0.16.1.0
, pretty-simple
, process
, random >=1.2.0
@ -499,145 +508,6 @@ executable transcripts
, shellmet
, silently
, stm
, text
, text-builder
, text-rope
, these
, these-lens
, time
, transformers
, unison-cli
, unison-codebase
, unison-codebase-sqlite
, unison-codebase-sqlite-hashing-v2
, unison-core
, unison-core1
, unison-hash
, unison-parser-typechecker
, unison-prelude
, unison-pretty-printer
, unison-share-api
, unison-share-projects-api
, unison-sqlite
, unison-syntax
, unison-util-base32hex
, unison-util-nametree
, unison-util-relation
, unliftio
, unordered-containers
, uri-encode
, uuid
, vector
, wai
, warp
, witch
, witherable
default-language: Haskell2010
if flag(optimized)
ghc-options: -O2 -funbox-strict-fields
executable unison
main-is: Main.hs
other-modules:
ArgParse
Stats
System.Path
Version
hs-source-dirs:
unison
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFunctor
DeriveFoldable
DeriveTraversable
DeriveGeneric
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall -threaded -rtsopts "-with-rtsopts=-I0 -N -qn4 -qg1 -T" -optP-Wno-nonportable-include-path
build-depends:
IntervalMap
, ListLike
, aeson >=2.0.0.0
, aeson-pretty
, ansi-terminal
, async
, base
, bytes
, bytestring
, co-log-core
, code-page
, concurrent-output
, configurator
, containers >=0.6.3
, cryptonite
, directory
, either
, errors
, exceptions
, extra
, filepath
, free
, friendly-time
, fsnotify
, fuzzyfind
, generic-lens
, haskeline
, http-client >=0.7.6
, http-client-tls
, http-types
, jwt
, ki
, lens
, lock-file
, lsp >=2.2.0.0
, lsp-types >=2.0.2.0
, megaparsec
, memory
, mtl
, network
, network-simple
, network-udp
, network-uri
, nonempty-containers
, open-browser
, optparse-applicative >=0.16.1.0
, pretty-simple
, process
, random >=1.2.0
, random-shuffle
, recover-rtti
, regex-tdfa
, semialign
, semigroups
, servant
, servant-client
, shellmet
, stm
, template-haskell
, temporary
, text
@ -647,7 +517,7 @@ executable unison
, these-lens
, time
, transformers
, unison-cli
, unison-cli-lib
, unison-codebase
, unison-codebase-sqlite
, unison-codebase-sqlite-hashing-v2
@ -771,6 +641,7 @@ test-suite cli-tests
, network-uri
, nonempty-containers
, open-browser
, optparse-applicative >=0.16.1.0
, pretty-simple
, process
, random >=1.2.0
@ -783,6 +654,7 @@ test-suite cli-tests
, servant-client
, shellmet
, stm
, template-haskell
, temporary
, text
, text-builder
@ -791,7 +663,7 @@ test-suite cli-tests
, these-lens
, time
, transformers
, unison-cli
, unison-cli-lib
, unison-codebase
, unison-codebase-sqlite
, unison-codebase-sqlite-hashing-v2

View File

@ -114,6 +114,7 @@ data Command
data GlobalOptions = GlobalOptions
{ codebasePathOption :: Maybe CodebasePathOption,
exitOption :: ShouldExit,
nativeRuntimePath :: Maybe FilePath,
lspFormattingConfig :: LspFormattingConfig
}
deriving (Show, Eq)
@ -256,10 +257,11 @@ globalOptionsParser = do
-- ApplicativeDo
codebasePathOption <- codebasePathParser <|> codebaseCreateParser
exitOption <- exitParser
nativeRuntimePath <- nativeRuntimePathFlag
lspFormattingConfig <- lspFormattingParser
pure
GlobalOptions {codebasePathOption, exitOption, lspFormattingConfig}
GlobalOptions {codebasePathOption, exitOption, nativeRuntimePath, lspFormattingConfig}
codebasePathParser :: Parser (Maybe CodebasePathOption)
codebasePathParser = do
@ -446,6 +448,14 @@ readAbsolutePath = do
<> show rel
<> " was relative. Try adding a `.` prefix, e.g. `.path.to.project`"
nativeRuntimePathFlag :: Parser (Maybe FilePath)
nativeRuntimePathFlag =
optional . strOption $
long "runtime-path"
<> metavar "DIR"
<> help "Path to native runtime files"
<> noGlobal
readPath' :: ReadM Path.Path'
readPath' = do
strPath <- OptParse.str

View File

@ -6,7 +6,7 @@
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-partial-type-signatures #-}
module Main
module Unison.Main
( main,
)
where
@ -40,10 +40,21 @@ import Ki qualified
import Network.HTTP.Client qualified as HTTP
import Network.HTTP.Client.TLS qualified as HTTP
import Stats (recordRtsStats)
import System.Directory (canonicalizePath, getCurrentDirectory, removeDirectoryRecursive)
import System.Environment (getProgName, withArgs)
import System.Directory
( canonicalizePath,
exeExtension,
getCurrentDirectory,
removeDirectoryRecursive,
)
import System.Environment (getExecutablePath, getProgName, withArgs)
import System.Exit qualified as Exit
import System.FilePath qualified as FP
import System.FilePath
( replaceExtension,
takeDirectory,
takeExtension,
(<.>),
(</>),
)
import System.IO (stderr)
import System.IO.CodePage (withCP65001)
import System.IO.Error (catchIOError)
@ -78,15 +89,22 @@ import Unison.Server.Backend qualified as Backend
import Unison.Server.CodebaseServer qualified as Server
import Unison.Symbol (Symbol)
import Unison.Util.Pretty qualified as P
import Unison.Version (Version)
import Unison.Version qualified as Version
import UnliftIO qualified
import UnliftIO.Directory (getHomeDirectory)
import Version qualified
type Runtimes =
(RTI.Runtime Symbol, RTI.Runtime Symbol, RTI.Runtime Symbol)
main :: IO ()
main = do
fixNativeRuntimePath :: Maybe FilePath -> IO FilePath
fixNativeRuntimePath override = do
ucm <- getExecutablePath
let ucr = takeDirectory ucm </> "runtime" </> "unison-runtime" <.> exeExtension
pure $ maybe ucr id override
main :: Version -> IO ()
main version = do
-- Replace the default exception handler with one complains loudly, because we shouldn't have any uncaught exceptions.
-- Sometimes `show` and `displayException` are different strings; in this case, we want to show them both, so this
-- issue is easier to debug.
@ -114,16 +132,17 @@ main = do
withCP65001 . runInUnboundThread . Ki.scoped $ \scope -> do
interruptHandler <- defaultInterruptHandler
withInterruptHandler interruptHandler $ do
void $ Ki.fork scope initHTTPClient
void $ Ki.fork scope (initHTTPClient version)
progName <- getProgName
-- hSetBuffering stdout NoBuffering -- cool
(renderUsageInfo, globalOptions, command) <- parseCLIArgs progName (Text.unpack Version.gitDescribeWithDate)
(renderUsageInfo, globalOptions, command) <- parseCLIArgs progName (Text.unpack (Version.gitDescribeWithDate version))
nrtp <- fixNativeRuntimePath (nativeRuntimePath globalOptions)
let GlobalOptions {codebasePathOption = mCodePathOption, exitOption, lspFormattingConfig} = globalOptions
withConfig mCodePathOption \config -> do
currentDir <- getCurrentDirectory
case command of
PrintVersion ->
Text.putStrLn $ Text.pack progName <> " version: " <> Version.gitDescribeWithDate
Text.putStrLn $ Text.pack progName <> " version: " <> Version.gitDescribeWithDate version
Init -> do
exitError
( P.lines
@ -138,7 +157,7 @@ main = do
)
Run (RunFromSymbol mainName) args -> do
getCodebaseOrExit mCodePathOption (SC.MigrateAutomatically SC.Backup SC.Vacuum) \(_, _, theCodebase) -> do
RTI.withRuntime False RTI.OneOff Version.gitDescribeWithDate \runtime -> do
RTI.withRuntime False RTI.OneOff (Version.gitDescribeWithDate version) \runtime -> do
withArgs args (execute theCodebase runtime mainName) >>= \case
Left err -> exitError err
Right () -> pure ()
@ -150,13 +169,14 @@ main = do
Left _ -> exitError "I couldn't find that file or it is for some reason unreadable."
Right contents -> do
getCodebaseOrExit mCodePathOption (SC.MigrateAutomatically SC.Backup SC.Vacuum) \(initRes, _, theCodebase) -> do
withRuntimes RTI.OneOff \(rt, sbrt, nrt) -> do
withRuntimes nrtp RTI.OneOff \(rt, sbrt, nrt) -> do
let fileEvent = Input.UnisonFileChanged (Text.pack file) contents
let noOpRootNotifier _ = pure ()
let noOpPathNotifier _ = pure ()
let serverUrl = Nothing
let startPath = Nothing
launch
version
currentDir
config
rt
@ -176,13 +196,14 @@ main = do
Left _ -> exitError "I had trouble reading this input."
Right contents -> do
getCodebaseOrExit mCodePathOption (SC.MigrateAutomatically SC.Backup SC.Vacuum) \(initRes, _, theCodebase) -> do
withRuntimes RTI.OneOff \(rt, sbrt, nrt) -> do
withRuntimes nrtp RTI.OneOff \(rt, sbrt, nrt) -> do
let fileEvent = Input.UnisonFileChanged (Text.pack "<standard input>") contents
let noOpRootNotifier _ = pure ()
let noOpPathNotifier _ = pure ()
let serverUrl = Nothing
let startPath = Nothing
launch
version
currentDir
config
rt
@ -226,7 +247,7 @@ main = do
Left err -> exitError err
Right () -> pure ()
where
vmatch = v == Version.gitDescribeWithDate
vmatch = v == Version.gitDescribeWithDate version
ws s = P.wrap (P.text s)
ifile
| 'c' : 'u' : '.' : rest <- reverse file = reverse rest
@ -242,7 +263,7 @@ main = do
P.indentN 4 $ P.text v,
"",
"Your version",
P.indentN 4 $ P.text Version.gitDescribeWithDate,
P.indentN 4 $ P.text $ Version.gitDescribeWithDate version,
"",
P.wrap $
"The program was compiled from hash "
@ -261,13 +282,13 @@ main = do
\that matches your version of Unison."
]
Transcript shouldFork shouldSaveCodebase mrtsStatsFp transcriptFiles -> do
let action = runTranscripts Verbosity.Verbose renderUsageInfo shouldFork shouldSaveCodebase mCodePathOption transcriptFiles
let action = runTranscripts version Verbosity.Verbose renderUsageInfo shouldFork shouldSaveCodebase mCodePathOption nrtp transcriptFiles
case mrtsStatsFp of
Nothing -> action
Just fp -> recordRtsStats fp action
Launch isHeadless codebaseServerOpts mayStartingPath shouldWatchFiles -> do
getCodebaseOrExit mCodePathOption (SC.MigrateAfterPrompt SC.Backup SC.Vacuum) \(initRes, _, theCodebase) -> do
withRuntimes RTI.Persistent \(runtime, sbRuntime, nRuntime) -> do
withRuntimes nrtp RTI.Persistent \(runtime, sbRuntime, nRuntime) -> do
startingPath <- case isHeadless of
WithCLI -> do
-- If the user didn't provide a starting path on the command line, put them in the most recent
@ -316,6 +337,7 @@ main = do
PT.putPrettyLn $ P.string "Now starting the Unison Codebase Manager (UCM)..."
launch
version
currentDir
config
runtime
@ -332,12 +354,13 @@ main = do
Exit -> do Exit.exitSuccess
where
-- (runtime, sandboxed runtime)
withRuntimes :: RTI.RuntimeHost -> (Runtimes -> IO a) -> IO a
withRuntimes mode action =
RTI.withRuntime False mode Version.gitDescribeWithDate \runtime -> do
RTI.withRuntime True mode Version.gitDescribeWithDate \sbRuntime ->
withRuntimes :: FilePath -> RTI.RuntimeHost -> (Runtimes -> IO a) -> IO a
withRuntimes nrtp mode action =
RTI.withRuntime False mode (Version.gitDescribeWithDate version) \runtime -> do
RTI.withRuntime True mode (Version.gitDescribeWithDate version) \sbRuntime ->
action . (runtime,sbRuntime,)
=<< RTI.startNativeRuntime Version.gitDescribeWithDate
-- startNativeRuntime saves the path to `unison-runtime`
=<< RTI.startNativeRuntime (Version.gitDescribeWithDate version) nrtp
withConfig :: Maybe CodebasePathOption -> (Config -> IO a) -> IO a
withConfig mCodePathOption action = do
UnliftIO.bracket
@ -352,9 +375,9 @@ main = do
-- | Set user agent and configure TLS on global http client.
-- Note that the authorized http client is distinct from the global http client.
initHTTPClient :: IO ()
initHTTPClient = do
let (ucmVersion, _date) = Version.gitDescribe
initHTTPClient :: Version -> IO ()
initHTTPClient version = do
let (ucmVersion, _date) = Version.gitDescribe version
let userAgent = Text.encodeUtf8 $ "UCM/" <> ucmVersion
let addUserAgent req = do
pure $ req {HTTP.requestHeaders = ("User-Agent", userAgent) : HTTP.requestHeaders req}
@ -386,21 +409,23 @@ prepareTranscriptDir verbosity shouldFork mCodePathOption shouldSaveCodebase = d
pure tmp
runTranscripts' ::
Version ->
String ->
Maybe FilePath ->
FilePath ->
FilePath ->
NonEmpty MarkdownFile ->
IO Bool
runTranscripts' progName mcodepath transcriptDir markdownFiles = do
runTranscripts' version progName mcodepath nativeRtp transcriptDir markdownFiles = do
currentDir <- getCurrentDirectory
configFilePath <- getConfigFilePath mcodepath
-- We don't need to create a codebase through `getCodebaseOrExit` as we've already done so previously.
and <$> getCodebaseOrExit (Just (DontCreateCodebaseWhenMissing transcriptDir)) (SC.MigrateAutomatically SC.Backup SC.Vacuum) \(_, codebasePath, theCodebase) -> do
TR.withTranscriptRunner Verbosity.Verbose Version.gitDescribeWithDate (Just configFilePath) $ \runTranscript -> do
TR.withTranscriptRunner Verbosity.Verbose (Version.gitDescribeWithDate version) nativeRtp (Just configFilePath) $ \runTranscript -> do
for markdownFiles $ \(MarkdownFile fileName) -> do
transcriptSrc <- readUtf8 fileName
result <- runTranscript fileName transcriptSrc (codebasePath, theCodebase)
let outputFile = FP.replaceExtension (currentDir FP.</> fileName) ".output.md"
let outputFile = replaceExtension (currentDir </> fileName) ".output.md"
(output, succeeded) <- case result of
Left err -> case err of
TR.TranscriptParseError err -> do
@ -439,14 +464,16 @@ runTranscripts' progName mcodepath transcriptDir markdownFiles = do
pure succeeded
runTranscripts ::
Version ->
Verbosity.Verbosity ->
UsageRenderer ->
ShouldForkCodebase ->
ShouldSaveCodebase ->
Maybe CodebasePathOption ->
FilePath ->
NonEmpty String ->
IO ()
runTranscripts verbosity renderUsageInfo shouldFork shouldSaveTempCodebase mCodePathOption args = do
runTranscripts version verbosity renderUsageInfo shouldFork shouldSaveTempCodebase mCodePathOption nativeRtp args = do
markdownFiles <- case traverse (first (pure @[]) . markdownFile) args of
Failure invalidArgs -> do
PT.putPrettyLn $
@ -464,7 +491,7 @@ runTranscripts verbosity renderUsageInfo shouldFork shouldSaveTempCodebase mCode
progName <- getProgName
transcriptDir <- prepareTranscriptDir verbosity shouldFork mCodePathOption shouldSaveTempCodebase
completed <-
runTranscripts' progName (Just transcriptDir) transcriptDir markdownFiles
runTranscripts' version progName (Just transcriptDir) nativeRtp transcriptDir markdownFiles
case shouldSaveTempCodebase of
DontSaveCodebase -> removeDirectoryRecursive transcriptDir
SaveCodebase _ ->
@ -489,6 +516,7 @@ defaultInitialPath :: Path.Absolute
defaultInitialPath = Path.absoluteEmpty
launch ::
Version ->
FilePath ->
Config ->
Rt.Runtime Symbol ->
@ -503,12 +531,12 @@ launch ::
(Path.Absolute -> STM ()) ->
CommandLine.ShouldWatchFiles ->
IO ()
launch dir config runtime sbRuntime nRuntime codebase inputs serverBaseUrl mayStartingPath initResult notifyRootChange notifyPathChange shouldWatchFiles = do
launch version dir config runtime sbRuntime nRuntime codebase inputs serverBaseUrl mayStartingPath initResult notifyRootChange notifyPathChange shouldWatchFiles = do
showWelcomeHint <- Codebase.runTransaction codebase Queries.doProjectsExist
let isNewCodebase = case initResult of
CreatedCodebase -> NewlyCreatedCodebase
OpenedCodebase -> PreviouslyCreatedCodebase
(ucmVersion, _date) = Version.gitDescribe
(ucmVersion, _date) = Version.gitDescribe version
welcome = Welcome.welcome isNewCodebase ucmVersion showWelcomeHint
in CommandLine.main
dir
@ -529,16 +557,16 @@ launch dir config runtime sbRuntime nRuntime codebase inputs serverBaseUrl maySt
newtype MarkdownFile = MarkdownFile FilePath
markdownFile :: FilePath -> Validation FilePath MarkdownFile
markdownFile md = case FP.takeExtension md of
markdownFile md = case takeExtension md of
".md" -> Success $ MarkdownFile md
".markdown" -> Success $ MarkdownFile md
_ -> Failure md
isDotU :: String -> Bool
isDotU file = FP.takeExtension file == ".u"
isDotU file = takeExtension file == ".u"
getConfigFilePath :: Maybe FilePath -> IO FilePath
getConfigFilePath mcodepath = (FP.</> ".unisonConfig") <$> Codebase.getCodebaseDir mcodepath
getConfigFilePath mcodepath = (</> ".unisonConfig") <$> Codebase.getCodebaseDir mcodepath
getCodebaseOrExit :: Maybe CodebasePathOption -> SC.MigrationStrategy -> ((InitResult, CodebasePath, Codebase IO Symbol Ann) -> IO r) -> IO r
getCodebaseOrExit codebasePathOption migrationStrategy action = do

Some files were not shown because too many files have changed in this diff Show More