⅄ trunk → topic/merge4

This commit is contained in:
Mitchell Rosen 2024-04-04 14:40:54 -04:00
commit 2ff7d77820
80 changed files with 2424 additions and 1793 deletions

View File

@ -1,42 +0,0 @@
# Reference:
# https://docs.github.com/en/actions/creating-actions/creating-a-composite-action
name: "Install Stack"
description: "Install stack for Linux, macOS, and Windows"
inputs:
stack-version:
description: "The version of stack to install, e.g. 2.9.1"
required: true
default: "2.9.1"
runs:
using: "composite"
steps:
- name: install stack
shell: bash
working-directory: ${{ runner.temp }}
run: |
if [[ ${{runner.os}} = 'Windows' ]]; then
stack_os="windows"
elif [[ ${{runner.os}} = 'macOS' ]]; then
stack_os="osx"
elif [[ ${{runner.os}} = 'Linux' ]]; then
stack_os="linux"
else
echo "Unsupported OS: ${{runner.os}}"
exit 1
fi
if [[ ${{runner.arch}} = 'X64' ]]; then
stack_arch="x86_64"
elif [[ ${{runner.arch}} = 'ARM64' ]]; then
stack_arch="aarch64"
else
echo "Unsupported architecture: ${{runner.arch}}"
exit 1
fi
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v${{inputs.stack-version}}/stack-${{inputs.stack-version}}-${stack_os}-${stack_arch}.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
echo "stack_path=$PWD/stack-"* >> $GITHUB_ENV

View File

@ -1,100 +0,0 @@
name: restore stack cache
description: restore ~/.stack and .stack-work caches on Linux, macOS, and Windows
inputs:
cache-prefix:
description: The cache prefix to use for `~/.stack`, e.g. "release" or "ci"
required: true
work-cache-prefix:
description: The cache prefix to use for `**/.stack-work`, defaults to the same as `cache-prefix`
required: false
stack-yaml-dir:
description: The directory to search for `stack.yaml`
required: false
default: '.'
lookup-only:
description: If true, only checks if cache entry exists and skips download.
required: false
default: 'false'
outputs:
cache-hit:
description: Whether the .stack cache was restored with an exact match
value: ${{ steps.cache-stack-unix.outputs.cache-hit || steps.cache-stack-windows.outputs.cache-hit }}
work-cache-hit:
description: Whether the .stack-work cache was restored with an exact match
value: ${{ steps.cache-stack-work.outputs.cache-hit }}
runs:
using: composite
steps:
- name: set default work cache prefix
shell: bash
run: |
if [ -z "${{inputs.work-cache-prefix}}" ]; then
echo "work-cache-prefix=${{inputs.cache-prefix}}" >> "$GITHUB_ENV"
else
echo "work-cache-prefix=${{inputs.work-cache-prefix}}" >> "$GITHUB_ENV"
fi
# The number towards the beginning of the cache keys allow you to manually avoid using a previous cache.
# GitHub will automatically delete caches that haven't been accessed in 7 days, but there is no way to
# purge one manually.
- id: stackage-resolver
name: record stackage resolver
shell: bash
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files
# looks for `resolver: nightly-yyyy-mm-dd` or `resolver: lts-xx.yy` in `stack.yaml` and splits it into
# `nightly` or `lts-xx`. the whole resolver string is put into $resolver as a backup cache key
# ${{ env.resolver_short }}
# ${{ env.resolver }}
run: |
grep resolver ${{inputs.stack-yaml-dir}}/stack.yaml | awk '{ x="resolver_short="; if (split($2,a,"-") > 2) print x a[1]; else {split($2,b,"."); print x b[1]}}' >> "$GITHUB_ENV"
grep resolver ${{inputs.stack-yaml-dir}}/stack.yaml | awk '{print "resolver="$2}' >> "$GITHUB_ENV"
- name: restore ~/.stack (non-Windows)
uses: actions/cache/restore@v4
id: cache-stack-unix
if: runner.os != 'Windows'
with:
lookup-only: ${{inputs.lookup-only}}
path: ~/.stack
key:
${{inputs.cache-prefix}}-stack-${{runner.os}}_${{env.resolver}}-${{hashFiles('**/stack.yaml', '**/package.yaml')}}
restore-keys:
${{inputs.cache-prefix}}-stack-${{runner.os}}_${{env.resolver}}-
- name: restore ~/.stack (Windows)
uses: actions/cache/restore@v4
id: cache-stack-windows
if: runner.os == 'Windows'
with:
lookup-only: ${{inputs.lookup-only}}
path: |
C:\Users\runneradmin\AppData\Roaming\stack
C:\Users\runneradmin\AppData\Local\Programs\stack
key:
${{inputs.cache-prefix}}-stack-${{runner.os}}_${{env.resolver}}-${{hashFiles('**/stack.yaml', '**/package.yaml')}}
restore-keys:
${{inputs.cache-prefix}}-stack-${{runner.os}}_${{env.resolver}}-
- name: restore .stack-work
uses: actions/cache/restore@v4
id: cache-stack-work
with:
lookup-only: ${{inputs.lookup-only}}
path: |
**/.stack-work
key:
${{env.work-cache-prefix}}-stack-work-${{runner.os}}_${{env.resolver}}-${{hashFiles('**/stack.yaml', '**/package.yaml')}}-${{hashFiles('**/*.hs')}}
restore-keys: |
${{env.work-cache-prefix}}-stack-work-${{runner.os}}_${{env.resolver}}-${{hashFiles('**/stack.yaml', '**/package.yaml')}}-
${{env.work-cache-prefix}}-stack-work-${{runner.os}}_${{env.resolver}}-
${{env.work-cache-prefix}}-stack-work-${{runner.os}}_
# we added this step out of necessity, don't exactly remember why.
# hope to remove it someday.
- name: remove ~/.stack/setup-exe-cache on macOS
if: runner.os == 'macOS'
shell: bash
run: rm -rf ~/.stack/setup-exe-cache

View File

@ -1,73 +0,0 @@
name: save stack cache
description: save ~/.stack and .stack-work caches on Linux, macOS, and Windows
inputs:
cache-prefix:
description: The cache prefix to use for `~/.stack`, e.g. "release" or "ci"
required: true
work-cache-prefix:
description: The cache prefix to use for `**/.stack-work`, defaults to the same as `cache-prefix`
required: false
stack-yaml-dir:
description: The directory to search for `stack.yaml`
required: false
default: '.'
runs:
using: composite
steps:
- name: set default work cache prefix
shell: bash
run: |
if [ -z "${{inputs.work-cache-prefix}}" ]; then
echo "work-cache-prefix=${{inputs.cache-prefix}}" >> "$GITHUB_ENV"
else
echo "work-cache-prefix=${{inputs.work-cache-prefix}}" >> "$GITHUB_ENV"
fi
- name: check stack caches
id: check-stack
uses: ./.github/workflows/actions/restore-stack-cache
with:
cache-prefix: ${{inputs.cache-prefix}}
work-cache-prefix: ${{env.work-cache-prefix}}
stack-yaml-dir: ${{inputs.stack-yaml-dir}}
lookup-only: true
# The number towards the beginning of the cache keys allow you to manually avoid using a previous cache.
# GitHub will automatically delete caches that haven't been accessed in 7 days, but there is no way to
# purge one manually.
- name: record stackage resolver
shell: bash
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files
# looks for `resolver: nightly-yyyy-mm-dd` or `resolver: lts-xx.yy` in `stack.yaml` and splits it into
# `nightly` or `lts-xx`. the whole resolver string is put into $resolver as a backup cache key
# ${{ env.resolver_short }}
# ${{ env.resolver }}
run: |
grep resolver ${{inputs.stack-yaml-dir}}/stack.yaml | awk '{ x="resolver_short="; if (split($2,a,"-") > 2) print x a[1]; else {split($2,b,"."); print x b[1]}}' >> "$GITHUB_ENV"
grep resolver ${{inputs.stack-yaml-dir}}/stack.yaml | awk '{print "resolver="$2}' >> "$GITHUB_ENV"
- name: save ~/.stack (non-Windows)
if: runner.os != 'Windows' && steps.check-stack.outputs.cache-hit != 'true'
uses: actions/cache/save@v4
with:
path: ~/.stack
key: ${{inputs.cache-prefix}}-stack-${{runner.os}}_${{env.resolver}}-${{hashFiles('**/stack.yaml', '**/package.yaml')}}
- name: save ~/.stack (Windows)
if: runner.os == 'Windows' && steps.check-stack.outputs.cache-hit != 'true'
uses: actions/cache/save@v4
with:
path: |
C:\Users\runneradmin\AppData\Roaming\stack
C:\Users\runneradmin\AppData\Local\Programs\stack
key: ${{inputs.cache-prefix}}-stack-${{runner.os}}_${{env.resolver}}-${{hashFiles('**/stack.yaml', '**/package.yaml')}}
- name: save .stack-work
if: steps.check-stack.outputs.work-cache-hit != 'true'
uses: actions/cache/save@v4
with:
path: |
**/.stack-work
key: ${{env.work-cache-prefix}}-stack-work-${{runner.os}}_${{env.resolver}}-${{hashFiles('**/stack.yaml', '**/package.yaml')}}-${{hashFiles('**/*.hs')}}

View File

@ -1,95 +0,0 @@
on:
workflow_call:
inputs:
ref:
description: Git ref to check out for this build, e.g. `trunk` or `release/0.5.19`
type: string
required: true
stack-cache-prefix:
description: The Stack cache prefix to use for builds
type: string
default: release
defaults:
run:
shell: bash
jobs:
build-ucm:
name: bundle ucm+ui ${{matrix.os}}
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, macos-12, windows-2019]
steps:
- uses: actions/checkout@v4
with:
ref: ${{inputs.ref}}
- name: restore stack caches
uses: ./.github/workflows/actions/restore-stack-cache
with:
cache-prefix: release
- name: install stack
uses: ./.github/workflows/actions/install-stack
- name: build
run: |
# unison-cli embeds version numbers using TH
# so it needs to be forced to rebuild to ensure those are updated.
stack clean unison-cli
# Windows will crash on build intermittently because the filesystem
# sucks at managing concurrent file access;
# Just keep retrying on these failures.
tries=5
for (( i = 0; i < $tries; i++ )); do
stack --no-terminal build --flag unison-parser-typechecker:optimized && break;
done
- name: save stack caches
uses: ./.github/workflows/actions/save-stack-cache
with:
cache-prefix: release
- name: set up environment
run: |
if [[ ${{runner.os}} = 'Windows' ]]; then
artifact_os="windows"
elif [[ ${{runner.os}} = 'macOS' ]]; then
artifact_os="osx"
elif [[ ${{runner.os}} = 'Linux' ]]; then
artifact_os="linux"
else
echo "Unexpected OS: ${{runner.os}}"
exit 1
fi
echo "artifact_os=$artifact_os" >> $GITHUB_ENV
- name: fetch latest Unison Local UI and package with ucm
run: |
mkdir /tmp/ucm
cp -v $(stack exec -- which unison) /tmp/ucm/ucm
curl -L -o /tmp/unisonLocal.zip \
https://github.com/unisonweb/unison-local-ui/releases/download/latest/unisonLocal.zip
mkdir /tmp/ucm/ui
unzip -d /tmp/ucm/ui /tmp/unisonLocal.zip
if [[ ${{runner.os}} = 'Windows' ]]; then
artifact_archive=ucm-${{env.artifact_os}}.zip
7z a -r -tzip ${artifact_archive} /tmp/ucm/*
else
artifact_archive=ucm-${{env.artifact_os}}.tar.gz
tar -c -z -f ${artifact_archive} -C /tmp/ucm .
fi
echo "artifact_archive=${artifact_archive}" >> $GITHUB_ENV
- name: upload artifact
uses: actions/upload-artifact@v2
with:
if-no-files-found: error
name: build-${{env.artifact_os}}
path: ${{env.artifact_archive}}

261
.github/workflows/bundle-ucm.yaml vendored Normal file
View File

@ -0,0 +1,261 @@
name: bundle ucm
# build optimized ucm
# package racket lib
# build/dist unison-runtime
on:
workflow_call:
inputs:
ref:
description: Git ref to check out for this build, e.g. `trunk` or `release/0.5.19`
type: string
required: true
env:
racket_version: "8.7"
defaults:
run:
shell: bash
jobs:
build-ucm:
name: build ucm
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, macos-12, windows-2019]
runs-on: ${{matrix.os}}
steps:
- uses: actions/checkout@v4
with:
ref: ${{inputs.ref}}
- name: restore stack caches
uses: unisonweb/actions/stack/cache/restore@main
with:
cache-prefix: release
- name: install stack
uses: unisonweb/actions/stack/install@main
- name: build
run: |
# unison-cli-main embeds version numbers using TH
# so it needs to be forced to rebuild to ensure those are updated.
stack clean unison-cli-main
mkdir ucm-bin
# Windows will crash on build intermittently because the filesystem
# sucks at managing concurrent file access;
# Just keep retrying on these failures.
tries=5
for (( i = 0; i < $tries; i++ )); do
stack build :unison \
--flag unison-parser-typechecker:optimized \
--local-bin-path ucm-bin \
--copy-bins \
&& break;
done
if [[ ${{runner.os}} = 'Windows' ]]; then
ucm=$(stack exec where unison)
else
ucm=$(stack exec which unison)
fi
echo ucm="$ucm" >> $GITHUB_ENV
ls -l $ucm
- name: save stack caches
uses: unisonweb/actions/stack/cache/save@main
with:
cache-prefix: release
- name: upload ucm
uses: actions/upload-artifact@v4
with:
name: unison-${{matrix.os}}
path: ${{ env.ucm }}
if-no-files-found: error
package-racket-lib:
strategy:
matrix:
os: [ubuntu-20.04]
needs: build-ucm
name: package racket lib
runs-on: ${{matrix.os}}
steps:
- name: set up environment
run: echo "ucm=${{ runner.temp }}/unison" >> $GITHUB_ENV
- name: download racket `unison` source
uses: actions/checkout@v4
with:
ref: ${{inputs.ref}}
- name: download ucm artifact
uses: actions/download-artifact@v4
with:
name: unison-${{matrix.os}}
path: ${{ runner.temp }}
- name: generate source
run: |
chmod +x ${{ env.ucm }}
${{ env.ucm }} transcript unison-src/transcripts-manual/gen-racket-libs.md
- uses: Bogdanp/setup-racket@v1.11
with:
architecture: "x64"
distribution: "full"
variant: "CS"
version: ${{env.racket_version}}
- name: create racket lib
run: |
raco pkg create scheme-libs/racket/unison
ls -l scheme-libs/racket/unison.zip{,.CHECKSUM}
- name: upload racket lib
uses: actions/upload-artifact@v4
with:
name: racket-lib
path: |
scheme-libs/racket/unison.zip
scheme-libs/racket/unison.zip.CHECKSUM
if-no-files-found: error
build-dist-unison-runtime:
needs: package-racket-lib
name: build unison-runtime
strategy:
fail-fast: false
matrix:
os:
- ubuntu-20.04
- macos-12
- windows-2019
runs-on: ${{matrix.os}}
steps:
- uses: actions/checkout@v4
with:
ref: ${{inputs.ref}}
- name: download racket lib
uses: actions/download-artifact@v4
with:
name: racket-lib
path: scheme-libs/racket/
- name: Cache Racket dependencies
id: cache-racket-deps
uses: actions/cache@v4
with:
path: |
~/.cache/racket
~/.local/share/racket
~/Library/Racket/${{env.racket_version}}
# This isn't right because unison.zip is going to include different dates each time.
# Maybe we can unpack it and hash the contents.
key: ${{ runner.os }}-racket-${{env.racket_version}}-${{hashFiles('scheme-libs/racket/unison.zip')}}
- uses: Bogdanp/setup-racket@v1.11
with:
architecture: "x64"
distribution: "full"
variant: "CS"
version: ${{env.racket_version}}
- uses: awalsh128/cache-apt-pkgs-action@latest
if: runner.os == 'Linux'
with:
packages: libb2-dev
version: 1.0 # cache key version afaik
- name: install unison racket lib
if: steps.cache-racket-deps.outputs.cache-hit != 'true'
run: raco pkg install --auto scheme-libs/racket/unison.zip
- name: build unison-runtime
run: |
raco exe --embed-dlls --orig-exe scheme-libs/racket/unison-runtime.rkt
mkdir runtime
if [[ ${{runner.os}} = 'Windows' ]]; then exe=".exe"; else exe=""; fi
raco distribute runtime scheme-libs/racket/unison-runtime$exe
ls -l runtime/
- name: upload unison-runtime
uses: actions/upload-artifact@v4
with:
name: unison-runtime-${{matrix.os}}
path: runtime/
if-no-files-found: error
bundle:
name: bundle ucm, jit, and ui
needs: [build-ucm, package-racket-lib, build-dist-unison-runtime]
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, macos-12, windows-2019]
steps:
- name: set up environment
run: |
staging_dir="${RUNNER_TEMP//\\//}/ucm-staging"
artifact_os="$(echo $RUNNER_OS | tr '[:upper:]' '[:lower:]')"
echo "staging_dir=$staging_dir" >> $GITHUB_ENV
echo "artifact_os=$artifact_os" >> $GITHUB_ENV
- name: download ucm
uses: actions/download-artifact@v4
with:
name: unison-${{matrix.os}}
path: ${{env.staging_dir}}/unison/
- name: restore permissions on ucm
run: chmod +x ${{env.staging_dir}}/unison/unison
- name: download racket lib
uses: actions/download-artifact@v4
with:
name: racket-lib
path: ${{env.staging_dir}}/racket/
- name: download unison-runtime
uses: actions/download-artifact@v4
with:
name: unison-runtime-${{matrix.os}}
path: ${{env.staging_dir}}/runtime
- name: restore permissions on unison-runtime
# here we have the `if:` not because of the missing .exe on Windows,
# nor the lack of need to chmod, but because /runtime/bin/ probably doesn't exist
# due to differences in `raco distribute` on Windows vs macOS and Linux.
if: runner.os != 'Windows'
run: chmod +x ${{env.staging_dir}}/runtime/bin/unison-runtime
- name: download latest unison-local-ui
run: |
curl -L -o /tmp/unisonLocal.zip \
https://github.com/unisonweb/unison-local-ui/releases/download/latest/unisonLocal.zip
unzip -d ${{env.staging_dir}}/ui /tmp/unisonLocal.zip
- name: create startup script (non-Windows)
if: runner.os != 'Windows'
uses: 1arp/create-a-file-action@0.4.4
with:
path: ${{env.staging_dir}}
file: ucm
content: |
#!/bin/bash
$(dirname "$0")/unison/unison --runtime-path $(dirname "$0")/runtime/bin/unison-runtime "$@"
- name: create startup script (Windows)
if: runner.os == 'Windows'
uses: 1arp/create-a-file-action@0.4.4
with:
path: ${{env.staging_dir}}
file: ucm.cmd
content: |
@echo off
"%~dp0unison\unison.exe" --runtime-path "%~dp0runtime\unison-runtime.exe" %*
- name: package everything together
run: |
if [[ ${{runner.os}} = 'Windows' ]]; then
artifact_archive=ucm-${{env.artifact_os}}.zip
7z a -r -tzip ${artifact_archive} ${{env.staging_dir}}/*
else
chmod +x ${{env.staging_dir}}/ucm
artifact_archive=ucm-${{env.artifact_os}}.tar.gz
tar -c -z -f ${artifact_archive} -C ${{env.staging_dir}} .
fi
echo "artifact_archive=${artifact_archive}" >> $GITHUB_ENV
- name: upload artifact
uses: actions/upload-artifact@v4
with:
name: bundle-${{env.artifact_os}}
path: ${{env.artifact_archive}}
if-no-files-found: error

View File

@ -24,15 +24,6 @@ Some cached directories:
`jit_generator_os: ubuntu-20.04`
- afaik, the jit sources are generated in a platform-independent way, so we just choose one platform to generate them on.
`*-cache-key-version` increment one of these to invalidate its corresponding cache, though you shouldn't have to:
- `ucm-binaries`
- `unison-src-test-results`
- `stack`
- `stack-work`
- `base-codebase`
- `jit-src`
- `jit-dist`
### Cached directories:
One reason for this change is to reduce the CI time for commits that only change docs, or yaml or other uninteresting things.

View File

@ -21,7 +21,7 @@ env:
ormolu_version: "0.5.2.0"
racket_version: "8.7"
ucm_local_bin: "ucm-local-bin"
jit_version: "@unison/internal/releases/0.0.12"
jit_version: "@unison/internal/releases/0.0.13"
jit_src_scheme: "unison-jit-src/scheme-libs/racket"
jit_dist: "unison-jit-dist"
jit_generator_os: ubuntu-20.04
@ -30,15 +30,6 @@ env:
# refers to all tests that depend on **/unison-src/**
unison_src_test_results: "unison-src-test-results"
# cache key versions, increment to invalidate one, though you aren't expected to have to.
ucm-binaries-cache-key-version: 1
unison-src-test-results-cache-key-version: 1
stack-cache-key-version: 1
stack-work-cache-key-version: 1
base-codebase-cache-key-version: 1
jit-src-cache-key-version: 1
jit-dist-cache-key-version: 1
jobs:
ormolu:
runs-on: ubuntu-20.04
@ -59,7 +50,7 @@ jobs:
mode: inplace
pattern: ${{ steps.changed-files.outputs.all_changed_files }}
- name: apply formatting changes
uses: stefanzweifel/git-auto-commit-action@v4
uses: stefanzweifel/git-auto-commit-action@v5
# Only try to commit formatting changes if we're running within the repo containing the PR,
# and not on a protected branch.
# The job doesn't have permission to push back to contributor forks on contributor PRs.
@ -105,25 +96,25 @@ jobs:
uses: actions/cache@v4
with:
path: ${{env.ucm_local_bin}}
key: ucm-${{env.ucm-binaries-cache-key-version}}_${{ matrix.os }}-${{ hashFiles('**/stack.yaml', '**/package.yaml', '**/*.hs')}}
key: ucm-${{ matrix.os }}-${{ hashFiles('**/stack.yaml', '**/package.yaml', '**/*.hs')}}
- name: cache unison-src test results
id: cache-unison-src-test-results
uses: actions/cache@v4
with:
path: ${{env.unison_src_test_results}}
key: unison-src-test-results-${{env.unison-src-test-results-cache-key-version}}_${{ matrix.os }}-${{ hashFiles('**/ci.yaml', '**/stack.yaml', '**/package.yaml', '**/*.hs')}}-${{ hashFiles('**/unison-src/**') }}
key: unison-src-test-results-${{ matrix.os }}-${{ hashFiles('**/ci.yaml', '**/stack.yaml', '**/package.yaml', '**/*.hs')}}-${{ hashFiles('**/unison-src/**') }}
- name: restore stack caches
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
id: restore-stack-caches
uses: ./.github/workflows/actions/restore-stack-cache
uses: unisonweb/actions/stack/cache/restore@main
with:
cache-prefix: ci${{env.stack-cache-key-version}}
cache-prefix: ci
- name: install stack
if: steps.cache-ucm-binaries.outputs.cache-hit != 'true'
uses: ./.github/workflows/actions/install-stack
uses: unisonweb/actions/stack/install@main
# One of the transcripts fails if the user's git name hasn't been set.
## (Which transcript? -AI)
@ -227,7 +218,7 @@ jobs:
with:
path: ${{ env.base-codebase }}
# key = base transcript contents + sqlite schema version
key: base.unison-${{env.base-codebase-cache-key-version}}_${{hashFiles('**/unison-src/builtin-tests/base.md', '**/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs')}}.
key: base.unison-${{hashFiles('**/unison-src/builtin-tests/base.md', '**/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs')}}.
- name: create base.md codebase
if: steps.cache-base-codebase.outputs.cache-hit != 'true'
@ -261,9 +252,9 @@ jobs:
!cancelled()
&& steps.restore-stack-caches.outputs.cache-hit != 'true'
&& steps.cache-ucm-binaries.outputs.cache-hit != 'true'
uses: ./.github/workflows/actions/save-stack-cache
uses: unisonweb/actions/stack/cache/save@main
with:
cache-prefix: ci${{env.stack-cache-key-version}}
cache-prefix: ci
generate-jit-source:
if: always() && needs.build-ucm.result == 'success'
@ -277,13 +268,13 @@ jobs:
echo "ucm=${{ runner.temp }}/unison" >> $GITHUB_ENV
- uses: actions/cache@v4
name: cache jit source
if: runner.os == 'Linux'
with:
path: ${{ env.jit_src_scheme }}
key: jit_src_scheme-${{env.jit-src-cache-key-version}}.racket_${{env.racket_version}}.jit_${{env.jit_version}}
key: jit_src_scheme-racket_${{env.racket_version}}.jit_${{env.jit_version}}-${{hashFiles('**/scheme-libs/**')}}
- name: check source exists
id: jit_src_exists
if: steps.cache-jit-source.outputs.cache-hit != 'true'
run: |
files=(data-info boot-generated simple-wrappers builtin-generated compound-wrappers)
all_exist=true
@ -396,7 +387,7 @@ jobs:
uses: actions/cache@v4
with:
path: ${{ env.jit_dist }}
key: jit_dist-${{env.jit-dist-cache-key-version}}.racket_${{ env.racket_version }}.jit_${{ env.jit_version }}
key: jit_dist-racket_${{ env.racket_version }}.jit_${{ env.jit_version }}
- name: Cache Racket dependencies
if: steps.restore-jit-binaries.outputs.cache-hit != 'true'
@ -460,7 +451,7 @@ jobs:
uses: actions/cache/restore@v4
with:
path: ${{ env.base-codebase}}
key: base.unison-${{env.base-codebase-cache-key-version}}_${{hashFiles('**/unison-src/builtin-tests/base.md', '**/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs')}}.
key: base.unison-${{hashFiles('**/unison-src/builtin-tests/base.md', '**/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs')}}.
- name: jit integration test ${{ matrix.os }}
if: runner.os != 'Windows' && steps.restore-jit-binaries.outputs.cache-hit != 'true'

View File

@ -20,20 +20,20 @@ jobs:
path: unison
- name: restore stack caches
uses: ./unison/.github/workflows/actions/restore-stack-cache
uses: unisonweb/actions/stack/cache/restore@main
with:
cache-prefix: haddocks
stack-yaml-dir: unison
- name: install stack
uses: ./unison/.github/workflows/actions/install-stack
uses: unisonweb/actions/stack/install@main
- name: build with haddocks
working-directory: unison
run: stack build --fast --haddock
- name: save stack caches
uses: ./unison/.github/workflows/actions/save-stack-cache
uses: unisonweb/actions/stack/cache/save@main
with:
cache-prefix: haddocks
stack-yaml-dir: unison
@ -42,11 +42,18 @@ jobs:
- name: Checkout haddocks branch
uses: actions/checkout@v4
with:
ref: 'haddocks'
path: 'haddocks'
ref: haddocks
path: haddocks
# Needed for `git commit` below
- name: set git user info
working-directory: unison
run: |
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
- name: Copy haddocks
working-directory: 'unison'
working-directory: unison
run: |
docs_root="$(stack path --local-doc-root)"
# Erase any stale files

View File

@ -1,18 +1,25 @@
name: "pre-release"
name: pre-release
run-name: pre-release ${{github.ref_name}}
defaults:
run:
shell: bash
on:
# run on each merge to `trunk`
workflow_run:
workflows: ["CI"]
branches: [trunk]
types:
- completed
# run manually
workflow_dispatch:
jobs:
build-ucm:
uses: ./.github/workflows/build-optimized-ucm.yaml
bundle-ucm:
name: build and bundle ucm
uses: ./.github/workflows/bundle-ucm.yaml
with:
ref: ${{ github.ref }}
@ -20,23 +27,26 @@ jobs:
name: create release
runs-on: ubuntu-20.04
needs:
- build-ucm
- bundle-ucm
steps:
- name: make download dir
run: mkdir /tmp/ucm
- name: "download artifacts"
uses: actions/download-artifact@v2
uses: actions/download-artifact@v4
with:
path: /tmp/ucm
- name: derive release tag
run: echo "ref_name=$(echo ${{ github.ref_name }} | awk -F'/' '{print $NF}')" >> $GITHUB_ENV
- uses: "marvinpinto/action-automatic-releases@latest"
with:
repo_token: "${{ secrets.GITHUB_TOKEN }}"
automatic_release_tag: "trunk-build"
automatic_release_tag: ${{ env.ref_name }}-build
prerelease: true
title: "Development Build"
title: Development Build (${{ env.ref_name }})
files: |
/tmp/ucm/**/*.tar.gz
/tmp/ucm/**/*.zip
/tmp/ucm/**/ucm-*.tar.gz
/tmp/ucm/**/ucm-*.zip

View File

@ -1,6 +1,6 @@
name: "release"
name: release
run-name: "release ${{inputs.version}}"
run-name: release ${{inputs.version}}
defaults:
run:
@ -13,30 +13,26 @@ on:
description: Release version; e.g. `0.5.19`. We'll create tag `release/${version}`.
required: true
type: string
target:
description: Git ref to use for this release; defaults to `trunk`.
required: true
default: trunk
type: string
jobs:
build-ucm:
uses: ./.github/workflows/build-optimized-ucm.yaml
bundle-ucm:
name: build and bundle ucm
uses: ./.github/workflows/bundle-ucm.yaml
with:
ref: release/${{inputs.version}}
ref: ${{github.ref}}
release:
name: create release
runs-on: ubuntu-20.04
needs:
- build-ucm
- bundle-ucm
steps:
- name: make download dir
run: mkdir /tmp/ucm
- name: "download artifacts"
uses: actions/download-artifact@v2
uses: actions/download-artifact@v4
with:
path: /tmp/ucm
@ -44,8 +40,6 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
version="${{inputs.version}}"
target="${{inputs.target}}"
prev_tag="$( \
gh release view \
--repo unisonweb/unison \
@ -54,13 +48,12 @@ jobs:
if [ -z "$prev_tag" ]; then echo "No previous release found"; exit 1; fi
echo "Creating a release from these artifacts:"
ls -R /tmp/ucm
ls -R /tmp/ucm/**/ucm-*.{zip,tar.gz}
gh release create "release/${version}" \
gh release create "release/${{inputs.version}}" \
--repo unisonweb/unison \
--target "${target}" \
--target "${{github.ref}}" \
--generate-notes \
--notes-start-tag "${prev_tag}" \
\
/tmp/ucm/**/*.{zip,tar.gz}
/tmp/ucm/**/ucm-*.{zip,tar.gz}

View File

@ -15,53 +15,13 @@ jobs:
- macOS-12
steps:
- uses: actions/checkout@v4
- id: stackage-resolver
name: record stackage resolver
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files
# looks for `resolver: nightly-yyyy-mm-dd` or `resolver: lts-xx.yy` in `stack.yaml` and splits it into
# `nightly` or `lts-xx`. the whole resolver string is put into resolver_long as a backup cache key
# ${{ steps.stackage-resolver.outputs.resolver_short }}
# ${{ steps.stackage-resolver.outputs.resolver_long }}
run: |
grep resolver stack.yaml | awk '{ x="resolver_short="; if (split($2,a,"-") > 2) print x a[1]; else {split($2,b,"."); print x b[1]}}' >> "$GITHUB_OUTPUT"
grep resolver stack.yaml | awk '{print "resolver_long="$2}' >> "$GITHUB_OUTPUT"
# Cache ~/.stack, keyed by the contents of 'stack.yaml'.
- uses: actions/cache@v3
name: cache ~/.stack (unix)
if: runner.os != 'Windows'
- uses: unisonweb/actions/stack/cache/restore@main
with:
path: ~/.stack
key: stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-${{github.sha}}
# Fall-back to use the most recent cache for the stack.yaml, or failing that the OS
restore-keys: |
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}.
stack-1_${{matrix.os}}-
# Cache each local package's ~/.stack-work for fast incremental builds in CI.
- uses: actions/cache@v3
name: cache .stack-work
with:
path: |
**/.stack-work
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-${{hashFiles('**/stack.yaml')}}-${{github.sha}}
restore-keys: |
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}.
stack-work-4_${{matrix.os}}-
# take cache from the ci job, read-only
cache-prefix: ci
- name: install stack
uses: ./.github/workflows/actions/install-stack
uses: unisonweb/actions/stack/install@main
# One of the transcripts fails if the user's git name hasn't been set.
- name: set git user info
@ -69,14 +29,14 @@ jobs:
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
- name: build
run: stack --no-terminal build --fast --no-run-tests --test
run: stack build --fast --no-run-tests --test
- name: round-trip-tests
run: |
stack --no-terminal exec unison transcript unison-src/transcripts-round-trip/main.md
stack --no-terminal exec unison transcript unison-src/transcripts-manual/rewrites.md
stack exec unison transcript unison-src/transcripts-round-trip/main.md
stack exec unison transcript unison-src/transcripts-manual/rewrites.md
- name: transcripts
run: stack --no-terminal exec transcripts
run: stack exec transcripts
- name: save transcript changes
uses: stefanzweifel/git-auto-commit-action@v4
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: rerun transcripts (reminder to rerun CI!)

View File

@ -1,9 +1,15 @@
## Some things I wish I'd known about Github Actions
You can't have an `env:` key defined in terms of another `env` key, but
You can't have an `env:` key defined in terms of another `env` key, but you can use `$GITHUB_ENV` to get around this.
You can't define a `matrix` at the top level, it has to be defined within a `job`'s `strategy`.
`runs-on:` doesn't allow `env` for some reason.
Strings don't need quotes, unless you need to force something to be a string.
A `@ref` is always needed on a remote action.
Windows doesn't seem to honor the `default: run: shell:` setting, so you need to set the `shell:` on `run:` manually?
Don't hesitate to do a lot with `run:` blocks aka bash scripts — at least bash is mature and well documented.
@ -20,6 +26,20 @@ e.g.
It's not clear to me when to use `$GITHUB_OUTPUT` vs `$GITHUB_ENV`, but I have been favoring `$GITHUB_ENV` because it requires fewer characters to access.
However, it seems a little wrong.
### `if:`
Although the type rules don't totally make sense in Github Actions, `if:` takes a Boolean.
Therefore, I think the String interpolation in `if: ${{runner.os}} != 'Windows'` causes the whole expression to become a String, which is coerced to `true`, when you definitely didn't mean `if: true`. So don't use `${{}}` here.
### Job names
Job names will automatically get `(${{matrix.os}})` if you don't use `${{matrix.os}}` somewhere in the name.
### Windows
The whole thing with `.exe` is a mess. Unix commands typically drop and add `.exe` correctly as needed, but Github Actions (e.g. `actions/upload-artifact`?) don't.
### Cache
When using the `cache` action, getting a cache hit on the primary key means you won't update the cache with any changes.
@ -29,6 +49,15 @@ Similarly, `save-always: true` only if a key hit means there will be nothing new
Backup restore keys: "Is there a prior run that would be worth starting out from? With the caveat that any irrelevant garbage it includes will be saved into this run too."
### Upload Artifact
I suspect on Windows it can't support paths that select a drive in a Unix-y way,
like `/c/asdf` or `/d/asdf`. It's got to be `C:/asdf` or `C:\asdf` etc.
Upload will complain if any
Upload and Download plugin versions have to match.
### Reusability
Github supports splitting off "reusable workflows" (`jobs` that can be imported into another workflow), and "composite actions" (multi-step `steps` that can be imported into another `job`).
@ -37,6 +66,10 @@ Github supports splitting off "reusable workflows" (`jobs` that can be imported
Needs to have `shell:` specified on every `run:`
#### Reusable workflows
These have to be in `.github/workflows`, you can't organize them deeper, or elsewhere.
### Reference
Default Environment Variables:

View File

@ -21,18 +21,24 @@ cradle:
- path: "codebase2/util-term/./"
component: "unison-util-term:lib"
- path: "lib/orphans/network-uri-orphans-sqlite/src"
component: "network-uri-orphans-sqlite:lib"
- path: "lib/orphans/unison-core-orphans-sqlite/src"
component: "unison-core-orphans-sqlite:lib"
- path: "lib/unison-hash/src"
component: "unison-hash:lib"
- path: "lib/orphans/unison-hash-orphans-aeson/src"
component: "unison-hash-orphans-aeson:lib"
- path: "lib/orphans/unison-hash-orphans-sqlite/src"
component: "unison-hash-orphans-sqlite:lib"
- path: "lib/orphans/uuid-orphans-sqlite/src"
component: "uuid-orphans-sqlite:lib"
- path: "lib/unison-hash/src"
component: "unison-hash:lib"
- path: "lib/unison-hashing/src"
component: "unison-hashing:lib"
@ -87,42 +93,36 @@ cradle:
- path: "lib/unison-util-rope/src"
component: "unison-util-rope:lib"
- path: "lib/orphans/uuid-orphans-sqlite/src"
component: "uuid-orphans-sqlite:lib"
- path: "parser-typechecker/src"
component: "unison-parser-typechecker:lib"
- path: "parser-typechecker/tests"
component: "unison-parser-typechecker:test:parser-typechecker-tests"
- path: "unison-cli/src"
- path: "unison-cli/unison"
component: "unison-cli:lib"
- path: "unison-cli/integration-tests/Suite.hs"
component: "unison-cli:exe:cli-integration-tests"
- path: "unison-cli/integration-tests/IntegrationTests/ArgumentParsing.hs"
component: "unison-cli:exe:cli-integration-tests"
- path: "unison-cli/src"
component: "unison-cli:lib:unison-cli-lib"
- path: "unison-cli/transcripts/Transcripts.hs"
component: "unison-cli:exe:transcripts"
- path: "unison-cli/unison/Main.hs"
component: "unison-cli:exe:unison"
- path: "unison-cli/unison/ArgParse.hs"
component: "unison-cli:exe:unison"
- path: "unison-cli/unison/System/Path.hs"
component: "unison-cli:exe:unison"
- path: "unison-cli/unison/Version.hs"
component: "unison-cli:exe:unison"
- path: "unison-cli/tests"
component: "unison-cli:test:cli-tests"
- path: "unison-cli-integration/integration-tests/Suite.hs"
component: "unison-cli-integration:exe:cli-integration-tests"
- path: "unison-cli-integration/integration-tests/IntegrationTests/ArgumentParsing.hs"
component: "unison-cli-integration:exe:cli-integration-tests"
- path: "unison-cli-main/unison/Main.hs"
component: "unison-cli-main:exe:unison"
- path: "unison-cli-main/unison/Version.hs"
component: "unison-cli-main:exe:unison"
- path: "unison-core/src"
component: "unison-core1:lib"

View File

@ -506,6 +506,7 @@ builtinsSrc =
B "Text.patterns.notCharIn" $ list char --> pat text,
-- Pattern.many : Pattern a -> Pattern a
B "Pattern.many" $ forall1 "a" (\a -> pat a --> pat a),
B "Pattern.many.corrected" $ forall1 "a" (\a -> pat a --> pat a),
B "Pattern.replicate" $ forall1 "a" (\a -> nat --> nat --> pat a --> pat a),
B "Pattern.capture" $ forall1 "a" (\a -> pat a --> pat a),
B "Pattern.captureAs" $ forall1 "a" (\a -> a --> pat a --> pat a),

View File

@ -8,7 +8,7 @@
-- decl to discover constraints on the decl vars. These constraints
-- are then given to a constraint solver that determines a unique kind
-- for each type variable. Unconstrained variables are defaulted to
-- kind * (just like Haskell 98). This is done by 'inferDecls'.
-- kind Type (just like Haskell 98). This is done by 'inferDecls'.
--
-- Afterwards, the 'SolveState' holds the kinds of all decls and we
-- can check that type annotations in terms that may mention the

View File

@ -8,15 +8,15 @@ where
import Control.Lens (Traversal, Traversal')
import Unison.KindInference.Constraint.Provenance (Provenance)
import Unison.KindInference.Constraint.Provenance qualified as Provenance
import Unison.KindInference.Constraint.StarProvenance (StarProvenance)
import Unison.KindInference.Constraint.StarProvenance qualified as SP
import Unison.KindInference.Constraint.TypeProvenance (TypeProvenance)
import Unison.KindInference.Constraint.TypeProvenance qualified as TP
-- | Solved constraints
--
-- These constraints are associated with unification variables during
-- kind inference.
data Constraint uv v loc
= IsType (StarProvenance v loc)
= IsType (TypeProvenance v loc)
| IsAbility (Provenance v loc)
| IsArr (Provenance v loc) uv uv
deriving stock (Show, Eq, Ord)
@ -28,7 +28,7 @@ prov ::
(Provenance v loc)
(Provenance v loc')
prov f = \case
IsType x -> IsType <$> SP.prov f x
IsType x -> IsType <$> TP.prov f x
IsAbility x -> IsAbility <$> f x
IsArr l a b -> (\x -> IsArr x a b) <$> f l
{-# INLINE prov #-}

View File

@ -1,5 +1,5 @@
module Unison.KindInference.Constraint.StarProvenance
( StarProvenance (..),
module Unison.KindInference.Constraint.TypeProvenance
( TypeProvenance (..),
prov,
)
where
@ -11,15 +11,15 @@ import Unison.KindInference.Constraint.Provenance (Provenance)
-- in constraint generation (in which case it will have a
-- @Provenance@) and also in the solver through kind-defaulting on
-- unconstrained unification variables.
data StarProvenance v loc
data TypeProvenance v loc
= NotDefault (Provenance v loc)
| Default
deriving stock (Show, Eq, Ord)
prov ::
Traversal
(StarProvenance v loc)
(StarProvenance v loc')
(TypeProvenance v loc)
(TypeProvenance v loc')
(Provenance v loc)
(Provenance v loc')
prov f = \case

View File

@ -1,6 +1,6 @@
module Unison.KindInference.Constraint.Unsolved
( Constraint (..),
starProv,
typeProv,
prov,
loc,
)
@ -14,29 +14,29 @@ import Unison.KindInference.Constraint.Provenance qualified as Provenance
--
-- These are produced during constraint generation and given as input
-- to the constraint solver.
data Constraint uv v loc starProv
data Constraint uv v loc typeProv
= -- | An IsType constraint may arise from generation or from the
-- solver. During generation the provenance is always a real
-- source code location, but the solver defaults unconstrained
-- kind vars to Star.
IsType uv (starProv v loc)
IsType uv (typeProv v loc)
| IsArr uv (Provenance v loc) uv uv
| IsAbility uv (Provenance v loc)
| Unify (Provenance v loc) uv uv
deriving stock (Show, Eq, Ord)
starProv ::
typeProv ::
Traversal
(Constraint uv v loc prov)
(Constraint uv v loc prov')
(prov v loc)
(prov' v loc)
starProv f = \case
typeProv f = \case
IsType x l -> IsType x <$> f l
IsAbility x l -> pure (IsAbility x l)
IsArr s l a b -> pure (IsArr s l a b)
Unify l a b -> pure (Unify l a b)
{-# INLINE starProv #-}
{-# INLINE typeProv #-}
prov ::
Lens

View File

@ -1,3 +1,5 @@
-- | Handles generating kind constraints to be fed to the kind
-- constraint solver (found in "Unison.KindInference.Solve").
module Unison.KindInference.Generate
( typeConstraints,
termConstraints,
@ -28,40 +30,16 @@ import Unison.Term qualified as Term
import Unison.Type qualified as Type
import Unison.Var (Type (User), Var (typed), freshIn)
data ConstraintTree v loc
= Node [ConstraintTree v loc]
| Constraint (GeneratedConstraint v loc) (ConstraintTree v loc)
| ParentConstraint (GeneratedConstraint v loc) (ConstraintTree v loc)
| StrictOrder (ConstraintTree v loc) (ConstraintTree v loc)
newtype TreeWalk = TreeWalk (forall a. ([a] -> [a]) -> [([a] -> [a], [a] -> [a])] -> [a] -> [a])
--------------------------------------------------------------------------------
-- Constraints arising from Types
--------------------------------------------------------------------------------
bottomUp :: TreeWalk
bottomUp = TreeWalk \down pairs0 -> foldr (\(d, u) b -> d . u . b) id pairs0 . down
flatten :: TreeWalk -> ConstraintTree v loc -> [GeneratedConstraint v loc]
flatten (TreeWalk f) = ($ []) . flattenTop
where
flattenTop :: ConstraintTree v loc -> [GeneratedConstraint v loc] -> [GeneratedConstraint v loc]
flattenTop t0 =
f id [flattenRec id t0]
flattenRec ::
([GeneratedConstraint v loc] -> [GeneratedConstraint v loc]) ->
ConstraintTree v loc ->
([GeneratedConstraint v loc] -> [GeneratedConstraint v loc], [GeneratedConstraint v loc] -> [GeneratedConstraint v loc])
flattenRec down = \case
Node cts ->
let pairs = map (flattenRec id) cts
in (f down pairs, id)
Constraint c ct -> flattenRec (down . (c :)) ct
ParentConstraint c ct ->
let (down', up) = flattenRec down ct
in (down', up . (c :))
StrictOrder a b ->
let as = flattenTop a
bs = flattenTop b
in (f down [(as . bs, id)], id)
-- | Generate kind constraints arising from a given type. The given
-- @UVar@ is constrained to have the kind of the given type.
typeConstraints :: (Var v, Ord loc) => UVar v loc -> Type.Type v loc -> Gen v loc [GeneratedConstraint v loc]
typeConstraints resultVar typ =
flatten bottomUp <$> typeConstraintTree resultVar typ
typeConstraintTree :: (Var v, Ord loc) => UVar v loc -> Type.Type v loc -> Gen v loc (ConstraintTree v loc)
typeConstraintTree resultVar term@ABT.Term {annotation, out} = do
@ -130,11 +108,6 @@ typeConstraintTree resultVar term@ABT.Term {annotation, out} = do
effConstraints <- typeConstraintTree effKind eff
pure $ ParentConstraint (IsAbility effKind (Provenance EffectsList $ ABT.annotation eff)) effConstraints
-- | Generate kind constraints arising from a given type. The given
-- @UVar@ is constrained to have the kind of the given type.
typeConstraints :: (Var v, Ord loc) => UVar v loc -> Type.Type v loc -> Gen v loc [GeneratedConstraint v loc]
typeConstraints resultVar typ =
flatten bottomUp <$> typeConstraintTree resultVar typ
handleIntroOuter :: Var v => v -> loc -> (GeneratedConstraint v loc -> Gen v loc r) -> Gen v loc r
handleIntroOuter v loc k = do
@ -146,6 +119,29 @@ handleIntroOuter v loc k = do
Just a -> pure a
k (Unify (Provenance ScopeReference loc) new orig)
--------------------------------------------------------------------------------
-- Constraints arising from Type annotations
--------------------------------------------------------------------------------
-- | Check that all annotations in a term are well-kinded
termConstraints :: forall v loc. (Var v, Ord loc) => Term.Term v loc -> Gen v loc [GeneratedConstraint v loc]
termConstraints x = flatten bottomUp <$> termConstraintTree x
termConstraintTree :: forall v loc. (Var v, Ord loc) => Term.Term v loc -> Gen v loc (ConstraintTree v loc)
termConstraintTree = fmap Node . dfAnns processAnn cons nil . hackyStripAnns
where
processAnn :: loc -> Type.Type v loc -> Gen v loc [ConstraintTree v loc] -> Gen v loc [ConstraintTree v loc]
processAnn ann typ mrest = do
instantiateType typ \typ gcs -> do
typKind <- freshVar typ
annConstraints <- ParentConstraint (IsType typKind (Provenance TypeAnnotation ann)) <$> typeConstraintTree typKind typ
let annConstraints' = foldr Constraint annConstraints gcs
rest <- mrest
pure (annConstraints' : rest)
cons mlhs mrhs = (++) <$> mlhs <*> mrhs
nil = pure []
-- | Helper for @termConstraints@ that instantiates the outermost
-- foralls and keeps the type in scope (in the type map) while
-- checking lexically nested type annotations.
@ -165,24 +161,6 @@ instantiateType type0 k =
t -> k t (reverse acc)
in go [] type0
-- | Check that all annotations in a term are well-kinded
termConstraints :: forall v loc. (Var v, Ord loc) => Term.Term v loc -> Gen v loc [GeneratedConstraint v loc]
termConstraints x = flatten bottomUp <$> termConstraintTree x
termConstraintTree :: forall v loc. (Var v, Ord loc) => Term.Term v loc -> Gen v loc (ConstraintTree v loc)
termConstraintTree = fmap Node . dfAnns processAnn cons nil . hackyStripAnns
where
processAnn :: loc -> Type.Type v loc -> Gen v loc [ConstraintTree v loc] -> Gen v loc [ConstraintTree v loc]
processAnn ann typ mrest = do
instantiateType typ \typ gcs -> do
typKind <- freshVar typ
annConstraints <- ParentConstraint (IsType typKind (Provenance TypeAnnotation ann)) <$> typeConstraintTree typKind typ
let annConstraints' = foldr Constraint annConstraints gcs
rest <- mrest
pure (annConstraints' : rest)
cons mlhs mrhs = (++) <$> mlhs <*> mrhs
nil = pure []
-- | Process type annotations depth-first. Allows processing
-- annotations with lexical scoping.
dfAnns :: (loc -> Type.Type v loc -> b -> b) -> (b -> b -> b) -> b -> Term.Term v loc -> b
@ -222,6 +200,10 @@ hackyStripAnns =
Term.Ann trm _typ -> trm
t -> ABT.tm ann t
--------------------------------------------------------------------------------
-- Constraints arising from Decls
--------------------------------------------------------------------------------
-- | Generate kind constraints for a mutally recursive component of
-- decls
declComponentConstraints ::
@ -345,6 +327,12 @@ withInstantiatedConstructorType declType tyParams0 constructorType0 k =
pure (Unify (Provenance DeclDefinition (ABT.annotation typ)) x tp)
in goForall constructorType0
--------------------------------------------------------------------------------
-- Constraints on builtins
--------------------------------------------------------------------------------
-- | Constraints on language builtins, used to initialize the kind
-- inference state ('Unison.KindInference.Solve.initialState')
builtinConstraints :: forall v loc. (Ord loc, BuiltinAnnotation loc, Var v) => Gen v loc [GeneratedConstraint v loc]
builtinConstraints = flatten bottomUp <$> builtinConstraintTree
@ -417,6 +405,11 @@ builtinConstraintTree =
kindVar <- pushType (t builtinAnnotation)
foldr Constraint (Node []) <$> constrainToKind (Provenance Builtin builtinAnnotation) kindVar k
--------------------------------------------------------------------------------
-- Helpers for constructing constraints
--------------------------------------------------------------------------------
-- | Constrain a @UVar@ to the provided @Kind@
constrainToKind :: (Var v) => Provenance v loc -> UVar v loc -> Kind -> Gen v loc [GeneratedConstraint v loc]
constrainToKind prov resultVar0 = fmap ($ []) . go resultVar0
where
@ -438,7 +431,52 @@ data Kind = Type | Ability | Kind :-> Kind
infixr 9 :->
-- | Convert the 'Unison.Kind' annotation type to our internal 'Kind'
fromUnisonKind :: Unison.Kind -> Kind
fromUnisonKind = \case
Unison.Star -> Type
Unison.Arrow a b -> fromUnisonKind a :-> fromUnisonKind b
--------------------------------------------------------------------------------
-- Constraint ordering
--------------------------------------------------------------------------------
-- | The order in which constraints are generated has a great impact
-- on the error observed. To separate the concern of constraint
-- generation and constraint ordering the constraints are generated as
-- a constraint tree, and the flattening of this tree determines the
-- generated constraint order.
data ConstraintTree v loc
= Node [ConstraintTree v loc]
| Constraint (GeneratedConstraint v loc) (ConstraintTree v loc)
| ParentConstraint (GeneratedConstraint v loc) (ConstraintTree v loc)
| StrictOrder (ConstraintTree v loc) (ConstraintTree v loc)
newtype TreeWalk = TreeWalk (forall a. ([a] -> [a]) -> [([a] -> [a], [a] -> [a])] -> [a] -> [a])
bottomUp :: TreeWalk
bottomUp = TreeWalk \down pairs0 -> foldr (\(d, u) b -> d . u . b) id pairs0 . down
flatten :: TreeWalk -> ConstraintTree v loc -> [GeneratedConstraint v loc]
flatten (TreeWalk f) = ($ []) . flattenTop
where
flattenTop :: ConstraintTree v loc -> [GeneratedConstraint v loc] -> [GeneratedConstraint v loc]
flattenTop t0 =
f id [flattenRec id t0]
flattenRec ::
([GeneratedConstraint v loc] -> [GeneratedConstraint v loc]) ->
ConstraintTree v loc ->
([GeneratedConstraint v loc] -> [GeneratedConstraint v loc], [GeneratedConstraint v loc] -> [GeneratedConstraint v loc])
flattenRec down = \case
Node cts ->
let pairs = map (flattenRec id) cts
in (f down pairs, id)
Constraint c ct -> flattenRec (down . (c :)) ct
ParentConstraint c ct ->
let (down', up) = flattenRec down ct
in (down', up . (c :))
StrictOrder a b ->
let as = flattenTop a
bs = flattenTop b
in (f down [(as . bs, id)], id)

View File

@ -25,8 +25,10 @@ import Unison.Symbol
import Unison.Type qualified as T
import Unison.Var
-- | A generated constraint
type GeneratedConstraint v loc = Constraint (UVar v loc) v loc Provenance
-- | The @Gen@ monad state
data GenState v loc = GenState
{ unifVars :: !(Set Symbol),
typeMap :: !(Map (T.Type v loc) (NonEmpty (UVar v loc))),
@ -45,6 +47,7 @@ newtype Gen v loc a = Gen
)
via State (GenState v loc)
-- | @Gen@ monad runner
run :: Gen v loc a -> GenState v loc -> (a, GenState v loc)
run (Gen ma) st0 = ma st0
@ -71,11 +74,13 @@ pushType t = do
modify \st -> st {typeMap = newTypeMap}
pure var
-- | Lookup the @UVar@ associated with a @Type@
lookupType :: Var v => T.Type v loc -> Gen v loc (Maybe (UVar v loc))
lookupType t = do
GenState {typeMap} <- get
pure (NonEmpty.head <$> Map.lookup t typeMap)
-- | Remove a @Type@ from the context
popType :: Var v => T.Type v loc -> Gen v loc ()
popType t = do
modify \st -> st {typeMap = del (typeMap st)}
@ -88,6 +93,7 @@ popType t = do
x : xs -> Just (x :| xs)
in Map.alter f t m
-- | Helper to run an action with the given @Type@ in the context
scopedType :: Var v => T.Type v loc -> (UVar v loc -> Gen v loc r) -> Gen v loc r
scopedType t m = do
s <- pushType t

View File

@ -1,3 +1,5 @@
-- | Handles solving kind constraints generated by
-- "Unison.KindInference.Generate".
module Unison.KindInference.Solve
( step,
verify,
@ -19,7 +21,7 @@ import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation)
import Unison.Debug (DebugFlag (KindInference), shouldDebug)
import Unison.KindInference.Constraint.Provenance (Provenance (..))
import Unison.KindInference.Constraint.Solved qualified as Solved
import Unison.KindInference.Constraint.StarProvenance (StarProvenance (..))
import Unison.KindInference.Constraint.TypeProvenance (TypeProvenance (..))
import Unison.KindInference.Constraint.Unsolved qualified as Unsolved
import Unison.KindInference.Error (ConstraintConflict (..), KindError (..), improveError)
import Unison.KindInference.Generate (builtinConstraints)
@ -43,10 +45,16 @@ import Unison.Syntax.TypePrinter qualified as TP
import Unison.Util.Pretty qualified as P
import Unison.Var (Var)
type UnsolvedConstraint v loc = Unsolved.Constraint (UVar v loc) v loc StarProvenance
-- | Like 'GeneratedConstraint' but the provenance of @IsType@
-- constraints may be due to kind defaulting. (See 'defaultUnconstrainedVars')
type UnsolvedConstraint v loc = Unsolved.Constraint (UVar v loc) v loc TypeProvenance
-- | We feed both @UnsolvedConstraint@ and @GeneratedConstraint@ to
-- our constraint solver, so it is useful to convert
-- @GeneratedConstraint@ into @UnsolvedConstraint@ to avoid code
-- duplication.
_Generated :: forall v loc. Prism' (UnsolvedConstraint v loc) (GeneratedConstraint v loc)
_Generated = prism' (Unsolved.starProv %~ NotDefault) \case
_Generated = prism' (Unsolved.typeProv %~ NotDefault) \case
Unsolved.IsType s l -> case l of
Default -> Nothing
NotDefault l -> Just (Unsolved.IsType s l)
@ -54,8 +62,9 @@ _Generated = prism' (Unsolved.starProv %~ NotDefault) \case
Unsolved.IsArr s l a b -> Just (Unsolved.IsArr s l a b)
Unsolved.Unify l a b -> Just (Unsolved.Unify l a b)
-- | Apply some generated constraints to a solve state, returning a
-- kind error if detected or a new solve state.
-- | This is the primary function in the exposed API. @step@ applies
-- some generated constraints to a solve state, returning a kind error
-- if detected or a new solve state.
step ::
(Var v, Ord loc, Show loc) =>
Env ->
@ -79,7 +88,7 @@ step e st cs =
Left e -> Left e
Right () -> Right finalState
-- | Default any unconstrained vars to *
-- | Default any unconstrained vars to @Type@
defaultUnconstrainedVars :: Var v => SolveState v loc -> SolveState v loc
defaultUnconstrainedVars st =
let newConstraints = foldl' phi (constraints st) (newUnifVars st)
@ -90,124 +99,12 @@ defaultUnconstrainedVars st =
Just _ -> U.Canonical ecSize d
in st {constraints = newConstraints, newUnifVars = []}
prettyConstraintD' :: Show loc => Var v => PrettyPrintEnv -> UnsolvedConstraint v loc -> P.Pretty P.ColorText
prettyConstraintD' ppe =
P.wrap . \case
Unsolved.IsType v p -> prettyUVar ppe v <> " ~ Type" <> prettyProv p
Unsolved.IsAbility v p -> prettyUVar ppe v <> " ~ Ability" <> prettyProv p
Unsolved.IsArr v p a b -> prettyUVar ppe v <> " ~ " <> prettyUVar ppe a <> " -> " <> prettyUVar ppe b <> prettyProv p
Unsolved.Unify p a b -> prettyUVar ppe a <> " ~ " <> prettyUVar ppe b <> prettyProv p
where
prettyProv x =
"[" <> P.string (show x) <> "]"
prettyConstraints :: Show loc => Var v => PrettyPrintEnv -> [UnsolvedConstraint v loc] -> P.Pretty P.ColorText
prettyConstraints ppe = P.sep "\n" . map (prettyConstraintD' ppe)
prettyUVar :: Var v => PrettyPrintEnv -> UVar v loc -> P.Pretty P.ColorText
prettyUVar ppe (UVar s t) = TP.pretty ppe t <> " :: " <> P.prettyVar s
tracePretty :: P.Pretty P.ColorText -> a -> a
tracePretty p = trace (P.toAnsiUnbroken p)
data OccCheckState v loc = OccCheckState
{ visitingSet :: Set (UVar v loc),
visitingStack :: [UVar v loc],
solvedSet :: Set (UVar v loc),
solvedConstraints :: ConstraintMap v loc,
kindErrors :: [KindError v loc]
}
markVisiting :: Var v => UVar v loc -> M.State (OccCheckState v loc) CycleCheck
markVisiting x = do
OccCheckState {visitingSet, visitingStack} <- M.get
case Set.member x visitingSet of
True -> do
OccCheckState {solvedConstraints} <- M.get
let loc = case U.lookupCanon x solvedConstraints of
Just (_, _, Descriptor {descriptorConstraint = Just (Solved.IsArr (Provenance _ loc) _ _)}, _) -> loc
_ -> error "cycle without IsArr constraint"
addError (CycleDetected loc x solvedConstraints)
pure Cycle
False -> do
M.modify \st ->
st
{ visitingSet = Set.insert x visitingSet,
visitingStack = x : visitingStack
}
pure NoCycle
unmarkVisiting :: Var v => UVar v loc -> M.State (OccCheckState v loc) ()
unmarkVisiting x = M.modify \st ->
st
{ visitingSet = Set.delete x (visitingSet st),
visitingStack = tail (visitingStack st),
solvedSet = Set.insert x (solvedSet st)
}
addError :: KindError v loc -> M.State (OccCheckState v loc) ()
addError ke = M.modify \st -> st {kindErrors = ke : kindErrors st}
isSolved :: Var v => UVar v loc -> M.State (OccCheckState v loc) Bool
isSolved x = do
OccCheckState {solvedSet} <- M.get
pure $ Set.member x solvedSet
data CycleCheck
= Cycle
| NoCycle
-- | occurence check and report any errors
occCheck ::
forall v loc.
Var v =>
ConstraintMap v loc ->
Either (NonEmpty (KindError v loc)) (ConstraintMap v loc)
occCheck constraints0 =
let go ::
[(UVar v loc)] ->
M.State (OccCheckState v loc) ()
go = \case
[] -> pure ()
u : us -> do
isSolved u >>= \case
True -> go us
False -> do
markVisiting u >>= \case
Cycle -> pure ()
NoCycle -> do
st@OccCheckState {solvedConstraints} <- M.get
let handleNothing = error "impossible"
handleJust _canonK ecSize d = case descriptorConstraint d of
Nothing -> ([], U.Canonical ecSize d {descriptorConstraint = Just $ Solved.IsType Default})
Just v ->
let descendants = case v of
Solved.IsType _ -> []
Solved.IsAbility _ -> []
Solved.IsArr _ a b -> [a, b]
in (descendants, U.Canonical ecSize d)
let (descendants, solvedConstraints') = U.alterF u handleNothing handleJust solvedConstraints
M.put st {solvedConstraints = solvedConstraints'}
go descendants
unmarkVisiting u
go us
OccCheckState {solvedConstraints, kindErrors} =
M.execState
(go (U.keys constraints0))
OccCheckState
{ visitingSet = Set.empty,
visitingStack = [],
solvedSet = Set.empty,
solvedConstraints = constraints0,
kindErrors = []
}
in case kindErrors of
[] -> Right solvedConstraints
e : es -> Left (e :| es)
-- | loop through the constraints, eliminating constraints until we
-- have some set that cannot be reduced
-- | Loop through the constraints, eliminating constraints until we
-- have some set that cannot be reduced. There isn't any strong reason
-- to avoid halting at the first error -- we don't have constraints
-- that error but may succeed with more information or anything. The
-- idea of looping was to resolve as much as possible so that the
-- error message can be as filled out as possible.
reduce ::
forall v loc.
(Show loc, Var v, Ord loc) =>
@ -224,36 +121,50 @@ reduce cs0 = dbg "reduce" cs0 (go False [])
Right () -> error "impossible"
c : cs ->
addConstraint c >>= \case
-- If an error occurs then push it back onto the unsolved
-- stack
Left _ -> go b (c : acc) cs
-- Signal that we solved something on this pass (by passing
-- @True@) and continue
Right () -> go True acc cs
-- | tracing helper
dbg ::
forall a.
-- | A hanging prefix or header
P.Pretty P.ColorText ->
-- | The constraints to print
[GeneratedConstraint v loc] ->
([GeneratedConstraint v loc] -> Solve v loc a) ->
Solve v loc a
dbg hdr cs f =
case shouldDebug KindInference of
True -> do
ppe <- asks prettyPrintEnv
tracePretty (P.hang (P.bold hdr) (prettyConstraints ppe (map (review _Generated) cs))) (f cs)
False -> f cs
dbg = traceApp \ppe cs -> prettyConstraints ppe (map (review _Generated) cs)
-- | Like @dbg@, but for a single constraint
dbgSingle ::
forall a.
P.Pretty P.ColorText ->
GeneratedConstraint v loc ->
(GeneratedConstraint v loc -> Solve v loc a) ->
Solve v loc a
dbgSingle hdr c f =
dbgSingle = traceApp \ppe c -> prettyConstraintD' ppe (review _Generated c)
-- | A helper for @dbg*@
traceApp ::
forall a b.
(PrettyPrintEnv -> a -> P.Pretty P.ColorText) ->
P.Pretty P.ColorText ->
a ->
(a -> Solve v loc b) ->
Solve v loc b
traceApp prettyA hdr a ab =
case shouldDebug KindInference of
False -> ab a
True -> do
ppe <- asks prettyPrintEnv
tracePretty (P.hang (P.bold hdr) (prettyConstraintD' ppe (review _Generated c))) (f c)
False -> f c
tracePretty (P.hang (P.bold hdr) (prettyA ppe a)) (ab a)
-- | Add a single constraint, returning an error if there is a
-- contradictory constraint
-- contradictory constraint.
addConstraint ::
forall v loc.
Ord loc =>
@ -284,6 +195,9 @@ addConstraint constraint = do
processPostAction . fmap concat =<< runExceptT ((traverse (ExceptT . addConstraint') (x : xs)))
processPostAction =<< addConstraint' (review _Generated constraint)
-- | Decompose the unsolved constraint into implied constraints,
-- returning a constraint conflict if the constraint cannot be
-- satisfied.
addConstraint' ::
forall v loc.
Ord loc =>
@ -291,11 +205,21 @@ addConstraint' ::
UnsolvedConstraint v loc ->
Solve v loc (Either (ConstraintConflict v loc) [UnsolvedConstraint v loc])
addConstraint' = \case
-- @IsAbility@ and @IsType@ constraints are very straightforward,
-- they are satisfied of the constraint already exists or no
-- constraint exists.
Unsolved.IsAbility s p0 -> do
handleConstraint s (Solved.IsAbility p0) \case
Solved.IsAbility _ -> Just (Solved.IsAbility p0, [])
_ -> Nothing
Unsolved.IsType s p0 -> do
handleConstraint s (Solved.IsType p0) \case
Solved.IsType _ -> Just (Solved.IsType p0, [])
_ -> Nothing
Unsolved.IsArr s p0 a b -> do
-- If an @IsArr@ constraint is already present then we need to unify
-- the left and right hand sides of the input constraints and the
-- existing constraints, so we return those as implied constraints.
handleConstraint s (Solved.IsArr p0 a b) \case
Solved.IsArr _p1 c d ->
let implied =
@ -305,18 +229,23 @@ addConstraint' = \case
prov = p0
in Just (Solved.IsArr prov a b, implied)
_ -> Nothing
Unsolved.IsType s p0 -> do
handleConstraint s (Solved.IsType p0) \case
Solved.IsType _ -> Just (Solved.IsType p0, [])
_ -> Nothing
Unsolved.Unify l a b -> Right <$> union l a b
where
-- | A helper for solving various @Is*@ constraints. In each case
-- we want to lookup any existing constraints on the constrained
-- variable. If none exist then we simply add the new constraint,
-- as it can't conflict with anything. If there is an existing
-- constraint we defer to the passed in function.
handleConstraint ::
-- | The variable mentioned in the input constraint
UVar v loc ->
-- | The new constraint
Solved.Constraint (UVar v loc) v loc ->
-- | How to handle the an existing constraint
( Solved.Constraint (UVar v loc) v loc ->
Maybe (Solved.Constraint (UVar v loc) v loc, [UnsolvedConstraint v loc])
) ->
-- | An error or a list of implied constraints
Solve v loc (Either (ConstraintConflict v loc) [UnsolvedConstraint v loc])
handleConstraint s solvedConstraint phi = do
st@SolveState {constraints} <- M.get
@ -384,6 +313,16 @@ verify st =
Left e -> Left e
Right m -> Right st {constraints = m}
--------------------------------------------------------------------------------
-- @SolveState@ initialization
--------------------------------------------------------------------------------
initialState :: forall v loc. (BuiltinAnnotation loc, Show loc, Ord loc, Var v) => Env -> SolveState v loc
initialState env =
let ((), finalState) = run env emptyState initializeState
in finalState
initializeState :: forall v loc. (BuiltinAnnotation loc, Ord loc, Show loc, Var v) => Solve v loc ()
initializeState = assertGen do
builtinConstraints
@ -399,10 +338,129 @@ assertGen gen = do
st <- step env st cs
verify st
case comp of
Left _ -> error "[assertGen]: constraint failure in among builtin constraints"
Left _ -> error "[assertGen]: constraint failure in builtin constraints"
Right st -> M.put st
initialState :: forall v loc. (BuiltinAnnotation loc, Show loc, Ord loc, Var v) => Env -> SolveState v loc
initialState env =
let ((), finalState) = run env emptyState initializeState
in finalState
--------------------------------------------------------------------------------
-- Occurence check and helpers
--------------------------------------------------------------------------------
-- | occurence check and report any errors
occCheck ::
forall v loc.
Var v =>
ConstraintMap v loc ->
Either (NonEmpty (KindError v loc)) (ConstraintMap v loc)
occCheck constraints0 =
let go ::
[(UVar v loc)] ->
M.State (OccCheckState v loc) ()
go = \case
[] -> pure ()
u : us -> do
isSolved u >>= \case
True -> go us
False -> do
markVisiting u >>= \case
Cycle -> pure ()
NoCycle -> do
st@OccCheckState {solvedConstraints} <- M.get
let handleNothing = error "impossible"
handleJust _canonK ecSize d = case descriptorConstraint d of
Nothing -> ([], U.Canonical ecSize d {descriptorConstraint = Just $ Solved.IsType Default})
Just v ->
let descendants = case v of
Solved.IsType _ -> []
Solved.IsAbility _ -> []
Solved.IsArr _ a b -> [a, b]
in (descendants, U.Canonical ecSize d)
let (descendants, solvedConstraints') = U.alterF u handleNothing handleJust solvedConstraints
M.put st {solvedConstraints = solvedConstraints'}
go descendants
unmarkVisiting u
go us
OccCheckState {solvedConstraints, kindErrors} =
M.execState
(go (U.keys constraints0))
OccCheckState
{ visitingSet = Set.empty,
visitingStack = [],
solvedSet = Set.empty,
solvedConstraints = constraints0,
kindErrors = []
}
in case kindErrors of
[] -> Right solvedConstraints
e : es -> Left (e :| es)
data OccCheckState v loc = OccCheckState
{ visitingSet :: Set (UVar v loc),
visitingStack :: [UVar v loc],
solvedSet :: Set (UVar v loc),
solvedConstraints :: ConstraintMap v loc,
kindErrors :: [KindError v loc]
}
markVisiting :: Var v => UVar v loc -> M.State (OccCheckState v loc) CycleCheck
markVisiting x = do
OccCheckState {visitingSet, visitingStack} <- M.get
case Set.member x visitingSet of
True -> do
OccCheckState {solvedConstraints} <- M.get
let loc = case U.lookupCanon x solvedConstraints of
Just (_, _, Descriptor {descriptorConstraint = Just (Solved.IsArr (Provenance _ loc) _ _)}, _) -> loc
_ -> error "cycle without IsArr constraint"
addError (CycleDetected loc x solvedConstraints)
pure Cycle
False -> do
M.modify \st ->
st
{ visitingSet = Set.insert x visitingSet,
visitingStack = x : visitingStack
}
pure NoCycle
unmarkVisiting :: Var v => UVar v loc -> M.State (OccCheckState v loc) ()
unmarkVisiting x = M.modify \st ->
st
{ visitingSet = Set.delete x (visitingSet st),
visitingStack = tail (visitingStack st),
solvedSet = Set.insert x (solvedSet st)
}
addError :: KindError v loc -> M.State (OccCheckState v loc) ()
addError ke = M.modify \st -> st {kindErrors = ke : kindErrors st}
isSolved :: Var v => UVar v loc -> M.State (OccCheckState v loc) Bool
isSolved x = do
OccCheckState {solvedSet} <- M.get
pure $ Set.member x solvedSet
data CycleCheck
= Cycle
| NoCycle
--------------------------------------------------------------------------------
-- Debug output helpers
--------------------------------------------------------------------------------
prettyConstraintD' :: Show loc => Var v => PrettyPrintEnv -> UnsolvedConstraint v loc -> P.Pretty P.ColorText
prettyConstraintD' ppe =
P.wrap . \case
Unsolved.IsType v p -> prettyUVar ppe v <> " ~ Type" <> prettyProv p
Unsolved.IsAbility v p -> prettyUVar ppe v <> " ~ Ability" <> prettyProv p
Unsolved.IsArr v p a b -> prettyUVar ppe v <> " ~ " <> prettyUVar ppe a <> " -> " <> prettyUVar ppe b <> prettyProv p
Unsolved.Unify p a b -> prettyUVar ppe a <> " ~ " <> prettyUVar ppe b <> prettyProv p
where
prettyProv x =
"[" <> P.string (show x) <> "]"
prettyConstraints :: Show loc => Var v => PrettyPrintEnv -> [UnsolvedConstraint v loc] -> P.Pretty P.ColorText
prettyConstraints ppe = P.sep "\n" . map (prettyConstraintD' ppe)
prettyUVar :: Var v => PrettyPrintEnv -> UVar v loc -> P.Pretty P.ColorText
prettyUVar ppe (UVar s t) = TP.pretty ppe t <> " :: " <> P.prettyVar s
tracePretty :: P.Pretty P.ColorText -> a -> a
tracePretty p = trace (P.toAnsiUnbroken p)

View File

@ -35,6 +35,18 @@ data Env = Env {prettyPrintEnv :: PrettyPrintEnv}
type ConstraintMap v loc = U.UFMap (UVar v loc) (Descriptor v loc)
-- | The @SolveState@ holds all kind constraints gathered for each
-- type. For example, after processing data and effect decls the
-- @typeMap@ will hold entries for every decl, and looking up the
-- corresponding @UVar@ in @constraints@ will return its kind.
--
-- The other fields, @unifVars@ and @newUnifVars@, are relevant when
-- interleaving constraint generation with solving. Constraint
-- generation needs to create fresh unification variables, so it needs
-- the set of bound unification variables from
-- @unifVars@. @newUnifVars@ holds the uvars that are candidates for
-- kind defaulting (see
-- 'Unison.KindInference.Solve.defaultUnconstrainedVars').
data SolveState v loc = SolveState
{ unifVars :: !(Set Symbol),
newUnifVars :: [UVar v loc],
@ -42,6 +54,7 @@ data SolveState v loc = SolveState
typeMap :: !(Map (T.Type v loc) (NonEmpty (UVar v loc)))
}
-- | Constraints associated with a unification variable
data Descriptor v loc = Descriptor
{ descriptorConstraint :: Maybe (Constraint (UVar v loc) v loc)
}
@ -57,6 +70,7 @@ newtype Solve v loc a = Solve {unSolve :: Env -> SolveState v loc -> (a, SolveSt
)
via M.ReaderT Env (M.State (SolveState v loc))
-- | Helper for inteleaving constraint generation and solving
genStateL :: Lens' (SolveState v loc) (Gen.GenState v loc)
genStateL f st =
( \genState ->
@ -72,6 +86,7 @@ genStateL f st =
newVars = []
}
-- | Interleave constraint generation into constraint solving
runGen :: Var v => Gen v loc a -> Solve v loc a
runGen gena = do
st <- M.get
@ -85,15 +100,20 @@ runGen gena = do
M.modify \st -> st {newUnifVars = vs ++ newUnifVars st}
pure cs
-- | Add a unification variable to the constarint mapping with no
-- constraints. This is done on uvars created during constraint
-- generation to initialize the new uvars (see 'runGen').
addUnconstrainedVar :: Var v => UVar v loc -> Solve v loc ()
addUnconstrainedVar uvar = do
st@SolveState {constraints} <- M.get
let constraints' = U.insert uvar Descriptor {descriptorConstraint = Nothing} constraints
M.put st {constraints = constraints'}
-- | Runner for the @Solve@ monad
run :: Env -> SolveState v loc -> Solve v loc a -> (a, SolveState v loc)
run e st action = unSolve action e st
-- | Initial solve state
emptyState :: SolveState v loc
emptyState =
SolveState
@ -103,6 +123,7 @@ emptyState =
typeMap = M.empty
}
-- | Lookup the constraints associated with a unification variable
find :: Var v => UVar v loc -> Solve v loc (Maybe (Constraint (UVar v loc) v loc))
find k = do
st@SolveState {constraints} <- M.get

View File

@ -3103,7 +3103,9 @@ declareForeigns = do
_ -> die "Text.patterns.notCharIn: non-character closure"
evaluate . TPat.cpattern . TPat.Char . TPat.Not $ TPat.CharSet cs
declareForeign Untracked "Pattern.many" boxDirect . mkForeign $
\(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Many p
\(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Many False p
declareForeign Untracked "Pattern.many.corrected" boxDirect . mkForeign $
\(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Many True p
declareForeign Untracked "Pattern.capture" boxDirect . mkForeign $
\(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Capture p
declareForeign Untracked "Pattern.captureAs" boxBoxDirect . mkForeign $

View File

@ -363,6 +363,7 @@ performRehash rgrp0 ctx =
irs = remap $ intermedRemap ctx
f b r
| not b,
r `Map.notMember` rgrp0,
r <- Map.findWithDefault r r frs,
Just r <- Map.lookup r irs =
r
@ -757,7 +758,9 @@ prepareEvaluation ppe tm ctx = do
pure (backrefAdd rbkr ctx', rgrp, rmn)
where
(rmn0, frem, rgrp0, rbkr) = intermediateTerm ppe ctx tm
int b r = if b then r else toIntermed ctx r
int b r
| b || Map.member r rgrp0 = r
| otherwise = toIntermed ctx r
(ctx', rrefs, rgrp) =
performRehash
((fmap . overGroupLinks) int rgrp0)

View File

@ -12,7 +12,7 @@ data Pattern
| Or Pattern Pattern -- left-biased choice: tries second pattern only if first fails
| Capture Pattern -- capture all the text consumed by the inner pattern, discarding its subcaptures
| CaptureAs Text Pattern -- capture the given text, discarding its subcaptures, and name the capture
| Many Pattern -- zero or more repetitions (at least 1 can be written: Join [p, Many p])
| Many Bool Pattern -- zero or more repetitions (at least 1 can be written: Join [p, Many p]); boolean determines whether it's the correct version (True) or the original (False).
| Replicate Int Int Pattern -- m to n occurrences of a pattern, optional = 0-1
| Eof -- succeed if given the empty text, fail otherwise
| Literal Text -- succeed if input starts with the given text, advance by that text
@ -128,7 +128,7 @@ compile (CaptureAs t p) !err !success = go
success' _ rem acc0 _ = success (pushCapture t acc0) rem
compiled = compile p err' success'
go acc t = compiled acc t acc t
compile (Capture (Many (Char Any))) !_ !success = \acc t -> success (pushCapture t acc) Text.empty
compile (Capture (Many _ (Char Any))) !_ !success = \acc t -> success (pushCapture t acc) Text.empty
compile (Capture c) !err !success = go
where
err' _ _ acc0 t0 = err acc0 t0
@ -152,12 +152,13 @@ compile (Char cp) !err !success = go
go acc t = case Text.uncons t of
Just (ch, rem) | ok ch -> success acc rem
_ -> err acc t
compile (Many p) !_ !success = case p of
compile (Many correct p) !_ !success = case p of
Char Any -> (\acc _ -> success acc Text.empty)
Char cp -> walker (charPatternPred cp)
p -> go
where
go = try "Many" (compile p) success success'
go | correct = try "Many" (compile p) success success'
| otherwise = compile p success success'
success' acc rem
| Text.size rem == 0 = success acc rem
| otherwise = go acc rem

View File

@ -114,12 +114,12 @@ test =
expect' (P.run (P.Char (P.CharSet "0123")) "3ab" == Just ([], "ab"))
expect' (P.run (P.Char (P.Not (P.CharSet "0123"))) "a3b" == Just ([], "3b"))
expect' (P.run (P.Capture (P.Char (P.Not (P.CharSet "0123")))) "a3b" == Just (["a"], "3b"))
expect' (P.run (P.Many (P.Char (P.CharSet "abcd"))) "babbababac123" == Just ([], "123"))
expect' (P.run (P.Capture (P.Many (P.Char (P.CharSet "abcd")))) "babbababac123" == Just (["babbababac"], "123"))
expect' (P.run (P.Capture (P.Many (P.Char (P.CharClass P.Number)))) "012345abc" == Just (["012345"], "abc"))
expect' (P.run (P.Join [P.Capture (P.Many (P.Char (P.CharClass P.Number))), P.Literal ",", P.Capture (P.Many (P.Char P.Any))]) "012345,abc" == Just (["012345", "abc"], ""))
expect' (P.run (P.Many True (P.Char (P.CharSet "abcd"))) "babbababac123" == Just ([], "123"))
expect' (P.run (P.Capture (P.Many True (P.Char (P.CharSet "abcd")))) "babbababac123" == Just (["babbababac"], "123"))
expect' (P.run (P.Capture (P.Many True (P.Char (P.CharClass P.Number)))) "012345abc" == Just (["012345"], "abc"))
expect' (P.run (P.Join [P.Capture (P.Many True (P.Char (P.CharClass P.Number))), P.Literal ",", P.Capture (P.Many True (P.Char P.Any))]) "012345,abc" == Just (["012345", "abc"], ""))
expect'
( P.run (P.Many (P.Join [P.Capture (P.Many (P.Char (P.CharClass P.Number))), P.Many (P.Char (P.CharClass P.Whitespace))])) "01 10 20 1123 292 110 10"
( P.run (P.Many True (P.Join [P.Capture (P.Many True (P.Char (P.CharClass P.Number))), P.Many True (P.Char (P.CharClass P.Whitespace))])) "01 10 20 1123 292 110 10"
== Just (["01", "10", "20", "1123", "292", "110", "10"], "")
)
expect' $

View File

@ -100,7 +100,7 @@ library
Unison.KindInference.Constraint.Pretty
Unison.KindInference.Constraint.Provenance
Unison.KindInference.Constraint.Solved
Unison.KindInference.Constraint.StarProvenance
Unison.KindInference.Constraint.TypeProvenance
Unison.KindInference.Constraint.Unsolved
Unison.KindInference.Error
Unison.KindInference.Error.Pretty

View File

@ -54,7 +54,7 @@
(let ([bs (grab-bytes)])
(match (builtin-Value.deserialize (bytes->chunked-bytes bs))
[(unison-data _ t (list q))
(= t unison-either-right:tag)
(= t ref-either-right:tag)
(apply
values
(unison-tuple->list (reify-value (unison-quote-val q))))]
@ -67,7 +67,7 @@
(define (do-evaluate)
(let-values ([(code main-ref) (decode-input)])
(add-runtime-code 'unison-main code)
(handle [unison-exception:typelink] top-exn-handler
(handle [ref-exception:typelink] top-exn-handler
((termlink->proc main-ref))
(data 'unit 0))))

View File

@ -13,38 +13,39 @@
#!racket/base
(provide
(all-from-out unison/data-info)
unison-any:typelink
unison-boolean:typelink
unison-bytes:typelink
unison-char:typelink
unison-float:typelink
unison-int:typelink
unison-nat:typelink
unison-text:typelink
unison-code:typelink
unison-mvar:typelink
unison-pattern:typelink
unison-promise:typelink
unison-sequence:typelink
unison-socket:typelink
unison-tls:typelink
unison-timespec:typelink
unison-threadid:typelink
builtin-any:typelink
builtin-boolean:typelink
builtin-bytes:typelink
builtin-char:typelink
builtin-float:typelink
builtin-int:typelink
builtin-nat:typelink
builtin-text:typelink
builtin-code:typelink
builtin-mvar:typelink
builtin-pattern:typelink
builtin-promise:typelink
builtin-sequence:typelink
builtin-socket:typelink
builtin-tls:typelink
builtin-timespec:typelink
builtin-threadid:typelink
builtin-value:typelink
unison-crypto.hashalgorithm:typelink
unison-char.class:typelink
unison-immutablearray:typelink
unison-immutablebytearray:typelink
unison-mutablearray:typelink
unison-mutablebytearray:typelink
unison-processhandle:typelink
unison-ref.ticket:typelink
unison-tls.cipher:typelink
unison-tls.clientconfig:typelink
unison-tls.privatekey:typelink
unison-tls.serverconfig:typelink
unison-tls.signedcert:typelink
unison-tls.version:typelink
builtin-crypto.hashalgorithm:typelink
builtin-char.class:typelink
builtin-immutablearray:typelink
builtin-immutablebytearray:typelink
builtin-mutablearray:typelink
builtin-mutablebytearray:typelink
builtin-processhandle:typelink
builtin-ref.ticket:typelink
builtin-tls.cipher:typelink
builtin-tls.clientconfig:typelink
builtin-tls.privatekey:typelink
builtin-tls.serverconfig:typelink
builtin-tls.signedcert:typelink
builtin-tls.version:typelink
bytevector
bytes
@ -495,62 +496,54 @@
(define (reference->termlink rf)
(match rf
[(unison-data _ t (list nm))
#:when (= t unison-reference-builtin:tag)
#:when (= t ref-reference-builtin:tag)
(unison-termlink-builtin (chunked-string->string nm))]
[(unison-data _ t (list id))
#:when (= t unison-reference-derived:tag)
#:when (= t ref-reference-derived:tag)
(match id
[(unison-data _ t (list rf i))
#:when (= t unison-id-id:tag)
#:when (= t ref-id-id:tag)
(unison-termlink-derived rf i)])]))
(define (referent->termlink rn)
(match rn
[(unison-data _ t (list rf i))
#:when (= t unison-referent-con:tag)
#:when (= t ref-referent-con:tag)
(unison-termlink-con (reference->typelink rf) i)]
[(unison-data _ t (list rf))
#:when (= t unison-referent-def:tag)
#:when (= t ref-referent-def:tag)
(reference->termlink rf)]))
(define (reference->typelink rf)
(match rf
[(unison-data _ t (list nm))
#:when (= t unison-reference-builtin:tag)
#:when (= t ref-reference-builtin:tag)
(unison-typelink-builtin (chunked-string->string nm))]
[(unison-data _ t (list id))
#:when (= t unison-reference-derived:tag)
#:when (= t ref-reference-derived:tag)
(match id
[(unison-data _ t (list rf i))
#:when (= t unison-id-id:tag)
#:when (= t ref-id-id:tag)
(unison-typelink-derived rf i)])]))
(define (typelink->reference tl)
(match tl
[(unison-typelink-builtin nm)
(unison-reference-builtin (string->chunked-string nm))]
(ref-reference-builtin (string->chunked-string nm))]
[(unison-typelink-derived hs i)
(unison-reference-derived
(unison-id-id hs i))]))
(ref-reference-derived (ref-id-id hs i))]))
(define (termlink->referent tl)
(match tl
[(unison-termlink-builtin nm)
(unison-referent-def
(unison-reference-builtin nm))]
(ref-referent-def
(ref-reference-builtin nm))]
[(unison-termlink-derived rf i)
(unison-referent-def
(unison-reference-derived
(unison-id-id rf i)))]
(ref-referent-def
(ref-reference-derived
(ref-id-id rf i)))]
[(unison-termlink-con tyl i)
(unison-referent-con
(typelink->reference tyl)
i)]))
(define (list->unison-tuple l)
(foldr unison-tuple-pair unison-unit-unit l))
(define (unison-tuple . l) (list->unison-tuple l))
(ref-referent-con (typelink->reference tyl) i)]))
(define (unison-seq . l)
(vector->chunked-list (list->vector l)))
@ -564,13 +557,13 @@
[pure (x)
(match x
[(unison-data r 0 (list))
(eq? r unison-unit:typelink)
(eq? r ref-unit:typelink)
(display "")]
[else
(display (describe-value x))])]
[unison-exception:typelink
[ref-exception:typelink
[0 (f)
(control unison-exception:typelink k
(control ref-exception:typelink k
(let ([disp (describe-value f)])
(raise (make-exn:bug "builtin.bug" disp))))]]))

View File

@ -106,37 +106,36 @@
(or (exn:fail:contract:divide-by-zero? e)
(exn:fail:contract:non-fixnum-result? e)))
;; TODO Replace strings with proper type links once we have them
(define (try-eval thunk)
(with-handlers
([exn:break?
(lambda (e)
(exception
unison-threadkilledfailure:typelink
ref-threadkilledfailure:typelink
(string->chunked-string "thread killed")
()))]
ref-unit-unit))]
[exn:io?
(lambda (e)
(exception
unison-iofailure:typelink
(exception->string e) ()))]
ref-iofailure:typelink
(exception->string e) ref-unit-unit))]
[exn:arith?
(lambda (e)
(exception
unison-arithfailure:typelink
ref-arithfailure:typelink
(exception->string e)
()))]
ref-unit-unit))]
[exn:bug? (lambda (e) (exn:bug->exception e))]
[exn:fail?
(lambda (e)
(exception
unison-runtimefailure:typelink
ref-runtimefailure:typelink
(exception->string e)
()))]
ref-unit-unit))]
[(lambda (x) #t)
(lambda (e)
(exception
unison-miscfailure:typelink
(string->chunked-string "unknown exception")
e))])
ref-miscfailure:typelink
(exception->string e)
ref-unit-unit))])
(right (thunk)))))

View File

@ -30,6 +30,9 @@
chunked-string-foldMap-chunks
unison-tuple
list->unison-tuple
freeze-bytevector!
freeze-vector!
freeze-subvector
@ -69,6 +72,7 @@
build-path
path->string
match
match*
for/fold)
(string-copy! racket-string-copy!)
(bytes-append bytevector-append)
@ -184,12 +188,43 @@
[sfx (if (<= l 10) "" "...")])
(string-append "32x" (substring s 0 10) sfx)))
(define (describe-tuple x)
(define (format-tuple l)
(for/fold
([sep ")"]
[bits '()]
#:result (apply string-append (cons "(" bits)))
([e l])
(values ", " (list* (describe-value e) sep bits))))
(define (format-non-tuple l)
(for/fold
([result #f])
([e l])
(let ([de (describe-value e)])
(if (not result) de
(string-append "Cons (" de ") (" result ")")))))
(let rec ([acc '()] [tup x])
(match tup
[(unison-data r t (list x y))
#:when (eq? r ref-tuple:typelink)
(rec (cons x acc) y)]
[(unison-data r t (list))
#:when (eq? r ref-unit:typelink)
(format-tuple acc)]
[else
(format-non-tuple (cons tup acc))])))
(define (describe-value x)
(match x
[(unison-sum t fs)
(let ([tt (number->string t)]
[vs (describe-list-br fs)])
(string-append "Sum " tt " " vs))]
[(unison-data r t fs)
#:when (eq? r ref-tuple:typelink)
(describe-tuple x)]
[(unison-data r t fs)
(let ([tt (number->string t)]
[rt (describe-ref r)]
@ -258,62 +293,165 @@
[else sc]))]))
; universal-compares two lists of values lexicographically
(define (lexico-compare ls rs)
(define (lexico-compare ls rs cmp-ty)
(let rec ([cls ls] [crs rs])
(cond
[(and (null? cls) (null? crs)) '=]
[else
(comparisons
(universal-compare (car cls) (car crs))
(universal-compare (car cls) (car crs) cmp-ty)
(rec (cdr cls) (cdr crs)))])))
(define (cmp-num l r)
(define ((comparison e? l?) l r)
(cond
[(= l r) '=]
[(< l r) '<]
[(e? l r) '=]
[(l? l r) '<]
[else '>]))
(define (compare-char a b)
(cond
[(char=? a b) '=]
[(char<? a b) '<]
[else '>]))
(define compare-num (comparison = <))
(define compare-char (comparison char=? char<?))
(define compare-byte (comparison = <))
(define compare-bytes (comparison bytes=? bytes<?))
(define compare-string (comparison string=? string<?))
(define (compare-byte a b)
(cond
[(= a b) '=]
[(< a b) '<]
[else '>]))
(define (compare-typelink ll rl)
(match ll
[(unison-typelink-builtin lnm)
(match rl
[(unison-typelink-builtin rnm) (compare-string lnm rnm)]
[(? unison-typelink-derived?) '<])]
[(unison-typelink-derived lh i)
(match rl
[(unison-typelink-derived rh j)
(comparisons
(compare-bytes lh rh)
(compare-num i j))]
[(? unison-typelink-builtin?) '>])]))
(define (universal-compare l r)
(define (compare-termlink ll rl)
(match ll
[(unison-termlink-builtin lnm)
(match rl
[(unison-termlink-builtin rnm)
(compare-string lnm rnm)]
[else '<])]
[(unison-termlink-derived lh i)
(match rl
[(unison-termlink-derived rh j)
(comparisons
(compare-bytes lh rh)
(compare-num i j))]
[(? unison-termlink-builtin?) '>]
[else '<])]
[(unison-termlink-con lr t)
(match rl
[(unison-termlink-con rr u)
(comparisons
(compare-typelink lr rr)
(compare-num t u))]
[else '>])]))
(define (value->category v)
(cond
[(equal? l r) '=]
[(and (number? l) (number? r)) (if (< l r) '< '>)]
[(and (char? l) (char? r)) (if (char<? l r) '< '>)]
[(procedure? v) 0]
[(unison-closure? v) 0]
[(number? v) 1]
[(char? v) 1]
[(boolean? v) 1]
[(unison-data? v) 1]
[(chunked-list? v) 3]
[(chunked-string? v) 3]
[(chunked-bytes? v) 3]
[(unison-termlink? v) 3]
[(unison-typelink? v) 3]
[(bytes? v) 5]))
(define (compare-data l r cmp-ty)
(match* (l r)
[((unison-data lr lt lfs) (unison-data rr rt rfs))
(compare-data-stuff lr lt lfs rr rt rfs cmp-ty)]))
(define (compare-data-stuff lr lt lfs rr rt rfs cmp-ty)
(define new-cmp-ty (or cmp-ty (eq? lr builtin-any:typelink)))
(comparisons
(if cmp-ty (compare-typelink lr rr) '=)
(compare-num lt rt)
(compare-num (length lfs) (length rfs))
(lexico-compare lfs rfs new-cmp-ty)))
; gives links to compare values as pseudo- or actual data types.
; This is how the interpreter works, so this is an attempt to obtain
; the same ordering.
(define (pseudo-data-link v)
(cond
[(boolean? v) builtin-boolean:typelink]
[(char? v) builtin-char:typelink]
[(flonum? v) builtin-float:typelink]
[(and (number? v) (negative? v)) builtin-int:typelink]
[(number? v) builtin-nat:typelink]
[(unison-data? v) (unison-data-ref v)]))
(define (compare-proc l r cmp-ty)
(define (unpack v)
(if (procedure? v)
(values (lookup-function-link v) '())
(values
(lookup-function-link (unison-closure-code v))
(unison-closure-env v))))
(define-values (lnl envl) (unpack l))
(define-values (lnr envr) (unpack r))
(comparisons
(compare-termlink lnl lnr)
(lexico-compare envl envr cmp-ty)))
(define (compare-timespec l r)
(comparisons
(compare-num (unison-timespec-sec l) (unison-timespec-sec r))
(compare-num (unison-timespec-nsec l) (unison-timespec-nsec r))))
(define (universal-compare l r [cmp-ty #f])
(define (u-proc? v)
(or (procedure? v) (unison-closure? v)))
(cond
[(eq? l r) '=] ; optimistic equality case
[(and (boolean? l) (boolean? r)) (if r '< '>)]
[(and (chunked-list? l) (chunked-list? r)) (chunked-list-compare/recur l r universal-compare)]
[(and (char? l) (char? r)) (if (char<? l r) '< '>)]
[(and (number? l) (number? r)) (compare-num l r)]
[(and (chunked-list? l) (chunked-list? r))
(chunked-list-compare/recur l r universal-compare)]
[(and (chunked-string? l) (chunked-string? r))
(chunked-string-compare/recur l r compare-char)]
[(and (chunked-bytes? l) (chunked-bytes? r))
(chunked-bytes-compare/recur l r compare-byte)]
[(and (bytes? l) (bytes? r))
(cond
[(bytes=? l r) '=]
[(bytes<? l r) '<]
[else '>])]
[(and (unison-data? l) (unison-data? r))
(let ([fls (unison-data-fields l)] [frs (unison-data-fields r)])
(comparisons
(cmp-num (unison-data-tag l) (unison-data-tag r))
(cmp-num (length fls) (length frs))
(lexico-compare fls frs)))]
[else
(let ([dl (describe-value l)]
[dr (describe-value r)])
[(and (unison-data? l) (unison-data? r)) (compare-data l r cmp-ty)]
[(and (bytes? r) (bytes? r)) (compare-bytes l r)]
[(and (u-proc? l) (u-proc? r)) (compare-proc l r)]
[(and (unison-termlink? l) (unison-termlink? r))
(compare-termlink l r)]
[(and (unison-typelink? l) (unison-typelink? r))
(compare-typelink l r)]
[(and (unison-timespec? l) (unison-timespec? r))
(compare-timespec l r)]
[(= 3 (value->category l) (value->category r))
(compare-typelink (pseudo-data-link l) (pseudo-data-link r))]
[(= (value->category l) (value->category r))
(raise
(format
"universal-compare: unimplemented\n~a\n\n~a"
dl dr)))]))
(make-exn:bug
"unsupported universal comparison of values"
(unison-tuple l r)))]
[else
(compare-num (value->category l) (value->category r))]))
(define (list->unison-tuple l)
(foldr ref-tuple-pair ref-unit-unit l))
(define (unison-tuple . l) (list->unison-tuple l))
(define (chunked-string<? l r) (chunked-string=?/recur l r char<?))
@ -380,11 +518,29 @@
(vector-set! dst i (vector-ref src (+ off i)))
(next (fx1- i)))))))
; TODO needs better pretty printing for when it isn't caught
(struct exn:bug (msg a)
#:constructor-name make-exn:bug)
(define (write-exn:bug ex port mode)
(when mode
(write-string "<exn:bug " port))
(let ([recur (case mode
[(#t) write]
[(#f) display]
[else (lambda (v port) (print v port mode))])])
(recur (chunked-string->string (exn:bug-msg ex)) port)
(if mode (write-string " " port) (newline port))
(write-string (describe-value (exn:bug-val ex)) port))
(when mode
(write-string ">")))
(struct exn:bug (msg val)
#:constructor-name make-exn:bug
#:methods gen:custom-write
[(define write-proc write-exn:bug)])
(define (exn:bug->exception b)
(exception
unison-runtimefailure:typelink
ref-runtimefailure:typelink
(exn:bug-msg b)
(exn:bug-a b)))
(exn:bug-val b)))

View File

@ -25,6 +25,7 @@
(struct-out unison-typelink-derived)
(struct-out unison-code)
(struct-out unison-quote)
(struct-out unison-timespec)
define-builtin-link
declare-builtin-link
@ -53,47 +54,47 @@
failure
exception
unison-any:typelink
builtin-any:typelink
unison-any-any:tag
unison-any-any
unison-boolean:typelink
builtin-boolean:typelink
unison-boolean-true:tag
unison-boolean-false:tag
unison-boolean-true
unison-boolean-false
unison-bytes:typelink
unison-char:typelink
unison-float:typelink
unison-int:typelink
unison-nat:typelink
unison-text:typelink
unison-code:typelink
unison-mvar:typelink
unison-pattern:typelink
unison-promise:typelink
unison-sequence:typelink
unison-socket:typelink
unison-tls:typelink
unison-timespec:typelink
unison-threadid:typelink
; unison-value:typelink
builtin-bytes:typelink
builtin-char:typelink
builtin-float:typelink
builtin-int:typelink
builtin-nat:typelink
builtin-text:typelink
builtin-code:typelink
builtin-mvar:typelink
builtin-pattern:typelink
builtin-promise:typelink
builtin-sequence:typelink
builtin-socket:typelink
builtin-tls:typelink
builtin-timespec:typelink
builtin-threadid:typelink
builtin-value:typelink
unison-crypto.hashalgorithm:typelink
unison-char.class:typelink
unison-immutablearray:typelink
unison-immutablebytearray:typelink
unison-mutablearray:typelink
unison-mutablebytearray:typelink
unison-processhandle:typelink
unison-ref.ticket:typelink
unison-tls.cipher:typelink
unison-tls.clientconfig:typelink
unison-tls.privatekey:typelink
unison-tls.serverconfig:typelink
unison-tls.signedcert:typelink
unison-tls.version:typelink
builtin-crypto.hashalgorithm:typelink
builtin-char.class:typelink
builtin-immutablearray:typelink
builtin-immutablebytearray:typelink
builtin-mutablearray:typelink
builtin-mutablebytearray:typelink
builtin-processhandle:typelink
builtin-ref.ticket:typelink
builtin-tls.cipher:typelink
builtin-tls.clientconfig:typelink
builtin-tls.privatekey:typelink
builtin-tls.serverconfig:typelink
builtin-tls.signedcert:typelink
builtin-tls.version:typelink
unison-tuple->list)
@ -253,6 +254,26 @@
(apply (unison-closure-code clo)
(append (unison-closure-env clo) rest))]))
(struct unison-timespec (sec nsec)
#:transparent
#:property prop:equal+hash
(let ()
(define (equal-proc tml tmr rec)
(match tml
[(unison-timespec sl nsl)
(match tmr
[(unison-timespec sr nsr)
(and (= sl sr) (= nsl nsr))])]))
(define ((hash-proc init) tm rec)
(match tm
[(unison-timespec s ns)
(fxxor (fx*/wraparound (rec s) 67)
(fx*/wraparound (rec ns) 71)
(fx*/wraparound init 73))]))
(list equal-proc (hash-proc 3) (hash-proc 5))))
(define-syntax (define-builtin-link stx)
(syntax-case stx ()
[(_ name)
@ -332,63 +353,63 @@
(define (either-get either) (car (unison-sum-fields either)))
; a -> Any
(define unison-any:typelink (unison-typelink-builtin "Any"))
(define builtin-any:typelink (unison-typelink-builtin "Any"))
(define unison-any-any:tag 0)
(define (unison-any-any x)
(data unison-any:typelink unison-any-any:tag x))
(data builtin-any:typelink unison-any-any:tag x))
(define unison-boolean:typelink (unison-typelink-builtin "Boolean"))
(define builtin-boolean:typelink (unison-typelink-builtin "Boolean"))
(define unison-boolean-true:tag 1)
(define unison-boolean-false:tag 0)
(define unison-boolean-true
(data unison-boolean:typelink unison-boolean-true:tag))
(data builtin-boolean:typelink unison-boolean-true:tag))
(define unison-boolean-false
(data unison-boolean:typelink unison-boolean-false:tag))
(data builtin-boolean:typelink unison-boolean-false:tag))
(define unison-bytes:typelink (unison-typelink-builtin "Bytes"))
(define unison-char:typelink (unison-typelink-builtin "Char"))
(define unison-code:typelink (unison-typelink-builtin "Code"))
(define unison-float:typelink (unison-typelink-builtin "Float"))
(define unison-int:typelink (unison-typelink-builtin "Int"))
(define unison-mvar:typelink (unison-typelink-builtin "MVar"))
(define unison-nat:typelink (unison-typelink-builtin "Nat"))
(define unison-pattern:typelink (unison-typelink-builtin "Pattern"))
(define unison-promise:typelink (unison-typelink-builtin "Promise"))
(define unison-sequence:typelink (unison-typelink-builtin "Sequence"))
(define unison-socket:typelink (unison-typelink-builtin "Socket"))
(define unison-text:typelink (unison-typelink-builtin "Text"))
(define unison-tls:typelink (unison-typelink-builtin "Tls"))
(define unison-timespec:typelink (unison-typelink-builtin "TimeSpec"))
(define unison-threadid:typelink (unison-typelink-builtin "ThreadId"))
; (define unison-value:typelink (unison-typelink-builtin "Value"))
(define builtin-bytes:typelink (unison-typelink-builtin "Bytes"))
(define builtin-char:typelink (unison-typelink-builtin "Char"))
(define builtin-code:typelink (unison-typelink-builtin "Code"))
(define builtin-float:typelink (unison-typelink-builtin "Float"))
(define builtin-int:typelink (unison-typelink-builtin "Int"))
(define builtin-mvar:typelink (unison-typelink-builtin "MVar"))
(define builtin-nat:typelink (unison-typelink-builtin "Nat"))
(define builtin-pattern:typelink (unison-typelink-builtin "Pattern"))
(define builtin-promise:typelink (unison-typelink-builtin "Promise"))
(define builtin-sequence:typelink (unison-typelink-builtin "Sequence"))
(define builtin-socket:typelink (unison-typelink-builtin "Socket"))
(define builtin-text:typelink (unison-typelink-builtin "Text"))
(define builtin-tls:typelink (unison-typelink-builtin "Tls"))
(define builtin-timespec:typelink (unison-typelink-builtin "TimeSpec"))
(define builtin-threadid:typelink (unison-typelink-builtin "ThreadId"))
(define builtin-value:typelink (unison-typelink-builtin "Value"))
(define unison-crypto.hashalgorithm:typelink
(define builtin-crypto.hashalgorithm:typelink
(unison-typelink-builtin "crypto.HashAlgorithm"))
(define unison-char.class:typelink
(define builtin-char.class:typelink
(unison-typelink-builtin "Char.Class"))
(define unison-immutablearray:typelink
(define builtin-immutablearray:typelink
(unison-typelink-builtin "ImmutableArray"))
(define unison-immutablebytearray:typelink
(define builtin-immutablebytearray:typelink
(unison-typelink-builtin "ImmutableByteArray"))
(define unison-mutablearray:typelink
(define builtin-mutablearray:typelink
(unison-typelink-builtin "MutableArray"))
(define unison-mutablebytearray:typelink
(define builtin-mutablebytearray:typelink
(unison-typelink-builtin "MutableArray"))
(define unison-processhandle:typelink
(define builtin-processhandle:typelink
(unison-typelink-builtin "ProcessHandle"))
(define unison-ref.ticket:typelink
(define builtin-ref.ticket:typelink
(unison-typelink-builtin "Ref.Ticket"))
(define unison-tls.cipher:typelink
(define builtin-tls.cipher:typelink
(unison-typelink-builtin "Tls.Cipher"))
(define unison-tls.clientconfig:typelink
(define builtin-tls.clientconfig:typelink
(unison-typelink-builtin "Tls.ClientConfig"))
(define unison-tls.privatekey:typelink
(define builtin-tls.privatekey:typelink
(unison-typelink-builtin "Tls.PrivateKey"))
(define unison-tls.serverconfig:typelink
(define builtin-tls.serverconfig:typelink
(unison-typelink-builtin "Tls.ServerConfig"))
(define unison-tls.signedcert:typelink
(define builtin-tls.signedcert:typelink
(unison-typelink-builtin "Tls.SignedCert"))
(define unison-tls.version:typelink
(define builtin-tls.version:typelink
(unison-typelink-builtin "Tls.Version"))
; Type -> Text -> Any -> Failure

View File

@ -43,87 +43,103 @@
; typeLink msg any
(define (Exception typeLink message payload)
(let* ([x7 (unison-any-any payload)]
[x8 (unison-failure-failure typeLink message x7)])
(unison-either-left x8)))
(let* ([a (unison-any-any payload)]
[msg (string->chunked-string message)]
[f (ref-failure-failure typeLink msg a)])
(ref-either-left f)))
(define-unison (isFileOpen.impl.v3 port)
(unison-either-right (not (port-closed? port))))
(ref-either-right (not (port-closed? port))))
(define-unison (ready.impl.v1 port)
(if (byte-ready? port)
(unison-either-right #t)
(ref-either-right #t)
(if (port-eof? port)
(Exception unison-iofailure:typelink "EOF" port)
(unison-either-right #f))))
(Exception ref-iofailure:typelink "EOF" port)
(ref-either-right #f))))
(define-unison (getCurrentDirectory.impl.v3 unit)
(unison-either-right
(ref-either-right
(string->chunked-string (path->string (current-directory)))))
(define-unison (isSeekable.impl.v3 handle)
(unison-either-right
(ref-either-right
(port-has-set-port-position!? handle)))
(define-unison (handlePosition.impl.v3 handle)
(unison-either-right (port-position handle)))
(ref-either-right (port-position handle)))
(define-unison (seekHandle.impl.v3 handle mode amount)
(data-case mode
(0 ()
(set-port-position! handle amount)
(unison-either-right none))
(ref-either-right none))
(1 ()
(let ([current (port-position handle)])
(set-port-position! handle (+ current amount))
(unison-either-right none)))
(ref-either-right none)))
(2 ()
(Exception unison-iofailure:typelink "SeekFromEnd not supported" 0))))
(Exception
ref-iofailure:typelink
"SeekFromEnd not supported"
0))))
(define-unison (getLine.impl.v1 handle)
(let* ([line (read-line handle)])
(if (eof-object? line)
(unison-either-right (string->chunked-string ""))
(unison-either-right (string->chunked-string line))
(ref-either-right (string->chunked-string ""))
(ref-either-right (string->chunked-string line))
)))
(define-unison (getChar.impl.v1 handle)
(let* ([char (read-char handle)])
(if (eof-object? char)
(Exception unison-iofailure:typelink "End of file reached")
(unison-either-right char))))
(Exception
ref-iofailure:typelink
"End of file reached"
ref-unit-unit)
(ref-either-right char))))
(define-unison (getSomeBytes.impl.v1 handle bytes)
(let* ([buffer (make-bytes bytes)]
[line (read-bytes-avail! buffer handle)])
(if (eof-object? line)
(unison-either-right (bytes->chunked-bytes #""))
(unison-either-right (bytes->chunked-bytes buffer))
(ref-either-right (bytes->chunked-bytes #""))
(ref-either-right (bytes->chunked-bytes buffer))
)))
(define-unison (getBuffering.impl.v3 handle)
(case (file-stream-buffer-mode handle)
[(none) (unison-either-right unison-buffermode-no-buffering)]
[(line) (unison-either-right
unison-buffermode-line-buffering)]
[(block) (unison-either-right
unison-buffermode-block-buffering)]
[(#f) (Exception unison-iofailure:typelink "Unable to determine buffering mode of handle" '())]
[else (Exception unison-iofailure:typelink "Unexpected response from file-stream-buffer-mode" '())]))
[(none) (ref-either-right ref-buffermode-no-buffering)]
[(line) (ref-either-right
ref-buffermode-line-buffering)]
[(block) (ref-either-right
ref-buffermode-block-buffering)]
[(#f) (Exception
ref-iofailure:typelink
"Unable to determine buffering mode of handle"
ref-unit-unit)]
[else (Exception
ref-iofailure:typelink
"Unexpected response from file-stream-buffer-mode"
ref-unit-unit)]))
(define-unison (setBuffering.impl.v3 handle mode)
(data-case mode
(0 ()
(file-stream-buffer-mode handle 'none)
(unison-either-right none))
(ref-either-right none))
(1 ()
(file-stream-buffer-mode handle 'line)
(unison-either-right none))
(ref-either-right none))
(2 ()
(file-stream-buffer-mode handle 'block)
(unison-either-right none))
(ref-either-right none))
(3 (size)
(Exception unison-iofailure:typelink "Sized block buffering not supported" '()))))
(Exception
ref-iofailure:typelink
"Sized block buffering not supported"
ref-unit-unit))))
(define (with-buffer-mode port mode)
(file-stream-buffer-mode port mode)
@ -141,8 +157,11 @@
(define-unison (getEcho.impl.v1 handle)
(if (eq? handle stdin)
(unison-either-right (get-stdin-echo))
(Exception unison-iofailure:typelink "getEcho only supported on stdin" '())))
(ref-either-right (get-stdin-echo))
(Exception
ref-iofailure:typelink
"getEcho only supported on stdin"
ref-unit-unit)))
(define-unison (setEcho.impl.v1 handle echo)
(if (eq? handle stdin)
@ -150,23 +169,29 @@
(if echo
(system "stty echo")
(system "stty -echo"))
(unison-either-right none))
(Exception unison-iofailure:typelink "setEcho only supported on stdin" '())))
(ref-either-right none))
(Exception
ref-iofailure:typelink
"setEcho only supported on stdin"
ref-unit-unit)))
(define (get-stdin-echo)
(let ([current (with-output-to-string (lambda () (system "stty -a")))])
(string-contains? current " echo ")))
(define-unison (getArgs.impl.v1 unit)
(unison-either-right
(ref-either-right
(vector->chunked-list
(vector-map string->chunked-string (current-command-line-arguments)))))
(define-unison (getEnv.impl.v1 key)
(let ([value (environment-variables-ref (current-environment-variables) (string->bytes/utf-8 (chunked-string->string key)))])
(if (false? value)
(Exception unison-iofailure:typelink "environmental variable not found" key)
(unison-either-right
(Exception
ref-iofailure:typelink
"environmental variable not found"
key)
(ref-either-right
(string->chunked-string (bytes->string/utf-8 value))))))
;; From https://github.com/sorawee/shlex/blob/5de06500e8c831cfc8dffb99d57a76decc02c569/main.rkt (MIT License)

View File

@ -46,20 +46,26 @@
(with-handlers
[[exn:fail:filesystem?
(lambda (e)
(exception unison-iofailure:typelink (exception->string e) '()))]]
(exception
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]]
(right (file-size (chunked-string->string path)))))
(define (getFileTimestamp.impl.v3 path)
(with-handlers
[[exn:fail:filesystem?
(lambda (e)
(exception unison-iofailure:typelink (exception->string e) '()))]]
(exception
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]]
(right (file-or-directory-modify-seconds (chunked-string->string path)))))
; in haskell, it's not just file but also directory
(define-unison (fileExists.impl.v3 path)
(let ([path-string (chunked-string->string path)])
(unison-either-right
(ref-either-right
(or
(file-exists? path-string)
(directory-exists? path-string)))))
@ -73,10 +79,10 @@
(define-unison (setCurrentDirectory.impl.v3 path)
(current-directory (chunked-string->string path))
(unison-either-right none))
(ref-either-right none))
(define-unison (createTempDirectory.impl.v3 prefix)
(unison-either-right
(ref-either-right
(string->chunked-string
(path->string
(make-temporary-directory*
@ -85,44 +91,65 @@
(define-unison (createDirectory.impl.v3 file)
(make-directory (chunked-string->string file))
(unison-either-right none))
(ref-either-right none))
(define-unison (removeDirectory.impl.v3 file)
(delete-directory/files (chunked-string->string file))
(unison-either-right none))
(ref-either-right none))
(define-unison (isDirectory.impl.v3 path)
(unison-either-right
(ref-either-right
(directory-exists? (chunked-string->string path))))
(define-unison (renameDirectory.impl.v3 old new)
(rename-file-or-directory (chunked-string->string old)
(chunked-string->string new))
(unison-either-right none))
(ref-either-right none))
(define-unison (renameFile.impl.v3 old new)
(rename-file-or-directory (chunked-string->string old)
(chunked-string->string new))
(unison-either-right none))
(ref-either-right none))
(define-unison (systemTime.impl.v3 unit)
(unison-either-right (current-seconds)))
(ref-either-right (current-seconds)))
(define-unison (systemTimeMicroseconds.impl.v3 unit)
(unison-either-right (inexact->exact (* 1000 (current-inexact-milliseconds)))))
(ref-either-right (inexact->exact (* 1000 (current-inexact-milliseconds)))))
(define (threadCPUTime.v1)
(right (current-process-milliseconds (current-thread))))
(right
(integer->time
(current-process-milliseconds (current-thread)))))
(define (processCPUTime.v1)
(right (current-process-milliseconds 'process)))
(right
(integer->time
(current-process-milliseconds #f))))
(define (realtime.v1)
(right (current-inexact-milliseconds)))
(right
(float->time
(current-inexact-milliseconds))))
(define (monotonic.v1)
(right (current-inexact-monotonic-milliseconds)))
(right
(float->time
(current-inexact-monotonic-milliseconds))))
(define (integer->time msecs)
(unison-timespec
(truncate (/ msecs 1000))
(* (modulo msecs 1000) 1000000)))
(define (float->time msecs)
(unison-timespec
(trunc (/ msecs 1000))
(trunc (* (flmod msecs 1000.0) 1000000))))
;
(define (flt f) (fl->exact-integer (fltruncate f)))
(define (trunc f) (inexact->exact (truncate f)))
(define (sec.v1 ts) (flt (/ ts 1000)))
(define sec.v1 unison-timespec-sec)
(define (nsec.v1 ts) (flt (* (flmod ts 1000.0) 1000000)))
(define nsec.v1 unison-timespec-nsec)

View File

@ -83,10 +83,10 @@
(define (decode-term tm)
(match tm
[(unison-data _ t (list tms))
#:when (= t unison-schemeterm-sexpr:tag)
#:when (= t ref-schemeterm-sexpr:tag)
(map decode-term (chunked-list->list tms))]
[(unison-data _ t (list as h tms))
#:when (= t unison-schemeterm-handle:tag)
#:when (= t ref-schemeterm-handle:tag)
`(handle
,(map
(lambda (tx) (text->linkname tx))
@ -94,27 +94,27 @@
,(text->ident h)
,@(map decode-term (chunked-list->list tms)))]
[(unison-data _ t (list hd sc cs))
#:when (= t unison-schemeterm-cases:tag)
#:when (= t ref-schemeterm-cases:tag)
(assemble-cases
(text->ident hd)
(decode-term sc)
(map decode-term (chunked-list->list cs)))]
[(unison-data _ t (list hd bs bd))
#:when (= t unison-schemeterm-binds:tag)
#:when (= t ref-schemeterm-binds:tag)
`(,(text->ident hd)
,(map decode-binding (chunked-list->list bs))
,(decode-term bd))]
[(unison-data _ t (list tx))
#:when (= t unison-schemeterm-ident:tag)
#:when (= t ref-schemeterm-ident:tag)
(text->ident tx)]
[(unison-data _ t (list tx))
#:when (= t unison-schemeterm-string:tag)
#:when (= t ref-schemeterm-string:tag)
(chunked-string->string tx)]
[(unison-data _ t (list tx))
#:when (= t unison-schemeterm-symbol:tag)
#:when (= t ref-schemeterm-symbol:tag)
`(quote ,(text->ident tx))]
[(unison-data _ t (list ns))
#:when (= t unison-schemeterm-bytevec:tag)
#:when (= t ref-schemeterm-bytevec:tag)
(list->bytes (chunked-list->list ns))]
[else
(raise (format "decode-term: unimplemented case: ~a" tm))]))
@ -131,13 +131,13 @@
(define (decode-syntax dfn)
(match dfn
[(unison-data _ t (list nm vs bd))
#:when (= t unison-schemedefn-define:tag)
#:when (= t ref-schemedefn-define:tag)
(let ([head (map text->ident
(cons nm (chunked-list->list vs)))]
[body (decode-term bd)])
(list 'define-unison head body))]
[(unison-data _ t (list nm bd))
#:when (= t unison-schemedefn-alias:tag)
#:when (= t ref-schemedefn-alias:tag)
(list 'define (text->ident nm) (decode-term bd))]
[else
(raise (format "decode-syntax: unimplemented case: ~a" dfn))]))
@ -167,10 +167,10 @@
(define (decode-ref rf)
(match rf
[(unison-data r t (list name))
#:when (= t unison-reference-builtin:tag)
#:when (= t ref-reference-builtin:tag)
(sum 0 (chunked-string->string name))]
[(unison-data r t (list id))
#:when (= t unison-reference-derived:tag)
#:when (= t ref-reference-derived:tag)
(data-case id
[0 (bs i) (sum 1 bs i)])]))
@ -200,7 +200,7 @@
[(_)
#`(lambda (gr)
(data-case (group-ref-ident gr)
[#,unison-schemeterm-ident:tag (name) name]
[#,ref-schemeterm-ident:tag (name) name]
[else
(raise
(format
@ -242,10 +242,10 @@
(define (termlink->reference rn)
(match rn
[(unison-termlink-builtin name)
(unison-reference-builtin
(ref-reference-builtin
(string->chunked-string name))]
[(unison-termlink-derived bs i)
(unison-reference-derived (unison-id-id bs i))]
(ref-reference-derived (ref-id-id bs i))]
[else (raise "termlink->reference: con case")]))
(define (group-reference gr)
@ -260,19 +260,19 @@
(define runtime-module-map (make-hash))
(define (reflect-derived bs i)
(data unison-reference:typelink unison-reference-derived:tag
(data unison-id:typelink unison-id-id:tag bs i)))
(data ref-reference:typelink ref-reference-derived:tag
(data ref-id:typelink ref-id-id:tag bs i)))
(define (function->groupref f)
(match (lookup-function-link f)
[(unison-termlink-derived h i)
(unison-groupref-group
(unison-reference-derived
(unison-id-id h i))
(ref-groupref-group
(ref-reference-derived
(ref-id-id h i))
0)]
[(unison-termlink-builtin name)
(unison-groupref-group
(unison-reference-builtin (string->chunked-string name))
(ref-groupref-group
(ref-reference-builtin (string->chunked-string name))
0)]
[else (raise "function->groupref: con case")]))
@ -280,19 +280,19 @@
(match vl
[(unison-data _ t (list l))
(cond
[(= t unison-vlit-bytes:tag) l]
[(= t unison-vlit-char:tag) l]
[(= t unison-vlit-bytearray:tag) l]
[(= t unison-vlit-text:tag) l]
[(= t unison-vlit-termlink:tag) (referent->termlink l)]
[(= t unison-vlit-typelink:tag) (reference->typelink l)]
[(= t unison-vlit-float:tag) l]
[(= t unison-vlit-pos:tag) l]
[(= t unison-vlit-neg:tag) (- l)]
[(= t unison-vlit-quote:tag) (unison-quote l)]
[(= t unison-vlit-code:tag) (unison-code l)]
[(= t unison-vlit-array:tag) (vector-map reify-value l)]
[(= t unison-vlit-seq:tag)
[(= t ref-vlit-bytes:tag) l]
[(= t ref-vlit-char:tag) l]
[(= t ref-vlit-bytearray:tag) l]
[(= t ref-vlit-text:tag) l]
[(= t ref-vlit-termlink:tag) (referent->termlink l)]
[(= t ref-vlit-typelink:tag) (reference->typelink l)]
[(= t ref-vlit-float:tag) l]
[(= t ref-vlit-pos:tag) l]
[(= t ref-vlit-neg:tag) (- l)]
[(= t ref-vlit-quote:tag) (unison-quote l)]
[(= t ref-vlit-code:tag) (unison-code l)]
[(= t ref-vlit-array:tag) (vector-map reify-value l)]
[(= t ref-vlit-seq:tag)
; TODO: better map over chunked list
(vector->chunked-list
(vector-map reify-value (chunked-list->vector l)))]
@ -302,19 +302,19 @@
(define (reify-value v)
(match v
[(unison-data _ t (list rf rt bs0))
#:when (= t unison-value-data:tag)
#:when (= t ref-value-data:tag)
(let ([bs (map reify-value (chunked-list->list bs0))])
(make-data (reference->typelink rf) rt bs))]
[(unison-data _ t (list gr bs0))
#:when (= t unison-value-partial:tag)
#:when (= t ref-value-partial:tag)
(let ([bs (map reify-value (chunked-list->list bs0))]
[proc (resolve-proc gr)])
(apply proc bs))]
[(unison-data _ t (list vl))
#:when (= t unison-value-vlit:tag)
#:when (= t ref-value-vlit:tag)
(reify-vlit vl)]
[(unison-data _ t (list bs0 k))
#:when (= t unison-value-cont:tag)
#:when (= t ref-value-cont:tag)
(raise "reify-value: unimplemented cont case")]
[(unison-data r t fs)
(raise "reify-value: unimplemented data case")]
@ -324,75 +324,75 @@
(define (reflect-typelink tl)
(match tl
[(unison-typelink-builtin name)
(unison-reference-builtin
(ref-reference-builtin
(string->chunked-string name))]
[(unison-typelink-derived h i)
(unison-reference-derived (unison-id-id h i))]))
(ref-reference-derived (ref-id-id h i))]))
(define (reflect-termlink tl)
(match tl
[(unison-termlink-con r i)
(unison-referent-con (reflect-typelink r) i)]
(ref-referent-con (reflect-typelink r) i)]
[(unison-termlink-builtin name)
(unison-referent-def
(unison-reference-builtin
(ref-referent-def
(ref-reference-builtin
(string->chunked-string name)))]
[(unison-termlink-derived h i)
(unison-referent-def
(unison-reference-derived
(unison-id-id h i)))]))
(ref-referent-def
(ref-reference-derived
(ref-id-id h i)))]))
(define (number-reference n)
(cond
[(exact-nonnegative-integer? n)
(unison-reference-builtin (string->chunked-string "Nat"))]
(ref-reference-builtin (string->chunked-string "Nat"))]
[(exact-integer? n)
(unison-reference-builtin (string->chunked-string "Int"))]
(ref-reference-builtin (string->chunked-string "Int"))]
[else
(unison-reference-builtin (string->chunked-string "Float"))]))
(ref-reference-builtin (string->chunked-string "Float"))]))
(define (reflect-value v)
(match v
[(? exact-nonnegative-integer?)
(unison-value-vlit (unison-vlit-pos v))]
(ref-value-vlit (ref-vlit-pos v))]
[(? exact-integer?)
(unison-value-vlit (unison-vlit-neg (- v)))]
(ref-value-vlit (ref-vlit-neg (- v)))]
[(? inexact-real?)
(unison-value-vlit (unison-vlit-float v))]
(ref-value-vlit (ref-vlit-float v))]
[(? char?)
(unison-value-vlit (unison-vlit-char v))]
(ref-value-vlit (ref-vlit-char v))]
[(? chunked-bytes?)
(unison-value-vlit (unison-vlit-bytes v))]
(ref-value-vlit (ref-vlit-bytes v))]
[(? bytes?)
(unison-value-vlit (unison-vlit-bytearray v))]
(ref-value-vlit (ref-vlit-bytearray v))]
[(? vector?)
(unison-value-vlit
(unison-vlit-array
(ref-value-vlit
(ref-vlit-array
(vector-map reflect-value v)))]
[(? chunked-string?)
(unison-value-vlit (unison-vlit-text v))]
(ref-value-vlit (ref-vlit-text v))]
; TODO: better map over chunked lists
[(? chunked-list?)
(unison-value-vlit
(unison-vlit-seq
(ref-value-vlit
(ref-vlit-seq
(list->chunked-list
(map reflect-value (chunked-list->list v)))))]
[(? unison-termlink?)
(unison-value-vlit (unison-vlit-termlink (reflect-termlink v)))]
(ref-value-vlit (ref-vlit-termlink (reflect-termlink v)))]
[(? unison-typelink?)
(unison-value-vlit (unison-vlit-typelink (reflect-typelink v)))]
[(unison-code sg) (unison-value-vlit (unison-vlit-code sg))]
[(unison-quote q) (unison-value-vlit (unison-vlit-quote q))]
(ref-value-vlit (ref-vlit-typelink (reflect-typelink v)))]
[(unison-code sg) (ref-value-vlit (ref-vlit-code sg))]
[(unison-quote q) (ref-value-vlit (ref-vlit-quote q))]
[(unison-closure f as)
(unison-value-partial
(ref-value-partial
(function->groupref f)
(list->chunked-list (map reflect-value as)))]
[(? procedure?)
(unison-value-partial
(ref-value-partial
(function->groupref v)
empty-chunked-list)]
[(unison-data rf t fs)
(unison-value-data
(ref-value-data
(reflect-typelink rf)
t
(list->chunked-list (map reflect-value fs)))]))
@ -428,8 +428,8 @@
#:result
(if (null? unkn)
(unison-either-right (list->chunked-list sdbx))
(unison-either-left (list->chunked-list unkn))))
(ref-either-right (list->chunked-list sdbx))
(ref-either-left (list->chunked-list unkn))))
([r (in-chunked-list (value-term-dependencies v))])
@ -593,7 +593,7 @@
,@sdefs
(handle [unison-exception:typelink] top-exn-handler
(handle [ref-exception:typelink] top-exn-handler
(,pname #f)))))
(define (build-runtime-module mname tylinks tmlinks defs)
@ -646,23 +646,22 @@
[fdeps (filter need-dependency? deps)]
[rdeps (remove* refs fdeps)])
(cond
[(null? fdeps) #f]
[(null? fdeps) empty-chunked-list]
[(null? rdeps)
(let ([ndefs (map gen-code udefs)] [sdefs (flatten (map gen-code udefs))]
(let ([ndefs (map gen-code udefs)]
[sdefs (flatten (map gen-code udefs))]
[mname (or mname0 (generate-module-name tmlinks))])
(expand-sandbox tmlinks (map-links depss))
(register-code udefs)
(add-module-associations tmlinks mname)
(add-runtime-module mname tylinks tmlinks sdefs)
#f)]
[else (list->chunked-list rdeps)]))]
[else #f])))
empty-chunked-list)]
[else
(list->chunked-list
(map reference->termlink rdeps))]))]
[else empty-chunked-list])))
(define (unison-POp-CACH dfns0)
(let ([result (add-runtime-code #f dfns0)])
(if result
(sum 1 result)
(sum 0 '()))))
(define (unison-POp-CACH dfns0) (add-runtime-code #f dfns0))
(define (unison-POp-LOAD v0)
(let* ([val (unison-quote-val v0)]
@ -671,14 +670,16 @@
[fdeps (filter need-dependency? (chunked-list->list deps))])
(if (null? fdeps)
(sum 1 (reify-value val))
(sum 0 (list->chunked-list fdeps)))))
(sum 0
(list->chunked-list
(map reference->termlink fdeps))))))
(define (unison-POp-LKUP tl) (lookup-code tl))
(define-unison (builtin-Code.lookup tl)
(match (lookup-code tl)
[(unison-sum 0 (list)) unison-optional-none]
[(unison-sum 1 (list co)) (unison-optional-some co)]))
[(unison-sum 0 (list)) ref-optional-none]
[(unison-sum 1 (list co)) (ref-optional-some co)]))
(define-unison (builtin-validateSandboxed ok v)
(let ([l (sandbox-scheme-value (chunked-list->list ok) v)])

View File

@ -24,13 +24,6 @@
#!r6rs
(library (unison primops)
(export
builtin-Any:typelink
builtin-Char:typelink
builtin-Float:typelink
builtin-Int:typelink
builtin-Nat:typelink
builtin-Text:typelink
builtin-Float.*
builtin-Float.*:termlink
builtin-Float.>=
@ -255,6 +248,8 @@
builtin-Char.Class.is:termlink
builtin-Pattern.captureAs
builtin-Pattern.captureAs:termlink
builtin-Pattern.many.corrected
builtin-Pattern.many.corrected:termlink
builtin-Pattern.isMatch
builtin-Pattern.isMatch:termlink
builtin-IO.fileExists.impl.v3
@ -645,13 +640,6 @@
(unison concurrent)
(racket random))
(define builtin-Any:typelink unison-any:typelink)
(define builtin-Char:typelink unison-char:typelink)
(define builtin-Float:typelink unison-float:typelink)
(define builtin-Int:typelink unison-int:typelink)
(define builtin-Nat:typelink unison-nat:typelink)
(define builtin-Text:typelink unison-text:typelink)
(define-builtin-link Float.*)
(define-builtin-link Float.fromRepresentation)
(define-builtin-link Float.toRepresentation)
@ -754,6 +742,7 @@
(define-builtin-link Universal.compare)
(define-builtin-link Universal.murmurHash)
(define-builtin-link Pattern.captureAs)
(define-builtin-link Pattern.many.corrected)
(define-builtin-link Pattern.isMatch)
(define-builtin-link Char.Class.is)
(define-builtin-link Scope.bytearrayOf)
@ -780,13 +769,13 @@
(define-unison (builtin-List.splitLeft n s)
(match (unison-POp-SPLL n s)
[(unison-sum 0 fs) unison-seqview-empty]
[(unison-sum 1 (list l r)) (unison-seqview-elem l r)]))
[(unison-sum 0 fs) ref-seqview-empty]
[(unison-sum 1 (list l r)) (ref-seqview-elem l r)]))
(define-unison (builtin-List.splitRight n s)
(match (unison-POp-SPLR n s)
[(unison-sum 0 fs) unison-seqview-empty]
[(unison-sum 1 (list l r)) (unison-seqview-elem l r)]))
[(unison-sum 0 fs) ref-seqview-empty]
[(unison-sum 1 (list l r)) (ref-seqview-elem l r)]))
(define-unison (builtin-Float.> x y) (fl> x y))
(define-unison (builtin-Float.< x y) (fl< x y))
@ -876,6 +865,8 @@
(define-unison (builtin-Pattern.captureAs c p)
(capture-as c p))
(define-unison (builtin-Pattern.many.corrected p) (many p))
(define-unison (builtin-Pattern.isMatch p s)
(pattern-match? p s))
@ -896,7 +887,7 @@
(define (reify-exn thunk)
(guard
(e [else
(sum 0 '() (exception->string e) e)])
(sum 0 '() (exception->string e) ref-unit-unit)])
(thunk)))
; Core implemented primops, upon which primops-in-unison can be built.
@ -923,7 +914,7 @@
(define (unison-POp-EQLT s t) (bool (equal? s t)))
(define (unison-POp-LEQT s t) (bool (chunked-string<? s t)))
(define (unison-POp-EQLU x y) (bool (universal=? x y)))
(define (unison-POp-EROR fnm x) ;; TODO raise the correct failure, use display
(define (unison-POp-EROR fnm x)
(let-values ([(p g) (open-string-output-port)])
(put-string p (chunked-string->string fnm))
(put-string p ": ")
@ -977,8 +968,8 @@
(define (->optional v)
(if v
(unison-optional-some v)
unison-optional-none))
(ref-optional-some v)
ref-optional-none))
(define-unison (builtin-Text.indexOf n h)
(->optional (chunked-string-index-of h n)))
@ -1130,7 +1121,7 @@
([exn:fail:contract?
(lambda (e)
(exception
unison-iofailure:typelink
ref-iofailure:typelink
(string->chunked-string
(string-append
"Invalid UTF-8 stream: "
@ -1143,7 +1134,7 @@
(bytes->chunked-bytes (string->bytes/utf-8 (chunked-string->string s))))
(define-unison (builtin-IO.isFileEOF.impl.v3 p)
(unison-either-right (port-eof? p)))
(ref-either-right (port-eof? p)))
(define (unison-FOp-IO.closeFile.impl.v3 h)
(if (input-port? h)
@ -1471,5 +1462,6 @@
(declare-builtin-link builtin-Pattern.isMatch)
(declare-builtin-link builtin-Scope.bytearrayOf)
(declare-builtin-link builtin-Char.Class.is)
(declare-builtin-link builtin-Pattern.many.corrected)
(declare-builtin-link builtin-unsafe.coerceAbilities)
)

View File

@ -30,21 +30,22 @@
[[exn:fail:network?
(lambda (e)
(exception
unison-iofailure:typelink
(exception->string e) '()))]
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]
[exn:fail:contract?
(lambda (e)
(exception
unison-miscfailure:typelink
ref-miscfailure:typelink
(exception->string e)
'()))]
ref-unit-unit))]
[(lambda _ #t)
(lambda (e)
(exception
unison-miscfailure:typelink
ref-miscfailure:typelink
(chunked-string->string
(format "Unknown exception ~a" (exn->string e)))
e))]]
ref-unit-unit))]]
(fn)))
(define (closeSocket.impl.v3 socket)
@ -66,9 +67,9 @@
(define (socketSend.impl.v3 socket data) ; socket bytes -> ()
(if (not (socket-pair? socket))
(exception
unison-iofailure:typelink
"Cannot send on a server socket"
'())
ref-iofailure:typelink
(string->chunked-string "Cannot send on a server socket")
ref-unit-unit)
(begin
(write-bytes (chunked-bytes->bytes data) (socket-pair-output socket))
(flush-output (socket-pair-output socket))
@ -77,8 +78,8 @@
(define (socketReceive.impl.v3 socket amt) ; socket int -> bytes
(if (not (socket-pair? socket))
(exception
unison-iofailure:typelink
"Cannot receive on a server socket")
ref-iofailure:typelink
(string->chunked-string "Cannot receive on a server socket"))
(handle-errors
(lambda ()
(begin
@ -106,20 +107,21 @@
[[exn:fail:network?
(lambda (e)
(exception
unison-iofailure:typelink
(exception->string e) '()))]
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]
[exn:fail:contract?
(lambda (e)
(exception
unison-iofailure:typelink
ref-iofailure:typelink
(exception->string e)
'()))]
ref-unit-unit))]
[(lambda _ #t)
(lambda (e)
(exception
unison-miscfailure:typelink
ref-miscfailure:typelink
(string->chunked-string "Unknown exception")
e))] ]
ref-unit-unit))] ]
(let ([listener (tcp-listen (string->number port ) 4 #f (if (equal? 0 hostname) #f hostname))])
(right listener))))))
@ -135,9 +137,9 @@
(define (socketAccept.impl.v3 listener)
(if (socket-pair? listener)
(exception
unison-iofailure:typelink
ref-iofailure:typelink
(string->chunked-string "Cannot accept on a non-server socket")
'())
ref-unit-unit)
(begin
(let-values ([(input output) (tcp-accept listener)])
(right (socket-pair input output))))))

View File

@ -63,8 +63,9 @@
(if (= 1 (length certs))
(right bytes)
(exception
unison-tlsfailure:typelink
(string->chunked-string "nope") certs)))) ; TODO passing certs is wrong, should either be converted to chunked-list or removed
ref-tlsfailure:typelink
(string->chunked-string "nope")
bytes))))
; We don't actually "decode" certificates, we just validate them
(define (encodeCert bytes) bytes)
@ -112,42 +113,41 @@
(define (ClientConfig.certificates.set certs config) ; list tlsSignedCert tlsClientConfig -> tlsClientConfig
(client-config (client-config-host config) certs))
; TODO: have someone familiar with TLS verify these exception
; classifications
(define (handle-errors fn)
(with-handlers
[[exn:fail:network?
(lambda (e)
(exception
unison-iofailure:typelink
(exception->string e) '()))]
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]
[exn:fail:contract?
(lambda (e)
(exception
unison-miscfailure:typelink
ref-miscfailure:typelink
(exception->string e)
'()))]
ref-unit-unit))]
[(lambda err
(string-contains? (exn->string err) "not valid for hostname"))
(lambda (e)
(exception
unison-tlsfailure:typelink
ref-tlsfailure:typelink
(string->chunked-string "NameMismatch")
'()))]
ref-unit-unit))]
[(lambda err
(string-contains? (exn->string err) "certificate verify failed"))
(lambda (e)
(exception
unison-tlsfailure:typelink
ref-tlsfailure:typelink
(string->chunked-string "certificate verify failed")
'()))]
ref-unit-unit))]
[(lambda _ #t)
(lambda (e)
(exception
unison-miscfailure:typelink
ref-miscfailure:typelink
(string->chunked-string
(format "Unknown exception ~a" (exn->string e)))
e))]]
ref-unit-unit))]]
(fn)))
(define (newClient.impl.v3 config socket)

View File

@ -110,7 +110,7 @@
[[exn:fail?
(lambda (e)
(exception
unison-miscfailure:typelink
ref-miscfailure:typelink
(exception->string e)
'()))]]
(right

View File

@ -13,10 +13,10 @@ usage() {
prev_version="${prev_tag#release/}"
prefix="${prev_version%.*}"
next_version="${prefix}.$(( ${prev_version##*.} + 1 ))"
echo "usage: $0 <version> [target]"
echo "usage: $0 <version> [ref]"
echo ""
echo "version: The new version number"
echo "target: The Git revision to make the release from, defaults to 'origin/trunk'"
echo "ref: The Git revision to make the release from, defaults to 'origin/trunk'"
echo ""
echo "Try: $0 $next_version"
}
@ -53,8 +53,8 @@ git fetch origin trunk
git tag "${tag}" "${target}"
git push origin "${tag}"
gh workflow run release --repo unisonweb/unison \
--field "version=${version}" \
--field "target=${target}"
--ref "${tag}" \
--field "version=${version}"
echo "Kicking off Homebrew update task"
gh workflow run release --repo unisonweb/homebrew-unison --field "version=${version}"

View File

@ -34,6 +34,8 @@ packages:
- lib/unison-util-rope
- parser-typechecker
- unison-cli
- unison-cli-integration
- unison-cli-main
- unison-core
- unison-hashing-v2
- unison-merge

View File

@ -12,7 +12,7 @@ import System.Process (readProcessWithExitCode)
import Text.Printf
integrationTestsDir :: FilePath
integrationTestsDir = "unison-cli" </> "integration-tests" </> "IntegrationTests"
integrationTestsDir = "unison-cli-integration" </> "integration-tests" </> "IntegrationTests"
uFile :: FilePath
uFile = integrationTestsDir </> "print.u"

View File

@ -40,5 +40,5 @@ main = do
```ucm
.> add
.> compile main ./unison-cli/integration-tests/IntegrationTests/main
.> compile main ./unison-cli-integration/integration-tests/IntegrationTests/main
```

View File

@ -29,6 +29,8 @@ main = do
```ucm
Loading changes detected in scratch.u.
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
@ -36,7 +38,7 @@ main = do
⍟ These new definitions are ok to `add`:
structural ability Break
unique type MyBool
type MyBool
main : '{IO, Exception} ()
resume : Request {g, Break} x -> x
@ -47,10 +49,10 @@ main = do
⍟ I've added these definitions:
structural ability Break
unique type MyBool
type MyBool
main : '{IO, Exception} ()
resume : Request {g, Break} x -> x
.> compile main ./unison-cli/integration-tests/IntegrationTests/main
.> compile main ./unison-cli-integration/integration-tests/IntegrationTests/main
```

View File

@ -0,0 +1,68 @@
name: unison-cli-integration
github: unisonweb/unison
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
flags:
optimized:
manual: true
default: false
ghc-options: -Wall
executables:
cli-integration-tests:
when:
- condition: false
other-modules: Paths_unison_cli_integration
source-dirs: integration-tests
main: Suite.hs
ghc-options: -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0
dependencies:
- base
- code-page
- filepath
- directory
- easytest
- process
- shellmet
- time
build-tools:
- unison-cli-main:unison
when:
- condition: flag(optimized)
ghc-options: -O2 -funbox-strict-fields
default-extensions:
- ApplicativeDo
- BangPatterns
- BlockArguments
- DeriveAnyClass
- DeriveFunctor
- DeriveFoldable
- DeriveTraversable
- DeriveGeneric
- DerivingStrategies
- DerivingVia
- DoAndIfThenElse
- DuplicateRecordFields
- FlexibleContexts
- FlexibleInstances
- GADTs
- GeneralizedNewtypeDeriving
- ImportQualifiedPost
- InstanceSigs
- KindSignatures
- LambdaCase
- MultiParamTypeClasses
- MultiWayIf
- NamedFieldPuns
- NumericUnderscores
- OverloadedLabels
- OverloadedStrings
- PatternSynonyms
- RankNTypes
- ScopedTypeVariables
- TupleSections
- TypeApplications
- ViewPatterns

View File

@ -0,0 +1,75 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.2.
--
-- see: https://github.com/sol/hpack
name: unison-cli-integration
version: 0.0.0
homepage: https://github.com/unisonweb/unison#readme
bug-reports: https://github.com/unisonweb/unison/issues
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
build-type: Simple
source-repository head
type: git
location: https://github.com/unisonweb/unison
flag optimized
manual: True
default: False
executable cli-integration-tests
main-is: Suite.hs
other-modules:
IntegrationTests.ArgumentParsing
hs-source-dirs:
integration-tests
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFunctor
DeriveFoldable
DeriveTraversable
DeriveGeneric
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0
build-tool-depends:
unison-cli-main:unison
build-depends:
base
, code-page
, directory
, easytest
, filepath
, process
, shellmet
, time
default-language: Haskell2010
if flag(optimized)
ghc-options: -O2 -funbox-strict-fields

19
unison-cli-main/LICENSE Normal file
View File

@ -0,0 +1,19 @@
Copyright (c) 2021, Unison Computing, public benefit corp and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,63 @@
name: unison-cli-main
github: unisonweb/unison
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
flags:
optimized:
manual: true
default: false
ghc-options: -Wall
executables:
unison:
when:
- condition: false
other-modules: Paths_unison_cli_main
source-dirs: unison
main: Main.hs
ghc-options: -threaded -rtsopts "-with-rtsopts=-I0 -N -qn4 -qg1 -T" -optP-Wno-nonportable-include-path
dependencies:
- base
- shellmet
- template-haskell
- text
- unison-cli
when:
- condition: flag(optimized)
ghc-options: -O2 -funbox-strict-fields
default-extensions:
- ApplicativeDo
- BangPatterns
- BlockArguments
- DeriveAnyClass
- DeriveFunctor
- DeriveFoldable
- DeriveTraversable
- DeriveGeneric
- DerivingStrategies
- DerivingVia
- DoAndIfThenElse
- DuplicateRecordFields
- FlexibleContexts
- FlexibleInstances
- GADTs
- GeneralizedNewtypeDeriving
- ImportQualifiedPost
- InstanceSigs
- KindSignatures
- LambdaCase
- MultiParamTypeClasses
- MultiWayIf
- NamedFieldPuns
- NumericUnderscores
- OverloadedLabels
- OverloadedStrings
- PatternSynonyms
- RankNTypes
- ScopedTypeVariables
- TupleSections
- TypeApplications
- ViewPatterns

View File

@ -0,0 +1,72 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.36.0.
--
-- see: https://github.com/sol/hpack
name: unison-cli-main
version: 0.0.0
homepage: https://github.com/unisonweb/unison#readme
bug-reports: https://github.com/unisonweb/unison/issues
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
license: MIT
license-file: LICENSE
build-type: Simple
source-repository head
type: git
location: https://github.com/unisonweb/unison
flag optimized
manual: True
default: False
executable unison
main-is: Main.hs
other-modules:
Version
hs-source-dirs:
unison
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFunctor
DeriveFoldable
DeriveTraversable
DeriveGeneric
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall -threaded -rtsopts "-with-rtsopts=-I0 -N -qn4 -qg1 -T" -optP-Wno-nonportable-include-path
build-depends:
base
, shellmet
, template-haskell
, text
, unison-cli
default-language: Haskell2010
if flag(optimized)
ghc-options: -O2 -funbox-strict-fields

View File

@ -0,0 +1,15 @@
{-# LANGUAGE ApplicativeDo #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-partial-type-signatures #-}
module Main (main) where
import Unison.Main qualified
import Version (version)
main :: IO ()
main = Unison.Main.main version

View File

@ -1,7 +1,7 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Version where
module Version (version) where
import Data.Bifunctor
import Data.Text
@ -9,6 +9,10 @@ import Data.Text qualified as Text
import Language.Haskell.TH (Exp (TupE), runIO)
import Language.Haskell.TH.Syntax (Exp (LitE), Lit (StringL))
import Shellmet
import Unison.Version (CommitDate, GitRef, Version (Version))
version :: Version
version = Version gitDescribeWithDate gitDescribe
-- | A formatted descriptor of when and against which commit this unison executable was built
-- E.g. latest-149-g5cef8f851 (built on 2021-10-04)
@ -16,13 +20,9 @@ import Shellmet
gitDescribeWithDate :: Text
gitDescribeWithDate =
let formatDate d = " (built on " <> d <> ")"
(gitRef, date) = gitDescribe
(gitRef, date) = Version.gitDescribe
in gitRef <> formatDate date
type CommitDate = Text
type GitRef = Text
-- | Uses Template Haskell to embed a git descriptor of the commit
-- which was used to build the executable.
-- E.g. latest-149-g5cef8f851 (built on 2021-10-04)

View File

@ -20,6 +20,7 @@ dependencies:
- bytes
- bytestring
- co-log-core
- code-page
- concurrent-output
- configurator
- containers >= 0.6.3
@ -53,6 +54,7 @@ dependencies:
- network-uri
- nonempty-containers
- open-browser
- optparse-applicative >= 0.16.1.0
- pretty-simple
- process
- random >= 1.2.0
@ -63,7 +65,10 @@ dependencies:
- semigroups
- servant
- servant-client
- shellmet
- stm
- template-haskell
- temporary
- text
- text-builder
- text-rope
@ -97,16 +102,29 @@ dependencies:
- warp
- witch
- witherable
- witherable
library:
internal-libraries:
unison-cli-lib:
source-dirs: src
when:
- condition: '!os(windows)'
- condition: "!os(windows)"
dependencies: unix
- condition: false
other-modules: Paths_unison_cli
library:
source-dirs: unison
when:
- condition: false
other-modules: Paths_unison_cli
dependencies:
- code-page
- optparse-applicative >= 0.16.1.0
- shellmet
- template-haskell
- temporary
- unison-cli-lib
tests:
cli-tests:
when:
@ -118,26 +136,11 @@ tests:
- here
- shellmet
- temporary
- unison-cli
- unison-cli-lib
main: Main.hs
source-dirs: tests
executables:
unison:
when:
- condition: false
other-modules: Paths_unison_cli
source-dirs: unison
main: Main.hs
ghc-options: -threaded -rtsopts "-with-rtsopts=-I0 -N -qn4 -qg1 -T" -optP-Wno-nonportable-include-path
dependencies:
- code-page
- optparse-applicative >= 0.16.1.0
- shellmet
- template-haskell
- temporary
- unison-cli
transcripts:
when:
- condition: false
@ -150,25 +153,9 @@ executables:
- easytest
- process
- shellmet
- unison-cli
- unison-cli-lib
- silently
cli-integration-tests:
when:
- condition: false
other-modules: Paths_unison_cli
source-dirs: integration-tests
main: Suite.hs
ghc-options: -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0
dependencies:
- code-page
- easytest
- process
- shellmet
- time
build-tools:
- unison-cli:unison
when:
- condition: flag(optimized)
ghc-options: -O2 -funbox-strict-fields

View File

@ -2523,7 +2523,7 @@ runScheme =
InputPattern
"run.native"
[]
I.Visible
I.Hidden
[("definition to run", Required, exactDefinitionTermQueryArg), ("arguments", ZeroPlus, noCompletionsArg)]
( P.wrapColumn2
[ ( makeExample runScheme ["main", "args"],
@ -2540,7 +2540,7 @@ compileScheme =
InputPattern
"compile.native"
[]
I.Visible
I.Hidden
[("definition to compile", Required, exactDefinitionTermQueryArg), ("output file", Required, filePathArg)]
( P.wrapColumn2
[ ( makeExample compileScheme ["main", "file"],

View File

@ -1,4 +1,4 @@
cabal-version: 1.12
cabal-version: 2.0
-- This file has been generated from package.yaml by hpack version 0.36.0.
--
@ -22,6 +22,147 @@ flag optimized
default: False
library
exposed-modules:
ArgParse
Stats
System.Path
Unison.Main
Unison.Version
hs-source-dirs:
unison
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFunctor
DeriveFoldable
DeriveTraversable
DeriveGeneric
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall
build-depends:
IntervalMap
, ListLike
, aeson >=2.0.0.0
, aeson-pretty
, ansi-terminal
, async
, base
, bytes
, bytestring
, co-log-core
, code-page
, concurrent-output
, configurator
, containers >=0.6.3
, cryptonite
, directory
, either
, errors
, exceptions
, extra
, filepath
, free
, friendly-time
, fsnotify
, fuzzyfind
, generic-lens
, haskeline
, http-client >=0.7.6
, http-client-tls
, http-types
, jwt
, ki
, lens
, lock-file
, lsp >=2.2.0.0
, lsp-types >=2.0.2.0
, megaparsec
, memory
, mtl
, network
, network-simple
, network-uri
, nonempty-containers
, open-browser
, optparse-applicative >=0.16.1.0
, pretty-simple
, process
, random >=1.2.0
, random-shuffle
, recover-rtti
, regex-tdfa
, semialign
, semigroups
, servant
, servant-client
, shellmet
, stm
, template-haskell
, temporary
, text
, text-builder
, text-rope
, these
, these-lens
, time
, transformers
, unison-cli-lib
, unison-codebase
, unison-codebase-sqlite
, unison-codebase-sqlite-hashing-v2
, unison-core
, unison-core1
, unison-hash
, unison-merge
, unison-parser-typechecker
, unison-prelude
, unison-pretty-printer
, unison-share-api
, unison-share-projects-api
, unison-sqlite
, unison-syntax
, unison-util-base32hex
, unison-util-nametree
, unison-util-relation
, unliftio
, unordered-containers
, uri-encode
, uuid
, vector
, wai
, warp
, witch
, witherable
default-language: Haskell2010
if flag(optimized)
ghc-options: -O2 -funbox-strict-fields
library unison-cli-lib
exposed-modules:
Compat
Unison.Auth.CredentialFile
@ -180,6 +321,7 @@ library
, bytes
, bytestring
, co-log-core
, code-page
, concurrent-output
, configurator
, containers >=0.6.3
@ -213,6 +355,7 @@ library
, network-uri
, nonempty-containers
, open-browser
, optparse-applicative >=0.16.1.0
, pretty-simple
, process
, random >=1.2.0
@ -223,7 +366,10 @@ library
, semigroups
, servant
, servant-client
, shellmet
, stm
, template-haskell
, temporary
, text
, text-builder
, text-rope
@ -264,143 +410,6 @@ library
build-depends:
unix
executable cli-integration-tests
main-is: Suite.hs
other-modules:
IntegrationTests.ArgumentParsing
hs-source-dirs:
integration-tests
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFunctor
DeriveFoldable
DeriveTraversable
DeriveGeneric
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0
build-tools:
unison
build-depends:
IntervalMap
, ListLike
, aeson >=2.0.0.0
, aeson-pretty
, ansi-terminal
, async
, base
, bytes
, bytestring
, co-log-core
, code-page
, concurrent-output
, configurator
, containers >=0.6.3
, cryptonite
, directory
, easytest
, either
, errors
, exceptions
, extra
, filepath
, free
, friendly-time
, fsnotify
, fuzzyfind
, generic-lens
, haskeline
, http-client >=0.7.6
, http-client-tls
, http-types
, jwt
, ki
, lens
, lock-file
, lsp >=2.2.0.0
, lsp-types >=2.0.2.0
, megaparsec
, memory
, mtl
, network
, network-simple
, network-uri
, nonempty-containers
, open-browser
, pretty-simple
, process
, random >=1.2.0
, random-shuffle
, recover-rtti
, regex-tdfa
, semialign
, semigroups
, servant
, servant-client
, shellmet
, stm
, text
, text-builder
, text-rope
, these
, these-lens
, time
, transformers
, unison-codebase
, unison-codebase-sqlite
, unison-codebase-sqlite-hashing-v2
, unison-core
, unison-core1
, unison-hash
, unison-merge
, unison-parser-typechecker
, unison-prelude
, unison-pretty-printer
, unison-share-api
, unison-share-projects-api
, unison-sqlite
, unison-syntax
, unison-util-base32hex
, unison-util-nametree
, unison-util-relation
, unliftio
, unordered-containers
, uri-encode
, uuid
, vector
, wai
, warp
, witch
, witherable
default-language: Haskell2010
if flag(optimized)
ghc-options: -O2 -funbox-strict-fields
executable transcripts
main-is: Transcripts.hs
hs-source-dirs:
@ -485,6 +494,7 @@ executable transcripts
, network-uri
, nonempty-containers
, open-browser
, optparse-applicative >=0.16.1.0
, pretty-simple
, process
, random >=1.2.0
@ -498,145 +508,6 @@ executable transcripts
, shellmet
, silently
, stm
, text
, text-builder
, text-rope
, these
, these-lens
, time
, transformers
, unison-cli
, unison-codebase
, unison-codebase-sqlite
, unison-codebase-sqlite-hashing-v2
, unison-core
, unison-core1
, unison-hash
, unison-merge
, unison-parser-typechecker
, unison-prelude
, unison-pretty-printer
, unison-share-api
, unison-share-projects-api
, unison-sqlite
, unison-syntax
, unison-util-base32hex
, unison-util-nametree
, unison-util-relation
, unliftio
, unordered-containers
, uri-encode
, uuid
, vector
, wai
, warp
, witch
, witherable
default-language: Haskell2010
if flag(optimized)
ghc-options: -O2 -funbox-strict-fields
executable unison
main-is: Main.hs
other-modules:
ArgParse
Stats
System.Path
Version
hs-source-dirs:
unison
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFunctor
DeriveFoldable
DeriveTraversable
DeriveGeneric
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall -threaded -rtsopts "-with-rtsopts=-I0 -N -qn4 -qg1 -T" -optP-Wno-nonportable-include-path
build-depends:
IntervalMap
, ListLike
, aeson >=2.0.0.0
, aeson-pretty
, ansi-terminal
, async
, base
, bytes
, bytestring
, co-log-core
, code-page
, concurrent-output
, configurator
, containers >=0.6.3
, cryptonite
, directory
, either
, errors
, exceptions
, extra
, filepath
, free
, friendly-time
, fsnotify
, fuzzyfind
, generic-lens
, haskeline
, http-client >=0.7.6
, http-client-tls
, http-types
, jwt
, ki
, lens
, lock-file
, lsp >=2.2.0.0
, lsp-types >=2.0.2.0
, megaparsec
, memory
, mtl
, network
, network-simple
, network-uri
, nonempty-containers
, open-browser
, optparse-applicative >=0.16.1.0
, pretty-simple
, process
, random >=1.2.0
, random-shuffle
, recover-rtti
, regex-tdfa
, semialign
, semigroups
, servant
, servant-client
, shellmet
, stm
, template-haskell
, temporary
, text
@ -646,7 +517,7 @@ executable unison
, these-lens
, time
, transformers
, unison-cli
, unison-cli-lib
, unison-codebase
, unison-codebase-sqlite
, unison-codebase-sqlite-hashing-v2
@ -770,6 +641,7 @@ test-suite cli-tests
, network-uri
, nonempty-containers
, open-browser
, optparse-applicative >=0.16.1.0
, pretty-simple
, process
, random >=1.2.0
@ -782,6 +654,7 @@ test-suite cli-tests
, servant-client
, shellmet
, stm
, template-haskell
, temporary
, text
, text-builder
@ -790,7 +663,7 @@ test-suite cli-tests
, these-lens
, time
, transformers
, unison-cli
, unison-cli-lib
, unison-codebase
, unison-codebase-sqlite
, unison-codebase-sqlite-hashing-v2

View File

@ -6,7 +6,7 @@
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-partial-type-signatures #-}
module Main
module Unison.Main
( main,
)
where
@ -89,9 +89,10 @@ import Unison.Server.Backend qualified as Backend
import Unison.Server.CodebaseServer qualified as Server
import Unison.Symbol (Symbol)
import Unison.Util.Pretty qualified as P
import Unison.Version (Version)
import Unison.Version qualified as Version
import UnliftIO qualified
import UnliftIO.Directory (getHomeDirectory)
import Version qualified
type Runtimes =
(RTI.Runtime Symbol, RTI.Runtime Symbol, RTI.Runtime Symbol)
@ -102,8 +103,8 @@ fixNativeRuntimePath override = do
let ucr = takeDirectory ucm </> "runtime" </> "unison-runtime" <.> exeExtension
pure $ maybe ucr id override
main :: IO ()
main = do
main :: Version -> IO ()
main version = do
-- Replace the default exception handler with one complains loudly, because we shouldn't have any uncaught exceptions.
-- Sometimes `show` and `displayException` are different strings; in this case, we want to show them both, so this
-- issue is easier to debug.
@ -131,17 +132,17 @@ main = do
withCP65001 . runInUnboundThread . Ki.scoped $ \scope -> do
interruptHandler <- defaultInterruptHandler
withInterruptHandler interruptHandler $ do
void $ Ki.fork scope initHTTPClient
void $ Ki.fork scope (initHTTPClient version)
progName <- getProgName
-- hSetBuffering stdout NoBuffering -- cool
(renderUsageInfo, globalOptions, command) <- parseCLIArgs progName (Text.unpack Version.gitDescribeWithDate)
(renderUsageInfo, globalOptions, command) <- parseCLIArgs progName (Text.unpack (Version.gitDescribeWithDate version))
nrtp <- fixNativeRuntimePath (nativeRuntimePath globalOptions)
let GlobalOptions {codebasePathOption = mCodePathOption, exitOption, lspFormattingConfig} = globalOptions
withConfig mCodePathOption \config -> do
currentDir <- getCurrentDirectory
case command of
PrintVersion ->
Text.putStrLn $ Text.pack progName <> " version: " <> Version.gitDescribeWithDate
Text.putStrLn $ Text.pack progName <> " version: " <> Version.gitDescribeWithDate version
Init -> do
exitError
( P.lines
@ -156,7 +157,7 @@ main = do
)
Run (RunFromSymbol mainName) args -> do
getCodebaseOrExit mCodePathOption (SC.MigrateAutomatically SC.Backup SC.Vacuum) \(_, _, theCodebase) -> do
RTI.withRuntime False RTI.OneOff Version.gitDescribeWithDate \runtime -> do
RTI.withRuntime False RTI.OneOff (Version.gitDescribeWithDate version) \runtime -> do
withArgs args (execute theCodebase runtime mainName) >>= \case
Left err -> exitError err
Right () -> pure ()
@ -175,6 +176,7 @@ main = do
let serverUrl = Nothing
let startPath = Nothing
launch
version
currentDir
config
rt
@ -201,6 +203,7 @@ main = do
let serverUrl = Nothing
let startPath = Nothing
launch
version
currentDir
config
rt
@ -244,7 +247,7 @@ main = do
Left err -> exitError err
Right () -> pure ()
where
vmatch = v == Version.gitDescribeWithDate
vmatch = v == Version.gitDescribeWithDate version
ws s = P.wrap (P.text s)
ifile
| 'c' : 'u' : '.' : rest <- reverse file = reverse rest
@ -260,7 +263,7 @@ main = do
P.indentN 4 $ P.text v,
"",
"Your version",
P.indentN 4 $ P.text Version.gitDescribeWithDate,
P.indentN 4 $ P.text $ Version.gitDescribeWithDate version,
"",
P.wrap $
"The program was compiled from hash "
@ -279,7 +282,7 @@ main = do
\that matches your version of Unison."
]
Transcript shouldFork shouldSaveCodebase mrtsStatsFp transcriptFiles -> do
let action = runTranscripts Verbosity.Verbose renderUsageInfo shouldFork shouldSaveCodebase mCodePathOption nrtp transcriptFiles
let action = runTranscripts version Verbosity.Verbose renderUsageInfo shouldFork shouldSaveCodebase mCodePathOption nrtp transcriptFiles
case mrtsStatsFp of
Nothing -> action
Just fp -> recordRtsStats fp action
@ -334,6 +337,7 @@ main = do
PT.putPrettyLn $ P.string "Now starting the Unison Codebase Manager (UCM)..."
launch
version
currentDir
config
runtime
@ -352,11 +356,11 @@ main = do
-- (runtime, sandboxed runtime)
withRuntimes :: FilePath -> RTI.RuntimeHost -> (Runtimes -> IO a) -> IO a
withRuntimes nrtp mode action =
RTI.withRuntime False mode Version.gitDescribeWithDate \runtime -> do
RTI.withRuntime True mode Version.gitDescribeWithDate \sbRuntime ->
RTI.withRuntime False mode (Version.gitDescribeWithDate version) \runtime -> do
RTI.withRuntime True mode (Version.gitDescribeWithDate version) \sbRuntime ->
action . (runtime,sbRuntime,)
-- startNativeRuntime saves the path to `unison-runtime`
=<< RTI.startNativeRuntime Version.gitDescribeWithDate nrtp
=<< RTI.startNativeRuntime (Version.gitDescribeWithDate version) nrtp
withConfig :: Maybe CodebasePathOption -> (Config -> IO a) -> IO a
withConfig mCodePathOption action = do
UnliftIO.bracket
@ -371,9 +375,9 @@ main = do
-- | Set user agent and configure TLS on global http client.
-- Note that the authorized http client is distinct from the global http client.
initHTTPClient :: IO ()
initHTTPClient = do
let (ucmVersion, _date) = Version.gitDescribe
initHTTPClient :: Version -> IO ()
initHTTPClient version = do
let (ucmVersion, _date) = Version.gitDescribe version
let userAgent = Text.encodeUtf8 $ "UCM/" <> ucmVersion
let addUserAgent req = do
pure $ req {HTTP.requestHeaders = ("User-Agent", userAgent) : HTTP.requestHeaders req}
@ -405,18 +409,19 @@ prepareTranscriptDir verbosity shouldFork mCodePathOption shouldSaveCodebase = d
pure tmp
runTranscripts' ::
Version ->
String ->
Maybe FilePath ->
FilePath ->
FilePath ->
NonEmpty MarkdownFile ->
IO Bool
runTranscripts' progName mcodepath nativeRtp transcriptDir markdownFiles = do
runTranscripts' version progName mcodepath nativeRtp transcriptDir markdownFiles = do
currentDir <- getCurrentDirectory
configFilePath <- getConfigFilePath mcodepath
-- We don't need to create a codebase through `getCodebaseOrExit` as we've already done so previously.
and <$> getCodebaseOrExit (Just (DontCreateCodebaseWhenMissing transcriptDir)) (SC.MigrateAutomatically SC.Backup SC.Vacuum) \(_, codebasePath, theCodebase) -> do
TR.withTranscriptRunner Verbosity.Verbose Version.gitDescribeWithDate nativeRtp (Just configFilePath) $ \runTranscript -> do
TR.withTranscriptRunner Verbosity.Verbose (Version.gitDescribeWithDate version) nativeRtp (Just configFilePath) $ \runTranscript -> do
for markdownFiles $ \(MarkdownFile fileName) -> do
transcriptSrc <- readUtf8 fileName
result <- runTranscript fileName transcriptSrc (codebasePath, theCodebase)
@ -459,6 +464,7 @@ runTranscripts' progName mcodepath nativeRtp transcriptDir markdownFiles = do
pure succeeded
runTranscripts ::
Version ->
Verbosity.Verbosity ->
UsageRenderer ->
ShouldForkCodebase ->
@ -467,7 +473,7 @@ runTranscripts ::
FilePath ->
NonEmpty String ->
IO ()
runTranscripts verbosity renderUsageInfo shouldFork shouldSaveTempCodebase mCodePathOption nativeRtp args = do
runTranscripts version verbosity renderUsageInfo shouldFork shouldSaveTempCodebase mCodePathOption nativeRtp args = do
markdownFiles <- case traverse (first (pure @[]) . markdownFile) args of
Failure invalidArgs -> do
PT.putPrettyLn $
@ -485,7 +491,7 @@ runTranscripts verbosity renderUsageInfo shouldFork shouldSaveTempCodebase mCode
progName <- getProgName
transcriptDir <- prepareTranscriptDir verbosity shouldFork mCodePathOption shouldSaveTempCodebase
completed <-
runTranscripts' progName (Just transcriptDir) nativeRtp transcriptDir markdownFiles
runTranscripts' version progName (Just transcriptDir) nativeRtp transcriptDir markdownFiles
case shouldSaveTempCodebase of
DontSaveCodebase -> removeDirectoryRecursive transcriptDir
SaveCodebase _ ->
@ -510,6 +516,7 @@ defaultInitialPath :: Path.Absolute
defaultInitialPath = Path.absoluteEmpty
launch ::
Version ->
FilePath ->
Config ->
Rt.Runtime Symbol ->
@ -524,12 +531,12 @@ launch ::
(Path.Absolute -> STM ()) ->
CommandLine.ShouldWatchFiles ->
IO ()
launch dir config runtime sbRuntime nRuntime codebase inputs serverBaseUrl mayStartingPath initResult notifyRootChange notifyPathChange shouldWatchFiles = do
launch version dir config runtime sbRuntime nRuntime codebase inputs serverBaseUrl mayStartingPath initResult notifyRootChange notifyPathChange shouldWatchFiles = do
showWelcomeHint <- Codebase.runTransaction codebase Queries.doProjectsExist
let isNewCodebase = case initResult of
CreatedCodebase -> NewlyCreatedCodebase
OpenedCodebase -> PreviouslyCreatedCodebase
(ucmVersion, _date) = Version.gitDescribe
(ucmVersion, _date) = Version.gitDescribe version
welcome = Welcome.welcome isNewCodebase ucmVersion showWelcomeHint
in CommandLine.main
dir

View File

@ -0,0 +1,12 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Unison.Version where
import Data.Text (Text)
data Version = Version {gitDescribeWithDate :: Text, gitDescribe :: (GitRef, CommitDate)}
type CommitDate = Text
type GitRef = Text

View File

@ -277,7 +277,7 @@ data TermEntry v a = TermEntry
deriving (Eq, Ord, Show, Generic)
termEntryLabeledDependencies :: Ord v => TermEntry v a -> Set LD.LabeledDependency
termEntryLabeledDependencies TermEntry {termEntryType, termEntryReferent, termEntryTag} =
termEntryLabeledDependencies TermEntry {termEntryType, termEntryReferent, termEntryTag, termEntryName} =
foldMap Type.labeledDependencies termEntryType
<> Set.singleton (LD.TermReferent (Cv.referent2to1UsingCT ct termEntryReferent))
where
@ -285,7 +285,8 @@ termEntryLabeledDependencies TermEntry {termEntryType, termEntryReferent, termEn
ct = case termEntryTag of
ServerTypes.Constructor ServerTypes.Ability -> V2Referent.EffectConstructor
ServerTypes.Constructor ServerTypes.Data -> V2Referent.DataConstructor
_ -> error "termEntryLabeledDependencies: not a constructor, but one was required"
ServerTypes.Doc -> V2Referent.DataConstructor
_ -> error $ "termEntryLabeledDependencies: Term is not a constructor, but the referent was a constructor. Tag: " <> show termEntryTag <> " Name: " <> show termEntryName <> " Referent: " <> show termEntryReferent
termEntryDisplayName :: TermEntry v a -> Text
termEntryDisplayName = HQ'.toTextWith Name.toText . termEntryHQName

View File

@ -21,6 +21,11 @@ to `Tests.check` and `Tests.checkEqual`).
.> add
```
```ucm:hide
.> load unison-src/builtin-tests/link-tests.u
.> add
```
```ucm:hide
.> load unison-src/builtin-tests/math-tests.u
.> add

View File

@ -20,6 +20,9 @@ io.tests = Tests.main do
!io.test_isFileOpen
!io.test_ready
!io.test_now
!io.test_monotonic
!io.test_processCPUTime
!io.test_threadCPUTime
!io.test_isSeekable
!io.test_handlePosition
!io.test_renameDirectory
@ -79,6 +82,29 @@ io.test_now = do
else
Tests.fail "!now" "now is too small"
io.test_threadCPUTime = do
match !threadCPUTime with
Duration s ns
| (s == +0) && (ns == 0) ->
Tests.pass "!threadCPUTime"
| otherwise ->
Tests.pass "!threadCPUTime"
io.test_processCPUTime = do
match !processCPUTime with
Duration s ns
| (s == +0) && (ns == 0) ->
Tests.pass "!processCPUTime"
| otherwise ->
Tests.pass "!processCPUTime"
io.test_monotonic = do
match !Clock.monotonic with
Duration s ns
| (s == +0) && (ns == 0) ->
Tests.pass "!Clock.monotonic"
| otherwise ->
Tests.pass "!Clock.monotonic"
io.test_createTempDirectory = do
tmp = (createTempDirectory (FilePath "prefix-"))

View File

@ -21,6 +21,11 @@ to `Tests.check` and `Tests.checkEqual`).
.> add
```
```ucm:hide
.> load unison-src/builtin-tests/link-tests.u
.> add
```
```ucm:hide
.> load unison-src/builtin-tests/math-tests.u
.> add
@ -98,3 +103,17 @@ to `Tests.check` and `Tests.checkEqual`).
```ucm
.> run.native tests.jit.only
```
```unison
foo = do
go : Nat ->{Exception} ()
go = cases
0 -> ()
n -> go (decrement n)
go 1000
```
```ucm
.> run.native foo
.> run.native foo
```

View File

@ -17,3 +17,35 @@ to `Tests.check` and `Tests.checkEqual`).
()
```
```unison
foo = do
go : Nat ->{Exception} ()
go = cases
0 -> ()
n -> go (decrement n)
go 1000
```
```ucm
Loading changes detected in scratch.u.
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These new definitions are ok to `add`:
foo : '{Exception} ()
```
```ucm
.> run.native foo
()
.> run.native foo
()
```

View File

@ -0,0 +1,58 @@
linkstuff.termlinks =
[ termLink data.Map.adjust
, termLink data.Map.alter
, termLink data.Map.contains
, termLink data.Map.delete
, termLink data.Map.difference
, termLink data.List.any
, termLink data.List.apply
, termLink data.List.compare
, termLink data.List.contains
, termLink data.List.count
, termLink data.List.diagonal
, termLink data.List.distinct
, termLink data.NatSet.alter
, termLink data.NatSet.any
, termLink data.NatSet.empty
, termLink data.NatSet.filter
, termLink data.Tuple.at1
, termLink data.Tuple.at2
, termLink data.Tuple.at3
, termLink data.Tuple.bimap
, termLink data.Tuple.mapLeft
, termLink data.graph.SCC.map
]
linkstuff.typelinks =
[ typeLink data.Map
, typeLink Nat
, typeLink Char
, typeLink data.List
, typeLink data.NatSet
, typeLink data.Tuple
]
linkstuff.tmlpairs =
flatMap (l -> map (r -> (l,r)) termlinks) termlinks
linkstuff.tylpairs =
flatMap (l -> map (r -> (l,r)) typelinks) typelinks
linkstuff.tests : '{Tests,IO} ()
linkstuff.tests = do
use Universal gteq
if all (cases (l,r) -> (l === r) || (l !== r)) tmlpairs
then pass "term link equality"
else fail "term link equality" ""
if all (cases (l,r) -> (l === r) || (l !== r)) tylpairs
then pass "type link equality"
else fail "type link equality" ""
if all (cases (l,r) -> gteq l r || gteq r l) tmlpairs
then pass "term link comparison"
else fail "term link comparison" ""
if all (cases (l,r) -> gteq l r || gteq r l) tylpairs
then pass "type link comparison"
else fail "type link comparison" ""

View File

@ -95,7 +95,10 @@ serial.loadSelfContained name path =
Right [] -> pass (name ++ " links validated")
Right _ -> fail name "failed link validation"
_ = cache_ deps
match cache_ deps with
[] -> ()
miss -> raiseFailure "code missing deps" miss
checkCached name deps
match Value.load v with
Left l -> raiseFailure "value missing deps" l

View File

@ -17,6 +17,7 @@ tests = Tests.main do
!array.tests
!codelookup.tests
!sandbox.tests
!linkstuff.tests
murmur.hash.tests = do
targets =

View File

@ -5,7 +5,7 @@ Next, we'll download the jit project and generate a few Racket files from it.
```ucm
.> project.create-empty jit-setup
jit-setup/main> pull @unison/internal/releases/0.0.12 lib.jit
jit-setup/main> pull @unison/internal/releases/0.0.13 lib.jit
```
```unison

View File

@ -20,9 +20,9 @@ Next, we'll download the jit project and generate a few Racket files from it.
🎉 🥳 Happy coding!
jit-setup/main> pull @unison/internal/releases/0.0.12 lib.jit
jit-setup/main> pull @unison/internal/releases/0.0.13 lib.jit
Downloaded 15048 entities.
Downloaded 15053 entities.

File diff suppressed because it is too large Load Diff

View File

@ -52,7 +52,7 @@ The `builtins.merge` command adds the known builtins to a `builtin` subnamespace
41. Optional (type)
42. Optional/ (2 terms)
43. Pattern (builtin type)
44. Pattern/ (8 terms)
44. Pattern/ (9 terms)
45. Ref (builtin type)
46. Ref/ (2 terms)
47. Request (builtin type)

View File

@ -23,7 +23,7 @@ Technically, the definitions all exist, but they have no names. `builtins.merge`
.foo> ls
1. builtin/ (455 terms, 71 types)
1. builtin/ (456 terms, 71 types)
```
And for a limited time, you can get even more builtin goodies:
@ -35,7 +35,7 @@ And for a limited time, you can get even more builtin goodies:
.foo> ls
1. builtin/ (627 terms, 89 types)
1. builtin/ (628 terms, 89 types)
```
More typically, you'd start out by pulling `base.

View File

@ -119,13 +119,13 @@ it's still in the `history` of the parent namespace and can be resurrected at an
Note: The most recent namespace hash is immediately below this
message.
⊙ 1. #1qpabd7ooq
⊙ 1. #mqis95ft23
- Deletes:
feature1.y
⊙ 2. #jhqb98218p
⊙ 2. #5ro9c9692q
+ Adds / updates:
@ -136,26 +136,26 @@ it's still in the `history` of the parent namespace and can be resurrected at an
Original name New name(s)
feature1.y master.y
⊙ 3. #n25372gm2b
⊙ 3. #da33td9rni
+ Adds / updates:
feature1.y
⊙ 4. #b9s4c5ut48
⊙ 4. #ks6rftepdv
> Moves:
Original name New name
x master.x
⊙ 5. #9uq9mhup43
⊙ 5. #dgcqc7jftr
+ Adds / updates:
x
□ 6. #8f47abto6r (start of history)
□ 6. #ms344fdodl (start of history)
```
To resurrect an old version of a namespace, you can learn its hash via the `history` command, then use `fork #namespacehash .newname`.

View File

@ -80,7 +80,7 @@ Should be able to move the term, type, and namespace, including its types, terms
1. Bar (Nat)
2. Bar (type)
3. Bar/ (4 terms, 1 type)
4. builtin/ (627 terms, 89 types)
4. builtin/ (628 terms, 89 types)
.> ls Bar
@ -145,7 +145,7 @@ bonk = 5
.z> ls
1. builtin/ (455 terms, 71 types)
1. builtin/ (456 terms, 71 types)
2. zonk (Nat)
```
@ -188,7 +188,7 @@ bonk.zonk = 5
.a> ls
1. builtin/ (455 terms, 71 types)
1. builtin/ (456 terms, 71 types)
2. zonk/ (1 term)
.a> view zonk.zonk

View File

@ -277,7 +277,7 @@ I should be able to move the root into a sub-namespace
.> ls
1. root/ (1370 terms, 214 types)
1. root/ (1373 terms, 214 types)
.> history
@ -286,22 +286,22 @@ I should be able to move the root into a sub-namespace
□ 1. #p1ltr60tg9 (start of history)
□ 1. #vrn80pdffk (start of history)
```
```ucm
.> ls .root.at.path
1. existing/ (456 terms, 71 types)
2. happy/ (458 terms, 72 types)
3. history/ (456 terms, 71 types)
1. existing/ (457 terms, 71 types)
2. happy/ (459 terms, 72 types)
3. history/ (457 terms, 71 types)
.> history .root.at.path
Note: The most recent namespace hash is immediately below this
message.
⊙ 1. #nndiivp3ng
⊙ 1. #g3ri07hi09
- Deletes:
@ -312,7 +312,7 @@ I should be able to move the root into a sub-namespace
Original name New name
existing.a.termInA existing.b.termInA
⊙ 2. #1he7dqonrt
⊙ 2. #ifjg1bj57v
+ Adds / updates:
@ -324,26 +324,26 @@ I should be able to move the root into a sub-namespace
happy.b.termInA existing.a.termInA
history.b.termInA existing.a.termInA
⊙ 3. #fbm4gr3975
⊙ 3. #bdn8f7vhg1
+ Adds / updates:
existing.a.termInA existing.b.termInB
⊙ 4. #v7j1f8vgni
⊙ 4. #5dqmgnr0lt
> Moves:
Original name New name
history.a.termInA history.b.termInA
⊙ 5. #ofsvuc0cgu
⊙ 5. #vd3d37rn3c
- Deletes:
history.b.termInB
⊙ 6. #s3afu924g2
⊙ 6. #gi32sh566a
+ Adds / updates:
@ -354,13 +354,13 @@ I should be able to move the root into a sub-namespace
Original name New name(s)
happy.b.termInA history.a.termInA
⊙ 7. #0bb30gq2b1
⊙ 7. #u2bs53f2hl
+ Adds / updates:
history.a.termInA history.b.termInB
⊙ 8. #aoclegh6j7
⊙ 8. #48hsm89mgl
> Moves:
@ -370,7 +370,7 @@ I should be able to move the root into a sub-namespace
happy.a.T.T2 happy.b.T.T2
happy.a.termInA happy.b.termInA
⊙ 9. #509sbqajct
⊙ 9. #pqd79g3q7l
+ Adds / updates:
@ -380,7 +380,7 @@ I should be able to move the root into a sub-namespace
happy.a.T.T
⊙ 10. #8erj1uau9u
⊙ 10. #allrjqq7ga
+ Adds / updates:
@ -392,7 +392,7 @@ I should be able to move the root into a sub-namespace
⊙ 11. #v4nrp8uols
⊙ 11. #ohd0a9rim1
```
@ -414,26 +414,26 @@ I should be able to move a sub namespace _over_ the root.
.> ls
1. b/ (3 terms, 1 type)
2. builtin/ (455 terms, 71 types)
2. builtin/ (456 terms, 71 types)
.> history
Note: The most recent namespace hash is immediately below this
message.
⊙ 1. #buu0h3vir1
⊙ 1. #lf3m1s2e7i
+ Adds / updates:
b.T b.T.T1 b.T.T2 b.termInA
⊙ 2. #rck0cngerk
⊙ 2. #b1cg22v7s1
- Deletes:
a.T a.T.T1 a.T.T2 a.termInA
⊙ 3. #k6m6gfsvd6
⊙ 3. #r83v608ifd
+ Adds / updates:
@ -443,13 +443,13 @@ I should be able to move a sub namespace _over_ the root.
a.T.T
⊙ 4. #2rvval9cn9
⊙ 4. #pmm6a0f6fj
+ Adds / updates:
a.T a.T.T a.termInA
□ 5. #schnold03v (start of history)
□ 5. #nmcjvlnbk1 (start of history)
```
```ucm

View File

@ -8,4 +8,5 @@ Some tests of pattern behavior.
p1 = join [literal "blue", literal "frog"]
> Pattern.run (many p1) "bluefrogbluegoat"
> Pattern.run (many.corrected p1) "bluefrogbluegoat"
```

View File

@ -4,6 +4,7 @@ Some tests of pattern behavior.
p1 = join [literal "blue", literal "frog"]
> Pattern.run (many p1) "bluefrogbluegoat"
> Pattern.run (many.corrected p1) "bluefrogbluegoat"
```
```ucm
@ -22,6 +23,10 @@ p1 = join [literal "blue", literal "frog"]
`>`)... Ctrl+C cancels.
3 | > Pattern.run (many p1) "bluefrogbluegoat"
Some ([], "goat")
4 | > Pattern.run (many.corrected p1) "bluefrogbluegoat"
Some ([], "bluegoat")

View File

@ -63,17 +63,17 @@ y = 2
most recent, along with the command that got us there. Try:
`fork 2 .old`
`fork #lbg8tf1sdh .old` to make an old namespace
`fork #mq4oqhiuuq .old` to make an old namespace
accessible again,
`reset-root #lbg8tf1sdh` to reset the root namespace and
`reset-root #mq4oqhiuuq` to reset the root namespace and
its history to that of the
specified namespace.
When Root Hash Action
1. now #5gonu2p9gp add
2. now #lbg8tf1sdh add
3. now #schnold03v builtins.merge
1. now #1n5tjujeu7 add
2. now #mq4oqhiuuq add
3. now #nmcjvlnbk1 builtins.merge
4. #sg60bvjo91 history starts here
Tip: Use `diff.namespace 1 7` to compare namespaces between

View File

@ -28,13 +28,13 @@ a = 5
Note: The most recent namespace hash is immediately below this
message.
⊙ 1. #havp29or07
⊙ 1. #0nv4t3770d
+ Adds / updates:
a
□ 2. #schnold03v (start of history)
□ 2. #nmcjvlnbk1 (start of history)
.> reset 2
@ -47,7 +47,7 @@ a = 5
□ 1. #schnold03v (start of history)
□ 1. #nmcjvlnbk1 (start of history)
```
```unison
@ -83,13 +83,13 @@ foo.a = 5
Note: The most recent namespace hash is immediately below this
message.
⊙ 1. #i2199da947
⊙ 1. #3s91aop8k9
+ Adds / updates:
foo.a
□ 2. #schnold03v (start of history)
□ 2. #nmcjvlnbk1 (start of history)
.> reset 1 foo

View File

@ -13,7 +13,7 @@ Let's look at some examples. We'll start with a namespace with just the builtins
□ 1. #iq58l8umv4 (start of history)
□ 1. #3pq2vvggng (start of history)
.> fork builtin builtin2
@ -42,21 +42,21 @@ Now suppose we `fork` a copy of builtin, then rename `Nat.+` to `frobnicate`, th
Note: The most recent namespace hash is immediately below this
message.
⊙ 1. #cb1ngbi7os
⊙ 1. #4g884gq7lc
> Moves:
Original name New name
Nat.frobnicate Nat.+
⊙ 2. #evasbqug8s
⊙ 2. #hnah4l7s0j
> Moves:
Original name New name
Nat.+ Nat.frobnicate
□ 3. #iq58l8umv4 (start of history)
□ 3. #3pq2vvggng (start of history)
```
If we merge that back into `builtin`, we get that same chain of history:
@ -73,21 +73,21 @@ If we merge that back into `builtin`, we get that same chain of history:
Note: The most recent namespace hash is immediately below this
message.
⊙ 1. #cb1ngbi7os
⊙ 1. #4g884gq7lc
> Moves:
Original name New name
Nat.frobnicate Nat.+
⊙ 2. #evasbqug8s
⊙ 2. #hnah4l7s0j
> Moves:
Original name New name
Nat.+ Nat.frobnicate
□ 3. #iq58l8umv4 (start of history)
□ 3. #3pq2vvggng (start of history)
```
Let's try again, but using a `merge.squash` (or just `squash`) instead. The history will be unchanged:
@ -108,7 +108,7 @@ Let's try again, but using a `merge.squash` (or just `squash`) instead. The hist
□ 1. #iq58l8umv4 (start of history)
□ 1. #3pq2vvggng (start of history)
```
The churn that happened in `mybuiltin` namespace ended up back in the same spot, so the squash merge of that namespace with our original namespace had no effect.
@ -493,13 +493,13 @@ This checks to see that squashing correctly preserves deletions:
Note: The most recent namespace hash is immediately below this
message.
⊙ 1. #272p6p79u5
⊙ 1. #jdptkosbfp
- Deletes:
Nat.* Nat.+
□ 2. #iq58l8umv4 (start of history)
□ 2. #3pq2vvggng (start of history)
```
Notice that `Nat.+` and `Nat.*` are deleted by the squash, and we see them deleted in one atomic step in the history.

View File

@ -57,7 +57,7 @@ proj/main> upgrade old new
proj/main> ls lib
1. builtin/ (455 terms, 71 types)
1. builtin/ (456 terms, 71 types)
2. new/ (1 term)
proj/main> view thingy