Merge branch 'trunk' into cp/definition-diffs

This commit is contained in:
Arya Irani 2024-05-18 10:31:26 -04:00 committed by GitHub
commit 8b762c7908
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
330 changed files with 10399 additions and 3321 deletions

3
.github/CODEOWNERS vendored Normal file
View File

@ -0,0 +1,3 @@
# Require approval from ucm team when editing repository workflows
# This helps prevent users from sneaking in malicious changes to CI workflows.
/.github/ @unisonweb/ucm

View File

@ -159,14 +159,15 @@ jobs:
distribution: "full"
variant: "CS"
version: ${{env.racket_version}}
- uses: awalsh128/cache-apt-pkgs-action@latest
if: runner.os == 'Linux'
with:
packages: libb2-dev
version: 1.0 # cache key version afaik
- name: install unison racket lib
if: steps.cache-racket-deps.outputs.cache-hit != 'true'
run: raco pkg install --auto scheme-libs/racket/unison.zip
- name: install libb2 (macos)
if: runner.os == 'macOS'
run: |
brew install libb2
ln -s "$(brew --prefix)"/lib/libb2.*.dylib \
"$(dirname "$(readlink -f "$(which raco)")")"/../lib/
- name: build unison-runtime
run: |
raco exe --embed-dlls --orig-exe scheme-libs/racket/unison-runtime.rkt
@ -232,7 +233,8 @@ jobs:
file: ucm
content: |
#!/bin/bash
"$(dirname "$0")/unison/unison" --runtime-path "$(dirname "$0")/runtime/bin/unison-runtime" "$@"
unison_root="$(dirname "$(readlink -f "$0")")"
"${unison_root}/unison/unison" --runtime-path "${unison_root}/runtime/bin/unison-runtime" "$@"
- name: create startup script (Windows)
if: runner.os == 'Windows'
uses: 1arp/create-a-file-action@0.4.4

View File

@ -0,0 +1,20 @@
name: Contributor signed CONTRIBUTORS.markdown
on:
pull_request:
jobs:
check-contributor:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v4
with:
sparse-checkout: CONTRIBUTORS.markdown
- name: Look for @${{github.event.pull_request.user.login}} in CONTRIBUTORS.markdown
shell: bash
run: |
echo "If this step fails, make sure you've added yourself to CONTRIBUTORS.markdown"
echo "to acknowledge Unison's MIT license."
egrep '\* .* \(@${{github.event.pull_request.user.login}}\)' \
CONTRIBUTORS.markdown

View File

@ -0,0 +1,126 @@
name: build jit binary
on:
workflow_call:
defaults:
run:
shell: bash
env:
jit_src: unison-jit-src/
jit_dist: unison-jit-dist/
racket_version: "8.7"
jobs:
build-jit-binary:
name: build jit binary
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, macOS-12, windows-2019]
runs-on: ${{matrix.os}}
steps:
- name: set up environment
run: |
jit_src="$GITHUB_WORKSPACE/${{ env.jit_src }}" # scheme source
jit_exe="${jit_src}/unison-runtime" # initially built jit
jit_dist="${{ runner.temp }}/${{ env.jit_dist }}" # jit binary with libraries destination
jit_dist_exe="${jit_dist}/bin/unison-runtime" # jit binary itself
ucm="${{ runner.temp }}/unison"
if [[ ${{runner.os}} = "Windows" ]]; then
jit_src="${jit_src//\\//}"
jit_dist="${jit_dist//\\//}"
jit_exe="${jit_exe//\\//}.exe"
jit_dist_exe="${jit_dist//\\//}/unison-runtime.exe"
ucm="${ucm//\\//}.exe"
fi
echo "jit_src=$jit_src" >> $GITHUB_ENV
echo "jit_exe=$jit_exe" >> $GITHUB_ENV
echo "jit_dist=$jit_dist" >> $GITHUB_ENV
echo "jit_dist_exe=$jit_dist_exe" >> $GITHUB_ENV
echo "ucm=$ucm" >> $GITHUB_ENV
- name: get workflow files, for checking hashes
uses: actions/checkout@v4
with:
sparse-checkout: .github
- name: download jit source
uses: actions/download-artifact@v4
with:
name: jit-source
path: ${{ env.jit_src }}
- name: cache/restore jit binaries
id: cache-jit-binaries
uses: actions/cache/restore@v4
with:
path: ${{ env.jit_dist }}
key: jit-dist_${{matrix.os}}.racket_${{env.racket_version}}.jit-src_${{hashFiles(format('{0}/**/*.rkt',env.jit_src),format('{0}/**/*.ss',env.jit_src))}}.yaml_${{hashFiles('**/ci-build-jit-binary.yaml')}}
- name: cache racket dependencies
if: steps.cache-jit-binaries.outputs.cache-hit != 'true'
uses: actions/cache@v4
with:
path: |
~/.cache/racket
~/.local/share/racket
key: ${{ matrix.os }}.racket_${{env.racket_version}}.yaml_${{hashFiles('**/ci-build-jit-binary.yaml')}}
- name: install racket
if: steps.cache-jit-binaries.outputs.cache-hit != 'true'
uses: unisonweb/actions/racket/install@buildjet-cache
with:
version: ${{env.racket_version}}
- name: set up redistributables (macos)
if: runner.os == 'macOS' && steps.cache-jit-binaries.outputs.cache-hit != 'true'
run: |
brew install libb2
brew_lib_dir=$(brew --prefix)/lib
racket_lib_dir=$(dirname $(dirname $(readlink -f $(which raco))))/lib
# link libb2 if not already present/cached
for dll in $brew_lib_dir/libb2.*.dylib; do
file=$(basename "$dll")
if [ ! -e "$racket_lib_dir/$file" ]; then
ln -s "$brew_lib_dir/$file" "$racket_lib_dir/$file"
else
echo "$racket_lib_dir/$file" already exists.
fi
done
- name: build jit binary
if: steps.cache-jit-binaries.outputs.cache-hit != 'true'
shell: bash
run: |
if [[ ${{runner.os}} = "Windows" ]]; then
raco pkg install --auto --skip-installed --scope installation x509-lib
elif [[ ${{runner.os}} = "macOS" ]]; then
raco pkg install --auto --skip-installed --scope installation x509-lib
elif [[ ${{runner.os}} = "Linux" ]]; then
sudo raco pkg install --auto --skip-installed --scope installation x509-lib
fi
raco pkg install --auto --skip-installed "$jit_src"/unison
raco exe --embed-dlls "$jit_src"/unison-runtime.rkt
raco distribute -v "$jit_dist" "$jit_exe"
- name: cache/save jit binaries
if: steps.cache-jit-binaries.outputs.cache-hit != 'true'
uses: actions/cache/save@v4
with:
path: ${{ env.jit_dist }}
key: jit-dist_${{matrix.os}}.racket_${{env.racket_version}}.jit-src_${{hashFiles(format('{0}/**/*.rkt',env.jit_src),format('{0}/**/*.ss',env.jit_src))}}.yaml_${{hashFiles('**/ci-build-jit-binary.yaml')}}
- name: save jit binary
uses: actions/upload-artifact@v4
with:
name: jit-binary-${{ matrix.os }}
path: ${{ env.jit_dist }}/**
# - name: setup tmate session
# uses: mxschmitt/action-tmate@v3

129
.github/workflows/ci-test-jit.yaml vendored Normal file
View File

@ -0,0 +1,129 @@
name: test jit
on:
workflow_call:
env:
runtime_tests_version: "@unison/runtime-tests/main"
# for best results, this should match the path in ci.yaml too; but GH doesn't make it easy to share them.
runtime_tests_codebase: "~/.cache/unisonlanguage/runtime-tests.unison"
jit_src_rel: unison-jit-src
jit_dist_rel: unison-jit-dist
jit_test_results: jit-test-results
defaults:
run:
shell: bash
jobs:
run-jit-tests:
name: test jit
strategy:
fail-fast: false
matrix:
os:
- ubuntu-20.04
- macOS-12
# - windows-2019
runs-on: ${{matrix.os}}
steps:
- name: set up environment
run: |
jit_src="$GITHUB_WORKSPACE/${{ env.jit_src_rel }}" # scheme source, for hashing
jit_dist="$GITHUB_WORKSPACE/${{ env.jit_dist_rel }}" # jit binary with libraries destination
jit_dist_exe="${jit_dist}/bin/unison-runtime" # jit binary itself
jit_dist_rel_exe="${jit_dist_rel}/bin/unison-runtime" # jit binary itself
ucm="${{ runner.temp }}/unison"
if [[ ${{runner.os}} = "Windows" ]]; then
jit_src="${jit_src//\\//}"
jit_dist="${jit_dist//\\//}"
jit_dist_exe="${jit_dist//\\//}/unison-runtime.exe"
jit_dist_rel_exe="${jit_dist_rel//\\//}/unison-runtime.exe"
ucm="${ucm//\\//}.exe"
fi
echo "jit_src=$jit_src" >> $GITHUB_ENV
echo "jit_dist=$jit_dist" >> $GITHUB_ENV
echo "jit_dist_exe=$jit_dist_exe" >> $GITHUB_ENV
echo "jit_dist_rel_exe=$jit_dist_rel_exe" >> $GITHUB_ENV
echo "ucm=$ucm" >> $GITHUB_ENV
- uses: actions/checkout@v4
with:
sparse-checkout: |
.github
scripts/get-share-hash.sh
unison-src/builtin-tests/jit-tests.tpl.md
unison-src/transcripts-using-base/serialized-cases/case-00.v4.ser
- name: download jit binaries
uses: actions/download-artifact@v4
with:
name: jit-binary-${{ matrix.os }}
path: ${{ env.jit_dist }}
- name: look up hash for runtime tests
run: |
echo "runtime_tests_causalhash=$(scripts/get-share-hash.sh ${{ env.runtime_tests_version }})" >> $GITHUB_ENV
- name: cache jit test results
id: cache-jit-test-results
uses: actions/cache@v4
with:
path: ${{env.jit_test_results}}
key: jit-test-results.dist-exe_${{ hashFiles(env.jit_dist_rel_exe) }}.tests_${{ env.runtime_tests_causalhash }}.yaml_${{ hashFiles('**/ci-test-jit.yaml') }}
- name: install libb2 (linux)
uses: awalsh128/cache-apt-pkgs-action@latest
if: runner.os == 'Linux' && steps.cache-jit-test-results.outputs.cache-hit != 'true'
with:
packages: libb2-1
version: 1.0 # cache key version afaik
- name: cache testing codebase
id: cache-testing-codebase
if: steps.cache-jit-test-results.outputs.cache-hit != 'true'
uses: actions/cache@v4
with:
path: ${{ env.runtime_tests_codebase }}
key: runtime-tests-codebase-${{ matrix.os }}-${{env.runtime_tests_causalhash}}
restore-keys: runtime-tests-codebase-${{ matrix.os }}-
- name: download ucm
if: steps.cache-jit-test-results.outputs.cache-hit != 'true'
uses: actions/download-artifact@v4
with:
name: unison-${{ matrix.os }}
path: ${{ runner.temp }}
- name: set ucm & runtime permissions
if: steps.cache-jit-test-results.outputs.cache-hit != 'true'
run: |
chmod +x ${{ env.ucm }}
chmod +x ${{ env.jit_dist_exe }}
if [[ ${{runner.os}} = "Linux" ]]; then
chmod +x ${{env.jit_dist}}/lib/plt/*
fi
- name: jit integration test ${{ matrix.os }}
if: steps.cache-jit-test-results.outputs.cache-hit != 'true'
run: |
envsubst '${runtime_tests_version}' \
< unison-src/builtin-tests/jit-tests.tpl.md \
> unison-src/builtin-tests/jit-tests.md
${{ env.ucm }} transcript.fork --runtime-path ${{ env.jit_dist_exe }} -C ${{env.runtime_tests_codebase}} unison-src/builtin-tests/jit-tests.md
cat unison-src/builtin-tests/jit-tests.output.md
git diff --exit-code unison-src/builtin-tests/jit-tests.output.md
- name: mark jit tests as passing
if: steps.cache-jit-test-results.outputs.cache-hit != 'true'
run: |
echo "passing=true" >> "${{env.jit_test_results}}"
# - name: Setup tmate session
# uses: mxschmitt/action-tmate@v3
# if: ${{ failure() }}
# timeout-minutes: 15

View File

@ -9,7 +9,7 @@ At a high level, the CI process is:
Some version numbers that are used during CI:
- `ormolu_version: "0.5.0.1"`
- `racket_version: "8.7"`
- `jit_version: "@unison/internal/releases/0.0.15"`
- `jit_version: "@unison/internal/releases/0.0.17"`
Some cached directories:
- `ucm_local_bin` a temp path for caching a built `ucm`

View File

@ -18,20 +18,18 @@ on:
workflow_dispatch:
env:
ormolu_version: "0.5.2.0"
racket_version: "8.7"
ucm_local_bin: "ucm-local-bin"
jit_version: "@unison/internal/releases/0.0.15"
jit_src_scheme: "unison-jit-src/scheme-libs/racket"
jit_dist: "unison-jit-dist"
ormolu_version: 0.5.2.0
ucm_local_bin: ucm-local-bin
jit_version: "@unison/internal/releases/0.0.17"
jit_src_scheme: unison-jit-src/scheme-libs/racket
jit_dist: unison-jit-dist
jit_generator_os: ubuntu-20.04
runtime_tests_version: "@unison/runtime-tests/main"
runtime_tests_codebase: "~/.cache/unisonlanguage/runtime-tests.unison"
# locations of some files that will indicate whether we need to re-run certain steps
transcript_test_results: "transcript-test-results"
interpreter_test_results: "interpreter-test-results"
jit_test_results: "jit-test-results"
transcript_test_results: transcript-test-results
interpreter_test_results: interpreter-test-results
jobs:
ormolu:
@ -354,38 +352,25 @@ jobs:
steps:
- name: set up environment
run: |
echo "jit_generated_src_scheme=${{ runner.temp }}/jit-generated-src" >> $GITHUB_ENV
echo "jit_src_scheme=${{ runner.temp }}/${{ env.jit_src_scheme }}" >> $GITHUB_ENV
echo "ucm=${{ runner.temp }}/unison" >> $GITHUB_ENV
- uses: actions/cache@v4
name: cache jit source
- name: download scheme-libs
uses: actions/checkout@v4
with:
path: ${{ env.jit_src_scheme }}
key: jit_src_scheme-racket_${{env.racket_version}}.jit_${{env.jit_version}}-${{hashFiles('**/scheme-libs/**')}}
- name: check source exists
id: jit_src_exists
if: steps.cache-jit-source.outputs.cache-hit != 'true'
run: |
files=(data-info boot-generated simple-wrappers builtin-generated compound-wrappers)
all_exist=true
for file in "${files[@]}"; do
if [[ ! -f "${{ env.jit_src_scheme }}/unison/$file.ss" ]]; then
echo "$file does not exist."
all_exist=false
# Uncomment the next line if you want to stop checking after the first missing file
# break
fi
done
if $all_exist; then
echo "files_exists=true" >> $GITHUB_OUTPUT
else
echo "files_exists=false" >> $GITHUB_OUTPUT
fi
sparse-checkout: |
scripts/get-share-hash.sh
scheme-libs
- name: look up hash for jit source generator
run: echo "jit_causalhash=$(scripts/get-share-hash.sh ${{env.jit_version}})" >> $GITHUB_ENV
- uses: actions/cache@v4
name: cache auto-generated jit source
id: cache-generated-source
with:
path: ${{ env.jit_generated_src_scheme }}
key: jit_generated_src_scheme-jit_${{env.jit_version}}-${{env.jit_causalhash}}
- name: create transcript
if: steps.jit_src_exists.outputs.files_exists == 'false'
if: steps.cache-generated-source.outputs.cache-hit != 'true'
uses: DamianReeves/write-file-action@v1.3
with:
path: ${{ runner.temp }}/setup-jit.md
@ -396,34 +381,31 @@ jobs:
jit-setup/main> pull ${{ env.jit_version }} lib.jit
```
```unison
go = generateSchemeBoot "${{ env.jit_src_scheme }}"
go = generateSchemeBoot "${{ env.jit_generated_src_scheme }}"
```
```ucm
jit-setup/main> run go
```
- name: download ucm artifact
if: steps.jit_src_exists.outputs.files_exists == 'false'
if: steps.cache-generated-source.outputs.cache-hit != 'true'
uses: actions/download-artifact@v4
with:
name: unison-${{ env.jit_generator_os }}
path: ${{ runner.temp }}
- name: set ucm permissions
if: steps.jit_src_exists.outputs.files_exists == 'false'
if: steps.cache-generated-source.outputs.cache-hit != 'true'
run: chmod +x ${{ env.ucm }}
- name: download scheme-libs
if: steps.jit_src_exists.outputs.files_exists == 'false'
uses: actions/checkout@v4
- name: generate source
if: steps.jit_src_exists.outputs.files_exists == 'false'
if: steps.cache-generated-source.outputs.cache-hit != 'true'
run: |
mkdir -p ${{ env.jit_generated_src_scheme }}
${{ env.ucm }} transcript ${{ runner.temp }}/setup-jit.md
- name: bundle source
run: |
mkdir -p ${{ env.jit_src_scheme }}
cp -R scheme-libs/racket/* ${{ env.jit_src_scheme }}
${{ env.ucm }} transcript ${{ runner.temp }}/setup-jit.md
cp -R "${{ env.jit_generated_src_scheme }}"/* ${{ env.jit_src_scheme }}
- name: save jit source
if: always()
uses: actions/upload-artifact@v4
@ -433,162 +415,11 @@ jobs:
if-no-files-found: error
build-jit-binary:
if: always() && needs.generate-jit-source.result == 'success'
name: build jit binary
needs: generate-jit-source
runs-on: ${{ matrix.os }}
strategy:
# Run each build to completion, regardless of if any have failed
fail-fast: false
matrix:
os:
# While iterating on this file, you can disable one or more of these to speed things up
- ubuntu-20.04
- macOS-12
- windows-2019
steps:
- name: set up environment
id: checks
run: |
jit_src_scheme="${{ runner.temp }}/${{ env.jit_src_scheme }}" # scheme source
jit_exe="${jit_src_scheme}/unison-runtime" # initially built jit
jit_dist="${{ runner.temp }}/${{ env.jit_dist }}" # jit binary with libraries destination
jit_dist_exe="${jit_dist}/bin/unison-runtime" # jit binary itself
ucm="${{ runner.temp }}/unison"
uses: ./.github/workflows/ci-build-jit-binary.yaml
if [[ ${{runner.os}} = "Windows" ]]; then
jit_src_scheme="${jit_src_scheme//\\//}"
jit_dist="${jit_dist//\\//}"
jit_exe="${jit_exe//\\//}.exe"
jit_dist_exe="${jit_dist//\\//}/unison-runtime.exe"
ucm="${ucm//\\//}.exe"
fi
echo "jit_src_scheme=$jit_src_scheme" >> $GITHUB_ENV
echo "jit_exe=$jit_exe" >> $GITHUB_ENV
echo "jit_dist=$jit_dist" >> $GITHUB_ENV
echo "jit_dist_exe=$jit_dist_exe" >> $GITHUB_ENV
echo "ucm=$ucm" >> $GITHUB_ENV
- name: cache jit binaries
id: cache-jit-binaries
uses: actions/cache@v4
with:
path: ${{ env.jit_dist }}
key: jit_dist-${{ matrix.os }}.racket_${{ env.racket_version }}.jit_${{ env.jit_version }}
- name: Cache Racket dependencies
if: steps.cache-jit-binaries.outputs.cache-hit != 'true'
uses: actions/cache@v4
with:
path: |
~/.cache/racket
~/.local/share/racket
key: ${{ matrix.os }}-racket-${{env.racket_version}}
- uses: Bogdanp/setup-racket@v1.11
if: steps.cache-jit-binaries.outputs.cache-hit != 'true'
with:
architecture: x64
distribution: full
variant: CS
version: ${{env.racket_version}}
- uses: actions/checkout@v4
with:
sparse-checkout: |
scripts/get-share-hash.sh
scheme-libs
unison-src/builtin-tests/jit-tests.tpl.md
unison-src/transcripts-using-base/serialized-cases/case-00.v4.ser
- name: look up hash for runtime tests
run: |
echo "runtime_tests_causalhash=$(scripts/get-share-hash.sh ${{env.runtime_tests_version}})" >> $GITHUB_ENV
- name: cache jit test results
id: cache-jit-test-results
uses: actions/cache@v4
with:
path: ${{env.jit_test_results}}
key: jit-test-results.${{ matrix.os }}.racket_${{ env.racket_version }}.jit_${{ env.jit_version }}.tests_${{env.runtime_tests_causalhash}}
- name: install libb2 (linux)
uses: awalsh128/cache-apt-pkgs-action@latest
if: |
runner.os == 'Linux'
&& steps.cache-jit-test-results.outputs.cache-hit != 'true'
# read this if a package isn't installing correctly
# https://github.com/awalsh128/cache-apt-pkgs-action#caveats
with:
packages: libb2-dev
version: 1.0 # cache key version afaik
- name: install libb2 (macos)
if: |
runner.os == 'macOS'
&& steps.cache-jit-test-results.outputs.cache-hit != 'true'
run: brew install libb2
- name: download jit source
if: steps.cache-jit-binaries.outputs.cache-hit != 'true'
uses: actions/download-artifact@v4
with:
name: jit-source
path: ${{ env.jit_src_scheme }}
- name: build jit binary
if: steps.cache-jit-binaries.outputs.cache-hit != 'true'
shell: bash
run: |
cp -R scheme-libs/racket/* "$jit_src_scheme"
raco pkg install --auto --skip-installed "$jit_src_scheme"/unison
raco exe --embed-dlls "$jit_src_scheme"/unison-runtime.rkt
raco distribute "$jit_dist" "$jit_exe"
- name: save jit binary
uses: actions/upload-artifact@v4
with:
name: jit-binary-${{ matrix.os }}
path: ${{ env.jit_dist }}/**
- name: cache testing codebase
id: cache-testing-codebase
if: steps.cache-jit-test-results.outputs.cache-hit != 'true'
uses: actions/cache@v4
with:
path: ${{ env.runtime_tests_codebase }}
key: runtime-tests-codebase-${{ matrix.os }}-${{env.runtime_tests_causalhash}}
restore-keys: runtime-tests-codebase-${{ matrix.os }}-
- name: download ucm
if: steps.cache-jit-test-results.outputs.cache-hit != 'true'
uses: actions/download-artifact@v4
with:
name: unison-${{ matrix.os }}
path: ${{ runner.temp }}
- name: set ucm permissions
if: steps.cache-jit-test-results.outputs.cache-hit != 'true'
run: chmod +x ${{ env.ucm }}
- name: jit integration test ${{ matrix.os }}
if: runner.os != 'Windows' && steps.cache-jit-test-results.outputs.cache-hit != 'true'
run: |
envsubst '${runtime_tests_version}' \
< unison-src/builtin-tests/jit-tests.tpl.md \
> unison-src/builtin-tests/jit-tests.md
${{ env.ucm }} transcript.fork --runtime-path ${{ env.jit_dist_exe }} -C ${{env.runtime_tests_codebase}} unison-src/builtin-tests/jit-tests.md
cat unison-src/builtin-tests/jit-tests.output.md
git diff --exit-code unison-src/builtin-tests/jit-tests.output.md
- name: mark jit tests as passing
if: runner.os != 'Windows' && steps.cache-jit-test-results.outputs.cache-hit != 'true'
run: |
echo "passing=true" >> "${{env.jit_test_results}}"
# - name: Setup tmate session
# uses: mxschmitt/action-tmate@v3
# if: ${{ failure() }}
# timeout-minutes: 15
test-jit:
name: test jit
needs: build-jit-binary
uses: ./.github/workflows/ci-test-jit.yaml

View File

@ -52,7 +52,7 @@ jobs:
gh release create "release/${{inputs.version}}" \
--repo unisonweb/unison \
--target "${{github.ref}}" \
--target "${{github.sha}}" \
--generate-notes \
--notes-start-tag "${prev_tag}" \
\

1
.gitignore vendored
View File

@ -11,6 +11,7 @@ interpreter-tests.md
# Stack
.stack-work
.stack-work-hls
# Cabal
dist-newstyle

View File

@ -82,3 +82,7 @@ The format for this list: name, GitHub handle
* Kyle Goetz (@kylegoetz)
* Ethan Morgan (@sixfourtwelve)
* Johan Winther (@JohanWinther)
* Greg Pfeil (@sellout)
* Upendra Upadhyay (@upendra1997)
* Dan Doel (@dolio)
* Eric Torreborre (@etorreborre)

View File

@ -15,7 +15,7 @@ be listed here, please [file a ticket](https://github.com/unisonweb/unison/issue
This file was generated using [unisonweb/credits-generator](http://github.com/unisonweb/credits-generator).
### Listing
### Listing
These are listed in alphabetical order.
| Package name | License |
@ -109,6 +109,7 @@ These are listed in alphabetical order.
| [network-bsd-2.8.1.0](https://hackage.haskell.org/package/network-bsd-2.8.1.0) | [BSD3](https://hackage.haskell.org/package/network-bsd-2.8.1.0/src/LICENSE) |
| [network-info-0.2.0.10](https://hackage.haskell.org/package/network-info-0.2.0.10) | [BSD3](https://hackage.haskell.org/package/network-info-0.2.0.10/src/LICENSE) |
| [network-simple-0.4.5](https://hackage.haskell.org/package/network-simple-0.4.5) | [BSD3](https://hackage.haskell.org/package/network-simple-0.4.5/src/LICENSE) |
| [network-udp-0.0.0](https://hackage.haskell.org/package/network-udp-0.0.0) | [BSD3](https://hackage.haskell.org/package/network-udp-0.0.0/src/LICENSE) |
| [nonempty-containers-0.3.3.0](https://hackage.haskell.org/package/nonempty-containers-0.3.3.0) | [BSD3](https://hackage.haskell.org/package/nonempty-containers-0.3.3.0/src/LICENSE) |
| [nonempty-vector-0.2.0.2](https://hackage.haskell.org/package/nonempty-vector-0.2.0.2) | [BSD3](https://hackage.haskell.org/package/nonempty-vector-0.2.0.2/src/LICENSE) |
| [parallel-3.2.2.0](https://hackage.haskell.org/package/parallel-3.2.2.0) | [BSD3](https://hackage.haskell.org/package/parallel-3.2.2.0/src/LICENSE) |

View File

@ -11,8 +11,8 @@ import Unison.Sqlite (FromRow, ToRow)
-- | A project.
data Project = Project
{ projectId :: ProjectId,
name :: ProjectName
{ projectId :: !ProjectId,
name :: !ProjectName
}
deriving stock (Generic, Show)
deriving anyclass (ToRow, FromRow)

View File

@ -11,10 +11,10 @@ import Unison.Sqlite (FromRow, ToRow)
-- | A project branch.
data ProjectBranch = ProjectBranch
{ projectId :: ProjectId,
branchId :: ProjectBranchId,
name :: ProjectBranchName,
parentBranchId :: Maybe ProjectBranchId
{ projectId :: !ProjectId,
branchId :: !ProjectBranchId,
name :: !ProjectBranchName,
parentBranchId :: !(Maybe ProjectBranchId)
}
deriving stock (Eq, Generic, Show)
deriving anyclass (ToRow, FromRow)

View File

@ -251,6 +251,7 @@ module U.Codebase.Sqlite.Queries
addMostRecentNamespaceTable,
addSquashResultTable,
addSquashResultTableIfNotExists,
cdToProjectRoot,
-- ** schema version
currentSchemaVersion,
@ -411,7 +412,7 @@ type TextPathSegments = [Text]
-- * main squeeze
currentSchemaVersion :: SchemaVersion
currentSchemaVersion = 15
currentSchemaVersion = 16
createSchema :: Transaction ()
createSchema = do
@ -475,6 +476,10 @@ addSquashResultTableIfNotExists :: Transaction ()
addSquashResultTableIfNotExists =
executeStatements $(embedProjectStringFile "sql/010-ensure-squash-cache-table.sql")
cdToProjectRoot :: Transaction ()
cdToProjectRoot =
executeStatements $(embedProjectStringFile "sql/011-cd-to-project-root.sql")
schemaVersion :: Transaction SchemaVersion
schemaVersion =
queryOneCol

View File

@ -94,6 +94,7 @@ import Unison.Hash32 qualified as Hash32
import Unison.Prelude
import Unison.Util.Monoid qualified as Monoid
import Prelude hiding (getChar, putChar)
import U.Codebase.Decl (Modifier)
debug :: Bool
debug = False
@ -497,11 +498,13 @@ getDeclElement =
0 -> pure Decl.Data
1 -> pure Decl.Effect
other -> unknownTag "DeclType" other
getModifier =
getWord8 >>= \case
0 -> pure Decl.Structural
1 -> Decl.Unique <$> getText
other -> unknownTag "DeclModifier" other
getModifier :: MonadGet m => m Modifier
getModifier =
getWord8 >>= \case
0 -> pure Decl.Structural
1 -> Decl.Unique <$> getText
other -> unknownTag "DeclModifier" other
-- | Get the number of constructors in a decl element.
getDeclElementNumConstructors :: (MonadGet m) => m Int
@ -512,7 +515,7 @@ getDeclElementNumConstructors = do
getListLength
where
skipDeclType = void getWord8
skipDeclModifier = void getWord8
skipDeclModifier = void getModifier
skipDeclTypeVariables = void (getList skipSymbol)
lookupDeclElement ::

View File

@ -0,0 +1,5 @@
-- As part of deprecating cd, we want users who have cd'd deeper than their project root to be parked at the root instead.
UPDATE "most_recent_namespace"
SET "namespace" = json_array("namespace" -> 0, "namespace" -> 1, "namespace" -> 2, "namespace" -> 3)
WHERE "namespace" ->> 0 = '__projects'
AND json_array_length("namespace") > 4;

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.2.
-- This file has been generated from package.yaml by hpack version 0.36.0.
--
-- see: https://github.com/sol/hpack
@ -20,6 +20,7 @@ extra-source-files:
sql/008-add-most-recent-namespace-table.sql
sql/009-add-squash-cache-table.sql
sql/010-ensure-squash-cache-table.sql
sql/011-cd-to-project-root.sql
sql/create.sql
source-repository head

View File

@ -7,6 +7,7 @@ module U.Codebase.Causal
)
where
import Data.Function (on)
import Data.Map.Strict qualified as Map
import Unison.Prelude
@ -18,6 +19,9 @@ data Causal m hc he pe e = Causal
}
deriving stock (Functor, Generic)
instance Eq hc => Eq (Causal m hc he pe e) where
(==) = (==) `on` causalHash
-- | @emap f g@ maps over the values and parents' values with @f@ and @g@.
emap :: Functor m => (e -> e') -> (pe -> pe') -> Causal m hc he pe e -> Causal m hc he pe' e'
emap f g causal@Causal {parents, value} =

View File

@ -48,6 +48,11 @@ toReference = \case
Ref termRef -> termRef
Con typeRef _ -> typeRef
toReferenceId :: Referent -> Maybe Reference.Id
toReferenceId = \case
Ref termRef -> Reference.toId termRef
Con typeRef _ -> Reference.toId typeRef
toTermReference :: Referent' termRef typeRef -> Maybe termRef
toTermReference = \case
Ref termRef -> Just termRef

View File

@ -42,7 +42,7 @@ Some tests are executables instead:
* `stack exec transcripts` runs the transcripts-related integration tests, found in `unison-src/transcripts`. You can add more tests to this directory.
* `stack exec transcripts -- prefix-of-filename` runs only transcript tests with a matching filename prefix.
* `stack exec integration-tests` runs the additional integration tests for cli. These tests are not triggered by `tests` or `trancscripts`.
* `stack exec integration-tests` runs the additional integration tests for cli. These tests are not triggered by `tests` or `transcripts`.
* `stack exec unison -- transcript unison-src/transcripts-round-trip/main.md` runs the pretty-printing round trip tests
* `stack exec unison -- transcript unison-src/transcripts-manual/benchmarks.md` runs the benchmark suite. Output goes in unison-src/transcripts-manual/benchmarks/output.txt.

View File

@ -26,6 +26,9 @@ e.g.
It's not clear to me when to use `$GITHUB_OUTPUT` vs `$GITHUB_ENV`, but I have been favoring `$GITHUB_ENV` because it requires fewer characters to access.
However, it seems a little wrong.
### `hashFiles()`
`hashFiles()` can only access files inside of and relative to `$GITHUB_WORKSPACE`.
### `if:`
Although the type rules don't totally make sense in Github Actions, `if:` takes a Boolean.

View File

@ -4,9 +4,12 @@
* [Overview](#overview)
* [Installation and setup](#installation-and-setup)
* [Settings](#settings)
* [NeoVim](#neovim)
* [VSCode](#vscode)
* [Helix Editor](#helix-editor)
* [Emacs](#emacs)
* [other editors](#other-editors)
* [Configuration](#configuration)
## Overview
@ -31,7 +34,7 @@ Note for Windows users: Due to an outstanding issue with GHC's IO manager on Win
Enabling the LSP on windows can cause UCM to hang on exit and may require the process to be killed by the operating system or via Ctrl-C.
Note that this doesn't pose any risk of codebase corruption or cause any known issues, it's simply an annoyance.
If you accept this annoyance, you can enable the LSP server on Windows by exporting the `UNISON_LSP_ENABLED=true` environment variable.
If you accept this annoyance, you can enable the LSP server on Windows by exporting the `UNISON_LSP_ENABLED=true` environment variable.
You can set this persistently in powershell using:
@ -41,17 +44,6 @@ You can set this persistently in powershell using:
See [this issue](https://github.com/unisonweb/unison/issues/3487) for more details.
### Settings
Supported settings and their defaults. See information for your language server client about where to provide these.
```json
{
// A suggestion for the formatter about how wide (in columns) to print definitions.
"formattingWidth": 80
}
```
### NeoVim
Before configuring the LSP, install the Vim plugin for filetype detection and syntax highlighting.
@ -193,6 +185,18 @@ language-servers = [ "ucm" ]
or follow the instructions for Unison in "[How to install the default language servers](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers#unison)" wiki page.
### Emacs
In Emacs 29 (or earlier, if you install the [Eglot](https://elpa.gnu.org/packages/eglot.html) package), add the following to your init file:
```elisp
(push '((unison-ts-mode unisonlang-mode) "127.0.0.1" 5757)
eglot-server-programs)
```
This requires having either [unison-ts-mode](https://github.com/fmguerreiro/unison-ts-mode) or [unisonlang-mode](https://melpa.org/#/unisonlang-mode) installed. unison-ts-mode is newer, supported, and more complete, but isnt in [MELPA](https://melpa.org/) yet and requires a couple commands to set up [tree-sitter-unison](https://github.com/kylegoetz/tree-sitter-unison).
You can then use `M-x eglot` in a Unison scratch file buffer. You can also [configure Eglot to start automatically](https://www.gnu.org/software/emacs/manual/html_node/eglot/Starting-Eglot.html).
### Other Editors
@ -205,11 +209,14 @@ Note that some editors require passing the command and arguments as separate par
Supported settings and their defaults. See information for your language server client about where to provide these.
* `formattingWidth`: A suggestion for the formatter about how wide (in columns) to print definitions.
* `maxCompletions`: The number of completions the server should collect and send based on a single query. Increasing this limit will provide more completion results, but at the cost of being slower to respond.
If explicitly set to `null` the server will return ALL completions available.
```json
{
// The number of completions the server should collect and send based on a single query.
// Increasing this limit will provide more completion results, but at the cost of being slower to respond.
// If explicitly set to `null` the server will return ALL completions available.
"formattingWidth": 80,
"maxCompletions": 100
}
```

View File

@ -85,5 +85,5 @@ curl -L https://github.com/unisonweb/unison/releases/latest/download/ucm-linux.t
**Windows manual install:**
* Recommended: [Set your default Terminal application](https://devblogs.microsoft.com/commandline/windows-terminal-as-your-default-command-line-experience/) to “Windows Terminal”.
* Download [the release](https://github.com/unisonweb/unison/releases/download/release%2FM5h/ucm-windows.zip) and extract it to a location of your choosing.
* Download [the release](https://github.com/unisonweb/unison/releases/latest/download/ucm-windows.zip) and extract it to a location of your choosing.
* Run `ucm.exe`

View File

@ -95,7 +95,9 @@
assert nixpkgs-packages.hpack.version == versions.hpack;
{
packages = nixpkgs-packages // {
default = haskell-nix-flake.defaultPackage;
haskell-nix = haskell-nix-flake.packages;
docker = import ./nix/docker.nix { inherit pkgs; haskell-nix = haskell-nix-flake.packages; };
build-tools = pkgs.symlinkJoin {
name = "build-tools";
paths = self.devShells."${system}".only-tools-nixpkgs.buildInputs;

View File

@ -129,6 +129,9 @@ cradle:
- path: "unison-hashing-v2/src"
component: "unison-hashing-v2:lib"
- path: "unison-merge/src"
component: "unison-merge:lib"
- path: "unison-share-api/src"
component: "unison-share-api:lib"

View File

@ -22,6 +22,7 @@ dependencies:
- pretty-simple
- safe
- text
- these
- time
- transformers
- unliftio

View File

@ -27,6 +27,7 @@ data DebugFlag
| Codebase
| Git
| Integrity
| Merge
| Migration
| Sqlite
| Sync
@ -60,6 +61,7 @@ debugFlags = case (unsafePerformIO (lookupEnv "UNISON_DEBUG")) of
"CODEBASE" -> pure Codebase
"GIT" -> pure Git
"INTEGRITY" -> pure Integrity
"MERGE" -> pure Merge
"MIGRATION" -> pure Migration
"SQLITE" -> pure Sqlite
"SYNC" -> pure Sync
@ -91,6 +93,10 @@ debugAuth :: Bool
debugAuth = Auth `Set.member` debugFlags
{-# NOINLINE debugAuth #-}
debugMerge :: Bool
debugMerge = Merge `Set.member` debugFlags
{-# NOINLINE debugMerge #-}
debugMigration :: Bool
debugMigration = Migration `Set.member` debugFlags
{-# NOINLINE debugMigration #-}
@ -183,6 +189,7 @@ shouldDebug = \case
Codebase -> debugCodebase
Git -> debugGit
Integrity -> debugIntegrity
Merge -> debugMerge
Migration -> debugMigration
Sqlite -> debugSqlite
Sync -> debugSync

View File

@ -1,10 +1,17 @@
-- | @Map@ utilities.
module Unison.Util.Map
( bimap,
( alignWithKey,
bimap,
bitraverse,
bitraversed,
deleteLookup,
elemsSet,
foldM,
foldMapM,
for_,
insertLookup,
invert,
mergeMap,
unionWithM,
remap,
traverseKeys,
@ -12,6 +19,7 @@ module Unison.Util.Map
swap,
upsert,
upsertF,
upsertLookup,
valuesVector,
asList_,
)
@ -22,10 +30,22 @@ import Control.Monad qualified as Monad
import Data.Bifunctor qualified as B
import Data.Bitraversable qualified as B
import Data.Foldable (foldlM)
import Data.Map.Internal qualified as Map (Map (Bin, Tip))
import Data.Map.Merge.Strict qualified as Map
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import Data.These (These (..))
import Data.Vector (Vector)
import Data.Vector qualified as Vector
import Unison.Prelude hiding (bimap)
import Unison.Prelude hiding (bimap, foldM, for_)
-- | A common case of @Map.merge@. Like @alignWith@, but includes the key.
alignWithKey :: Ord k => (k -> These a b -> c) -> Map k a -> Map k b -> Map k c
alignWithKey f =
Map.merge
(Map.mapMissing \k x -> f k (This x))
(Map.mapMissing \k y -> f k (That y))
(Map.zipWithMatched \k x y -> f k (These x y))
bimap :: (Ord a') => (a -> a') -> (b -> b') -> Map a b -> Map a' b'
bimap fa fb = Map.fromList . map (B.bimap fa fb) . Map.toList
@ -51,6 +71,17 @@ swap :: (Ord b) => Map a b -> Map b a
swap =
Map.foldlWithKey' (\z a b -> Map.insert b a z) mempty
-- | Like 'Map.insert', but returns the old value as well.
insertLookup :: Ord k => k -> v -> Map k v -> (Maybe v, Map k v)
insertLookup k v =
upsertLookup (const v) k
-- | Invert a map's keys and values. This probably only makes sense with injective maps, but otherwise, later key/value
-- pairs (ordered by the original map's keys) overwrite earlier ones.
invert :: Ord v => Map k v -> Map v k
invert =
Map.foldlWithKey' (\m k v -> Map.insert v k m) Map.empty
-- | Upsert an element into a map.
upsert :: (Ord k) => (Maybe v -> v) -> k -> Map k v -> Map k v
upsert f =
@ -61,6 +92,11 @@ upsertF :: (Functor f, Ord k) => (Maybe v -> f v) -> k -> Map k v -> f (Map k v)
upsertF f =
Map.alterF (fmap Just . f)
-- | Like 'upsert', but returns the old value as well.
upsertLookup :: Ord k => (Maybe v -> v) -> k -> Map k v -> (Maybe v, Map k v)
upsertLookup f =
upsertF (\v -> (v, f v))
valuesVector :: Map k v -> Vector v
valuesVector =
Vector.fromList . Map.elems
@ -70,6 +106,23 @@ deleteLookup :: (Ord k) => k -> Map k v -> (Maybe v, Map k v)
deleteLookup =
Map.alterF (,Nothing)
-- | Like 'Map.elems', but return the values as a set.
elemsSet :: Ord v => Map k v -> Set v
elemsSet =
Set.fromList . Map.elems
-- | Like 'Map.foldlWithKey'', but with a monadic accumulator.
foldM :: Monad m => (acc -> k -> v -> m acc) -> acc -> Map k v -> m acc
foldM f acc0 =
go acc0
where
go !acc = \case
Map.Tip -> pure acc
Map.Bin _ k v xs ys -> do
acc1 <- go acc xs
acc2 <- f acc1 k v
go acc2 ys
-- | Construct a map from a foldable container by mapping each element to monadic action that returns a key and a value.
--
-- The map is constructed from the left: if two elements map to the same key, the second will overwrite the first.
@ -81,6 +134,18 @@ foldMapM f =
(k, v) <- f x
pure $! Map.insert k v acc
-- | Run a monadic action for each key/value pair in a map.
for_ :: Monad m => Map k v -> (k -> v -> m ()) -> m ()
for_ m f =
go m
where
go = \case
Map.Tip -> pure ()
Map.Bin _ k v xs ys -> do
go xs
f k v
go ys
unionWithM ::
forall m k a.
(Monad m, Ord k) =>
@ -111,3 +176,23 @@ traverseKeys f = bitraverse f pure
traverseKeysWith :: (Applicative f, Ord k') => (v -> v -> v) -> (k -> f k') -> Map k v -> f (Map k' v)
traverseKeysWith combine f m =
Map.fromListWith combine <$> (Map.toList m & traversed . _1 %%~ f)
-- | @mergeMap@ is like a @foldMap@ version of @merge@: summarize the merging of two maps together as a monoidal value.
mergeMap ::
forall a b k m.
(Monoid m, Ord k) =>
-- | Function to apply when a key exists in the first map, but not the second.
(k -> a -> m) ->
-- | Function to apply when a key exists in the second map, but not the first.
(k -> b -> m) ->
-- | Function to apply when a key exists in both maps.
(k -> a -> b -> m) ->
Map k a ->
Map k b ->
m
mergeMap f g h =
coerce @(Map k a -> Map k b -> Const m (Map k ())) do
Map.mergeA
(Map.traverseMissing (coerce f))
(Map.traverseMissing (coerce g))
(Map.zipWithAMatched (coerce h))

View File

@ -74,6 +74,7 @@ library
, pretty-simple
, safe
, text
, these
, time
, transformers
, unliftio

View File

@ -173,7 +173,7 @@ defaultColors :: ST.Element r -> Maybe Color
defaultColors = \case
ST.NumericLiteral -> Nothing
ST.TextLiteral -> Nothing
ST.BytesLiteral -> Just HiBlack
ST.BytesLiteral -> Just HiPurple
ST.CharLiteral -> Nothing
ST.BooleanLiteral -> Nothing
ST.Blank -> Nothing
@ -182,21 +182,21 @@ defaultColors = \case
ST.TermReference _ -> Nothing
ST.Op _ -> Nothing
ST.Unit -> Nothing
ST.AbilityBraces -> Just HiBlack
ST.ControlKeyword -> Just Bold
ST.LinkKeyword -> Just HiBlack
ST.TypeOperator -> Just HiBlack
ST.AbilityBraces -> Just HiPurple
ST.ControlKeyword -> Just HiCyan
ST.LinkKeyword -> Just HiPurple
ST.TypeOperator -> Just HiPurple
ST.BindingEquals -> Nothing
ST.TypeAscriptionColon -> Just Blue
ST.DataTypeKeyword -> Nothing
ST.DataTypeParams -> Nothing
ST.DataTypeModifier -> Nothing
ST.UseKeyword -> Just HiBlack
ST.UsePrefix -> Just HiBlack
ST.UseSuffix -> Just HiBlack
ST.HashQualifier _ -> Just HiBlack
ST.UseKeyword -> Just HiPurple
ST.UsePrefix -> Just HiPurple
ST.UseSuffix -> Just HiPurple
ST.HashQualifier _ -> Just HiPurple
ST.DelayForceChar -> Just Yellow
ST.DelimiterChar -> Nothing
ST.Parenthesis -> Nothing
ST.DocDelimiter -> Just Green
ST.DocKeyword -> Just Bold
ST.DocKeyword -> Just HiCyan

View File

@ -44,6 +44,7 @@ dependencies:
- text-builder
- transformers
- unison-prelude
- unison-util-cache
- unliftio
- unliftio-core
@ -68,6 +69,7 @@ default-extensions:
- GADTs
- GeneralizedNewtypeDeriving
- ImportQualifiedPost
- InstanceSigs
- KindSignatures
- LambdaCase
- MultiParamTypeClasses

View File

@ -21,9 +21,12 @@ module Unison.Sqlite
runTransactionWithRollback,
runReadOnlyTransaction,
runWriteTransaction,
unsafeUnTransaction,
cacheTransaction,
savepoint,
-- ** Unsafe things
unsafeIO,
unsafeUnTransaction,
-- * Executing queries
Sql,

View File

@ -5,10 +5,13 @@ module Unison.Sqlite.Transaction
runTransactionWithRollback,
runReadOnlyTransaction,
runWriteTransaction,
unsafeUnTransaction,
cacheTransaction,
savepoint,
-- ** Unsafe things
unsafeIO,
unsafeGetConnection,
unsafeUnTransaction,
-- * Executing queries
@ -52,6 +55,8 @@ import Unison.Sqlite.Connection (Connection (..))
import Unison.Sqlite.Connection qualified as Connection
import Unison.Sqlite.Exception (SqliteExceptionReason, SqliteQueryException, pattern SqliteBusyException)
import Unison.Sqlite.Sql (Sql)
import Unison.Util.Cache (Cache)
import Unison.Util.Cache qualified as Cache
import UnliftIO.Exception (bracketOnError_, catchAny, trySyncOrAsync, uninterruptibleMask)
import Unsafe.Coerce (unsafeCoerce)
@ -61,8 +66,26 @@ newtype Transaction a
-- Omit MonadThrow instance so we always throw SqliteException (via *Check) with lots of context
deriving (Applicative, Functor, Monad) via (ReaderT Connection IO)
unsafeGetConnection :: Transaction Connection
unsafeGetConnection = Transaction pure
instance Monoid a => Monoid (Transaction a) where
mempty :: Monoid a => Transaction a
mempty = pure mempty
instance Semigroup a => Semigroup (Transaction a) where
(<>) :: Transaction a -> Transaction a -> Transaction a
(<>) = liftA2 (<>)
-- Internal newtype that equips Transaction with a MonadIO instance
newtype TransactionWithMonadIO a
= TransactionWithMonadIO (Transaction a)
deriving newtype (Applicative, Functor, Monad)
unTransactionWithMonadIO :: TransactionWithMonadIO a -> Transaction a
unTransactionWithMonadIO (TransactionWithMonadIO m) = m
instance MonadIO TransactionWithMonadIO where
liftIO :: forall a. IO a -> TransactionWithMonadIO a
liftIO =
coerce @(IO a -> Transaction a) unsafeIO
-- | Run a transaction on the given connection.
runTransaction :: (MonadIO m) => Connection -> Transaction a -> m a
@ -170,10 +193,10 @@ ignoringExceptions :: IO () -> IO ()
ignoringExceptions action =
action `catchAny` \_ -> pure ()
-- | Unwrap the transaction newtype, throwing away the sending of BEGIN/COMMIT + automatic retry.
unsafeUnTransaction :: Transaction a -> Connection -> IO a
unsafeUnTransaction (Transaction action) =
action
-- | Wrap a transaction with a cache; cache hits will not hit SQLite.
cacheTransaction :: forall k v. Cache k v -> (k -> Transaction v) -> (k -> Transaction v)
cacheTransaction cache f k =
unTransactionWithMonadIO (Cache.apply cache (TransactionWithMonadIO . f) k)
-- | Perform an atomic sub-computation within a transaction; if it returns 'Left', it's rolled back.
savepoint :: Transaction (Either a a) -> Transaction a
@ -198,6 +221,15 @@ unsafeIO :: IO a -> Transaction a
unsafeIO action =
Transaction \_ -> action
unsafeGetConnection :: Transaction Connection
unsafeGetConnection =
Transaction pure
-- | Unwrap the transaction newtype, throwing away the sending of BEGIN/COMMIT + automatic retry.
unsafeUnTransaction :: Transaction a -> Connection -> IO a
unsafeUnTransaction (Transaction action) =
action
-- Without results
execute :: Sql -> Transaction ()

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.2.
-- This file has been generated from package.yaml by hpack version 0.36.0.
--
-- see: https://github.com/sol/hpack
@ -47,6 +47,7 @@ library
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
@ -79,6 +80,7 @@ library
, text-builder
, transformers
, unison-prelude
, unison-util-cache
, unliftio
, unliftio-core
default-language: Haskell2010
@ -106,6 +108,7 @@ test-suite tests
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
KindSignatures
LambdaCase
MultiParamTypeClasses
@ -141,6 +144,7 @@ test-suite tests
, transformers
, unison-prelude
, unison-sqlite
, unison-util-cache
, unliftio
, unliftio-core
default-language: Haskell2010

View File

@ -46,6 +46,7 @@ default-extensions:
- NamedFieldPuns
- NumericUnderscores
- OverloadedLabels
- OverloadedRecordDot
- OverloadedStrings
- PatternSynonyms
- RankNTypes

View File

@ -0,0 +1,101 @@
module Unison.Util.Defns
( Defns (..),
DefnsF,
DefnsF2,
DefnsF3,
DefnsF4,
alignDefnsWith,
defnsAreEmpty,
hoistDefnsF,
mapDefns,
unzipDefns,
unzipDefnsWith,
zipDefns,
zipDefnsWith,
zipDefnsWith3,
)
where
import Data.Align (Semialign, alignWith)
import Data.Bifoldable (Bifoldable, bifoldMap)
import Data.Bitraversable (Bitraversable, bitraverse)
import Data.Semigroup.Generic (GenericSemigroupMonoid (..))
import Data.These (These)
import Unison.Prelude
-- | Definitions (terms and types) in a namespace.
data Defns terms types = Defns
{ terms :: terms,
types :: types
}
deriving stock (Generic, Show)
deriving (Monoid, Semigroup) via GenericSemigroupMonoid (Defns terms types)
instance Bifoldable Defns where
bifoldMap f g (Defns x y) =
f x <> g y
instance Bifunctor Defns where
bimap f g (Defns x y) =
Defns (f x) (g y)
instance Bitraversable Defns where
bitraverse f g (Defns x y) =
Defns <$> f x <*> g y
-- | A common shape of definitions - terms and types are stored in the same structure.
type DefnsF f terms types =
Defns (f terms) (f types)
type DefnsF2 f g terms types =
Defns (f (g terms)) (f (g types))
type DefnsF3 f g h terms types =
Defns (f (g (h terms))) (f (g (h types)))
type DefnsF4 f g h i terms types =
Defns (f (g (h (i terms)))) (f (g (h (i types))))
alignDefnsWith :: Semialign f => (These a b -> c) -> Defns (f a) (f b) -> f c
alignDefnsWith f defns =
alignWith f defns.terms defns.types
defnsAreEmpty :: (Foldable f, Foldable g) => Defns (f a) (g b) -> Bool
defnsAreEmpty defns =
null defns.terms && null defns.types
hoistDefnsF :: (forall x. f x -> g x) -> DefnsF f a b -> DefnsF g a b
hoistDefnsF f (Defns x y) =
Defns (f x) (f y)
mapDefns :: (a -> b) -> Defns a a -> Defns b b
mapDefns f =
bimap f f
unzipDefns :: Defns (tm1, tm2) (ty1, ty2) -> (Defns tm1 ty1, Defns tm2 ty2)
unzipDefns =
unzipDefnsWith id id
unzipDefnsWith :: (tm1 -> (tm2, tm3)) -> (ty1 -> (ty2, ty3)) -> Defns tm1 ty1 -> (Defns tm2 ty2, Defns tm3 ty3)
unzipDefnsWith f g (Defns terms1 types1) =
let (terms2, terms3) = f terms1
(types2, types3) = g types1
in (Defns terms2 types2, Defns terms3 types3)
zipDefns :: Defns tm1 ty1 -> Defns tm2 ty2 -> Defns (tm1, tm2) (ty1, ty2)
zipDefns =
zipDefnsWith (,) (,)
zipDefnsWith :: (tm1 -> tm2 -> tm3) -> (ty1 -> ty2 -> ty3) -> Defns tm1 ty1 -> Defns tm2 ty2 -> Defns tm3 ty3
zipDefnsWith f g (Defns terms1 types1) (Defns terms2 types2) =
Defns (f terms1 terms2) (g types1 types2)
zipDefnsWith3 ::
(tm1 -> tm2 -> tm3 -> tm4) ->
(ty1 -> ty2 -> ty3 -> ty4) ->
Defns tm1 ty1 ->
Defns tm2 ty2 ->
Defns tm3 ty3 ->
Defns tm4 ty4
zipDefnsWith3 f g (Defns terms1 types1) (Defns terms2 types2) (Defns terms3 types3) =
Defns (f terms1 terms2 terms3) (g types1 types2 types3)

View File

@ -7,11 +7,6 @@ module Unison.Util.Nametree
-- ** Flattening and unflattening
flattenNametree,
unflattenNametree,
-- * Definitions
Defns (..),
mapDefns,
bimapDefns,
)
where
@ -19,11 +14,10 @@ import Data.List.NonEmpty (NonEmpty, pattern (:|))
import Data.List.NonEmpty qualified as List.NonEmpty
import Data.Map.Strict qualified as Map
import Data.Semialign (Semialign (alignWith), Unzip (unzipWith), Zip (zipWith))
import Data.Semigroup.Generic (GenericSemigroupMonoid (..))
import Data.These (These (..), these)
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.NameSegment
import Unison.NameSegment (NameSegment)
import Unison.Prelude
import Unison.Util.BiMultimap (BiMultimap)
import Unison.Util.BiMultimap qualified as BiMultimap
@ -34,7 +28,7 @@ data Nametree a = Nametree
{ value :: !a,
children :: !(Map NameSegment (Nametree a))
}
deriving stock (Functor, Generic, Show)
deriving stock (Functor, Foldable, Traversable, Generic, Show)
instance Semialign Nametree where
alignWith :: (These a b -> c) -> Nametree a -> Nametree b -> Nametree c
@ -78,7 +72,7 @@ unfoldNametree f x =
-- > }
-- > }
--
-- into an equivalent-but-flatter association between names and definitions, like
-- into an equivalent-but-flat association between names and definitions, like
--
-- > {
-- > "foo" = #bar,
@ -147,21 +141,3 @@ pattern NameThere :: a -> NonEmpty a -> NonEmpty a
pattern NameThere x xs <- x :| (List.NonEmpty.nonEmpty -> Just xs)
{-# COMPLETE NameHere, NameThere #-}
-- | Definitions (terms and types) in a namespace.
--
-- FIXME this doesn't belong in this module
data Defns terms types = Defns
{ terms :: !terms,
types :: !types
}
deriving stock (Generic, Show)
deriving (Monoid, Semigroup) via GenericSemigroupMonoid (Defns terms types)
mapDefns :: (a -> b) -> Defns a a -> Defns b b
mapDefns f (Defns terms types) =
Defns (f terms) (f types)
bimapDefns :: (terms -> terms') -> (types -> types') -> Defns terms types -> Defns terms' types'
bimapDefns f g (Defns terms types) =
Defns (f terms) (g types)

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.2.
-- This file has been generated from package.yaml by hpack version 0.36.0.
--
-- see: https://github.com/sol/hpack
@ -17,6 +17,7 @@ source-repository head
library
exposed-modules:
Unison.Util.Defns
Unison.Util.Nametree
hs-source-dirs:
src
@ -44,6 +45,7 @@ library
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedRecordDot
OverloadedStrings
PatternSynonyms
RankNTypes

View File

@ -7,6 +7,7 @@ module Unison.Util.BiMultimap
memberDom,
lookupDom,
lookupRan,
unsafeLookupRan,
lookupPreimage,
-- ** Mapping / traversing
@ -83,6 +84,13 @@ lookupRan :: Ord b => b -> BiMultimap a b -> Maybe a
lookupRan b (BiMultimap _ r) =
Map.lookup b r
-- | Look up the @a@ related to a @b@.
--
-- /O(log b)/.
unsafeLookupRan :: Ord b => b -> BiMultimap a b -> a
unsafeLookupRan b (BiMultimap _ r) =
r Map.! b
-- | Look up the preimage of a @b@, that is, the set of @b@ that are related to the same @a@ as the input @b@.
--
-- /O(log a + log b)

10
nix/docker.nix Normal file
View File

@ -0,0 +1,10 @@
{ pkgs, haskell-nix }:
{
ucm = pkgs.dockerTools.buildLayeredImage {
name = "ucm";
tag = "latest";
contents = with pkgs; [ cacert fzf ];
config.Cmd = [ "${haskell-nix."unison-cli:exe:unison"}/bin/unison" ];
};
}

View File

@ -23,6 +23,8 @@ dependencies:
- NanoID
- aeson
- ansi-terminal
- asn1-encoding
- asn1-types
- async
- atomic-primops
- base
@ -60,6 +62,7 @@ dependencies:
- http-media
- http-types
- IntervalMap
- iproute
- lens
- lucid
- megaparsec
@ -73,6 +76,7 @@ dependencies:
- natural-transformation
- network
- network-simple
- network-udp
- network-uri
- nonempty-containers
- open-browser
@ -158,6 +162,7 @@ tests:
- easytest
- filemanip
- split
- hex-text
- unison-parser-typechecker
when:
- condition: false
@ -183,6 +188,7 @@ default-extensions:
- MultiParamTypeClasses
- NamedFieldPuns
- OverloadedLabels
- OverloadedRecordDot
- OverloadedStrings
- PatternSynonyms
- RankNTypes

View File

@ -246,7 +246,10 @@ builtinTypesSrc =
B' "MutableArray" CT.Data,
B' "ImmutableByteArray" CT.Data,
B' "MutableByteArray" CT.Data,
B' "Char.Class" CT.Data
B' "Char.Class" CT.Data,
B' "UDPSocket" CT.Data,
B' "ListenSocket" CT.Data,
B' "ClientSockAddr" CT.Data
]
-- rename these to "builtin" later, when builtin means intrinsic as opposed to
@ -772,6 +775,10 @@ cryptoBuiltins =
[ B "crypto.Ed25519.sign.impl" $
bytes --> bytes --> bytes --> eithert failure bytes,
B "crypto.Ed25519.verify.impl" $
bytes --> bytes --> bytes --> eithert failure boolean,
B "crypto.Rsa.sign.impl" $
bytes --> bytes --> eithert failure bytes,
B "crypto.Rsa.verify.impl" $
bytes --> bytes --> bytes --> eithert failure boolean
]
@ -815,6 +822,17 @@ ioBuiltins =
("IO.serverSocket.impl.v3", optionalt text --> text --> iof socket),
("IO.listen.impl.v3", socket --> iof unit),
("IO.clientSocket.impl.v3", text --> text --> iof socket),
("IO.UDP.clientSocket.impl.v1", text --> text --> iof udpSocket),
("IO.UDP.ClientSockAddr.toText.v1", udpClientSockAddr --> text),
("IO.UDP.UDPSocket.toText.impl.v1", udpSocket --> text),
("IO.UDP.UDPSocket.close.impl.v1", udpSocket --> iof unit),
("IO.UDP.serverSocket.impl.v1", text --> text --> iof udpListenSocket),
("IO.UDP.ListenSocket.recvFrom.impl.v1", udpListenSocket --> iof (tuple [bytes, udpClientSockAddr])),
("IO.UDP.ListenSocket.sendTo.impl.v1", udpListenSocket --> bytes --> udpClientSockAddr --> iof unit),
("IO.UDP.ListenSocket.toText.impl.v1", udpListenSocket --> text),
("IO.UDP.ListenSocket.close.impl.v1", udpListenSocket --> iof unit),
("IO.UDP.UDPSocket.recv.impl.v1", udpSocket --> iof bytes),
("IO.UDP.UDPSocket.send.impl.v1", udpSocket --> bytes --> iof unit),
("IO.closeSocket.impl.v3", socket --> iof unit),
("IO.socketPort.impl.v3", socket --> iof nat),
("IO.socketAccept.impl.v3", socket --> iof socket),
@ -1055,6 +1073,12 @@ handle = Type.fileHandle ()
phandle = Type.processHandle ()
unit = DD.unitType ()
udpSocket, udpListenSocket, udpClientSockAddr :: Type
udpSocket = Type.udpSocket ()
udpListenSocket = Type.udpListenSocket ()
udpClientSockAddr = Type.udpClientSockAddr ()
tls, tlsClientConfig, tlsServerConfig, tlsSignedCert, tlsPrivateKey, tlsVersion, tlsCipher :: Type
tls = Type.ref () Type.tlsRef
tlsClientConfig = Type.ref () Type.tlsClientConfigRef

View File

@ -88,15 +88,11 @@ module Unison.Codebase
-- ** Remote sync
viewRemoteBranch,
importRemoteBranch,
Preprocessing (..),
pushGitBranch,
PushGitBranchOpts (..),
-- * Codebase path
getCodebaseDir,
CodebasePath,
SyncToDir,
-- * Direct codebase access
runTransaction,
@ -114,8 +110,6 @@ module Unison.Codebase
)
where
import Control.Monad.Except (ExceptT (ExceptT), runExceptT)
import Control.Monad.Trans.Except (throwE)
import Data.Map qualified as Map
import Data.Set qualified as Set
import U.Codebase.Branch qualified as V2
@ -130,21 +124,13 @@ import Unison.Codebase.Branch (Branch)
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation (builtinAnnotation))
import Unison.Codebase.CodeLookup qualified as CL
import Unison.Codebase.Editor.Git (withStatus)
import Unison.Codebase.Editor.Git qualified as Git
import Unison.Codebase.Editor.RemoteRepo (ReadGitRemoteNamespace)
import Unison.Codebase.GitError qualified as GitError
import Unison.Codebase.Path
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.SqliteCodebase.Conversions qualified as Cv
import Unison.Codebase.SqliteCodebase.Operations qualified as SqliteCodebase.Operations
import Unison.Codebase.SyncMode (SyncMode)
import Unison.Codebase.Type
( Codebase (..),
GitError (GitCodebaseError),
PushGitBranchOpts (..),
SyncToDir,
)
import Unison.Codebase.Type (Codebase (..), GitError)
import Unison.CodebasePath (CodebasePath, getCodebaseDir)
import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..))
import Unison.DataDeclaration (Decl)
@ -168,7 +154,6 @@ import Unison.Typechecker.TypeLookup (TypeLookup (TypeLookup))
import Unison.Typechecker.TypeLookup qualified as TL
import Unison.UnisonFile qualified as UF
import Unison.Util.Relation qualified as Rel
import Unison.Util.Timing (time)
import Unison.Var (Var)
import Unison.WatchKind qualified as WK
@ -483,40 +468,6 @@ isType c r = case r of
-- * Git stuff
-- | An optional preprocessing step to run on branches
-- before they're imported into the local codebase.
data Preprocessing m
= Unmodified
| Preprocessed (Branch m -> m (Branch m))
-- | Sync elements as needed from a remote git codebase into the local one.
-- If `sch` is supplied, we try to load the specified branch hash;
-- otherwise we try to load the root branch.
importRemoteBranch ::
forall m v a.
(MonadUnliftIO m) =>
Codebase m v a ->
ReadGitRemoteNamespace ->
SyncMode ->
Preprocessing m ->
m (Either GitError (Branch m))
importRemoteBranch codebase ns mode preprocess = runExceptT $ do
branchHash <- ExceptT . viewRemoteBranch' codebase ns Git.RequireExistingBranch $ \(branch, cacheDir) -> do
withStatus "Importing downloaded files into local codebase..." $ do
processedBranch <- preprocessOp branch
time "SyncFromDirectory" $ do
syncFromDirectory codebase cacheDir mode processedBranch
pure $ Branch.headHash processedBranch
time "load fresh local branch after sync" $ do
lift (getBranchForHash codebase branchHash) >>= \case
Nothing -> throwE . GitCodebaseError $ GitError.CouldntLoadSyncedBranch ns branchHash
Just result -> pure $ result
where
preprocessOp :: Branch m -> m (Branch m)
preprocessOp = case preprocess of
Preprocessed f -> f
Unmodified -> pure
-- | Pull a git branch and view it from the cache, without syncing into the
-- local codebase.
viewRemoteBranch ::

View File

@ -14,12 +14,14 @@ module Unison.Codebase.Branch
branch0,
one,
cons,
mergeNode,
uncons,
empty,
empty0,
discardHistory,
discardHistory0,
transform,
transform0,
-- * Branch tests
isEmpty,
@ -98,7 +100,7 @@ import Data.Sequence qualified as Seq
import Data.Set qualified as Set
import Data.These (These (..))
import U.Codebase.Branch.Type (NamespaceStats (..))
import U.Codebase.HashTags (PatchHash (..))
import U.Codebase.HashTags (CausalHash, PatchHash (..))
import Unison.Codebase.Branch.Raw (Raw)
import Unison.Codebase.Branch.Type
( Branch (..),
@ -473,6 +475,15 @@ stepM f = \case
cons :: (Applicative m) => Branch0 m -> Branch m -> Branch m
cons = step . const
-- | Construct a two-parent merge node.
mergeNode :: forall m. Applicative m => Branch0 m -> Branch m -> Branch m -> Branch m
mergeNode child parent1 parent2 =
Branch (Causal.mergeNode child (Map.fromList [f parent1, f parent2]))
where
f :: Branch m -> (CausalHash, m (Causal m (Branch0 m)))
f parent =
(headHash parent, pure (_history parent))
isOne :: Branch m -> Bool
isOne (Branch Causal.One {}) = True
isOne _ = False
@ -722,19 +733,15 @@ transform :: (Functor m) => (forall a. m a -> n a) -> Branch m -> Branch n
transform f b = case _history b of
causal -> Branch . Causal.transform f $ transformB0s f causal
where
transformB0 :: (Functor m) => (forall a. m a -> n a) -> Branch0 m -> Branch0 n
transformB0 f b =
b
{ _children = transform f <$> _children b,
_edits = second f <$> _edits b
}
transformB0s :: (Functor m) => (forall a. m a -> n a) -> Causal m (Branch0 m) -> Causal m (Branch0 n)
transformB0s f = Causal.unsafeMapHashPreserving (transform0 f)
transformB0s ::
(Functor m) =>
(forall a. m a -> n a) ->
Causal m (Branch0 m) ->
Causal m (Branch0 n)
transformB0s f = Causal.unsafeMapHashPreserving (transformB0 f)
transform0 :: (Functor m) => (forall a. m a -> n a) -> Branch0 m -> Branch0 n
transform0 f b =
b
{ _children = transform f <$> _children b,
_edits = second f <$> _edits b
}
-- | Traverse the head branch of all direct children.
-- The index of the traversal is the name of that child branch according to the parent.

View File

@ -1,6 +1,3 @@
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
module Unison.Codebase.Causal
@ -12,6 +9,7 @@ module Unison.Codebase.Causal
one,
cons,
consDistinct,
mergeNode,
uncons,
predecessors,
threeWayMerge,

View File

@ -1,9 +1,12 @@
{-# LANGUAGE DeriveAnyClass #-}
module Unison.Codebase.GitError
( CodebasePath,
GitProtocolError (..),
GitCodebaseError (..),
)
where
module Unison.Codebase.GitError where
import Unison.Codebase.Editor.RemoteRepo (ReadGitRemoteNamespace, ReadGitRepo, WriteGitRepo)
import Unison.Codebase.Path
import Unison.Codebase.Editor.RemoteRepo (ReadGitRepo, WriteGitRepo)
import Unison.Codebase.Path (Path)
import Unison.Codebase.ShortCausalHash (ShortCausalHash)
import Unison.Prelude
@ -30,8 +33,5 @@ data GitProtocolError
data GitCodebaseError h
= NoRemoteNamespaceWithHash ReadGitRepo ShortCausalHash
| RemoteNamespaceHashAmbiguous ReadGitRepo ShortCausalHash (Set h)
| CouldntLoadRootBranch ReadGitRepo h
| CouldntParseRemoteBranch ReadGitRepo String
| CouldntLoadSyncedBranch ReadGitRemoteNamespace h
| CouldntFindRemoteBranch ReadGitRepo Path
deriving (Show)

View File

@ -24,6 +24,7 @@ module Unison.Codebase.Path
prefix,
unprefix,
prefixName,
prefixName2,
unprefixName,
HQSplit,
Split,
@ -192,6 +193,11 @@ prefix (Absolute (Path prefix)) = \case
AbsolutePath' abs -> unabsolute abs
RelativePath' rel -> Path $ prefix <> toSeq (unrelative rel)
prefix2 :: Path -> Path' -> Path
prefix2 (Path prefix) = \case
AbsolutePath' abs -> unabsolute abs
RelativePath' rel -> Path $ prefix <> toSeq (unrelative rel)
-- | Finds the longest shared path prefix of two paths.
-- Returns (shared prefix, path to first location from shared prefix, path to second location from shared prefix)
--
@ -273,6 +279,9 @@ unprefixName prefix = toName . unprefix prefix . fromName'
prefixName :: Absolute -> Name -> Name
prefixName p n = fromMaybe n . toName . prefix p . fromName' $ n
prefixName2 :: Path -> Name -> Name
prefixName2 p n = fromMaybe n . toName . prefix2 p . fromName' $ n
singleton :: NameSegment -> Path
singleton n = fromList [n]

View File

@ -1,10 +1,5 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE ViewPatterns #-}
module Unison.Codebase.SqliteCodebase
( Unison.Codebase.SqliteCodebase.init,
@ -64,8 +59,7 @@ import Unison.Codebase.SqliteCodebase.Migrations qualified as Migrations
import Unison.Codebase.SqliteCodebase.Operations qualified as CodebaseOps
import Unison.Codebase.SqliteCodebase.Paths
import Unison.Codebase.SqliteCodebase.SyncEphemeral qualified as SyncEphemeral
import Unison.Codebase.SyncMode (SyncMode)
import Unison.Codebase.Type (LocalOrRemote (..), PushGitBranchOpts (..))
import Unison.Codebase.Type (GitPushBehavior, LocalOrRemote (..))
import Unison.Codebase.Type qualified as C
import Unison.DataDeclaration (Decl)
import Unison.Hash (Hash)
@ -325,8 +319,8 @@ sqliteCodebase debugName root localOrRemote lockOption migrationStrategy action
withRunInIO \runInIO ->
runInIO (runTransaction (CodebaseOps.putBranch (Branch.transform (Sqlite.unsafeIO . runInIO) branch)))
syncFromDirectory :: Codebase1.CodebasePath -> SyncMode -> Branch m -> m ()
syncFromDirectory srcRoot _syncMode b =
syncFromDirectory :: Codebase1.CodebasePath -> Branch m -> m ()
syncFromDirectory srcRoot b =
withConnection (debugName ++ ".sync.src") srcRoot \srcConn ->
withConn \destConn -> do
progressStateRef <- liftIO (newIORef emptySyncProgressState)
@ -334,8 +328,8 @@ sqliteCodebase debugName root localOrRemote lockOption migrationStrategy action
Sqlite.runWriteTransaction destConn \runDest -> do
syncInternal (syncProgress progressStateRef) runSrc runDest b
syncToDirectory :: Codebase1.CodebasePath -> SyncMode -> Branch m -> m ()
syncToDirectory destRoot _syncMode b =
syncToDirectory :: Codebase1.CodebasePath -> Branch m -> m ()
syncToDirectory destRoot b =
withConn \srcConn ->
withConnection (debugName ++ ".sync.dest") destRoot \destConn -> do
progressStateRef <- liftIO (newIORef emptySyncProgressState)
@ -635,11 +629,11 @@ pushGitBranch ::
(MonadUnliftIO m) =>
Sqlite.Connection ->
WriteGitRepo ->
PushGitBranchOpts ->
GitPushBehavior ->
-- An action which accepts the current root branch on the remote and computes a new branch.
(Branch m -> m (Either e (Branch m))) ->
m (Either C.GitError (Either e (Branch m)))
pushGitBranch srcConn repo (PushGitBranchOpts behavior _syncMode) action = UnliftIO.try do
pushGitBranch srcConn repo behavior action = UnliftIO.try do
-- Pull the latest remote into our git cache
-- Use a local git clone to copy this git repo into a temp-dir
-- Delete the codebase in our temp-dir

View File

@ -80,7 +80,8 @@ migrations getDeclType termBuffer declBuffer rootCodebasePath =
(12, migrateSchema11To12),
sqlMigration 13 Q.addMostRecentNamespaceTable,
sqlMigration 14 Q.addSquashResultTable,
sqlMigration 15 Q.addSquashResultTableIfNotExists
sqlMigration 15 Q.addSquashResultTableIfNotExists,
sqlMigration 16 Q.cdToProjectRoot
]
where
sqlMigration :: SchemaVersion -> Sqlite.Transaction () -> (SchemaVersion, Sqlite.Transaction ())

View File

@ -228,6 +228,13 @@ getDeclComponent h =
decl2 <- Ops.loadDeclComponent h
pure (map (Cv.decl2to1 h) decl2)
-- | Like 'getDeclComponent', for when the decl component is known to exist in the codebase.
expectDeclComponent :: (HasCallStack) => Hash -> Transaction [Decl Symbol Ann]
expectDeclComponent hash =
getDeclComponent hash <&> \case
Nothing -> error (reportBug "E101611" ("decl component " ++ show hash ++ " not found"))
Just decls -> decls
putTermComponent ::
TVar (Map Hash TermBufferEntry) ->
TVar (Map Hash DeclBufferEntry) ->

View File

@ -1,3 +0,0 @@
module Unison.Codebase.SyncMode where
data SyncMode = ShortCircuit | Complete deriving (Eq, Show)

View File

@ -4,10 +4,8 @@
module Unison.Codebase.Type
( Codebase (..),
CodebasePath,
PushGitBranchOpts (..),
GitPushBehavior (..),
GitError (..),
SyncToDir,
LocalOrRemote (..),
gitErrorFromOpenCodebaseError,
)
@ -21,7 +19,6 @@ import Unison.Codebase.Editor.RemoteRepo (ReadGitRemoteNamespace, ReadGitRepo, W
import Unison.Codebase.GitError (GitCodebaseError, GitProtocolError)
import Unison.Codebase.Init.OpenCodebaseError (OpenCodebaseError (..))
import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError (..))
import Unison.Codebase.SyncMode (SyncMode)
import Unison.CodebasePath (CodebasePath)
import Unison.ConstructorType qualified as CT
import Unison.DataDeclaration (Decl)
@ -36,12 +33,6 @@ import Unison.Term (Term)
import Unison.Type (Type)
import Unison.WatchKind qualified as WK
type SyncToDir m =
CodebasePath -> -- dest codebase
SyncMode ->
Branch m -> -- branch to sync to dest codebase
m ()
-- | Abstract interface to a user's codebase.
data Codebase m v a = Codebase
{ -- | Get a user-defined term from the codebase.
@ -86,12 +77,12 @@ data Codebase m v a = Codebase
-- The terms and type declarations that a branch references must already exist in the codebase.
putBranch :: Branch m -> m (),
-- | Copy a branch and all of its dependencies from the given codebase into this one.
syncFromDirectory :: CodebasePath -> SyncMode -> Branch m -> m (),
syncFromDirectory :: CodebasePath -> Branch m -> m (),
-- | Copy a branch and all of its dependencies from this codebase into the given codebase.
syncToDirectory :: CodebasePath -> SyncMode -> Branch m -> m (),
syncToDirectory :: CodebasePath -> Branch m -> m (),
viewRemoteBranch' :: forall r. ReadGitRemoteNamespace -> Git.GitBranchBehavior -> ((Branch m, CodebasePath) -> m r) -> m (Either GitError r),
-- | Push the given branch to the given repo, and optionally set it as the root branch.
pushGitBranch :: forall e. WriteGitRepo -> PushGitBranchOpts -> (Branch m -> m (Either e (Branch m))) -> m (Either GitError (Either e (Branch m))),
pushGitBranch :: forall e. WriteGitRepo -> GitPushBehavior -> (Branch m -> m (Either e (Branch m))) -> m (Either GitError (Either e (Branch m))),
-- | @getWatch k r@ returns watch result @t@ that was previously put by @putWatch k r t@.
getWatch :: WK.WatchKind -> Reference.Id -> Sqlite.Transaction (Maybe (Term v a)),
-- | Get the set of user-defined terms-or-constructors that have the given type.
@ -116,11 +107,6 @@ data LocalOrRemote
| Remote
deriving (Show, Eq, Ord)
data PushGitBranchOpts = PushGitBranchOpts
{ behavior :: GitPushBehavior,
syncMode :: SyncMode
}
data GitPushBehavior
= -- | Don't set root, just sync entities.
GitPushBehaviorGist

View File

@ -356,6 +356,9 @@ builtinConstraintTree =
flip Type.ref Type.filePathRef,
Type.threadId,
Type.socket,
Type.udpSocket,
Type.udpListenSocket,
Type.udpClientSockAddr,
Type.processHandle,
Type.ibytearrayType,
flip Type.ref Type.charClassRef,

View File

@ -10,6 +10,8 @@ module Unison.PrettyPrintEnv
typeName,
termNameOrHashOnly,
typeNameOrHashOnly,
termNameOrHashOnlyFq,
typeNameOrHashOnlyFq,
biasTo,
labeledRefName,
-- | Exported only for cases where the codebase's configured hash length is unavailable.
@ -42,6 +44,7 @@ data PrettyPrintEnv = PrettyPrintEnv
-- names for types; e.g. [(original name, possibly suffixified name)]
typeNames :: Reference -> [(HQ'.HashQualified Name, HQ'.HashQualified Name)]
}
deriving stock (Generic)
allTermNames :: PrettyPrintEnv -> Referent -> [HQ'.HashQualified Name]
allTermNames ppe = fmap snd . termNames ppe
@ -61,6 +64,16 @@ termNameOrHashOnly ppe r = maybe (HQ.fromReferent r) HQ'.toHQ $ terms ppe r
typeNameOrHashOnly :: PrettyPrintEnv -> Reference -> HQ.HashQualified Name
typeNameOrHashOnly ppe r = maybe (HQ.fromReference r) HQ'.toHQ $ types ppe r
-- Like 'termNameOrHashOnly' but returns the fully qualified name
termNameOrHashOnlyFq :: PrettyPrintEnv -> Referent -> HQ.HashQualified Name
termNameOrHashOnlyFq ppe r =
maybe (HQ.fromReferent r) HQ'.toHQ . fmap fst . listToMaybe $ termNames ppe r
-- Like 'typeNameOrHashOnly' but returns the fully qualified name
typeNameOrHashOnlyFq :: PrettyPrintEnv -> Reference -> HQ.HashQualified Name
typeNameOrHashOnlyFq ppe r =
maybe (HQ.fromReference r) HQ'.toHQ . fmap fst . listToMaybe $ typeNames ppe r
patterns :: PrettyPrintEnv -> ConstructorReference -> Maybe (HQ'.HashQualified Name)
patterns ppe r =
terms ppe (Referent.Con r CT.Data)

View File

@ -9,6 +9,7 @@ module Unison.PrettyPrintEnvDecl
where
import Unison.Name (Name)
import Unison.Prelude hiding (empty)
import Unison.PrettyPrintEnv (PrettyPrintEnv (..))
import Unison.PrettyPrintEnv qualified as PPE
@ -24,7 +25,7 @@ data PrettyPrintEnvDecl = PrettyPrintEnvDecl
{ unsuffixifiedPPE :: PrettyPrintEnv,
suffixifiedPPE :: PrettyPrintEnv
}
deriving (Show)
deriving stock (Generic, Show)
-- | Lifts 'biasTo' over a PrettyPrintEnvDecl
biasTo :: [Name] -> PrettyPrintEnvDecl -> PrettyPrintEnvDecl

View File

@ -962,8 +962,8 @@ serializeGroupForRehash fops (Derived h _) sg =
f _ = Nothing
refrep = Map.fromList . mapMaybe f $ groupTermLinks sg
deserializeValue :: ByteString -> Either String Value
deserializeValue bs = runGetS (getVersion >>= getValue) bs
getVersionedValue :: MonadGet m => m Value
getVersionedValue = getVersion >>= getValue
where
getVersion =
getWord32be >>= \case
@ -973,6 +973,9 @@ deserializeValue bs = runGetS (getVersion >>= getValue) bs
| n <= 4 -> pure n
| otherwise -> fail $ "deserializeValue: unknown version: " ++ show n
deserializeValue :: ByteString -> Either String Value
deserializeValue bs = runGetS getVersionedValue bs
serializeValue :: Value -> ByteString
serializeValue v = runPutS (putVersion *> putValue v)
where

View File

@ -11,6 +11,7 @@
-- Checking is toggled using the `arraychecks` flag.
module Unison.Runtime.Array
( module EPA,
byteArrayToList,
readArray,
writeArray,
copyArray,
@ -54,6 +55,9 @@ import Data.Primitive.PrimArray as EPA hiding
)
import Data.Primitive.PrimArray qualified as PA
import Data.Primitive.Types
import Data.Word (Word8)
import GHC.Exts (toList)
#ifdef ARRAY_CHECK
import GHC.Stack
@ -376,3 +380,6 @@ indexPrimArray ::
a
indexPrimArray = checkIPArray "indexPrimArray" PA.indexPrimArray
{-# INLINE indexPrimArray #-}
byteArrayToList :: ByteArray -> [Word8]
byteArrayToList = toList

View File

@ -39,6 +39,7 @@ import Crypto.Error (CryptoError (..), CryptoFailable (..))
import Crypto.Hash qualified as Hash
import Crypto.MAC.HMAC qualified as HMAC
import Crypto.PubKey.Ed25519 qualified as Ed25519
import Crypto.PubKey.RSA.PKCS15 qualified as RSA
import Crypto.Random (getRandomBytes)
import Data.Bits (shiftL, shiftR, (.|.))
import Data.ByteArray qualified as BA
@ -52,6 +53,7 @@ import Data.IORef as SYS
readIORef,
writeIORef,
)
import Data.IP (IP)
import Data.Map qualified as Map
import Data.PEM (PEM, pemContent, pemParseLBS)
import Data.Set (insert)
@ -81,9 +83,23 @@ import Network.Simple.TCP as SYS
import Network.Socket as SYS
( Socket,
accept,
socketPort,
socketPort, PortNumber,
)
import Network.TLS as TLS
import Network.UDP as UDP
( UDPSocket (..),
ClientSockAddr,
ListenSocket,
clientSocket,
close,
recv,
recvFrom,
send,
sendTo,
serverSocket,
stop,
)
import Network.TLS.Extra.Cipher as Cipher
import System.Clock (Clock (..), getTime, nsec, sec)
import System.Directory as SYS
@ -138,6 +154,7 @@ import System.Process as SYS
)
import System.X509 qualified as X
import Unison.ABT.Normalized hiding (TTm)
import Unison.Runtime.Crypto.Rsa as Rsa
import Unison.Builtin qualified as Ty (builtinTypes)
import Unison.Builtin.Decls qualified as Ty
import Unison.Prelude hiding (Text, some)
@ -1544,6 +1561,22 @@ outIoFailBool stack1 stack2 stack3 extra fail result =
)
]
outIoFailTup :: forall v . (Var v) => v -> v -> v -> v -> v -> v -> v -> v -> ANormal v
outIoFailTup stack1 stack2 stack3 stack4 stack5 extra fail result =
TMatch result . MatchSum $
mapFromList
[ failureCase stack1 stack2 stack3 extra fail,
( 1,
([BX, BX],
TAbss [stack1, stack2]
. TLetD stack3 BX (TCon Ty.unitRef 0 [])
. TLetD stack4 BX (TCon Ty.pairRef 0 [stack2, stack3])
. TLetD stack5 BX (TCon Ty.pairRef 0 [stack1, stack4])
$ right stack5
)
)
]
outIoFailG ::
(Var v) =>
v ->
@ -1767,6 +1800,14 @@ boxToEFBox =
where
(arg, result, stack1, stack2, stack3, any, fail) = fresh
-- a -> Either Failure (b, c)
boxToEFTup :: ForeignOp
boxToEFTup =
inBx arg result $
outIoFailTup stack1 stack2 stack3 stack4 stack5 extra fail result
where
(arg, result, stack1, stack2, stack3, stack4, stack5, extra, fail) = fresh
-- a -> Either Failure (Maybe b)
boxToEFMBox :: ForeignOp
boxToEFMBox =
@ -1858,6 +1899,14 @@ boxBoxToEF0 =
where
(arg1, arg2, result, stack1, stack2, stack3, fail, unit) = fresh
-- a -> b -> c -> Either Failure ()
boxBoxBoxToEF0 :: ForeignOp
boxBoxBoxToEF0 =
inBxBxBx arg1 arg2 arg3 result $
outIoFailUnit stack1 stack2 stack3 fail unit result
where
(arg1, arg2, arg3, result, stack1, stack2, stack3, fail, unit) = fresh
-- a -> Either Failure Nat
boxToEFNat :: ForeignOp
boxToEFNat =
@ -2290,8 +2339,64 @@ mkForeignTlsE f = mkForeign $ \a -> fmap flatten (tryIO2 (tryIO1 (f a)))
flatten (Right (Right (Left e))) = Left e
flatten (Right (Right (Right a))) = Right a
declareUdpForeigns :: FDecl Symbol ()
declareUdpForeigns = do
declareForeign Tracked "IO.UDP.clientSocket.impl.v1" boxBoxToEFBox
. mkForeignIOF
$ \(host :: Util.Text.Text, port :: Util.Text.Text) ->
let hostStr = Util.Text.toString host
portStr = Util.Text.toString port
in UDP.clientSocket hostStr portStr True
declareForeign Tracked "IO.UDP.UDPSocket.recv.impl.v1" boxToEFBox
. mkForeignIOF
$ \(sock :: UDPSocket) -> Bytes.fromArray <$> UDP.recv sock
declareForeign Tracked "IO.UDP.UDPSocket.send.impl.v1" boxBoxToEF0
. mkForeignIOF
$ \(sock :: UDPSocket, bytes :: Bytes.Bytes) ->
UDP.send sock (Bytes.toArray bytes)
declareForeign Tracked "IO.UDP.UDPSocket.close.impl.v1" boxToEF0
. mkForeignIOF
$ \(sock :: UDPSocket) -> UDP.close sock
declareForeign Tracked "IO.UDP.ListenSocket.close.impl.v1" boxToEF0
. mkForeignIOF
$ \(sock :: ListenSocket) -> UDP.stop sock
declareForeign Tracked "IO.UDP.UDPSocket.toText.impl.v1" boxDirect
. mkForeign
$ \(sock :: UDPSocket) -> pure $ show sock
declareForeign Tracked "IO.UDP.serverSocket.impl.v1" boxBoxToEFBox
. mkForeignIOF
$ \(ip :: Util.Text.Text, port :: Util.Text.Text) ->
let maybeIp = readMaybe $ Util.Text.toString ip :: Maybe IP
maybePort = readMaybe $ Util.Text.toString port :: Maybe PortNumber
in case (maybeIp, maybePort) of
(Nothing, _) -> fail "Invalid IP Address"
(_, Nothing) -> fail "Invalid Port Number"
(Just ip, Just pt) -> UDP.serverSocket (ip, pt)
declareForeign Tracked "IO.UDP.ListenSocket.toText.impl.v1" boxDirect
. mkForeign
$ \(sock :: ListenSocket) -> pure $ show sock
declareForeign Tracked "IO.UDP.ListenSocket.recvFrom.impl.v1" boxToEFTup .
mkForeignIOF $ fmap (first Bytes.fromArray) <$> UDP.recvFrom
declareForeign Tracked "IO.UDP.ClientSockAddr.toText.v1" boxDirect
. mkForeign
$ \(sock :: ClientSockAddr) -> pure $ show sock
declareForeign Tracked "IO.UDP.ListenSocket.sendTo.impl.v1" boxBoxBoxToEF0 .
mkForeignIOF $ \(socket :: ListenSocket, bytes :: Bytes.Bytes, addr :: ClientSockAddr) ->
UDP.sendTo socket (Bytes.toArray bytes) addr
declareForeigns :: FDecl Symbol ()
declareForeigns = do
declareUdpForeigns
declareForeign Tracked "IO.openFile.impl.v3" boxIomrToEFBox $
mkForeignIOF $ \(fnameText :: Util.Text.Text, n :: Int) ->
let fname = Util.Text.toString fnameText
@ -2830,6 +2935,14 @@ declareForeigns = do
. mkForeign
$ pure . verifyEd25519Wrapper
declareForeign Untracked "crypto.Rsa.sign.impl" boxBoxToEFBox
. mkForeign
$ pure . signRsaWrapper
declareForeign Untracked "crypto.Rsa.verify.impl" boxBoxBoxToEFBool
. mkForeign
$ pure . verifyRsaWrapper
let catchAll :: (MonadCatch m, MonadIO m, NFData a) => m a -> m (Either Util.Text.Text a)
catchAll e = do
e <- Exception.tryAnyDeep e
@ -3471,6 +3584,31 @@ verifyEd25519Wrapper (public0, msg0, sig0) = case validated of
"ed25519: Secret key structure invalid"
errMsg _ = "ed25519: unexpected error"
signRsaWrapper ::
(Bytes.Bytes, Bytes.Bytes) -> Either Failure Bytes.Bytes
signRsaWrapper (secret0, msg0) = case validated of
Left err ->
Left (Failure Ty.cryptoFailureRef err unitValue)
Right secret ->
case RSA.sign Nothing (Just Hash.SHA256) secret msg of
Left err -> Left (Failure Ty.cryptoFailureRef (Rsa.rsaErrorToText err) unitValue)
Right signature -> Right $ Bytes.fromByteString signature
where
msg = Bytes.toArray msg0 :: ByteString
validated = Rsa.parseRsaPrivateKey (Bytes.toArray secret0 :: ByteString)
verifyRsaWrapper ::
(Bytes.Bytes, Bytes.Bytes, Bytes.Bytes) -> Either Failure Bool
verifyRsaWrapper (public0, msg0, sig0) = case validated of
Left err ->
Left $ Failure Ty.cryptoFailureRef err unitValue
Right public ->
Right $ RSA.verify (Just Hash.SHA256) public msg sig
where
msg = Bytes.toArray msg0 :: ByteString
sig = Bytes.toArray sig0 :: ByteString
validated = Rsa.parseRsaPublicKey (Bytes.toArray public0 :: ByteString)
typeReferences :: [(Reference, Word64)]
typeReferences = zip rs [1 ..]
where

View File

@ -0,0 +1,127 @@
module Unison.Runtime.Crypto.Rsa (
parseRsaPublicKey,
parseRsaPrivateKey,
rsaErrorToText,
) where
import Crypto.Number.Basic qualified as Crypto
import Crypto.PubKey.RSA qualified as RSA
import Data.ASN1.BinaryEncoding qualified as ASN1
import Data.ASN1.BitArray qualified as ASN1
import Data.ASN1.Encoding qualified as ASN1
import Data.ASN1.Error qualified as ASN1
import Data.ASN1.Types qualified as ASN1
import Data.ByteString qualified as BS
import Data.ByteString.Lazy qualified as BSL
import Unison.Util.Text (Text)
import Unison.Util.Text qualified as Util.Text
-- | Parse a RSA public key from a ByteString
-- The input bytestring is a hex-encoded string of the DER file for the public key.
-- It can be generated with those commands:
-- # generate a RSA key of a given size
-- openssl genrsa -out private_key.pem <size>
-- # output the DER format as a hex string
-- openssl rsa -in private_key.pem -outform DER -pubout | xxd -p
parseRsaPublicKey :: BS.ByteString -> Either Text RSA.PublicKey
parseRsaPublicKey bs = case ASN1.decodeASN1 ASN1.DER (BSL.fromStrict bs) of
Left err -> Left $ "rsa: cannot decode as an ASN.1 structure. " <> asn1ErrorToText err
Right asn1 ->
case asn1 of
[ ASN1.Start ASN1.Sequence,
ASN1.Start ASN1.Sequence,
ASN1.OID _,
ASN1.Null,
ASN1.End ASN1.Sequence,
ASN1.BitString (ASN1.BitArray _ bits),
ASN1.End ASN1.Sequence
] -> case ASN1.decodeASN1 ASN1.DER (BSL.fromStrict bits) of
Left err -> Left $ "rsa: cannot decode as an ASN.1 inner structure. " <> asn1ErrorToText err
Right asn1 -> case asn1 of
[ASN1.Start ASN1.Sequence, ASN1.IntVal n, ASN1.IntVal e, ASN1.End ASN1.Sequence] ->
Right
RSA.PublicKey
{ public_size = Crypto.numBytes n,
public_n = n,
public_e = e
}
other -> Left ("rsa: unexpected ASN.1 inner structure for a RSA public key" <> Util.Text.pack (show other))
other -> Left ("rsa: unexpected ASN.1 outer structure for a RSA public key" <> Util.Text.pack (show other))
-- | Parse a RSA private key from a ByteString
-- The input bytestring is a hex-encoded string of the DER file for the private key.
-- It can be generated with those commands:
-- # generate a RSA key of a given size
-- openssl genrsa -out private_key.pem <size>
-- # output the DER format as a hex string
-- openssl rsa -in private_key.pem -outform DER | xxd -p
parseRsaPrivateKey :: BS.ByteString -> Either Text RSA.PrivateKey
parseRsaPrivateKey bs = case ASN1.decodeASN1 ASN1.DER (BSL.fromStrict bs) of
Left err -> Left $ "Error decoding ASN.1: " <> asn1ErrorToText err
Right asn1 ->
case asn1 of
[ ASN1.Start ASN1.Sequence,
ASN1.IntVal 0,
ASN1.Start ASN1.Sequence,
ASN1.OID _,
ASN1.Null,
ASN1.End ASN1.Sequence,
ASN1.OctetString bits,
ASN1.End ASN1.Sequence
] ->
case ASN1.decodeASN1 ASN1.DER (BSL.fromStrict bits) of
Left err -> Left $ "Error decoding inner ASN.1: " <> asn1ErrorToText err
Right asn1 ->
case asn1 of
[ ASN1.Start ASN1.Sequence,
ASN1.IntVal _,
ASN1.IntVal n,
ASN1.IntVal e,
ASN1.IntVal d,
ASN1.IntVal p,
ASN1.IntVal q,
ASN1.IntVal dP,
ASN1.IntVal dQ,
ASN1.IntVal qinv,
ASN1.End ASN1.Sequence
] ->
Right
RSA.PrivateKey
{ private_pub = RSA.PublicKey {public_size = Crypto.numBytes n, public_n = n, public_e = e},
private_d = d,
private_p = p,
private_q = q,
private_dP = dP,
private_dQ = dQ,
private_qinv = qinv
}
other -> Left ("rsa: unexpected inner ASN.1 structure for a RSA private key" <> Util.Text.pack (show other))
other -> Left ("rsa: unexpected outer ASN.1 structure for a RSA private key" <> Util.Text.pack (show other))
-- | Display an ASN1 Error
asn1ErrorToText :: ASN1.ASN1Error -> Text
asn1ErrorToText = \case
ASN1.StreamUnexpectedEOC -> "Unexpected EOC in the stream"
ASN1.StreamInfinitePrimitive -> "Invalid primitive with infinite length in a stream"
ASN1.StreamConstructionWrongSize -> "A construction goes over the size specified in the header"
ASN1.StreamUnexpectedSituation s -> "An unexpected situation has come up parsing an ASN1 event stream: " <> Util.Text.pack s
ASN1.ParsingHeaderFail s -> "Parsing an invalid header: " <> Util.Text.pack s
ASN1.ParsingPartial -> "Parsing is not finished, the key is not complete"
ASN1.TypeNotImplemented s -> "Decoding of a type that is not implemented: " <> Util.Text.pack s
ASN1.TypeDecodingFailed s -> "Decoding of a known type failed: " <> Util.Text.pack s
ASN1.TypePrimitiveInvalid s -> "Invalid primitive type: " <> Util.Text.pack s
ASN1.PolicyFailed s1 s2 -> "Policy failed. Policy name: " <> Util.Text.pack s1 <> ", reason:" <> Util.Text.pack s2
-- | Display a RSA Error
rsaErrorToText :: RSA.Error -> Text
rsaErrorToText = \case
RSA.MessageSizeIncorrect ->
"rsa: The message to decrypt is not of the correct size (need to be == private_size)"
RSA.MessageTooLong ->
"rsa: The message to encrypt is too long"
RSA.MessageNotRecognized ->
"rsa: The message decrypted doesn't have a PKCS15 structure (0 2 .. 0 msg)"
RSA.SignatureTooLong ->
"rsa: The message's digest is too long"
RSA.InvalidParameters ->
"rsa: Some parameters lead to breaking assumptions"

View File

@ -20,12 +20,18 @@ import Unison.Prelude
import Unison.Reference (Reference, pattern Builtin)
import Unison.Referent (pattern Ref)
import Unison.Runtime.ANF (maskTags)
import Unison.Runtime.Array
( Array
, ByteArray
, byteArrayToList
)
import Unison.Runtime.Foreign
( Foreign (..),
HashAlgorithm (..),
maybeUnwrapBuiltin,
maybeUnwrapForeign,
)
import Unison.Runtime.IOSource (iarrayFromListRef, ibarrayFromBytesRef)
import Unison.Runtime.MCode (CombIx (..))
import Unison.Runtime.Stack
( Closure (..),
@ -63,6 +69,8 @@ import Unison.Type
natRef,
termLinkRef,
typeLinkRef,
iarrayRef,
ibytearrayRef,
)
import Unison.Util.Bytes qualified as By
import Unison.Util.Pretty (indentN, lines, lit, syntaxToColor, wrap)
@ -210,6 +218,15 @@ decompileForeign backref topTerms f
_ -> l
| Just l <- maybeUnwrapForeign typeLinkRef f =
pure $ typeLink () l
| Just (a :: Array Closure) <- maybeUnwrapForeign iarrayRef f =
app () (ref () iarrayFromListRef) . list () <$>
traverse (decompile backref topTerms) (toList a)
| Just (a :: ByteArray) <- maybeUnwrapForeign ibytearrayRef f =
pure $
app
()
(ref () ibarrayFromBytesRef)
(decompileBytes . By.fromWord8s $ byteArrayToList a)
| Just s <- unwrapSeq f =
list' () <$> traverse (decompile backref topTerms) s
decompileForeign _ _ (Wrap r _) =

View File

@ -27,6 +27,7 @@ import Data.Primitive (ByteArray, MutableArray, MutableByteArray)
import Data.Tagged (Tagged (..))
import Data.X509 qualified as X509
import Network.Socket (Socket)
import Network.UDP (ListenSocket, UDPSocket, ClientSockAddr)
import Network.TLS qualified as TLS (ClientParams, Context, ServerParams)
import System.Clock (TimeSpec)
import System.IO (Handle)
@ -81,6 +82,10 @@ socketEq :: Socket -> Socket -> Bool
socketEq l r = l == r
{-# NOINLINE socketEq #-}
udpSocketEq :: UDPSocket -> UDPSocket -> Bool
udpSocketEq l r = l == r
{-# NOINLINE udpSocketEq #-}
refEq :: IORef () -> IORef () -> Bool
refEq l r = l == r
{-# NOINLINE refEq #-}
@ -157,6 +162,7 @@ ref2eq r
-- Ditto
| r == Ty.tvarRef = Just $ promote tvarEq
| r == Ty.socketRef = Just $ promote socketEq
| r == Ty.udpSocketRef = Just $ promote udpSocketEq
| r == Ty.refRef = Just $ promote refEq
| r == Ty.threadIdRef = Just $ promote tidEq
| r == Ty.marrayRef = Just $ promote marrEq
@ -230,6 +236,12 @@ instance BuiltinForeign Referent where foreignRef = Tagged Ty.termLinkRef
instance BuiltinForeign Socket where foreignRef = Tagged Ty.socketRef
instance BuiltinForeign ListenSocket where foreignRef = Tagged Ty.udpListenSocketRef
instance BuiltinForeign ClientSockAddr where foreignRef = Tagged Ty.udpClientSockAddrRef
instance BuiltinForeign UDPSocket where foreignRef = Tagged Ty.udpSocketRef
instance BuiltinForeign ThreadId where foreignRef = Tagged Ty.threadIdRef
instance BuiltinForeign TLS.ClientParams where foreignRef = Tagged Ty.tlsClientConfigRef

View File

@ -26,6 +26,7 @@ import Data.Time.Clock.POSIX (POSIXTime)
import Data.Word (Word16, Word32, Word64, Word8)
import GHC.IO.Exception (IOErrorType (..), IOException (..))
import Network.Socket (Socket)
import Network.UDP (UDPSocket)
import System.IO (BufferMode (..), Handle, IOMode, SeekMode)
import Unison.Builtin.Decls qualified as Ty
import Unison.Reference (Reference)
@ -139,6 +140,10 @@ instance ForeignConvention Socket where
readForeign = readForeignBuiltin
writeForeign = writeForeignBuiltin
instance ForeignConvention UDPSocket where
readForeign = readForeignBuiltin
writeForeign = writeForeignBuiltin
instance ForeignConvention ThreadId where
readForeign = readForeignBuiltin
writeForeign = writeForeignBuiltin

View File

@ -486,6 +486,12 @@ pattern ConsoleTextUnderline ct <- Term.App' (Term.Constructor' (ConstructorRefe
pattern ConsoleTextInvert ct <- Term.App' (Term.Constructor' (ConstructorReference ConsoleTextRef ((==) consoleTextInvertId -> True))) ct
iarrayFromListRef :: R.Reference
iarrayFromListRef = termNamed "ImmutableArray.fromList"
ibarrayFromBytesRef :: R.Reference
ibarrayFromBytesRef = termNamed "ImmutableByteArray.fromBytes"
constructorNamed :: R.Reference -> Text -> DD.ConstructorId
constructorNamed ref name =
case runIdentity . getTypeDeclaration codeLookup $ R.unsafeId ref of
@ -986,6 +992,35 @@ syntax.docFormatConsole d =
Image alt _link None -> go alt
Special sf -> Pretty.lit (Left sf)
go d
ImmutableArray.fromList l = Scope.run do
sz = List.size l
dst = Scope.array sz
go i = cases
[] -> ()
x +: xs ->
MutableArray.write dst i x
go (i+1) xs
handle go 0 l with cases
{ r } -> ()
{ raise _ -> _ } -> ()
MutableArray.freeze! dst
ImmutableByteArray.fromBytes : Bytes -> ImmutableByteArray
ImmutableByteArray.fromBytes bs = Scope.run do
sz = Bytes.size bs
arr = Scope.bytearray sz
fill i =
match Bytes.at i bs with
Some b ->
MutableByteArray.write8 arr i b
fill (i + 1)
None -> ()
handle fill 0
with cases
{ _ } -> ()
{ raise _ -> _ } -> ()
MutableByteArray.freeze! arr
|]
type Note = Result.Note Symbol Ann

View File

@ -27,10 +27,11 @@ import Data.Binary.Get (runGetOrFail)
-- import Data.Bits (shiftL)
import Data.ByteString qualified as BS
import Data.ByteString.Lazy qualified as BL
import Data.Bytes.Get (MonadGet)
import Data.Bytes.Get (MonadGet, getWord8, runGetS)
import Data.Bytes.Put (MonadPut, putWord32be, runPutL, runPutS)
import Data.Bytes.Serial
import Data.Foldable
import Data.Function (on)
import Data.IORef
import Data.List qualified as L
import Data.Map.Strict qualified as Map
@ -44,14 +45,17 @@ import Data.Set as Set
(\\),
)
import Data.Set qualified as Set
import Data.Text (isPrefixOf, unpack)
import Data.Text as Text (isPrefixOf, pack, unpack)
import GHC.IO.Exception (IOErrorType (NoSuchThing, OtherError, PermissionDenied), IOException (ioe_description, ioe_type))
import GHC.Stack (callStack)
import Network.Simple.TCP (Socket, acceptFork, listen, recv, send)
import Network.Socket (PortNumber, socketPort)
import System.Directory
( XdgDirectory (XdgCache),
createDirectoryIfMissing,
getXdgDirectory,
)
import System.Environment (getArgs)
import System.Exit (ExitCode (..))
import System.FilePath ((<.>), (</>))
import System.Process
@ -85,6 +89,7 @@ import Unison.Runtime.ANF as ANF
import Unison.Runtime.ANF.Rehash as ANF (rehashGroups)
import Unison.Runtime.ANF.Serialize as ANF
( getGroup,
getVersionedValue,
putGroup,
serializeValue,
)
@ -460,7 +465,18 @@ nativeEval executable ctxVar cl ppe tm = catchInternalErrors $ do
(ctx, codes) <- loadDeps cl ppe ctx tyrs tmrs
(ctx, tcodes, base) <- prepareEvaluation ppe tm ctx
writeIORef ctxVar ctx
nativeEvalInContext executable ppe ctx (codes ++ tcodes) base
-- Note: port 0 mean choosing an arbitrary available port.
-- We then ask what port was actually chosen.
listen "127.0.0.1" "0" $ \(serv, _) ->
socketPort serv >>= \port ->
nativeEvalInContext
executable
ppe
ctx
serv
port
(L.nubBy ((==) `on` fst) $ tcodes ++ codes)
base
interpEval ::
ActiveThreads ->
@ -790,14 +806,46 @@ backReferenceTm ws frs irs dcm c i = do
bs <- Map.lookup r dcm
Map.lookup i bs
ucrProc :: FilePath -> [String] -> CreateProcess
ucrProc executable args =
ucrEvalProc :: FilePath -> [String] -> CreateProcess
ucrEvalProc executable args =
(proc executable args)
{ std_in = Inherit,
std_out = Inherit,
std_err = Inherit
}
ucrCompileProc :: FilePath -> [String] -> CreateProcess
ucrCompileProc executable args =
(proc executable args)
{ std_in = CreatePipe,
std_out = Inherit,
std_err = Inherit
}
receiveAll :: Socket -> IO ByteString
receiveAll sock = read []
where
read acc =
recv sock 4096 >>= \case
Just chunk -> read (chunk : acc)
Nothing -> pure . BS.concat $ reverse acc
data NativeResult
= Success Value
| Bug Text Value
| Error Text
deserializeNativeResponse :: ByteString -> NativeResult
deserializeNativeResponse =
run $
getWord8 >>= \case
0 -> Success <$> getVersionedValue
1 -> Bug <$> getText <*> getVersionedValue
2 -> Error <$> getText
_ -> pure $ Error "Unexpected result bytes tag"
where
run e bs = either (Error . pack) id (runGetS e bs)
-- Note: this currently does not support yielding values; instead it
-- just produces a result appropriate for unitary `run` commands. The
-- reason is that the executed code can cause output to occur, which
@ -813,37 +861,53 @@ nativeEvalInContext ::
FilePath ->
PrettyPrintEnv ->
EvalCtx ->
Socket ->
PortNumber ->
[(Reference, SuperGroup Symbol)] ->
Reference ->
IO (Either Error ([Error], Term Symbol))
nativeEvalInContext executable _ ctx codes base = do
nativeEvalInContext executable ppe ctx serv port codes base = do
ensureRuntimeExists executable
let cc = ccache ctx
crs <- readTVarIO $ combRefs cc
-- Seems a bit weird, but apparently this is how we do it
args <- getArgs
let bytes = serializeValue . compileValue base $ codes
decodeResult (Left msg) = pure . Left $ fromString msg
decodeResult (Right val) =
decodeResult (Error msg) = pure . Left $ text msg
decodeResult (Bug msg val) =
reifyValue cc val >>= \case
Left _ -> pure . Left $ "missing references from bug result"
Right cl ->
pure . Left . bugMsg ppe [] msg $ decompileCtx crs ctx cl
decodeResult (Success val) =
reifyValue cc val >>= \case
Left _ -> pure . Left $ "missing references from result"
Right cl -> case decompileCtx crs ctx cl of
(errs, dv) -> pure $ Right (listErrors errs, dv)
callout (Just pin) _ _ ph = do
BS.hPut pin . runPutS . putWord32be . fromIntegral $ BS.length bytes
BS.hPut pin bytes
UnliftIO.hClose pin
let unit = Data RF.unitRef 0 [] []
sunit = Data RF.pairRef 0 [] [unit, unit]
comm mv (sock, _) = do
let encodeNum = runPutS . putWord32be . fromIntegral
send sock . encodeNum $ BS.length bytes
send sock bytes
send sock . encodeNum $ length args
for_ args $ \arg -> do
let bs = encodeUtf8 $ pack arg
send sock . encodeNum $ BS.length bs
send sock bs
UnliftIO.putMVar mv =<< receiveAll sock
callout _ _ _ ph = do
mv <- UnliftIO.newEmptyMVar
tid <- acceptFork serv $ comm mv
waitForProcess ph >>= \case
ExitSuccess -> decodeResult $ Right sunit
ExitFailure _ ->
ExitSuccess ->
decodeResult . deserializeNativeResponse
=<< UnliftIO.takeMVar mv
ExitFailure _ -> do
UnliftIO.killThread tid
pure . Left $ "native evaluation failed"
-- TODO: actualy receive output from subprocess
-- decodeResult . deserializeValue =<< BS.hGetContents pout
callout _ _ _ _ =
pure . Left $ "withCreateProcess didn't provide handles"
p = ucrProc executable []
p = ucrEvalProc executable ["-p", show port]
ucrError (e :: IOException) = pure $ Left (runtimeErrMsg (cmdspec p) (Right e))
withCreateProcess p callout
`UnliftIO.catch` ucrError
@ -872,7 +936,7 @@ nativeCompileCodes executable codes base path = do
throwIO $ PE callStack (runtimeErrMsg (cmdspec p) (Right e))
racoError (e :: IOException) =
throwIO $ PE callStack (racoErrMsg (makeRacoCmd RawCommand) (Right e))
p = ucrProc executable ["-G", srcPath]
p = ucrCompileProc executable ["-G", srcPath]
makeRacoCmd :: (FilePath -> [String] -> a) -> a
makeRacoCmd f = f "raco" ["exe", "-o", path, srcPath]
withCreateProcess p callout
@ -953,7 +1017,7 @@ bugMsg ::
Pretty ColorText
bugMsg ppe tr name (errs, tm)
| name == "blank expression" =
P.callout icon . P.lines $
P.callout icon . P.linesNonEmpty $
[ P.wrap
( "I encountered a"
<> P.red (P.text name)
@ -965,7 +1029,7 @@ bugMsg ppe tr name (errs, tm)
stackTrace ppe tr
]
| "pattern match failure" `isPrefixOf` name =
P.callout icon . P.lines $
P.callout icon . P.linesNonEmpty $
[ P.wrap
( "I've encountered a"
<> P.red (P.text name)
@ -980,7 +1044,7 @@ bugMsg ppe tr name (errs, tm)
stackTrace ppe tr
]
| name == "builtin.raise" =
P.callout icon . P.lines $
P.callout icon . P.linesNonEmpty $
[ P.wrap ("The program halted with an unhandled exception:"),
"",
P.indentN 2 $ pretty ppe tm,
@ -990,7 +1054,7 @@ bugMsg ppe tr name (errs, tm)
| name == "builtin.bug",
RF.TupleTerm' [Tm.Text' msg, x] <- tm,
"pattern match failure" `isPrefixOf` msg =
P.callout icon . P.lines $
P.callout icon . P.linesNonEmpty $
[ P.wrap
( "I've encountered a"
<> P.red (P.text msg)
@ -1005,7 +1069,7 @@ bugMsg ppe tr name (errs, tm)
stackTrace ppe tr
]
bugMsg ppe tr name (errs, tm) =
P.callout icon . P.lines $
P.callout icon . P.linesNonEmpty $
[ P.wrap
( "I've encountered a call to"
<> P.red (P.text name)
@ -1018,7 +1082,8 @@ bugMsg ppe tr name (errs, tm) =
]
stackTrace :: PrettyPrintEnv -> [(Reference, Int)] -> Pretty ColorText
stackTrace ppe tr = "Stack trace:\n" <> P.indentN 2 (P.lines $ f <$> tr)
stackTrace _ [] = mempty
stackTrace ppe tr = "\nStack trace:\n" <> P.indentN 2 (P.lines $ f <$> tr)
where
f (rf, n) = name <> count
where
@ -1165,10 +1230,11 @@ listErrors :: Set DecompError -> [Error]
listErrors = fmap (P.indentN 2 . renderDecompError) . toList
tabulateErrors :: Set DecompError -> Error
tabulateErrors errs | null errs = "\n"
tabulateErrors errs | null errs = mempty
tabulateErrors errs =
P.indentN 2 . P.lines $
P.wrap "The following errors occured while decompiling:"
""
: P.wrap "The following errors occured while decompiling:"
: (listErrors errs)
restoreCache :: StoredCache -> IO CCache

View File

@ -1950,8 +1950,10 @@ reserveIds :: Word64 -> TVar Word64 -> IO Word64
reserveIds n free = atomically . stateTVar free $ \i -> (i, i + n)
updateMap :: (Semigroup s) => s -> TVar s -> STM s
updateMap new r = stateTVar r $ \old ->
let total = new <> old in (total, total)
updateMap new0 r = do
new <- evaluateSTM new0
stateTVar r $ \old ->
let total = new <> old in (total, total)
refLookup :: String -> M.Map Reference Word64 -> Reference -> Word64
refLookup s m r
@ -2080,6 +2082,11 @@ checkValueSandboxing cc allowed0 v = do
where
allowed = S.fromList allowed0
-- Just evaluating to force exceptions. Shouldn't actually be that
-- unsafe.
evaluateSTM :: a -> STM a
evaluateSTM x = unsafeIOToSTM (evaluate x)
cacheAdd0 ::
S.Set Reference ->
[(Reference, SuperGroup Symbol)] ->

View File

@ -19,6 +19,7 @@ import Unison.DataDeclaration.Dependencies qualified as DD
import Unison.HashQualified qualified as HQ
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.Prelude
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.PrettyPrintEnv qualified as PPE
import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl (..))
@ -37,10 +38,11 @@ import Unison.Util.Pretty qualified as P
import Unison.Util.SyntaxText qualified as S
import Unison.Var (Var)
import Unison.Var qualified as Var (freshenId, name, named)
import qualified Data.Set as Set
type SyntaxText = S.SyntaxText' Reference
type AccessorName = HQ.HashQualified Name
type AccessorName = Name
prettyDeclW ::
(Var v) =>
@ -48,8 +50,8 @@ prettyDeclW ::
TypeReference ->
HQ.HashQualified Name ->
DD.Decl v a ->
Writer [AccessorName] (Pretty SyntaxText)
prettyDeclW ppe r hq d = case d of
Writer (Set AccessorName) (Pretty SyntaxText)
prettyDeclW ppe r hq = \case
Left e -> pure $ prettyEffectDecl ppe r hq e
Right dd -> prettyDataDecl ppe r hq dd
@ -81,10 +83,7 @@ prettyGADT ::
Pretty SyntaxText
prettyGADT env ctorType r name dd =
P.hang header . P.lines $
constructor
<$> zip
[0 ..]
(DD.constructors' dd)
constructor <$> zip [0 ..] (DD.constructors' dd)
where
constructor (n, (_, _, t)) =
prettyPattern (PPED.unsuffixifiedPPE env) ctorType name (ConstructorReference r n)
@ -116,14 +115,10 @@ prettyDataDecl ::
TypeReference ->
HQ.HashQualified Name ->
DataDeclaration v a ->
Writer [AccessorName] (Pretty SyntaxText)
Writer (Set AccessorName) (Pretty SyntaxText)
prettyDataDecl (PrettyPrintEnvDecl unsuffixifiedPPE suffixifiedPPE) r name dd =
(header <>)
. P.sep (fmt S.DelimiterChar (" | " `P.orElse` "\n | "))
<$> constructor
`traverse` zip
[0 ..]
(DD.constructors' dd)
(header <>) . P.sep (fmt S.DelimiterChar (" | " `P.orElse` "\n | "))
<$> constructor `traverse` zip [0 ..] (DD.constructors' dd)
where
constructor (n, (_, _, Type.ForallsNamed' _ t)) = constructor' n t
constructor (n, (_, _, t)) = constructor' n t
@ -136,10 +131,10 @@ prettyDataDecl (PrettyPrintEnvDecl unsuffixifiedPPE suffixifiedPPE) r name dd =
. P.hang' (prettyPattern unsuffixifiedPPE CT.Data name (ConstructorReference r n)) " "
$ P.spaced (runPretty suffixifiedPPE (traverse (TypePrinter.prettyRaw Map.empty 10) (init ts)))
Just fs -> do
tell
tell $ Set.fromList $
[ case accessor of
Nothing -> HQ.NameOnly $ declName `Name.joinDot` fieldName
Just accessor -> HQ.NameOnly $ declName `Name.joinDot` fieldName `Name.joinDot` accessor
Nothing -> declName `Name.joinDot` fieldName
Just accessor -> declName `Name.joinDot` fieldName `Name.joinDot` accessor
| HQ.NameOnly declName <- [name],
fieldName <- fs,
accessor <- [Nothing, Just (Name.fromSegment "set"), Just (Name.fromSegment "modify")]

View File

@ -5,13 +5,14 @@ where
import Control.Lens
import Control.Monad.Reader (asks, local)
import Data.List qualified as List
import Data.List.NonEmpty (pattern (:|))
import Data.Map qualified as Map
import Data.Set qualified as Set
import Data.Text qualified as Text
import Text.Megaparsec qualified as P
import Unison.ABT qualified as ABT
import Unison.DataDeclaration (DataDeclaration)
import Unison.DataDeclaration (DataDeclaration, EffectDeclaration)
import Unison.DataDeclaration qualified as DD
import Unison.DataDeclaration.Records (generateRecordAccessors)
import Unison.Name qualified as Name
@ -21,6 +22,7 @@ import Unison.NamesWithHistory qualified as Names
import Unison.Parser.Ann (Ann)
import Unison.Parser.Ann qualified as Ann
import Unison.Prelude
import Unison.Reference (TypeReferenceId)
import Unison.Syntax.DeclParser (declarations)
import Unison.Syntax.Lexer qualified as L
import Unison.Syntax.Name qualified as Name (toText, unsafeParseVar)
@ -30,12 +32,12 @@ import Unison.Syntax.Var qualified as Var (namespaced)
import Unison.Term (Term)
import Unison.Term qualified as Term
import Unison.UnisonFile (UnisonFile (..))
import Unison.UnisonFile qualified as UF
import Unison.UnisonFile.Env qualified as UF
import Unison.UnisonFile.Names qualified as UFN
import Unison.Util.List qualified as List
import Unison.Var (Var)
import Unison.Var qualified as Var
import Unison.WatchKind (WatchKind)
import Unison.WatchKind qualified as UF
import Prelude hiding (readFile)
@ -114,29 +116,35 @@ file = do
watches <- case List.validate (traverseOf (traversed . _3) bindNames) watches of
Left es -> resolutionFailures (toList es)
Right ws -> pure ws
let uf =
UnisonFileId
(UF.datasId env)
(UF.effectsId env)
(terms <> join accessors)
(List.multimap watches)
validateUnisonFile uf
pure uf
validateUnisonFile
(UF.datasId env)
(UF.effectsId env)
(terms <> join accessors)
(List.multimap watches)
-- | Final validations and sanity checks to perform before finishing parsing.
validateUnisonFile :: (Var v) => UnisonFile v Ann -> P v m ()
validateUnisonFile uf =
checkForDuplicateTermsAndConstructors uf
validateUnisonFile ::
Ord v =>
Map v (TypeReferenceId, DataDeclaration v Ann) ->
Map v (TypeReferenceId, EffectDeclaration v Ann) ->
[(v, Ann, Term v Ann)] ->
Map WatchKind [(v, Ann, Term v Ann)] ->
P v m (UnisonFile v Ann)
validateUnisonFile datas effects terms watches =
checkForDuplicateTermsAndConstructors datas effects terms watches
-- | Because types and abilities can introduce their own constructors and fields it's difficult
-- to detect all duplicate terms during parsing itself. Here we collect all terms and
-- constructors and verify that no duplicates exist in the file, triggering an error if needed.
checkForDuplicateTermsAndConstructors ::
forall m v.
(Ord v) =>
UnisonFile v Ann ->
P v m ()
checkForDuplicateTermsAndConstructors uf = do
Ord v =>
Map v (TypeReferenceId, DataDeclaration v Ann) ->
Map v (TypeReferenceId, EffectDeclaration v Ann) ->
[(v, Ann, Term v Ann)] ->
Map WatchKind [(v, Ann, Term v Ann)] ->
P v m (UnisonFile v Ann)
checkForDuplicateTermsAndConstructors datas effects terms watches = do
when (not . null $ duplicates) $ do
let dupeList :: [(v, [Ann])]
dupeList =
@ -144,11 +152,18 @@ checkForDuplicateTermsAndConstructors uf = do
& fmap Set.toList
& Map.toList
P.customFailure (DuplicateTermNames dupeList)
pure
UnisonFileId
{ dataDeclarationsId = datas,
effectDeclarationsId = effects,
terms = List.foldl (\acc (v, ann, term) -> Map.insert v (ann, term) acc) Map.empty terms,
watches
}
where
effectDecls :: [DataDeclaration v Ann]
effectDecls = (Map.elems . fmap (DD.toDataDecl . snd) $ (effectDeclarationsId uf))
effectDecls = Map.elems . fmap (DD.toDataDecl . snd) $ effects
dataDecls :: [DataDeclaration v Ann]
dataDecls = fmap snd $ Map.elems (dataDeclarationsId uf)
dataDecls = fmap snd $ Map.elems datas
allConstructors :: [(v, Ann)]
allConstructors =
(dataDecls <> effectDecls)
@ -156,13 +171,13 @@ checkForDuplicateTermsAndConstructors uf = do
& fmap (\(ann, v, _typ) -> (v, ann))
allTerms :: [(v, Ann)]
allTerms =
UF.terms uf
<&> (\(v, bindingAnn, _t) -> (v, bindingAnn))
map (\(v, ann, _term) -> (v, ann)) terms
mergedTerms :: Map v (Set Ann)
mergedTerms =
(allConstructors <> allTerms)
& (fmap . fmap) Set.singleton
& Map.fromListWith (<>)
& Map.fromListWith Set.union
duplicates :: Map v (Set Ann)
duplicates =
-- Any vars with multiple annotations are duplicates.

View File

@ -1,4 +1,3 @@
{-# LANGUAGE OverloadedRecordDot #-}
{-# LANGUAGE TemplateHaskell #-}
-- | This module is the primary interface to the Unison typechecker

View File

@ -16,6 +16,8 @@ module Unison.UnisonFile
typecheckingTerm,
watchesOfKind,
definitionLocation,
termBindings,
leftBiasedMerge,
-- * TypecheckedUnisonFile
TypecheckedUnisonFile (..),
@ -72,10 +74,41 @@ emptyUnisonFile =
UnisonFileId
{ dataDeclarationsId = Map.empty,
effectDeclarationsId = Map.empty,
terms = [],
terms = Map.empty,
watches = Map.empty
}
leftBiasedMerge :: forall v a. Ord v => UnisonFile v a -> UnisonFile v a -> UnisonFile v a
leftBiasedMerge lhs rhs =
let mergedTerms = Map.foldlWithKey' (addNotIn lhsTermNames) (terms lhs) (terms rhs)
mergedWatches = Map.foldlWithKey' addWatch (watches lhs) (watches rhs)
mergedDataDecls = Map.foldlWithKey' (addNotIn lhsTypeNames) (dataDeclarationsId lhs) (dataDeclarationsId rhs)
mergedEffectDecls = Map.foldlWithKey' (addNotIn lhsTypeNames) (effectDeclarationsId lhs) (effectDeclarationsId rhs)
in UnisonFileId
{ dataDeclarationsId = mergedDataDecls,
effectDeclarationsId = mergedEffectDecls,
terms = mergedTerms,
watches = mergedWatches
}
where
lhsTermNames =
Map.keysSet (terms lhs)
<> foldMap (\x -> Set.fromList [v | (v, _, _) <- x]) (watches lhs)
lhsTypeNames =
Map.keysSet (dataDeclarationsId lhs)
<> Map.keysSet (effectDeclarationsId lhs)
addNotIn :: forall x. Set v -> Map v x -> v -> x -> Map v x
addNotIn namesToAvoid b k v = case Set.member k namesToAvoid of
True -> b
False -> Map.insert k v b
addWatch :: Map WatchKind [(v, a, Term v a)] -> WatchKind -> [(v, a, Term v a)] -> Map WatchKind [(v, a, Term v a)]
addWatch b k v = case filter (\(x, _, _) -> not $ Set.member x lhsTermNames) v of
[] -> b
v -> Map.insertWith (++) k v b
dataDeclarations :: UnisonFile v a -> Map v (Reference, DataDeclaration v a)
dataDeclarations = fmap (first Reference.DerivedId) . dataDeclarationsId
@ -95,7 +128,7 @@ allWatches = join . Map.elems . watches
-- | Get the location of a given definition in the file.
definitionLocation :: (Var v) => v -> UnisonFile v a -> Maybe a
definitionLocation v uf =
terms uf ^? folded . filteredBy (_1 . only v) . _2
terms uf ^? ix v . _1
<|> watches uf ^? folded . folded . filteredBy (_1 . only v) . _2
<|> dataDeclarations uf ^? ix v . _2 . to DD.annotation
<|> effectDeclarations uf ^? ix v . _2 . to (DD.annotation . DD.toDataDecl)
@ -108,11 +141,15 @@ typecheckingTerm uf =
DD.unitTerm mempty
where
bindings =
terms uf <> testWatches <> watchesOfOtherKinds TestWatch uf
termBindings uf <> testWatches <> watchesOfOtherKinds TestWatch uf
-- we make sure each test has type Test.Result
f w = let wa = ABT.annotation w in Term.ann wa w (DD.testResultListType wa)
testWatches = map (second f) $ watchesOfKind TestWatch uf
termBindings :: UnisonFile v a -> [(v, a, Term v a)]
termBindings uf =
Map.foldrWithKey (\k (a, t) b -> (k, a, t) : b) [] (terms uf)
-- backwards compatibility with the old data type
dataDeclarations' :: TypecheckedUnisonFile v a -> Map v (Reference, DataDeclaration v a)
dataDeclarations' = fmap (first Reference.DerivedId) . dataDeclarationsId'
@ -127,8 +164,8 @@ mapTerms :: (Term v a -> Term v a) -> UnisonFile v a -> UnisonFile v a
mapTerms f (UnisonFileId datas effects terms watches) =
UnisonFileId datas effects terms' watches'
where
terms' = over _3 f <$> terms
watches' = fmap (over _3 f) <$> watches
terms' = over (mapped . _2) f terms
watches' = over (mapped . mapped . _3) f watches
-- | This function should be called in preparation for a call to
-- UnisonFile.rewrite. It prevents the possibility of accidental
@ -157,7 +194,7 @@ mapTerms f (UnisonFileId datas effects terms watches) =
-- then converting back to a "regular" UnisonFile with free variables in the
-- terms.
prepareRewrite :: (Monoid a, Var v) => UnisonFile v a -> ([v] -> Term v a -> Term v a, UnisonFile v a, UnisonFile v a -> UnisonFile v a)
prepareRewrite uf@(UnisonFileId _datas _effects terms watches) =
prepareRewrite uf@(UnisonFileId _datas _effects _terms watches) =
(freshen, mapTerms substs uf, mapTerms refToVar)
where
-- fn to replace free vars with unique refs
@ -178,7 +215,7 @@ prepareRewrite uf@(UnisonFileId _datas _effects terms watches) =
varToRef =
[(v, Term.ref () (Reference.Derived h i)) | (v, i) <- vs `zip` [0 ..]]
where
vs = (view _1 <$> terms) <> (toList watches >>= map (view _1))
vs = (view _1 <$> (termBindings uf)) <> (toList watches >>= map (view _1))
vars = Vector.fromList (fst <$> varToRef)
-- function to convert unique refs back to free variables
refToVar = ABT.rebuildUp' go
@ -194,17 +231,18 @@ prepareRewrite uf@(UnisonFileId _datas _effects terms watches) =
-- This function returns what symbols were modified.
-- The `Set v` is symbols that should be left alone.
rewrite :: (Var v, Eq a) => Set v -> (Term v a -> Maybe (Term v a)) -> UnisonFile v a -> ([v], UnisonFile v a)
rewrite leaveAlone rewriteFn (UnisonFileId datas effects terms watches) =
(rewritten, UnisonFileId datas effects (unEither terms') (unEither <$> watches'))
rewrite leaveAlone rewriteFn uf@(UnisonFileId datas effects _terms watches) =
(rewritten, UnisonFileId datas effects (Map.fromList $ unEitherTerms terms') (unEither <$> watches'))
where
terms' = go terms
terms' = go (termBindings uf)
watches' = go <$> watches
go tms = [(v, a, tm') | (v, a, tm) <- tms, tm' <- f v tm]
where
f v tm | Set.member v leaveAlone = [Left tm]
f _ tm = maybe [Left tm] (pure . Right) (rewriteFn tm)
rewritten = [v | (v, _, Right _) <- terms' <> join (toList watches')]
unEither = fmap (\(v, a, e) -> (v, a, case e of Left tm -> tm; Right tm -> tm))
unEitherTerms = fmap (\(v, a, e) -> (v, (a, either id id e)))
unEither = fmap (\(v, a, e) -> (v, a, either id id e))
typecheckedUnisonFile ::
forall v a.
@ -299,14 +337,14 @@ dependencies :: (Monoid a, Var v) => UnisonFile v a -> Set Reference
dependencies (UnisonFile ds es ts ws) =
foldMap (DD.typeDependencies . snd) ds
<> foldMap (DD.typeDependencies . DD.toDataDecl . snd) es
<> foldMap (Term.dependencies . view _3) ts
<> foldMap (Term.dependencies . snd) ts
<> foldMap (foldMap (Term.dependencies . view _3)) ws
discardTypes :: TypecheckedUnisonFile v a -> UnisonFile v a
discardTypes :: Ord v => TypecheckedUnisonFile v a -> UnisonFile v a
discardTypes (TypecheckedUnisonFileId datas effects terms watches _) =
let watches' = g . mconcat <$> List.multimap watches
g tup3s = [(v, a, e) | (v, a, e, _t) <- tup3s]
in UnisonFileId datas effects [(v, a, trm) | (v, a, trm, _typ) <- join terms] watches'
in UnisonFileId (coerce datas) (coerce effects) (Map.fromList [(v, (a, trm)) | (v, a, trm, _typ) <- join terms]) watches'
declsToTypeLookup :: (Var v) => UnisonFile v a -> TL.TypeLookup v a
declsToTypeLookup uf =

View File

@ -28,7 +28,7 @@ import Unison.Var (Var)
import Unison.Var qualified as Var
import Unison.WatchKind qualified as WK
toNames :: (Var v) => UnisonFile v a -> Names
toNames :: Var v => UnisonFile v a -> Names
toNames uf = datas <> effects
where
datas = foldMap (DD.Names.dataDeclToNames' Name.unsafeParseVar) (Map.toList (UF.dataDeclarationsId uf))
@ -84,10 +84,9 @@ bindNames names (UnisonFileId d e ts ws) = do
-- todo: consider having some kind of binding structure for terms & watches
-- so that you don't weirdly have free vars to tiptoe around.
-- The free vars should just be the things that need to be bound externally.
let termVars = (view _1 <$> ts) ++ (Map.elems ws >>= map (view _1))
termVarsSet = Set.fromList termVars
let termVarsSet = Map.keysSet ts <> Set.fromList (Map.elems ws >>= map (view _1))
-- todo: can we clean up this lambda using something like `second`
ts' <- traverse (\(v, a, t) -> (v,a,) <$> Term.bindNames Name.unsafeParseVar termVarsSet names t) ts
ts' <- traverse (\(a, t) -> (a,) <$> Term.bindNames Name.unsafeParseVar termVarsSet names t) ts
ws' <- traverse (traverse (\(v, a, t) -> (v,a,) <$> Term.bindNames Name.unsafeParseVar termVarsSet names t)) ws
pure $ UnisonFileId d e ts' ws'

View File

@ -82,9 +82,7 @@ mkFileSummary parsed typechecked = case (parsed, typechecked) of
fileNames = UF.typecheckedToNames tf
}
(Just uf@(UF.UnisonFileId {dataDeclarationsId, effectDeclarationsId, terms, watches}), _) ->
let trms =
terms & foldMap \(sym, ann, trm) ->
(Map.singleton sym (ann, Nothing, trm, Nothing))
let trms = (\(ann, trm) -> (ann, Nothing, trm, Nothing)) <$> terms
(testWatches, exprWatches) =
watches & ifoldMap \wk tms ->
tms & foldMap \(v, ann, trm) ->
@ -121,7 +119,7 @@ mkFileSummary parsed typechecked = case (parsed, typechecked) of
getUserTypeAnnotation :: Symbol -> Maybe (Type Symbol Ann)
getUserTypeAnnotation v = do
UF.UnisonFileId {terms, watches} <- parsed
trm <- (terms <> fold watches) ^? folded . filteredBy (_1 . only v) . _3
trm <- terms ^? ix v . _2 <|> watches ^? folded . folded . filteredBy (_1 . only v) . _3
typ <- Term.getTypeAnnotation trm
pure typ

View File

@ -1,4 +1,3 @@
{-# LANGUAGE OverloadedRecordDot #-}
{-# LANGUAGE RecordWildCards #-}
module Unison.UnisonFile.Type where
@ -18,15 +17,15 @@ import Unison.WatchKind (WatchKind)
data UnisonFile v a = UnisonFileId
{ dataDeclarationsId :: Map v (TypeReferenceId, DataDeclaration v a),
effectDeclarationsId :: Map v (TypeReferenceId, EffectDeclaration v a),
terms :: [(v, a {- ann for whole binding -}, Term v a)],
terms :: Map v (a {- ann for whole binding -}, Term v a),
watches :: Map WatchKind [(v, a {- ann for whole watch -}, Term v a)]
}
deriving (Generic, Show)
deriving stock (Generic, Show)
pattern UnisonFile ::
Map v (TypeReference, DataDeclaration v a) ->
Map v (TypeReference, EffectDeclaration v a) ->
[(v, a, Term v a)] ->
Map v (a, Term v a) ->
Map WatchKind [(v, a, Term v a)] ->
UnisonFile v a
pattern UnisonFile ds es tms ws <-

View File

@ -0,0 +1,48 @@
module Unison.Test.Runtime.Crypto.Rsa where
import Crypto.PubKey.RSA qualified as RSA
import Data.Maybe (fromJust)
import EasyTest
import Text.Hex
import Unison.Runtime.Crypto.Rsa
test :: Test ()
test =
scope "parsing" $
tests
[ scope "parseRsaPublicKey" parseRsaPublicKeyTest,
scope "parseRsaPrivateKey" parseRsaPrivateKeyTest
]
parseRsaPublicKeyTest :: Test ()
parseRsaPublicKeyTest = do
let publicKey = fromJust $ decodeHex "30819f300d06092a864886f70d010101050003818d0030818902818100a7104b2f20725896076e629ccedbcd6907b16694c6e3d8768b5e0e685670b49616e796c588e5aafb92ef986c1a42c021fed0bdc99212c969cdab98087a0ee4c2f4acd4b6049a87a96afc45668329a3cf21a86fb13b488bbe9fefa1cd5a459014f0d0101378e9661e11b73acf54c8a91141ac90309e7fb6ed69b4e63230ab29150203010001"
let actual = parseRsaPublicKey publicKey
let expected =
RSA.PublicKey
{ public_size = 128,
public_n = 117316082691067466889305872575557202673362950667744445659499028356561021937142613205104589546643406309814005581397307365793352915031830083408196867291689544964758311244905648512755140288413724266536406258908443053617981341387254220659107167969619543916073994027510270571746462643891169516098953507692950006037,
public_e = 65537
}
expectEqual actual (Right expected)
parseRsaPrivateKeyTest :: Test ()
parseRsaPrivateKeyTest = do
let privateKey = fromJust $ decodeHex "30820276020100300d06092a864886f70d0101010500048202603082025c02010002818100a7104b2f20725896076e629ccedbcd6907b16694c6e3d8768b5e0e685670b49616e796c588e5aafb92ef986c1a42c021fed0bdc99212c969cdab98087a0ee4c2f4acd4b6049a87a96afc45668329a3cf21a86fb13b488bbe9fefa1cd5a459014f0d0101378e9661e11b73acf54c8a91141ac90309e7fb6ed69b4e63230ab291502030100010281807cdc23a4fc3619d93f8293b728af848d0c0fdd603269d5bd7b99f760a9c22065d08693dbdcddf1f5863306133d694819e04d789aef4e95343b601507b8d9eac4492e6d7031b035c5d84eceaa9686b292712632d33b3303af84314d7920bc3d45f90d7818fc2587b129196d378ee4ed3e6b8d9010d504bb6470ff53e7c5fb17a1024100d67cbcf113d24325fcef12a778dc47c7060055290b68287649ef092558daccb61c4e7bc290740b75a29d4356dcbd66d18b0860dbff394cc8ff3d94d57617adbd024100c765d8261dd3d8e0d3caf11ab7b212eed181354215687ca6387283e4f0be16e79c8f298be0a70c7734dea78ea65128517d693cabfa4c0ff5328f2abb85d2023902403ca41dc347285e65c22251b2d9bfe5e7463217e1b7e0e5f7b3a58a7f6da4c6d60220ca6ad2ee8c42e10bf77afa83ee2af6551315800e52404db1ba7fb398b43d02410084877d85c0177933ddb12a554eb8edfa8b872c85d2c2d2ee8be019280696e19469ab81bab5c371f69d4e4be1f54b45d7fbda017870f1333e0eafb78051ee8689024061f694c12e934c44b7734f62d1b2a3d3624a4980e1b8e066d78dbabd2436654fbb9d9701425900daaafa1e031310e8a580520bb9e1c1288c669fce252bad1e65"
let actual = parseRsaPrivateKey privateKey
let expected =
RSA.PrivateKey
{ private_pub =
RSA.PublicKey
{ public_size = 128,
public_n = 117316082691067466889305872575557202673362950667744445659499028356561021937142613205104589546643406309814005581397307365793352915031830083408196867291689544964758311244905648512755140288413724266536406258908443053617981341387254220659107167969619543916073994027510270571746462643891169516098953507692950006037,
public_e = 65537
},
private_d = 87679616801061623139678211462583995973938243841750319557622746050821908471598979773246073219465960975647341309221073776399960619667883322633274192544886774496262613234964971623744931197514942326521327825606791139576216469817618072158660015124292686556025876602526093941289386692302798356532230087066424907681,
private_p = 11233609214744923027767175501352593646202568021007351512424743595719525825944483790453654486119375677127184086533073126720964060366977171672432803562630589,
private_q = 10443311712951670023099443962737058583295522901049380734330015511797675780053495867511334370071427510893202629294375157939437054042246322949533759718949433,
private_dP = 3176031022781156885141187342486873181111240716865972140527001145690023864823311109042460960576558461960260523664057127500690343997127119244373520564139069,
private_dQ = 6941120510619372179626602981107825119089517097926514417911731475020140673258620725588998791918173107511741662411060736754565186643059761376912904765212297,
private_qinv = 5130749483925715543854508655089227892147425255568362503702389513480166321367311031864242660308321705497233758877799126086240198385610964125158868020698725
}
expectEqual actual (Right expected)

View File

@ -35,7 +35,6 @@ library
Unison.Codebase
Unison.Codebase.Branch
Unison.Codebase.Branch.BranchDiff
Unison.Codebase.Branch.DeclCoherencyCheck
Unison.Codebase.Branch.Merge
Unison.Codebase.Branch.Names
Unison.Codebase.Branch.Raw
@ -85,7 +84,6 @@ library
Unison.Codebase.SqliteCodebase.Operations
Unison.Codebase.SqliteCodebase.Paths
Unison.Codebase.SqliteCodebase.SyncEphemeral
Unison.Codebase.SyncMode
Unison.Codebase.TermEdit
Unison.Codebase.TermEdit.Typing
Unison.Codebase.Type
@ -143,6 +141,7 @@ library
Unison.Runtime.ANF.Serialize
Unison.Runtime.Array
Unison.Runtime.Builtin
Unison.Runtime.Crypto.Rsa
Unison.Runtime.Debug
Unison.Runtime.Decompile
Unison.Runtime.Exception
@ -216,6 +215,7 @@ library
MultiParamTypeClasses
NamedFieldPuns
OverloadedLabels
OverloadedRecordDot
OverloadedStrings
PatternSynonyms
RankNTypes
@ -232,6 +232,8 @@ library
, NanoID
, aeson
, ansi-terminal
, asn1-encoding
, asn1-types
, async
, atomic-primops
, base
@ -268,6 +270,7 @@ library
, http-client
, http-media
, http-types
, iproute
, lens
, lucid
, megaparsec
@ -281,6 +284,7 @@ library
, natural-transformation
, network
, network-simple
, network-udp
, network-uri
, nonempty-containers
, open-browser
@ -370,6 +374,7 @@ test-suite parser-typechecker-tests
Unison.Test.DataDeclaration
Unison.Test.MCode
Unison.Test.Referent
Unison.Test.Runtime.Crypto.Rsa
Unison.Test.Syntax.FileParser
Unison.Test.Syntax.TermParser
Unison.Test.Syntax.TypePrinter
@ -407,6 +412,7 @@ test-suite parser-typechecker-tests
MultiParamTypeClasses
NamedFieldPuns
OverloadedLabels
OverloadedRecordDot
OverloadedStrings
PatternSynonyms
RankNTypes
@ -423,6 +429,8 @@ test-suite parser-typechecker-tests
, NanoID
, aeson
, ansi-terminal
, asn1-encoding
, asn1-types
, async
, atomic-primops
, base
@ -459,9 +467,11 @@ test-suite parser-typechecker-tests
, hashable
, hashtables
, haskeline
, hex-text
, http-client
, http-media
, http-types
, iproute
, lens
, lucid
, megaparsec
@ -475,6 +485,7 @@ test-suite parser-typechecker-tests
, natural-transformation
, network
, network-simple
, network-udp
, network-uri
, nonempty-containers
, open-browser

View File

@ -30,17 +30,31 @@
unison/data-info
unison/chunked-seq
unison/primops
unison/builtin
unison/primops-generated
unison/builtin-generated)
(define (grab-num port)
(integer-bytes->integer (read-bytes 4 port) #f #t 0 4))
; Gets bytes using the expected input format. The format is simple:
;
; - 4 bytes indicating how many bytes follow
; - the actual payload, with size matching the above
(define (grab-bytes)
(let* ([size-bytes (read-bytes 4)]
[size (integer-bytes->integer size-bytes #f #t 0 4)])
(read-bytes size)))
(define (grab-bytes port)
(let ([size (grab-num port)])
(read-bytes size port)))
; Gets args sent after the code payload. Format is:
;
; - 4 bytes indicating how many arguments
; - for each argument
; - 4 bytes indicating length of argument
; - utf-8 bytes of that length
(define (grab-args port)
(let ([n (grab-num port)])
(for/list ([i (range n)])
(bytes->string/utf-8 (grab-bytes port)))))
; Reads and decodes the input. First uses `grab-bytes` to read the
; payload, then uses unison functions to deserialize the `Value` that
@ -50,8 +64,8 @@
; definition should be executed. In unison types, it is:
;
; ([(Link.Term, Code)], Link.Term)
(define (decode-input)
(let ([bs (grab-bytes)])
(define (decode-input port)
(let ([bs (grab-bytes port)])
(match (builtin-Value.deserialize (bytes->chunked-bytes bs))
[(unison-data _ t (list q))
(= t ref-either-right:tag)
@ -61,15 +75,67 @@
[else
(raise "unexpected input")])))
(define (natural->bytes/variable n)
(let rec ([i n] [acc '()])
(cond
[(< i #x80) (list->bytes (reverse (cons i acc)))]
[else
(rec (arithmetic-shift i -7)
(cons (bitwise-and i #x7f) acc))])))
(define (write-string-bytes str port)
(define bs (string->bytes/utf-8 str))
(write-bytes (natural->bytes/variable (bytes-length bs)) port)
(write-bytes bs port))
(define (write-value-bytes val port)
(define qval (unison-quote (reflect-value val)))
(define bs (chunked-bytes->bytes (builtin-Value.serialize qval)))
(write-bytes bs port))
(define (encode-success result port)
(write-bytes #"\0" port)
(write-value-bytes result port)
(void))
(define (encode-error ex port)
(match ex
[(exn:bug msg val)
(write-bytes #"\1" port)
(write-string-bytes msg port)
(write-value-bytes val port)]
[else
(write-bytes #"\2" port)
(write-string-bytes (exception->string ex) port)])
(void))
(define (encode-exception fail port)
(write-bytes #"\1" port)
(write-string-bytes "builtin.raise" port)
(write-value-bytes fail port)
(void))
(define ((eval-exn-handler port) rq)
(request-case rq
[pure (result) (encode-success result port)]
[ref-exception:typelink
[0 (fail)
(control ref-exception:typelink k
(encode-exception fail port))]]))
; Implements the evaluation mode of operation. First decodes the
; input. Then uses the dynamic loading machinery to add the code to
; the runtime. Finally executes a specified main reference.
(define (do-evaluate)
(let-values ([(code main-ref) (decode-input)])
(define (do-evaluate in out)
(let-values ([(code main-ref) (decode-input in)]
[(args) (list->vector (grab-args in))])
(add-runtime-code 'unison-main code)
(handle [ref-exception:typelink] top-exn-handler
((termlink->proc main-ref))
(data 'unit 0))))
(with-handlers
([exn:bug? (lambda (e) (encode-error e out))])
(parameterize ([current-command-line-arguments args])
(handle [ref-exception:typelink] (eval-exn-handler out)
((termlink->proc main-ref)))))))
; Uses racket pretty printing machinery to instead generate a file
; containing the given code, and which executes the main definition on
@ -89,11 +155,12 @@
; Decodes input and writes a module to the specified file.
(define (do-generate srcf)
(define-values (icode main-ref) (decode-input))
(define-values (icode main-ref) (decode-input (current-input-port)))
(write-module srcf main-ref icode))
(define generate-to (make-parameter #f))
(define show-version (make-parameter #f))
(define use-port-num (make-parameter #f))
(define (handle-command-line)
(command-line
@ -102,14 +169,36 @@
["--version"
"display version"
(show-version #t)]
[("-p" "--port")
port-num
"runtime communication port"
(use-port-num port-num)]
[("-G" "--generate-file")
file
"generate code to <file>"
(generate-to file)]))
(generate-to file)]
#:args remaining
(list->vector remaining)))
(begin
(handle-command-line)
(let ([sub-args (handle-command-line)])
(current-command-line-arguments sub-args))
(cond
[(show-version) (displayln "unison-runtime version 0.0.11")]
[(generate-to) (do-generate (generate-to))]
[else (do-evaluate)]))
[(use-port-num)
(match (string->number (use-port-num))
[port
#:when (port-number? port)
(let-values ([(in out) (tcp-connect "localhost" port)])
(do-evaluate in out)
(close-output-port out)
(close-input-port in))]
[#f
(displayln "could not parse port number")
(exit 1)]
[port
(displayln "bad port number")
(exit 1)])]
[else
(do-evaluate (current-input-port) (open-output-bytes))]))

View File

@ -47,6 +47,10 @@
builtin-tls.signedcert:typelink
builtin-tls.version:typelink
builtin-udpsocket:typelink
builtin-listensocket:typelink
builtin-clientsockaddr:typelink
bytevector
bytes
control
@ -78,7 +82,7 @@
declare-function-link
declare-code
exn:bug?
(struct-out exn:bug)
exn:bug->exception
exception->string
raise-unison-exception
@ -568,7 +572,7 @@
(let ([disp (describe-value f)])
(raise
(make-exn:bug
(string->chunked-string "builtin.bug")
(string->chunked-string "unhandled top level exception")
disp))))]]))
(begin-encourage-inline
@ -607,5 +611,5 @@
(define (exn:bug->exception b)
(raise-unison-exception
ref-runtimefailure:typelink
(exn:bug-msg b)
(string->chunked-string (exn:bug-msg b))
(exn:bug-val b)))

View File

@ -0,0 +1,4 @@
#lang racket/base
(require unison/udp)
(provide (all-from-out))

View File

@ -37,7 +37,6 @@
bytevector
bytevector-append
directory-contents
current-microseconds
decode-value
@ -227,10 +226,6 @@
(define (current-microseconds)
(fl->fx (* 1000 (current-inexact-milliseconds))))
(define (directory-contents path-str)
(define (extract path) (string->chunked-string (path->string path)))
(map extract (directory-list (chunked-string->string path-str))))
(define (list-head l n)
(let rec ([c l] [m n])
(cond
@ -476,19 +471,17 @@
(next (fx1- i)))))))
(define (write-exn:bug ex port mode)
(when mode
(write-string "<exn:bug " port))
(when mode (write-string "<exn:bug " port))
(let ([recur (case mode
[(#t) write]
[(#f) display]
[else (lambda (v port) (print v port mode))])])
(recur (chunked-string->string (exn:bug-msg ex)) port)
(recur (exn:bug-msg ex) port)
(if mode (write-string " " port) (newline port))
(write-string (describe-value (exn:bug-val ex)) port))
(when mode
(write-string ">")))
(when mode (write-string ">" port)))
(struct exn:bug (msg val)
#:constructor-name make-exn:bug

View File

@ -2,8 +2,14 @@
(require ffi/unsafe
ffi/unsafe/define
racket/exn
racket/runtime-path
(for-syntax racket/base)
openssl/libcrypto
unison/chunked-seq)
unison/chunked-seq
racket/bool
(only-in openssl/sha1 bytes->hex-string hex-string->bytes)
)
(provide (prefix-out unison-FOp-crypto.
(combine-out
@ -17,15 +23,16 @@
HashAlgorithm.Blake2b_256
HashAlgorithm.Blake2b_512
hashBytes
hmacBytes)))
hmacBytes
Ed25519.sign.impl
Ed25519.verify.impl
)))
(define libcrypto
(with-handlers [[exn:fail? exn->string]]
(ffi-lib "libcrypto" openssl-lib-versions)))
(define-runtime-path libb2-so '(so "libb2" ("1" #f)))
(define libb2
(with-handlers [[exn:fail? exn->string]]
(ffi-lib "libb2" '("" "1"))))
(ffi-lib libb2-so '("1" #f))))
(define _EVP-pointer (_cpointer 'EVP))
@ -68,7 +75,7 @@
_int ; key-len
_pointer ; input
_int ; input-len
_pointer ; md
_pointer ; output pointer
_pointer ; null
-> _pointer ; unused
))))
@ -99,6 +106,134 @@
(define HashAlgorithm.Blake2s_256 (lc-algo "EVP_blake2s256" 256))
(define HashAlgorithm.Blake2b_512 (lc-algo "EVP_blake2b512" 512))
(define _EVP_PKEY-pointer (_cpointer 'EVP_PKEY))
(define _EVP_MD_CTX-pointer (_cpointer 'EVP_MD_CTX))
(define EVP_MD_CTX_new
(if (string? libcrypto)
(lambda _ (raise (error 'libcrypto "EVP_MD_CTX_create\n~a" libcrypto)))
(get-ffi-obj "EVP_MD_CTX_new" libcrypto
(_fun -> _EVP_MD_CTX-pointer
))))
; EVP_PKEY_new_raw_private_key(int type, NULL, const unsigned char *key, size_t keylen);
(define EVP_PKEY_new_raw_private_key
(if (string? libcrypto)
(lambda _ (raise (error 'libcrypto "EVP_PKEY_new_raw_private_key\n~a" libcrypto)))
(get-ffi-obj "EVP_PKEY_new_raw_private_key" libcrypto
(_fun
_int ; type
_pointer ; engine (null)
_pointer ; key
_int ; key-len
-> _EVP_PKEY-pointer
))))
; EVP_DigestSignInit(hctx, NULL, EVP_sha256(), NULL, pkey)
(define EVP_DigestSignInit
(if (string? libcrypto)
(lambda _ (raise (error 'libcrypto "EVP_DigestSignInit\n~a" libcrypto)))
(get-ffi-obj "EVP_DigestSignInit" libcrypto
(_fun
_EVP_MD_CTX-pointer
_pointer ; (null)
_pointer ; (null)
_pointer ; (null)
_EVP_PKEY-pointer ; pkey
-> _int
))))
; EVP_DigestSign(hctx, output, output-len-ptr, input-data, input-data-len)
(define EVP_DigestSign
(if (string? libcrypto)
(lambda _ (raise (error 'libcrypto "EVP_DigestSign\n~a" libcrypto)))
(get-ffi-obj "EVP_DigestSign" libcrypto
(_fun
_EVP_MD_CTX-pointer
_pointer ; output
(_ptr o _int) ; output-len (null prolly)
_pointer ; input-data
_int ; input-data-len
-> _int
))))
; EVP_PKEY_new_raw_public_key(int type, NULL, const unsigned char *key, size_t keylen);
(define EVP_PKEY_new_raw_public_key
(if (string? libcrypto)
(lambda _ (raise (error 'libcrypto "EVP_PKEY_new_raw_public_key\n~a" libcrypto)))
(get-ffi-obj "EVP_PKEY_new_raw_public_key" libcrypto
(_fun
_int ; type
_pointer ; engine (null)
_pointer ; key
_int ; key-len
-> _EVP_PKEY-pointer
))))
; int EVP_DigestVerifyInit(EVP_MD_CTX *ctx, EVP_PKEY_CTX **pctx,
; const EVP_MD *type, ENGINE *e, EVP_PKEY *pkey);
(define EVP_DigestVerifyInit
(if (string? libcrypto)
(lambda _ (raise (error 'libcrypto "EVP_DigestVerifyInit\n~a" libcrypto)))
(get-ffi-obj "EVP_DigestVerifyInit" libcrypto
(_fun
_EVP_MD_CTX-pointer
_pointer ; (null)
_pointer ; (null)
_pointer ; (null)
_EVP_PKEY-pointer ; pkey
-> _int
))))
; int EVP_DigestVerify(EVP_MD_CTX *ctx, const unsigned char *sig,
; size_t siglen, const unsigned char *tbs, size_t tbslen);
(define EVP_DigestVerify
(if (string? libcrypto)
(lambda _ (raise (error 'libcrypto "EVP_DigestVerify\n~a" libcrypto)))
(get-ffi-obj "EVP_DigestVerify" libcrypto
(_fun
_EVP_MD_CTX-pointer
_pointer ; signature
_int ; signature-len
_pointer ; input-data
_int ; input-data-len
-> _int
))))
(define EVP_PKEY_ED25519 1087)
(define (evpSign-raw seed input)
(let* ([ctx (EVP_MD_CTX_new)]
[pkey (EVP_PKEY_new_raw_private_key EVP_PKEY_ED25519 #f seed (bytes-length seed))])
(if (false? pkey)
(raise (error "Invalid seed provided."))
(if (<= (EVP_DigestSignInit ctx #f #f #f pkey) 0)
(raise (error "Initializing signing failed"))
(let* ([output (make-bytes 64)])
(if (<= (EVP_DigestSign ctx output input (bytes-length input)) 0)
(raise (error "Running digest failed"))
output))))))
(define (evpVerify-raw public-key input signature)
(let* ([ctx (EVP_MD_CTX_new)]
[pkey (EVP_PKEY_new_raw_public_key EVP_PKEY_ED25519 #f public-key (bytes-length public-key))])
(if (false? pkey)
(raise (error "Invalid seed provided."))
(if (<= (EVP_DigestVerifyInit ctx #f #f #f pkey) 0)
(raise (error "Initializing Verify failed"))
(if (<= (EVP_DigestVerify ctx signature (bytes-length signature) input (bytes-length input)) 0)
#f
#t)))))
(define (Ed25519.sign.impl seed _ignored_pubkey input)
(bytes->chunked-bytes (evpSign-raw (chunked-bytes->bytes seed) (chunked-bytes->bytes input))))
(define (Ed25519.verify.impl public-key input signature)
(evpVerify-raw
(chunked-bytes->bytes public-key)
(chunked-bytes->bytes input)
(chunked-bytes->bytes signature)))
; This one isn't provided by libcrypto, for some reason
(define (HashAlgorithm.Blake2b_256) (cons 'blake2b 256))
@ -154,17 +289,17 @@
(hashBytes-raw kind full)))
(define (hmacBytes kind key input)
(let ([key (chunked-bytes->bytes key)]
[input (chunked-bytes->bytes input)])
(bytes->chunked-bytes
(case (car kind)
['blake2b (hmacBlake kind key input)]
[else
(let* ([bytes (/ (cdr kind) 8)]
[output (make-bytes bytes)]
[algo (car kind)])
(HMAC algo key (bytes-length key) input (bytes-length input) output #f)
output)]))))
(bytes->chunked-bytes (hmacBytes-raw kind (chunked-bytes->bytes key) (chunked-bytes->bytes input))))
(define (hmacBytes-raw kind key input)
(case (car kind)
['blake2b (hmacBlake kind key input)]
[else
(let* ([bytes (/ (cdr kind) 8)]
[output (make-bytes bytes)]
[algo (car kind)])
(HMAC algo key (bytes-length key) input (bytes-length input) output #f)
output)]))
; These will only be evaluated by `raco test`
@ -172,67 +307,83 @@
(require rackunit
(only-in openssl/sha1 bytes->hex-string hex-string->bytes))
(test-case "ed25519 sign"
(check-equal?
(bytes->hex-string
(evpSign-raw
(hex-string->bytes "0000000000000000000000000000000000000000000000000000000000000000") #""))
"8f895b3cafe2c9506039d0e2a66382568004674fe8d237785092e40d6aaf483e4fc60168705f31f101596138ce21aa357c0d32a064f423dc3ee4aa3abf53f803"))
(test-case "ed25519 verify"
(check-equal?
(evpVerify-raw
(hex-string->bytes "3b6a27bcceb6a42d62a3a8d02a6f0d73653215771de243a63ac048a18b59da29")
#""
(hex-string->bytes "8f895b3cafe2c9506039d0e2a66382568004674fe8d237785092e40d6aaf483e4fc60168705f31f101596138ce21aa357c0d32a064f423dc3ee4aa3abf53f803")
)
#t))
(test-case "sha1 hmac"
(check-equal?
(bytes->hex-string (hmacBytes (HashAlgorithm.Sha1) #"key" #"message"))
(bytes->hex-string (hmacBytes-raw (HashAlgorithm.Sha1) #"key" #"message"))
"2088df74d5f2146b48146caf4965377e9d0be3a4"))
(test-case "blake2b-256 hmac"
(check-equal?
(bytes->hex-string (hmacBytes (HashAlgorithm.Blake2b_256) #"key" #"message"))
(bytes->hex-string (hmacBytes-raw (HashAlgorithm.Blake2b_256) #"key" #"message"))
"442d98a3872d3f56220f89e2b23d0645610b37c33dd3315ef224d0e39ada6751"))
(test-case "blake2b-512 hmac"
(check-equal?
(bytes->hex-string (hmacBytes (HashAlgorithm.Blake2b_512) #"key" #"message"))
(bytes->hex-string (hmacBytes-raw (HashAlgorithm.Blake2b_512) #"key" #"message"))
"04e9ada930688cde75eec939782eed653073dd621d7643f813702976257cf037d325b50eedd417c01b6ad1f978fbe2980a93d27d854044e8626df6fa279d6680"))
(test-case "blake2s-256 hmac"
(check-equal?
(bytes->hex-string (hmacBytes (HashAlgorithm.Blake2s_256) #"key" #"message"))
(bytes->hex-string (hmacBytes-raw (HashAlgorithm.Blake2s_256) #"key" #"message"))
"bba8fa28708ae80d249e317318c95c859f3f77512be23910d5094d9110454d6f"))
(test-case "md5 basic"
(check-equal?
(bytes->hex-string (hashBytes (HashAlgorithm.Md5) #""))
(bytes->hex-string (hashBytes-raw (HashAlgorithm.Md5) #""))
"d41d8cd98f00b204e9800998ecf8427e"))
(test-case "sha1 basic"
(check-equal?
(bytes->hex-string (hashBytes (HashAlgorithm.Sha1) #""))
(bytes->hex-string (hashBytes-raw (HashAlgorithm.Sha1) #""))
"da39a3ee5e6b4b0d3255bfef95601890afd80709"))
(test-case "sha2-256 basic"
(check-equal?
(bytes->hex-string (hashBytes (HashAlgorithm.Sha2_256) #""))
(bytes->hex-string (hashBytes-raw (HashAlgorithm.Sha2_256) #""))
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"))
(test-case "sha2-512 basic"
(check-equal?
(bytes->hex-string (hashBytes (HashAlgorithm.Sha2_512) #""))
(bytes->hex-string (hashBytes-raw (HashAlgorithm.Sha2_512) #""))
"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e"))
(test-case "sha3-256 basic"
(check-equal?
(bytes->hex-string (hashBytes (HashAlgorithm.Sha3_256) #""))
(bytes->hex-string (hashBytes-raw (HashAlgorithm.Sha3_256) #""))
"a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a"))
(test-case "sha3-512 basic"
(check-equal?
(bytes->hex-string (hashBytes (HashAlgorithm.Sha3_512) #""))
(bytes->hex-string (hashBytes-raw (HashAlgorithm.Sha3_512) #""))
"a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26"))
(test-case "blake2s_256 basic"
(check-equal?
(bytes->hex-string (hashBytes (HashAlgorithm.Blake2s_256) #""))
(bytes->hex-string (hashBytes-raw (HashAlgorithm.Blake2s_256) #""))
"69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9"))
(test-case "blake2b_256 basic"
(check-equal?
(bytes->hex-string (hashBytes (HashAlgorithm.Blake2b_256) #""))
(bytes->hex-string (hashBytes-raw (HashAlgorithm.Blake2b_256) #""))
"0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8"))
(test-case "blake2b_512 basic"
(check-equal?
(bytes->hex-string (hashBytes (HashAlgorithm.Blake2b_512) #""))
(bytes->hex-string (hashBytes-raw (HashAlgorithm.Blake2b_512) #""))
"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce")))

View File

@ -80,6 +80,9 @@
builtin-timespec:typelink
builtin-threadid:typelink
builtin-value:typelink
builtin-udpsocket:typelink
builtin-listensocket:typelink
builtin-clientsockaddr:typelink
builtin-crypto.hashalgorithm:typelink
builtin-char.class:typelink
@ -440,6 +443,9 @@
(define builtin-timespec:typelink (unison-typelink-builtin "TimeSpec"))
(define builtin-threadid:typelink (unison-typelink-builtin "ThreadId"))
(define builtin-value:typelink (unison-typelink-builtin "Value"))
(define builtin-udpsocket:typelink (unison-typelink-builtin "UDPSocket"))
(define builtin-listensocket:typelink (unison-typelink-builtin "ListenSocket"))
(define builtin-clientsockaddr:typelink (unison-typelink-builtin "ClientSockAddr"))
(define builtin-crypto.hashalgorithm:typelink
(unison-typelink-builtin "crypto.HashAlgorithm"))

View File

@ -14,6 +14,7 @@
(provide
unison-FOp-IO.stdHandle
unison-FOp-IO.openFile.impl.v3
(prefix-out
builtin-IO.
(combine-out
@ -100,13 +101,23 @@
ref-unit-unit)
(ref-either-right char))))
(define-unison (getSomeBytes.impl.v1 handle bytes)
(let* ([buffer (make-bytes bytes)]
(define-unison (getSomeBytes.impl.v1 handle nbytes)
(let* ([buffer (make-bytes nbytes)]
[line (read-bytes-avail! buffer handle)])
(if (eof-object? line)
(ref-either-right (bytes->chunked-bytes #""))
(ref-either-right (bytes->chunked-bytes buffer))
)))
(cond
[(eof-object? line)
(ref-either-right (bytes->chunked-bytes #""))]
[(procedure? line)
(Exception
ref-iofailure:typelink
"getSomeBytes.impl: special value returned"
ref-unit-unit)]
[else
(ref-either-right
(bytes->chunked-bytes
(if (< line nbytes)
(subbytes buffer 0 line)
buffer)))])))
(define-unison (getBuffering.impl.v3 handle)
(case (file-stream-buffer-mode handle)
@ -194,6 +205,15 @@
(ref-either-right
(string->chunked-string (bytes->string/utf-8 value))))))
(define (unison-FOp-IO.openFile.impl.v3 fn0 mode)
(define fn (chunked-string->string fn0))
(right (case mode
[(0) (open-input-file fn)]
[(1) (open-output-file fn #:exists 'truncate)]
[(2) (open-output-file fn #:exists 'append)]
[else (open-input-output-file fn #:exists 'can-update)])))
;; From https://github.com/sorawee/shlex/blob/5de06500e8c831cfc8dffb99d57a76decc02c569/main.rkt (MIT License)
;; with is a port of https://github.com/python/cpython/blob/bf2f76ec0976c09de79c8827764f30e3b6fba776/Lib/shlex.py#L325
(define unsafe-pattern #rx"[^a-zA-Z0-9_@%+=:,./-]")

View File

@ -5,6 +5,10 @@
unison/data-info
racket/file
racket/flonum
(only-in racket
date-dst?
date-time-zone-offset
date*-time-zone-name)
(only-in unison/boot data-case define-unison)
(only-in
rnrs/arithmetic/flonums-6
@ -12,6 +16,7 @@
(require racket/file)
(provide
builtin-Clock.internals.systemTimeZone.v1
(prefix-out
unison-FOp-Clock.internals.
(combine-out
@ -35,6 +40,7 @@
renameFile.impl.v3
createDirectory.impl.v3
removeDirectory.impl.v3
directoryContents.impl.v3
setCurrentDirectory.impl.v3
renameDirectory.impl.v3
isDirectory.impl.v3
@ -42,6 +48,13 @@
systemTimeMicroseconds.impl.v3
createTempDirectory.impl.v3)))
(define (failure-result ty msg vl)
(ref-either-left
(ref-failure-failure
ty
(string->chunked-string msg)
(unison-any-any vl))))
(define (getFileSize.impl.v3 path)
(with-handlers
[[exn:fail:filesystem?
@ -81,6 +94,24 @@
(current-directory (chunked-string->string path))
(ref-either-right none))
(define-unison (directoryContents.impl.v3 path)
(with-handlers
[[exn:fail:filesystem?
(lambda (e)
(failure-result
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]]
(let* ([dirps (directory-list (chunked-string->string path))]
[dirss (map path->string dirps)])
(ref-either-right
(vector->chunked-list
(list->vector
(map
string->chunked-string
(list* "." ".." dirss))))))))
(define-unison (createTempDirectory.impl.v3 prefix)
(ref-either-right
(string->chunked-string
@ -117,6 +148,14 @@
(define-unison (systemTimeMicroseconds.impl.v3 unit)
(ref-either-right (inexact->exact (* 1000 (current-inexact-milliseconds)))))
(define-unison (builtin-Clock.internals.systemTimeZone.v1 secs)
(let* ([d (seconds->date secs)])
(list->unison-tuple
(list
(date-time-zone-offset d)
(if (date-dst? d) 1 0)
(date*-time-zone-name d)))))
(define (threadCPUTime.v1)
(right
(integer->time

View File

@ -0,0 +1,31 @@
#lang racket/base
(require racket/exn
unison/data ; exception
unison/data-info ; ref-*
unison/chunked-seq
unison/core) ; exception->string, chunked-string
(provide handle-errors)
(define (handle-errors fn)
(with-handlers
[[exn:fail:network?
(lambda (e)
(exception
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]
[exn:fail:contract?
(lambda (e)
(exception
ref-miscfailure:typelink
(exception->string e)
ref-unit-unit))]
[(lambda _ #t)
(lambda (e)
(exception
ref-miscfailure:typelink
(string->chunked-string
(format "Unknown exception ~a" (exn->string e)))
ref-unit-unit))]]
(fn)))

View File

@ -46,6 +46,7 @@
; some exports of internal machinery for use elsewhere
gen-code
reify-value
reflect-value
termlink->name
add-runtime-code
@ -220,10 +221,11 @@
[(unison-termlink-builtin name)
(string-append "builtin-" name)]
[(unison-termlink-derived bs i)
(let ([hs (bytevector->base32-string bs #:alphabet 'hex)]
[po (if (= i 0) "" (string-append "." (number->string i)))])
(let* ([hs (bytevector->base32-string bs #:alphabet 'hex)]
[tm (string-trim hs "=" #:repeat? #t)]
[po (if (= i 0) "" (string-append "." (number->string i)))])
(string->symbol
(string-append "ref-" (substring hs 0 8) po)))]))
(string-append "ref-" tm po)))]))
(define (ref-bytes r)
(sum-case (decode-ref r)
@ -303,8 +305,24 @@
(match v
[(unison-data _ t (list rf rt bs0))
#:when (= t ref-value-data:tag)
(let ([bs (map reify-value (chunked-list->list bs0))])
(make-data (reference->typelink rf) rt bs))]
(let ([bs (map reify-value (chunked-list->list bs0))]
[tl (reference->typelink rf)])
(cond
[(equal? tl builtin-boolean:typelink)
(cond
[(not (null? bs))
(raise
(make-exn:bug
"reify-value: boolean with arguments"
bs0))]
[(= rt 0) #f]
[(= rt 1) #t]
[else
(raise
(make-exn:bug
"reify-value: unknown boolean tag"
rt))])]
[else (make-data tl rt bs)]))]
[(unison-data _ t (list gr bs0))
#:when (= t ref-value-partial:tag)
(let ([bs (map reify-value (chunked-list->list bs0))]
@ -315,11 +333,18 @@
(reify-vlit vl)]
[(unison-data _ t (list bs0 k))
#:when (= t ref-value-cont:tag)
(raise "reify-value: unimplemented cont case")]
(raise
(make-exn:bug
"reify-value: unimplemented cont case"
ref-unit-unit))]
[(unison-data r t fs)
(raise "reify-value: unimplemented data case")]
(raise
(make-exn:bug
"reify-value: unrecognized tag"
ref-unit-unit))]
[else
(raise (format "reify-value: unknown tag"))]))
(raise
(make-exn:bug "reify-value: unrecognized value" v))]))
(define (reflect-typelink tl)
(match tl
@ -353,6 +378,11 @@
(define (reflect-value v)
(match v
[(? boolean?)
(ref-value-data
(reflect-typelink builtin-boolean:typelink)
(if v 1 0) ; boolean pseudo-data tags
empty-chunked-list)]
[(? exact-nonnegative-integer?)
(ref-value-vlit (ref-vlit-pos v))]
[(? exact-integer?)

View File

@ -186,6 +186,29 @@
builtin-TypeLink.toReference
builtin-TypeLink.toReference:termlink
builtin-IO.UDP.clientSocket.impl.v1
builtin-IO.UDP.clientSocket.impl.v1:termlink
builtin-IO.UDP.UDPSocket.recv.impl.v1
builtin-IO.UDP.UDPSocket.recv.impl.v1:termlink
builtin-IO.UDP.UDPSocket.send.impl.v1
builtin-IO.UDP.UDPSocket.send.impl.v1:termlink
builtin-IO.UDP.UDPSocket.close.impl.v1
builtin-IO.UDP.UDPSocket.close.impl.v1:termlink
builtin-IO.UDP.ListenSocket.close.impl.v1
builtin-IO.UDP.ListenSocket.close.impl.v1:termlink
builtin-IO.UDP.UDPSocket.toText.impl.v1
builtin-IO.UDP.UDPSocket.toText.impl.v1:termlink
builtin-IO.UDP.serverSocket.impl.v1
builtin-IO.UDP.serverSocket.impl.v1:termlink
builtin-IO.UDP.ListenSocket.toText.impl.v1
builtin-IO.UDP.ListenSocket.toText.impl.v1:termlink
builtin-IO.UDP.ListenSocket.recvFrom.impl.v1
builtin-IO.UDP.ListenSocket.recvFrom.impl.v1:termlink
builtin-IO.UDP.ClientSockAddr.toText.v1
builtin-IO.UDP.ClientSockAddr.toText.v1:termlink
builtin-IO.UDP.ListenSocket.sendTo.impl.v1
builtin-IO.UDP.ListenSocket.sendTo.impl.v1:termlink
unison-FOp-internal.dataTag
unison-FOp-Char.toText
; unison-FOp-Code.dependencies
@ -314,13 +337,16 @@
unison-FOp-Clock.internals.processCPUTime.v1
unison-FOp-Clock.internals.realtime.v1
unison-FOp-Clock.internals.monotonic.v1
builtin-Clock.internals.systemTimeZone.v1
builtin-Clock.internals.systemTimeZone.v1:termlink
; unison-FOp-Value.serialize
unison-FOp-IO.stdHandle
unison-FOp-IO.getArgs.impl.v1
unison-FOp-IO.directoryContents.impl.v3
builtin-IO.directoryContents.impl.v3
builtin-IO.directoryContents.impl.v3:termlink
unison-FOp-IO.systemTimeMicroseconds.v1
unison-FOp-ImmutableArray.copyTo!
@ -645,6 +671,7 @@
(unison murmurhash)
(unison tls)
(unison tcp)
(unison udp)
(unison gzip)
(unison zlib)
(unison concurrent)
@ -732,6 +759,7 @@
(define-builtin-link IO.getEnv.impl.v1)
(define-builtin-link IO.getChar.impl.v1)
(define-builtin-link IO.getCurrentDirectory.impl.v3)
(define-builtin-link IO.directoryContents.impl.v3)
(define-builtin-link IO.removeDirectory.impl.v3)
(define-builtin-link IO.renameFile.impl.v3)
(define-builtin-link IO.createTempDirectory.impl.v3)
@ -758,6 +786,7 @@
(define-builtin-link Char.Class.is)
(define-builtin-link Scope.bytearrayOf)
(define-builtin-link unsafe.coerceAbilities)
(define-builtin-link Clock.internals.systemTimeZone.v1)
(begin-encourage-inline
(define-unison (builtin-Value.toBuiltin v) (unison-quote v))
@ -902,11 +931,13 @@
(define (unison-POp-LEQT s t) (bool (chunked-string<? s t)))
(define (unison-POp-EQLU x y) (bool (universal=? x y)))
(define (unison-POp-EROR fnm x)
(let-values ([(p g) (open-string-output-port)])
(put-string p (chunked-string->string fnm))
(let-values
([(p g) (open-string-output-port)]
[(snm) (chunked-string->string fnm)])
(put-string p snm)
(put-string p ": ")
(display (describe-value x) p)
(raise (make-exn:bug fnm x))))
(raise (make-exn:bug snm x))))
(define (unison-POp-FTOT f)
(define base (number->string f))
(define dotted
@ -1095,11 +1126,6 @@
(define (unison-FOp-IO.getArgs.impl.v1)
(sum 1 (cdr (command-line))))
(define (unison-FOp-IO.directoryContents.impl.v3 path)
(reify-exn
(lambda ()
(sum 1 (directory-contents path)))))
(define unison-FOp-IO.systemTimeMicroseconds.v1 current-microseconds)
;; TODO should we convert Bytes -> Text directly without the intermediate conversions?
@ -1129,13 +1155,6 @@
(close-output-port h))
(right none))
(define (unison-FOp-IO.openFile.impl.v3 fn mode)
(right (case mode
[(0) (open-file-input-port (chunked-string->string fn))]
[(1) (open-file-output-port (chunked-string->string fn))]
[(2) (open-file-output-port (chunked-string->string fn) 'no-truncate)]
[else (open-file-input/output-port (chunked-string->string fn))])))
(define (unison-FOp-Text.repeat n t)
(let loop ([cnt 0]
[acc empty-chunked-string])
@ -1470,6 +1489,7 @@
(declare-builtin-link builtin-IO.getArgs.impl.v1)
(declare-builtin-link builtin-IO.getEnv.impl.v1)
(declare-builtin-link builtin-IO.getChar.impl.v1)
(declare-builtin-link builtin-IO.directoryContents.impl.v3)
(declare-builtin-link builtin-IO.getCurrentDirectory.impl.v3)
(declare-builtin-link builtin-IO.removeDirectory.impl.v3)
(declare-builtin-link builtin-IO.renameFile.impl.v3)
@ -1495,4 +1515,5 @@
(declare-builtin-link builtin-Char.Class.is)
(declare-builtin-link builtin-Pattern.many.corrected)
(declare-builtin-link builtin-unsafe.coerceAbilities)
(declare-builtin-link builtin-Clock.internals.systemTimeZone.v1)
)

View File

@ -103,6 +103,17 @@
(sandbox-builtin "IO.getFileSize.impl.v3")
(sandbox-builtin "IO.serverSocket.impl.v3")
(sandbox-builtin "Socket.toText")
(sandbox-builtin "UDP.clientSocket.impl.v1")
(sandbox-builtin "UDP.serverSocket.impl.v1")
(sandbox-builtin "UDP.UDPSocket.close.impl.v1")
(sandbox-builtin "UDP.UDPSocket.recv.impl.v1")
(sandbox-builtin "UDP.UDPSocket.send.impl.v1")
(sandbox-builtin "UDP.ListenSocket.close.impl.v1")
(sandbox-builtin "UDP.UDPSocket.toText.impl.v1")
(sandbox-builtin "UDP.ListenSocket.toText.impl.v1")
(sandbox-builtin "UDP.ListenSocket.recvFrom.impl.v1")
(sandbox-builtin "UDP.ClientSockAddr.toText.v1")
(sandbox-builtin "UDP.ListenSocket.sendTo.impl.v1")
(sandbox-builtin "Handle.toText")
(sandbox-builtin "ThreadId.toText")
(sandbox-builtin "IO.socketPort.impl.v3")

View File

@ -6,6 +6,7 @@
unison/data
unison/data-info
unison/chunked-seq
unison/network-utils
unison/core)
(provide
@ -25,29 +26,6 @@
(struct socket-pair (input output))
(define (handle-errors fn)
(with-handlers
[[exn:fail:network?
(lambda (e)
(exception
ref-iofailure:typelink
(exception->string e)
ref-unit-unit))]
[exn:fail:contract?
(lambda (e)
(exception
ref-miscfailure:typelink
(exception->string e)
ref-unit-unit))]
[(lambda _ #t)
(lambda (e)
(exception
ref-miscfailure:typelink
(chunked-string->string
(format "Unknown exception ~a" (exn->string e)))
ref-unit-unit))]]
(fn)))
(define (closeSocket.impl.v3 socket)
(handle-errors
(lambda ()

View File

@ -0,0 +1,179 @@
; UDP primitives!
#lang racket/base
(require racket/udp
racket/format
(only-in unison/boot define-unison)
unison/data
unison/data-info
unison/chunked-seq
(only-in unison/boot sum-case)
unison/network-utils
unison/core)
(provide
(prefix-out
builtin-IO.UDP.
(combine-out
clientSocket.impl.v1
clientSocket.impl.v1:termlink
UDPSocket.recv.impl.v1
UDPSocket.recv.impl.v1:termlink
UDPSocket.send.impl.v1
UDPSocket.send.impl.v1:termlink
UDPSocket.close.impl.v1
UDPSocket.close.impl.v1:termlink
ListenSocket.close.impl.v1
ListenSocket.close.impl.v1:termlink
UDPSocket.toText.impl.v1
UDPSocket.toText.impl.v1:termlink
serverSocket.impl.v1
serverSocket.impl.v1:termlink
ListenSocket.toText.impl.v1
ListenSocket.toText.impl.v1:termlink
ListenSocket.recvFrom.impl.v1
ListenSocket.recvFrom.impl.v1:termlink
ClientSockAddr.toText.v1
ClientSockAddr.toText.v1:termlink
ListenSocket.sendTo.impl.v1
ListenSocket.sendTo.impl.v1:termlink)))
(struct client-sock-addr (host port))
; Haskell's Network.UDP choice of buffer size is 2048, so mirror that here
(define buffer-size 2048)
(define ; a -> Either Failure a
(wrap-in-either a)
(sum-case a
(0 (type msg meta)
(ref-either-left (ref-failure-failure type msg (unison-any-any meta))))
(1 (data)
(ref-either-right data))))
(define
(format-socket socket)
(let*-values ([(local-hn local-port remote-hn remote-port) (udp-addresses socket #t)]
[(rv) (~a "<socket local=" local-hn ":" local-port " remote=" remote-hn ":" remote-port ">")])
(string->chunked-string rv)))
(define (close-socket socket)
(let ([rv (handle-errors (lambda() (begin
(udp-close socket)
(right ref-unit-unit))))])
(wrap-in-either rv)))
;; define termlink builtins
(define clientSocket.impl.v1:termlink
(unison-termlink-builtin "IO.UDP.clientSocket.impl.v1"))
(define UDPSocket.recv.impl.v1:termlink
(unison-termlink-builtin "IO.UDP.UDPSocket.recv.impl.v1"))
(define UDPSocket.send.impl.v1:termlink
(unison-termlink-builtin "IO.UDP.UDPSocket.send.impl.v1"))
(define UDPSocket.close.impl.v1:termlink
(unison-termlink-builtin "IO.UDP.UDPSocket.close.impl.v1"))
(define ListenSocket.close.impl.v1:termlink
(unison-termlink-builtin "IO.UDP.ListenSocket.close.impl.v1"))
(define UDPSocket.toText.impl.v1:termlink
(unison-termlink-builtin "IO.UDP.UDPSocket.toText.impl.v1"))
(define serverSocket.impl.v1:termlink
(unison-termlink-builtin "IO.UDP.serverSocket.impl.v1"))
(define ListenSocket.toText.impl.v1:termlink
(unison-termlink-builtin "IO.UDP.ListenSocket.toText.impl.v1"))
(define ListenSocket.recvFrom.impl.v1:termlink
(unison-termlink-builtin "IO.UDP.ListenSocket.recvFrom.impl.v1"))
(define ClientSockAddr.toText.v1:termlink
(unison-termlink-builtin "IO.UDP.ClientSockAddr.toText.v1"))
(define ListenSocket.sendTo.impl.v1:termlink
(unison-termlink-builtin "IO.UDP.ListenSocket.sendTo.impl.v1"))
;; define builtins
(define-unison
(UDPSocket.recv.impl.v1 socket) ; socket -> Either Failure Bytes
(let
([rv (handle-errors (lambda()
(let*-values
([(buffer) (make-bytes buffer-size)]
[(len a b) (udp-receive! socket buffer)])
(right (bytes->chunked-bytes (subbytes buffer 0 len))))))])
(wrap-in-either rv)))
(define-unison
(ListenSocket.close.impl.v1 socket) ; socket -> Either Failure ()
(close-socket socket))
(define-unison
(serverSocket.impl.v1 ip port) ; string string -> Either Failure socket
(let
([result (handle-errors (lambda()
(let* ([iip (chunked-string->string ip)]
[pport (string->number (chunked-string->string port))]
[sock (udp-open-socket iip pport)])
(begin
(udp-bind! sock iip pport)
(right sock)))))])
(wrap-in-either result)))
(define-unison
(ListenSocket.recvFrom.impl.v1 socket) ; socket -> Either Failure (Bytes, ClientSockAddr)
(let ([result (handle-errors (lambda()
(if (not (udp? socket))
(raise-argument-error 'socket "a UDP socket" socket)
(let*-values
([(buffer) (make-bytes buffer-size)]
[(len host port) (udp-receive! socket buffer)]
[(csa) (client-sock-addr host port)]
[(bs) (subbytes buffer 0 len)]
[(chunked) (bytes->chunked-bytes bs)])
(right (ref-tuple-pair chunked (ref-tuple-pair csa ref-unit-unit)))))))])
(wrap-in-either result)))
(define-unison
(UDPSocket.send.impl.v1 socket data) ; socket -> Bytes -> Either Failure ()
(let
([result (handle-errors (lambda () (begin
(udp-send socket (chunked-bytes->bytes data))
(right ref-unit-unit))))])
(wrap-in-either result)))
(define-unison
(ListenSocket.sendTo.impl.v1 sock bytes addr) ; socket -> Bytes -> ClientSockAddr -> Either Failure ()
(let
([result (handle-errors (lambda()
(let* ([host (client-sock-addr-host addr)]
[port (client-sock-addr-port addr)]
[bytes (chunked-bytes->bytes bytes)])
(begin
(udp-send-to sock host port bytes)
(right ref-unit-unit)))))])
(wrap-in-either result)))
(define-unison
(UDPSocket.toText.impl.v1 socket) ; socket -> string
(format-socket socket))
(define-unison
(ClientSockAddr.toText.v1 addr) ; ClientSocketAddr -> string
(string->chunked-string (format "<client-sock-addr ~a ~a>" (client-sock-addr-host addr) (client-sock-addr-port addr))))
(define-unison
(ListenSocket.toText.impl.v1 socket) ; socket -> string
(format-socket socket))
(define-unison
(UDPSocket.close.impl.v1 socket) ; socket -> Either Failure ()
(let
([rv (handle-errors (lambda() (begin
(udp-close socket)
(right ref-unit-unit))))])
(wrap-in-either rv)))
(define-unison
(clientSocket.impl.v1 host port) ; string string -> Either Failure socket
(let ([rv (handle-errors (lambda() (let* ([pport (string->number (chunked-string->string port))]
[hhost (chunked-string->string host)]
[sock (udp-open-socket hhost pport)]
[_ (udp-bind! sock #f 0)]
[res (udp-connect! sock hhost pport)]) (right sock))))])
(wrap-in-either rv)))

View File

@ -31,13 +31,14 @@
;; ADLER32 implementation
;; https://www.ietf.org/rfc/rfc1950.txt
;; Modified from racket/collects/net/git-checkout.rkt
(define (adler32-through-ports in out)
(define (adler32-through-ports in out #:close-out [close #f])
(define ADLER 65521)
(define bstr (make-bytes 4096))
(let loop ([s1 1] [s2 0])
(define n (read-bytes! bstr in))
(cond
[(eof-object? n)
(when close (close-output-port out))
(bitwise-ior (arithmetic-shift s2 16) s1)]
[else
(write-bytes bstr out 0 n)
@ -65,7 +66,10 @@
(define uncompressed-adler #f)
(define checksum-thread
(thread
(lambda () (set! uncompressed-adler (adler32-through-ports checksum-in o)))))
(lambda ()
(set! uncompressed-adler
(adler32-through-ports checksum-in o)))))
;; Inflate, sending output to checksum (and then to `o`):
(inflate i checksum-out)
(close-output-port checksum-out)
@ -84,11 +88,12 @@
(define uncompressed-adler #f)
(define checksum-thread
(thread
(lambda () (set! uncompressed-adler (adler32-through-ports i checksum-out)))))
(lambda ()
(set! uncompressed-adler
(adler32-through-ports i checksum-out #:close-out #t)))))
(sync checksum-thread)
(close-output-port checksum-out)
(deflate checksum-in o)
(sync checksum-thread)
(write-bytes (integer->integer-bytes uncompressed-adler 4 #f #t) o)
(void))

View File

@ -6,4 +6,4 @@ true \
&& stack exec transcripts \
&& stack exec unison transcript unison-src/transcripts-round-trip/main.md \
&& stack exec unison transcript unison-src/transcripts-manual/rewrites.md \
&& stack exec integration-tests
&& stack exec cli-integration-tests

View File

@ -22,7 +22,7 @@ getHash() {
if [[ -z "$name" ]]; then
name="${parts[i]}"
else
name="$name/${parts[i]}"
name="$name%2F${parts[i]}"
fi
done
fi

View File

@ -38,7 +38,7 @@ if ! [[ "$1" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]] ; then
fi
version="${1}"
target=${2:-origin/trunk}
src=${2:-origin/trunk}
tag="release/$version"
echo "Creating release in unison-local-ui."
@ -50,7 +50,7 @@ gh release create "release/${version}" \
echo "Kicking off release workflow in unisonweb/unison"
# Make sure our origin/trunk ref is up to date, since that's usually what gets tagged.
git fetch origin trunk
git tag "${tag}" "${target}"
git tag "${tag}" "${src}"
git push origin "${tag}"
gh workflow run release --repo unisonweb/unison \
--ref "${tag}" \

View File

@ -38,6 +38,7 @@ packages:
- unison-cli-main
- unison-core
- unison-hashing-v2
- unison-merge
- unison-share-api
- unison-share-projects-api
- unison-syntax
@ -65,6 +66,7 @@ extra-deps:
- lsp-2.2.0.0@sha256:82fbf4b69d94d8d22543be71f89986b3e90050032d671fb3de3f8253ea1e5b6f,3550
- lsp-types-2.0.2.0@sha256:a9a51c3cea0726d91fe63fa0670935ee720f7b31bc3f3b33b2483fc538152677,29421
- row-types-1.0.1.2@sha256:4d4c7cb95d06a32b28ba977852d52a26b4c1f695ef083a6fd874ab6d79933b64,3071
- network-udp-0.0.0
ghc-options:
# All packages

View File

@ -82,6 +82,13 @@ packages:
size: 1060
original:
hackage: row-types-1.0.1.2@sha256:4d4c7cb95d06a32b28ba977852d52a26b4c1f695ef083a6fd874ab6d79933b64,3071
- completed:
hackage: network-udp-0.0.0@sha256:408d2d4fa1a25e49e95752ee124cca641993404bb133ae10fb81daef22d876ae,1075
pantry-tree:
sha256: ee19a66c9d420861c5cc1dfad3210e2a53cdc6088ff3dd90b44f7961f5caebee
size: 284
original:
hackage: network-udp-0.0.0
snapshots:
- completed:
sha256: 5a59b2a405b3aba3c00188453be172b85893cab8ebc352b1ef58b0eae5d248a2

View File

@ -34,9 +34,9 @@ main = do
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These new definitions are ok to `add`:
structural ability Break
type MyBool
main : '{IO, Exception} ()
@ -47,7 +47,7 @@ main = do
.> add
⍟ I've added these definitions:
structural ability Break
type MyBool
main : '{IO, Exception} ()

View File

@ -19,6 +19,7 @@ dependencies:
- base
- bytes
- bytestring
- co-log-core
- code-page
- concurrent-output
- configurator
@ -31,9 +32,9 @@ dependencies:
- extra
- filepath
- free
- friendly-time
- fsnotify
- fuzzyfind
- friendly-time
- generic-lens
- haskeline
- http-client >= 0.7.6
@ -48,11 +49,10 @@ dependencies:
- megaparsec
- memory
- mtl
- network-uri
- network-simple
- network
- co-log-core
- uri-encode
- network-simple
- network-udp
- network-uri
- nonempty-containers
- open-browser
- optparse-applicative >= 0.16.1.0
@ -71,6 +71,7 @@ dependencies:
- template-haskell
- temporary
- text
- text-ansi
- text-builder
- text-rope
- these
@ -83,6 +84,7 @@ dependencies:
- unison-core
- unison-core1
- unison-hash
- unison-merge
- unison-parser-typechecker
- unison-prelude
- unison-pretty-printer
@ -95,9 +97,9 @@ dependencies:
- unison-util-relation
- unliftio
- unordered-containers
- uri-encode
- uuid
- vector
- witherable
- wai
- warp
- witch
@ -178,6 +180,7 @@ default-extensions:
- NamedFieldPuns
- NumericUnderscores
- OverloadedLabels
- OverloadedRecordDot
- OverloadedStrings
- PatternSynonyms
- RankNTypes

View File

@ -0,0 +1,138 @@
-- | Utility functions for downloading remote entities and storing them locally in SQLite.
--
-- These are shared by commands like `pull` and `clone`.
module Unison.Cli.DownloadUtils
( downloadProjectBranchFromShare,
downloadLooseCodeFromShare,
GitNamespaceHistoryTreatment (..),
downloadLooseCodeFromGitRepo,
)
where
import Control.Concurrent.STM (atomically)
import Control.Concurrent.STM.TVar (modifyTVar', newTVarIO, readTVar, readTVarIO)
import Data.List.NonEmpty (pattern (:|))
import System.Console.Regions qualified as Console.Regions
import U.Codebase.HashTags (CausalHash)
import U.Codebase.Sqlite.Queries qualified as Queries
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.Share.Projects qualified as Share
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Editor.Git qualified as Git
import Unison.Codebase.Editor.HandleInput.AuthLogin (ensureAuthenticatedWithCodeserver)
import Unison.Codebase.Editor.Output qualified as Output
import Unison.Codebase.Editor.RemoteRepo (ReadGitRemoteNamespace, ReadShareLooseCode, shareUserHandleToText)
import Unison.Codebase.Editor.RemoteRepo qualified as RemoteRepo
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.Type (GitError)
import Unison.Codebase.Type qualified as Codebase (viewRemoteBranch')
import Unison.Core.Project (ProjectAndBranch (..))
import Unison.NameSegment qualified as NameSegment
import Unison.Parser.Ann (Ann)
import Unison.Prelude
import Unison.Share.API.Hash qualified as Share
import Unison.Share.Codeserver qualified as Codeserver
import Unison.Share.Sync qualified as Share
import Unison.Share.Sync.Types qualified as Share
import Unison.Share.Types (codeserverBaseURL)
import Unison.Symbol (Symbol)
import Unison.Sync.Common qualified as Sync.Common
import Unison.Sync.Types qualified as Share
-- | Download a project/branch from Share.
downloadProjectBranchFromShare ::
HasCallStack =>
Share.IncludeSquashedHead ->
Share.RemoteProjectBranch ->
Cli (Either Output.ShareError CausalHash)
downloadProjectBranchFromShare useSquashed branch =
Cli.labelE \done -> do
let remoteProjectBranchName = branch.branchName
let repoInfo = Share.RepoInfo (into @Text (ProjectAndBranch branch.projectName remoteProjectBranchName))
causalHashJwt <-
case (useSquashed, branch.squashedBranchHead) of
(Share.IncludeSquashedHead, Nothing) -> done Output.ShareExpectedSquashedHead
(Share.IncludeSquashedHead, Just squashedHead) -> pure squashedHead
(Share.NoSquashedHead, _) -> pure branch.branchHead
exists <- Cli.runTransaction (Queries.causalExistsByHash32 (Share.hashJWTHash causalHashJwt))
when (not exists) do
(result, numDownloaded) <-
Cli.with withEntitiesDownloadedProgressCallback \(downloadedCallback, getNumDownloaded) -> do
result <- Share.downloadEntities Share.hardCodedBaseUrl repoInfo causalHashJwt downloadedCallback
numDownloaded <- liftIO getNumDownloaded
pure (result, numDownloaded)
result & onLeft \err0 -> do
done case err0 of
Share.SyncError err -> Output.ShareErrorDownloadEntities err
Share.TransportError err -> Output.ShareErrorTransport err
Cli.respond (Output.DownloadedEntities numDownloaded)
pure (Sync.Common.hash32ToCausalHash (Share.hashJWTHash causalHashJwt))
-- | Download loose code from Share.
downloadLooseCodeFromShare :: ReadShareLooseCode -> Cli (Either Output.ShareError CausalHash)
downloadLooseCodeFromShare namespace = do
let codeserver = Codeserver.resolveCodeserver namespace.server
let baseURL = codeserverBaseURL codeserver
-- Auto-login to share if pulling from a non-public path
when (not (RemoteRepo.isPublic namespace)) do
_userInfo <- ensureAuthenticatedWithCodeserver codeserver
pure ()
let shareFlavoredPath =
Share.Path $
shareUserHandleToText namespace.repo
:| map NameSegment.toUnescapedText (Path.toList namespace.path)
Cli.labelE \done -> do
(causalHash, numDownloaded) <-
Cli.with withEntitiesDownloadedProgressCallback \(downloadedCallback, getNumDownloaded) -> do
causalHash <-
Share.pull baseURL shareFlavoredPath downloadedCallback & onLeftM \err0 ->
done case err0 of
Share.SyncError err -> Output.ShareErrorPull err
Share.TransportError err -> Output.ShareErrorTransport err
numDownloaded <- liftIO getNumDownloaded
pure (causalHash, numDownloaded)
Cli.respond (Output.DownloadedEntities numDownloaded)
pure causalHash
-- Provide the given action a callback that display to the terminal.
withEntitiesDownloadedProgressCallback :: ((Int -> IO (), IO Int) -> IO a) -> IO a
withEntitiesDownloadedProgressCallback action = do
entitiesDownloadedVar <- newTVarIO 0
Console.Regions.displayConsoleRegions do
Console.Regions.withConsoleRegion Console.Regions.Linear \region -> do
Console.Regions.setConsoleRegion region do
entitiesDownloaded <- readTVar entitiesDownloadedVar
pure $
"\n Downloaded "
<> tShow entitiesDownloaded
<> " entities...\n\n"
action ((\n -> atomically (modifyTVar' entitiesDownloadedVar (+ n))), readTVarIO entitiesDownloadedVar)
data GitNamespaceHistoryTreatment
= -- | Don't touch the history
GitNamespaceHistoryTreatment'LetAlone
| -- | Throw away all history at all levels
GitNamespaceHistoryTreatment'DiscardAllHistory
-- | Download loose code that's in a SQLite codebase in a Git repo.
downloadLooseCodeFromGitRepo ::
MonadIO m =>
Codebase IO Symbol Ann ->
GitNamespaceHistoryTreatment ->
ReadGitRemoteNamespace ->
m (Either GitError CausalHash)
downloadLooseCodeFromGitRepo codebase historyTreatment namespace = liftIO do
Codebase.viewRemoteBranch' codebase namespace Git.RequireExistingBranch \(branch0, cacheDir) -> do
let branch =
case historyTreatment of
GitNamespaceHistoryTreatment'LetAlone -> branch0
GitNamespaceHistoryTreatment'DiscardAllHistory -> Branch.discardHistory branch0
Codebase.syncFromDirectory codebase cacheDir branch
pure (Branch.headHash branch)

View File

@ -25,6 +25,7 @@ module Unison.Cli.Monad
-- * Short-circuiting
label,
labelE,
returnEarly,
returnEarlyWithoutOutput,
haltRepl,
@ -336,6 +337,12 @@ label f =
| otherwise -> throwIO err
Right a -> feed k a
-- | A variant of @label@ for the common case that early-return values are tagged with a Left.
labelE :: ((forall void. a -> Cli void) -> Cli b) -> Cli (Either a b)
labelE f =
label \goto ->
Right <$> f (goto . Left)
-- | Time an action.
time :: String -> Cli a -> Cli a
time label action =

View File

@ -474,8 +474,8 @@ updateRoot new reason =
let newHash = Branch.headHash new
oldHash <- getLastSavedRootHash
when (oldHash /= newHash) do
setRootBranch new
liftIO (Codebase.putRootBranch codebase reason new)
setRootBranch new
setLastSavedRootHash newHash
------------------------------------------------------------------------------------------------------------------------

View File

@ -53,7 +53,7 @@ module Unison.Cli.Pretty
where
import Control.Lens hiding (at)
import Control.Monad.Writer (Writer, mapWriter, runWriter)
import Control.Monad.Writer (Writer, runWriter)
import Data.List qualified as List
import Data.Map qualified as Map
import Data.Set qualified as Set
@ -119,6 +119,7 @@ import Unison.Sync.Types qualified as Share
import Unison.Syntax.DeclPrinter (AccessorName)
import Unison.Syntax.DeclPrinter qualified as DeclPrinter
import Unison.Syntax.HashQualified qualified as HQ (unsafeFromVar)
import Unison.Syntax.Name qualified as Name (unsafeParseVar)
import Unison.Syntax.NamePrinter (SyntaxText, prettyHashQualified, styleHashQualified')
import Unison.Syntax.TermPrinter qualified as TermPrinter
import Unison.Syntax.TypePrinter qualified as TypePrinter
@ -411,7 +412,7 @@ prettyUnisonFile ppe uf@(UF.UnisonFileId datas effects terms watches) =
where
prettyEffects = map prettyEffectDecl (Map.toList effects)
(prettyDatas, accessorNames) = runWriter $ traverse prettyDataDecl (Map.toList datas)
prettyTerms = map (prettyTerm accessorNames) terms
prettyTerms = Map.foldrWithKey (\k v -> (prettyTerm accessorNames k v :)) [] terms
prettyWatches = Map.toList watches >>= \(wk, tms) -> map (prettyWatch . (wk,)) tms
prettyEffectDecl :: (v, (Reference.Id, DD.EffectDeclaration v a)) -> (a, P.Pretty P.ColorText)
@ -419,16 +420,16 @@ prettyUnisonFile ppe uf@(UF.UnisonFileId datas effects terms watches) =
(DD.annotation . DD.toDataDecl $ et, st $ DeclPrinter.prettyDecl ppe' (rd r) (hqv n) (Left et))
prettyDataDecl :: (v, (Reference.Id, DD.DataDeclaration v a)) -> Writer (Set AccessorName) (a, P.Pretty P.ColorText)
prettyDataDecl (n, (r, dt)) =
(DD.annotation dt,) . st <$> (mapWriter (second Set.fromList) $ DeclPrinter.prettyDeclW ppe' (rd r) (hqv n) (Right dt))
prettyTerm :: Set (AccessorName) -> (v, a, Term v a) -> Maybe (a, P.Pretty P.ColorText)
prettyTerm skip (n, a, tm) =
(DD.annotation dt,) . st <$> DeclPrinter.prettyDeclW ppe' (rd r) (hqv n) (Right dt)
prettyTerm :: Set AccessorName -> v -> (a, Term v a) -> Maybe (a, P.Pretty P.ColorText)
prettyTerm skip n (a, tm) =
if traceMember isMember then Nothing else Just (a, pb hq tm)
where
traceMember =
if Debug.shouldDebug Debug.Update
then trace (show hq ++ " -> " ++ if isMember then "skip" else "print")
else id
isMember = Set.member hq skip
isMember = Set.member (Name.unsafeParseVar n) skip
hq = hqv n
prettyWatch :: (String, (v, a, Term v a)) -> (a, P.Pretty P.ColorText)
prettyWatch (wk, (n, a, tm)) = (a, go wk n tm)
@ -444,7 +445,7 @@ prettyUnisonFile ppe uf@(UF.UnisonFileId datas effects terms watches) =
sppe = PPED.suffixifiedPPE ppe'
pb v tm = st $ TermPrinter.prettyBinding sppe v tm
ppe' = PPED.PrettyPrintEnvDecl dppe dppe `PPED.addFallback` ppe
dppe = PPE.makePPE (PPE.hqNamer 8 (UF.toNames uf)) PPE.dontSuffixify
dppe = PPE.makePPE (PPE.namer (UF.toNames uf)) PPE.dontSuffixify
rd = Reference.DerivedId
hqv v = HQ.unsafeFromVar v

Some files were not shown because too many files have changed in this diff Show More