diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4c44f1a2f..18b7cfc05 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -17,9 +17,19 @@ on: - release/* jobs: + + ormolu: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + with: + path: unison + - uses: mrkkrp/ormolu-action@v10 # v10 uses ormolu 0.5.3.0 + build: name: ${{ matrix.os }} runs-on: ${{ matrix.os }} + needs: ormolu defaults: run: working-directory: unison @@ -101,7 +111,7 @@ jobs: working-directory: ${{ github.workspace }} run: | mkdir stack && cd stack - curl -L https://github.com/commercialhaskell/stack/releases/download/v2.7.5/stack-2.7.5-linux-x86_64.tar.gz | tar -xz + curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz | tar -xz echo "$PWD/stack-"* >> $GITHUB_PATH - name: install stack (macOS) @@ -109,7 +119,7 @@ jobs: if: runner.os == 'macOS' run: | mkdir stack && cd stack - curl -L https://github.com/commercialhaskell/stack/releases/download/v2.7.5/stack-2.7.5-osx-x86_64.tar.gz | tar -xz + curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-osx-x86_64.tar.gz | tar -xz echo "$PWD/stack-"* >> $GITHUB_PATH - name: install stack (windows) @@ -117,7 +127,7 @@ jobs: if: runner.os == 'Windows' run: | mkdir stack && cd stack - curl -L https://github.com/commercialhaskell/stack/releases/download/v2.7.5/stack-2.7.5-windows-x86_64.tar.gz | tar -xz + curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-windows-x86_64.tar.gz | tar -xz echo "$PWD/stack-"* >> $GITHUB_PATH # One of the transcripts fails if the user's git name hasn't been set. diff --git a/.github/workflows/haddocks.yaml b/.github/workflows/haddocks.yaml index 3f6bdbbf7..923283c82 100644 --- a/.github/workflows/haddocks.yaml +++ b/.github/workflows/haddocks.yaml @@ -62,7 +62,7 @@ jobs: working-directory: ${{ github.workspace }} run: | mkdir stack && cd stack - curl -L https://github.com/commercialhaskell/stack/releases/download/v2.7.5/stack-2.7.5-linux-x86_64.tar.gz | tar -xz + curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz | tar -xz echo "$PWD/stack-"* >> $GITHUB_PATH # One of the transcripts fails if the user's git name hasn't been set. diff --git a/.github/workflows/pre-release.yaml b/.github/workflows/pre-release.yaml index 1e2ece8c1..18473efd9 100644 --- a/.github/workflows/pre-release.yaml +++ b/.github/workflows/pre-release.yaml @@ -23,7 +23,7 @@ jobs: working-directory: ${{ github.workspace }} run: | mkdir stack && cd stack - curl -L https://github.com/commercialhaskell/stack/releases/download/v2.7.5/stack-2.7.5-linux-x86_64.tar.gz | tar -xz + curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz | tar -xz echo "$PWD/stack-"* >> $GITHUB_PATH # One of the transcripts fails if the user's git name hasn't been set. @@ -61,7 +61,7 @@ jobs: working-directory: ${{ github.workspace }} run: | mkdir stack && cd stack - curl -L https://github.com/commercialhaskell/stack/releases/download/v2.7.5/stack-2.7.5-osx-x86_64.tar.gz | tar -xz + curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-osx-x86_64.tar.gz | tar -xz echo "$PWD/stack-"* >> $GITHUB_PATH # One of the transcripts fails if the user's git name hasn't been set. @@ -104,7 +104,7 @@ jobs: if: runner.os == 'Windows' run: | mkdir stack && cd stack - curl -L https://github.com/commercialhaskell/stack/releases/download/v2.7.5/stack-2.7.5-windows-x86_64.tar.gz | tar -xz + curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-windows-x86_64.tar.gz | tar -xz echo "$PWD/stack-"* >> $GITHUB_PATH - name: build @@ -118,7 +118,7 @@ jobs: run: | mkdir -p tmp\ui mkdir -p release\ui - $UCM = .\stack\stack-2.7.5-windows-x86_64\stack.exe exec -- where unison + $UCM = .\stack\stack-2.9.1-windows-x86_64\stack.exe exec -- where unison cp $UCM .\release\ucm.exe Invoke-WebRequest -Uri https://github.com/unisonweb/unison-local-ui/releases/download/latest/unisonLocal.zip -OutFile tmp\unisonLocal.zip Expand-Archive -Path tmp\unisonLocal.zip -DestinationPath release\ui diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index c4f5ce6d5..fa91052bc 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -104,7 +104,7 @@ jobs: working-directory: ${{ github.workspace }} run: | mkdir stack && cd stack - curl -L https://github.com/commercialhaskell/stack/releases/download/v2.7.5/stack-2.7.5-linux-x86_64.tar.gz | tar -xz + curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz | tar -xz echo "$PWD/stack-"* >> $GITHUB_PATH - name: build @@ -176,7 +176,7 @@ jobs: working-directory: ${{ github.workspace }} run: | mkdir stack && cd stack - curl -L https://github.com/commercialhaskell/stack/releases/download/v2.7.5/stack-2.7.5-osx-x86_64.tar.gz | tar -xz + curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-osx-x86_64.tar.gz | tar -xz echo "$PWD/stack-"* >> $GITHUB_PATH - name: remove ~/.stack/setup-exe-cache on macOS @@ -252,7 +252,7 @@ jobs: working-directory: ${{ github.workspace }} run: | mkdir stack && cd stack - curl -L https://github.com/commercialhaskell/stack/releases/download/v2.7.5/stack-2.7.5-windows-x86_64.tar.gz | tar -xz + curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-windows-x86_64.tar.gz | tar -xz echo "$PWD/stack-"* >> $GITHUB_PATH - name: build @@ -277,7 +277,7 @@ jobs: run: | mkdir -p tmp\ui mkdir -p release\ui - $UCM = .\stack\stack-2.7.5-windows-x86_64\stack.exe exec -- where unison + $UCM = .\stack\stack-2.9.1-windows-x86_64\stack.exe exec -- where unison cp $UCM .\release\ucm.exe Invoke-WebRequest -Uri https://github.com/unisonweb/unison-local-ui/releases/download/latest/unisonLocal.zip -OutFile tmp\unisonLocal.zip Expand-Archive -Path tmp\unisonLocal.zip -DestinationPath release\ui diff --git a/.gitignore b/.gitignore index 6ef1f5047..48699c390 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,8 @@ dist-newstyle # GHC *.hie *.prof + +# Mac developers +**/.DS_Store + +/libb2.dylib diff --git a/CONTRIBUTORS.markdown b/CONTRIBUTORS.markdown index f9dd79b70..8b90b4e64 100644 --- a/CONTRIBUTORS.markdown +++ b/CONTRIBUTORS.markdown @@ -72,3 +72,6 @@ The format for this list: name, GitHub handle * Emil Hotkowski (@emilhotkowski) * Jesse Looney (@jesselooney) * Vlad Posmangiu Luchian (@cstml) +* Andrii Uvarov (@unorsk) +* Mario Bašić (@mabasic) +* Chris Krycho (@chriskrycho) diff --git a/README.md b/README.md index feb396101..629d35a66 100644 --- a/README.md +++ b/README.md @@ -42,6 +42,8 @@ If these instructions don't work for you or are incomplete, please file an issue The build uses [Stack](http://docs.haskellstack.org/). If you don't already have it installed, [follow the install instructions](http://docs.haskellstack.org/en/stable/README.html#how-to-install) for your platform. (Hint: `brew update && brew install stack`) +If you have not set up the Haskell toolchain before and are trying to contribute to Unison on an M1 Mac, we have [some tips specifically for you](docs/m1-mac-setup-tips.markdown). + ```sh $ git clone https://github.com/unisonweb/unison.git $ cd unison @@ -49,7 +51,7 @@ $ stack --version # we'll want to know this version if you run into trouble $ stack build --fast --test && stack exec unison ``` -To run the Unison Local UI while building from source, you can use the `/dev-ui-install.sh` script. It will download the latest release of [unison-local-ui](https://github.com/unisonweb/unison-local-ui) and put it in the expected location for the unison executable created by `stack build`. When you start unison, you'll see a url where Unison Local UI is running. +To run the Unison Local UI while building from source, you can use the `/dev-ui-install.sh` script. It will download the latest release of [unison-local-ui](https://github.com/unisonweb/unison-local-ui) and put it in the expected location for the unison executable created by `stack build`. When you start unison, you'll see a url where Unison Local UI is running. See [`development.markdown`](development.markdown) for a list of build commands you'll likely use during development. @@ -61,7 +63,7 @@ View Language Server setup instructions [here](docs/language-server.markdown). Codebase Server --------------- -When `ucm` starts it starts a Codebase web server that is used by the +When `ucm` starts it starts a Codebase web server that is used by the [Unison Local UI](https://github.com/unisonweb/unison-local-ui). It selects a random port and a unique token that must be used when starting the UI to correctly connect to the server. diff --git a/chez-libs/readme.md b/chez-libs/readme.md deleted file mode 100644 index 2e242c814..000000000 --- a/chez-libs/readme.md +++ /dev/null @@ -1,49 +0,0 @@ -This directory contains libraries necessary for building and running -unison programs via Chez Scheme. At the moment, they need to be -manually installed in the expected location. The default location is -the `unisonlanguage` directory in the XDG data directory. -Specifically, the `unison` subdirectory should be (recursively) copied -to: - - $XDG_DATA_DIRECTORY/unisonlanguage/scheme-libs - -On unix systems, the XDG directory is ~/.local/share by default, ~ -being the home directory. On Windows, this is instead the %APPDATA% -directory. See: - - https://hackage.haskell.org/package/directory/docs/System-Directory.html#t:XdgDirectory - -for more information. - -UCM can also be told to look in another directory by setting the -`SchemeLibs.Static` item in the unison config file. If this path is -denoted by `$CUSTOM`, then the compiler commands will look in: - - $CUSTOM/scheme-libs/ - -for the `unison/` directory containing the library files. - -The compiler commands also expect Chez Scheme to be installed -separately, and for `scheme` to be callable on the user's path. For -information on how to install, see: - - https://github.com/cisco/ChezScheme/blob/main/BUILDING - -For more information on Chez Scheme in general, see: - - https://cisco.github.io/ChezScheme/csug9.5/csug.html - -There are two ways to run code via scheme. The first is to use -`run.native`, which compiles and immediately runs a given unison -definition. The second is `compile.native`, which produces a Chez -scheme `.boot` file with a specified name. The boot file can then be -used with the scheme binary to run the precompiled program, like so: - - scheme -b ./my-program.boot - -It is also possible to install the boot file in a particular -directory, and make a renamed copy of the scheme binary, which will -automatically execute the boot file with the corresponding name on -start up. For more information on how to accomplish that, see: - - https://cisco.github.io/ChezScheme/csug9.5/use.html#./use:h8 diff --git a/chez-libs/unison/boot.ss b/chez-libs/unison/boot.ss deleted file mode 100644 index 96ab92f76..000000000 --- a/chez-libs/unison/boot.ss +++ /dev/null @@ -1,130 +0,0 @@ -; This library implements various syntactic constructs and functions -; that are used in the compilation of unison (intermediate) source to -; scheme. The intent is to provide for writing scheme definitions that -; more directly match the source, so that the compiler doesn't need to -; emit all the code necessary to fix up the difference itself. -; -; Probably the best example of this is the define-unison macro, which -; looks similar to scheme's define, but the function being defined is -; allowed to be under/over applied similar to a unison function. It -; has an 'arity' at which computation happens, but the function -; automatically handles being applied to fewer or more arguments than -; that arity appropriately. -(library (unison boot) - (export - name - define-unison - func-wrap - handle - request - unison-force) - - (import (chezscheme) - (unison cont)) - - ; Helper function. Turns a list of syntax objects into a list-syntax object. - (meta define (list->syntax l) #`(#,@l)) - - ; Concatenates - (meta define fun-sym - (case-lambda - [(pfx sfx) (string->symbol (string-append pfx "-" sfx))] - [(pfx ifx sfx) - (string->symbol (string-append pfx "-" ifx "-" sfx))])) - - ; Computes a symbol for automatically generated partial application - ; cases, based on number of arguments applied. The partial - ; application of `f` is (locally) named `f-partial-N` - (meta define (partial-symbol name m) - (fun-sym (symbol->string name) "partial" (number->string m))) - - ; As above, but takes a syntactic object representing the arguments - ; rather than their count. - (meta define (partial-name name us) - (datum->syntax name (syntax->datum name))) - - (define-syntax with-name - (syntax-rules () - [(with-name name e) (let ([name e]) name)])) - - ; Builds partial application cases for unison functions. It seems - ; most efficient to have a case for each posible under-application. - (meta define (build-partials name formals) - (let rec ([us formals] [acc '()]) - (syntax-case us () - [() (list->syntax (cons #`[() #,name] acc))] - [(a ... z) - (rec #'(a ...) - (cons - #`[(a ... z) - (with-name - #,(partial-name name us) - (lambda r (apply #,name a ... z r)))] - acc))]))) - - ; Given an overall function name, a fast path name, and a list of arguments, - ; builds the case-lambda body of a unison function that enables applying to - ; arbitrary numbers of arguments. - (meta define (func-cases name fast args) - (syntax-case args () - [() #`(case-lambda - [() (#,fast)] - [r (apply (#,fast) r)])] - [(a ... z) - #`(case-lambda - #,@(build-partials name #'(a ...)) - [(a ... z) (#,fast a ... z)] - [(a ... z . r) (apply (#,fast a ... z) r)])])) - - (meta define (func-wrap name args body) - #`(let ([fast-path (lambda (#,@args) #,@body)]) - #,(func-cases name #'fast-path args))) - - ; function definition with slow/fast path. Slow path allows for - ; under/overapplication. Fast path is exact application. - ; - ; The intent is for the scheme compiler to be able to recognize and - ; optimize static, fast path calls itself, while still supporting - ; unison-like automatic partial application and such. - (define-syntax (define-unison x) - (syntax-case x () - [(define-unison (name a ...) e ...) - #`(define name - #,(func-wrap #'name #'(a ...) #'(e ...)))])) - - ; call-by-name bindings - (define-syntax name - (syntax-rules () - ((name ([v (f . args)] ...) body) - (let ([v (lambda r (apply f (append (list . args) r)))] - ...) - body)))) - - ; wrapper that more closely matches `handle` constructs - (define-syntax handle - (syntax-rules () - [(handle [r ...] h e ...) - (prompt p (fluid-let ([r (cons p h)] ...) e ...))])) - - ; wrapper that more closely matches ability requests - (define-syntax request - (syntax-rules () - [(request r t . args) - ((cdr r) (list (quote r) t . args))])) - - (define-record-type - data - (fields type-ref payload)) - - (define-syntax data-case - (syntax-rules () - [(data-case scrut c ...) - (record-case (data-payload scrut) c ...)])) - - ; forces something that is expected to be a thunk, defined with - ; e.g. `name` above. In some cases, we might have a normal value, - ; so just do nothing in that case. - (define (unison-force x) - (if (procedure? x) (x) x)) - - ) diff --git a/chez-libs/unison/bytevector.ss b/chez-libs/unison/bytevector.ss deleted file mode 100644 index be58f9540..000000000 --- a/chez-libs/unison/bytevector.ss +++ /dev/null @@ -1,35 +0,0 @@ -; This library implements missing bytevector functionality for unison -; builtins. The main missing bits are better support for immutable -; bytevectors. Chez does provide a way to make an immutable -; bytevector, but it copies its input, so implementing things that way -; would make many unncessary copies. This library instead implements -; functions on immutable bytevectors by directly freezing the newly -; created mutable vector. It also provides the freezing function, -; which is itself a unison builtin. -(library (unison bytevector) - (export - freeze-bv! - ibytevector-drop - ibytevector-take - u8-list->ibytevector) - - (import (chezscheme)) - - (define (freeze-bv! bs) - (($primitive $bytevector-set-immutable!) bs) - bs) - - (define (ibytevector-drop n bs) - (let* ([l (bytevector-length bs)] - [k (max 0 (- l n))] - [br (make-bytevector k)]) - (bytevector-copy! bs n br 0 k) - (freeze-bv! br))) - - (define (ibytevector-take n bs) - (let* ([sz (min n (bytevector-length bs))] - [br (make-bytevector sz)]) - (bytevector-copy! bs 0 br 0 sz) - (freeze-bv! br))) - - (define (u8-list->ibytevector l) (freeze-bv! (u8-list->bytevector l)))) diff --git a/chez-libs/unison/cont.ss b/chez-libs/unison/cont.ss deleted file mode 100644 index 7656c21f4..000000000 --- a/chez-libs/unison/cont.ss +++ /dev/null @@ -1,47 +0,0 @@ -; This library is intended to contain the implementation of -; delimited continuations used in the semantics of abilities. -; -; Currently, it is a somewhat naive implementation based on call/cc. -; This has known issues that seem to still be in force even though a -; tweak has been applied that should fix certain space leaks. So, in -; the future it will likely need to be rewritten using some -; implementation specific machinery (possibly making use of -; continuation attachments as in the Racket implementation). -; -; Also, although the API includes prompts, they are currently ignored -; in `control` and `prompt` always uses the same prompt (0). This -; means that nesting handlers will not work in general, since requests -; will not be able to jump over an inner handler of unrelated -; abilities. It should be sufficient for testing simple examples in -; the mean time, though. -(library (unison cont) - (export prompt control) - - (import (chezscheme)) - - (define mk (lambda (x) (raise "fell off end"))) - - (define (prompt-impl h) - ((call/cc - (lambda (k) - (let ([ok mk]) - (set! mk (lambda (x) (set! mk ok) (k x))) - ; (h 0) <-- prompt = 0 - (mk (let ([v (h 0)]) (lambda () v)))))))) - - (define-syntax prompt - (syntax-rules () - [(prompt p e ...) - (prompt-impl (lambda (p) e ...))])) - - (define (control-impl h) - (call/cc - (lambda (k) - (let* ([g (lambda () (prompt p (h k)))]) - (mk g))))) - - (define-syntax control - (syntax-rules () - [(control p k e ...) - (control-impl (lambda (k) e ...))]))) - diff --git a/chez-libs/unison/core.ss b/chez-libs/unison/core.ss deleted file mode 100644 index 474b37cc6..000000000 --- a/chez-libs/unison/core.ss +++ /dev/null @@ -1,43 +0,0 @@ -(library (unison core) - (export - identity - - describe-value - decode-value) - - (import (chezscheme)) - - (define (identity x) x) - - ; Recovers the original function name from the partial - ; application name. - (define (extract-name i) - (string->symbol ((i 'code) 'name))) - - (define (describe-value x) - (let explain ([i (inspect/object x)]) - (case (i 'type) - [(simple) (i 'value)] - [(variable) (explain (i 'ref))] - [(procedure) - (let explain-args ([j (- (i 'length) 1)] [args '()]) - (if (< j 0) - (cons (extract-name i) args) - (explain-args - (- j 1) - (cons (explain (i 'ref j)) args))))]))) - - ; partial, data, cont, lit - (define (decode-value x) - (let reify ([i (inspect/object x)]) - (case (i 'type) - [(simple) (list 3 (i 'value))] - [(variable) (reify (i 'ref))] - [(procedure) - (let reify-args ([j (- (i 'length) 1)] [args '()]) - (if (< j 0) - (cons* 0 (extract-name i) args) - (reify-args - (- j 1) - (cons (reify (i 'ref j)) args))))]))) - ) diff --git a/chez-libs/unison/primops.ss b/chez-libs/unison/primops.ss deleted file mode 100644 index 9b7965ff3..000000000 --- a/chez-libs/unison/primops.ss +++ /dev/null @@ -1,205 +0,0 @@ -; This library implements pimitive operations that are used in -; builtins. There are two different sorts of primitive operations, but -; the difference is essentially irrelevant except for naming schemes. -; -; POps are part of a large enumeration of 'instructions' directly -; implemented in the Haskell runtime. These are referred to using the -; naming scheme `unison-POp-INST` where `INST` is the name of the -; instruction, which is (at the time of this writing) 4 letters. -; -; FOps are 'foreign' functons, which are allowed to be declared more -; flexibly in the Haskell runtime. Each such declaration associates a -; builtin to a Haskell function. For these, the naming shceme is -; `unison-FOp-NAME` where `NAME` is the name of the unison builtin -; associated to the declaration. -; -; Both POps and FOps are always called with exactly the right number -; of arguments, so they may be implemented as ordinary scheme -; definitions with a fixed number of arguments. By implementing the -; POp/FOp, you are expecting the associated unison function(s) to be -; implemented by code generation from the wrappers in -; Unison.Runtime.Builtin, so the POp/FOp implementation must -; take/return arguments that match what is expected in those wrappers. - -(library (unison primops) - (export - ; unison-FOp-Bytes.decodeNat16be - ; unison-FOp-Bytes.decodeNat32be - ; unison-FOp-Bytes.decodeNat64be - unison-FOp-Char.toText - ; unison-FOp-Code.dependencies - ; unison-FOp-Code.serialize - unison-FOp-IO.closeFile.impl.v3 - unison-FOp-IO.openFile.impl.v3 - unison-FOp-IO.putBytes.impl.v3 - ; unison-FOp-Text.fromUtf8.impl.v3 - unison-FOp-Text.repeat - unison-FOp-Text.toUtf8 - ; unison-FOp-Value.serialize - unison-FOp-IO.stdHandle - - unison-FOp-ImmutableByteArray.copyTo! - unison-FOp-ImmutableByteArray.read8 - - unison-FOp-MutableByteArray.freeze! - unison-FOp-MutableByteArray.write8 - - unison-FOp-Scope.bytearray - - unison-POp-ADDN - unison-POp-ANDN - unison-POp-BLDS - unison-POp-CATS - unison-POp-CATT - unison-POp-CMPU - unison-POp-COMN - unison-POp-CONS - unison-POp-DECI - unison-POp-DIVN - unison-POp-DRPB - unison-POp-DRPS - unison-POp-EQLN - unison-POp-EQLT - unison-POp-EQLU - unison-POp-EROR - unison-POp-FTOT - unison-POp-IDXB - unison-POp-IDXS - unison-POp-IORN - unison-POp-ITOT - unison-POp-LEQN - ; unison-POp-LKUP - unison-POp-MULN - unison-POp-NTOT - unison-POp-PAKT - unison-POp-SHLI - unison-POp-SHLN - unison-POp-SHRI - unison-POp-SHRN - unison-POp-SIZS - unison-POp-SIZT - unison-POp-SNOC - unison-POp-SUBN - unison-POp-TAKS - unison-POp-TAKT - unison-POp-TRCE - unison-POp-UPKT - unison-POp-VALU - unison-POp-VWLS - - ) - - (import (chezscheme) - (unison core) - (unison string) - (unison bytevector)) - - ; Core implemented primops, upon which primops-in-unison can be built. - (define (unison-POp-ADDN m n) (fx+ m n)) - (define (unison-POp-ANDN m n) (fxlogand m n)) - (define unison-POp-BLDS list) - (define (unison-POp-CATS l r) (append l r)) - (define (unison-POp-CATT l r) (istring-append l r)) - (define (unison-POp-CMPU l r) (equal? l r)) - (define (unison-POp-COMN n) (fxlognot n)) - (define (unison-POp-CONS x xs) (cons x xs)) - (define (unison-POp-DECI n) (+ n 1)) - (define (unison-POp-DIVN m n) (fxdiv m n)) - (define (unison-POp-DRPB n bs) (ibytevector-drop n bs)) - (define (unison-POp-DRPS n l) - (let ([m (max 0 (min n (length l)))]) (list-tail l m))) - (define (unison-POp-DRPT n t) (istring-drop n t)) - (define (unison-POp-EQLN m n) (if (fx= m n) 1 0)) - (define (unison-POp-EQLT s t) (if (string=? s t) 1 0)) - (define (unison-POp-EQLU x y) (equal? x y)) - (define (unison-POp-EROR fnm x) (raise fnm)) - (define (unison-POp-FTOT f) (number->istring f)) - (define (unison-POp-IDXB n bs) (bytevector-u8-ref bs n)) - (define (unison-POp-IDXS n l) (list-ref l n)) - (define (unison-POp-IORN m n) (fxlogior m n)) - (define (unison-POp-ITOT i) (signed-number->istring i)) - (define (unison-POp-LEQN m n) (if (fx< m n) 1 0)) - (define (unison-POp-MULN m n) (* m n)) - (define (unison-POp-NTOT m) (number->istring m)) - (define (unison-POp-PAKB l) (u8-list->ibytevector l)) - (define (unison-POp-PAKT l) (list->istring l)) - (define (unison-POp-SHLI i k) (fxarithmetic-shift-left i k)) - (define (unison-POp-SHLN n k) (fxarithmetic-shift-left n k)) - (define (unison-POp-SHRI i k) (fxarithmetic-shift-right i k)) - (define (unison-POp-SHRN n k) (fxarithmetic-shift-right n k)) - (define (unison-POp-SIZS l) (length l)) - (define (unison-POp-SIZT t) (string-length t)) - (define (unison-POp-SNOC xs x) (append xs (list x))) - (define (unison-POp-SUBN m n) (fx- m n)) - (define (unison-POp-TAKS n s) (list-head s n)) - (define (unison-POp-TAKT n t) (istring-take n t)) - (define (unison-POp-TRCE x) (display (describe-value x))) - (define (unison-POp-UPKT t) (string->list t)) - (define (unison-POp-VWLS l) - (if (null? l) - (list 0) - (list 1 (car l) (cdr l)))) - (define (unison-POp-VALU c) (decode-value c)) - - (define (unison-FOp-IO.putBytes.impl.v3 p bs) - (begin - (put-bytevector p bs) - (flush-output-port p) - (list 1 #f))) - - (define (unison-FOp-Char.toText c) (istring c)) - - (define stdin (standard-input-port)) - (define stdout (standard-output-port)) - (define stderr (standard-error-port)) - - (define (unison-FOp-IO.stdHandle n) - (case n - [(0) stdin] - [(1) stdout] - [(2) stderr])) - - (define (unison-FOp-Text.toUtf8 s) - (string->bytevector s utf-8-transcoder)) - - (define (unison-FOp-IO.closeFile.impl.v3 h) - (close-input-port h)) - - (define (unison-FOp-IO.openFile.impl.v3 fn mode) - (case mode - [(0) (open-file-input-port fn)] - [(1) (open-file-output-port fn)] - [(2) (open-file-output-port fn 'no-truncate)] - [else (open-file-input/output-port fn)])) - - (define (unison-FOp-Text.repeat n t) (istring-repeat n t)) - - (define (catch-array thunk) - (with-exception-handler - (lambda (e) (list 0 '() "array index out of boudns" e)) - thunk)) - - (define (unison-FOp-ImmutableByteArray.copyTo! dst doff src soff n) - (catch-array - (lambda () - (bytevector-copy! src soff dst doff n) - (list 1)))) - - (define (unison-FOp-ImmutableByteArray.read8 arr i) - (catch-array - (lambda () - (list 1 (bytevector-u8-ref arr i))))) - - (define (unison-FOp-MutableByteArray.freeze! arr) - (freeze-bv! arr)) - - (define (unison-FOp-MutableByteArray.write8 arr i b) - (catch-array - (lambda () - (bytevector-u8-set! arr i b) - (list 1)))) - - (define (unison-FOp-Scope.bytearray n) (make-bytevector n)) - - ) - diff --git a/chez-libs/unison/string.ss b/chez-libs/unison/string.ss deleted file mode 100644 index 7df0502ad..000000000 --- a/chez-libs/unison/string.ss +++ /dev/null @@ -1,60 +0,0 @@ -; This library wraps some chez functionality to provide immutable -; strings. There is a wrapper for making immutable strings in chez, -; but it copies its input, so relying on that would involve every -; string operation building a fresh mutable string, then making an -; immutable copy. This library instead directly freezes the newly -; created mutable strings. -(library (unison string) - (export - istring - istring-append - istring-drop - istring-take - istring-repeat - list->istring - make-istring - number->istring - signed-number->istring - utf8-bytevector->istring - utf-8-transcoder) - - (import (chezscheme)) - - (define (freeze-s! s) - (($primitive $string-set-immutable!) s) - s) - - (define istring (lambda l (freeze-s! (apply string l)))) - - (define (make-istring n c) (freeze-s! (make-string n c))) - - (define (istring-repeat n s) - (let* ([k (string-length s)] - [t (make-string (* k n))]) - (let loop ([i 0]) - (if (< i n) - (begin - (string-copy! s 0 t (* i k) k) - (loop (+ i 1))) - (freeze-s! t))))) - - (define istring-append (lambda l (freeze-s! (apply string-append l)))) - - (define (istring-drop n s) (freeze-s! (substring s n (- (string-length s) n)))) - - (define (number->istring n) (freeze-s! (number->string n))) - - (define (signed-number->istring n) - (freeze-s! - (if (>= n 0) - (string-append "+" (number->string n)) - (number->string n)))) - - (define (list->istring l) (freeze-s! (list->string l))) - - (define (istring-take n s) (freeze-s! (substring s 0 n))) - - (define utf-8-transcoder (make-transcoder (utf-8-codec))) - - (define (utf8-bytevector->istring bs) - (freeze-s! (bytevector->string bs utf-8-transcoder)))) diff --git a/codebase2/codebase-sqlite-hashing-v2/package.yaml b/codebase2/codebase-sqlite-hashing-v2/package.yaml index 46f8fe4f3..a261460dc 100644 --- a/codebase2/codebase-sqlite-hashing-v2/package.yaml +++ b/codebase2/codebase-sqlite-hashing-v2/package.yaml @@ -15,6 +15,7 @@ dependencies: - unison-codebase-sqlite - unison-core - unison-core1 + - unison-hash - unison-hashing-v2 - unison-prelude - unison-sqlite diff --git a/codebase2/codebase-sqlite-hashing-v2/src/U/Codebase/Sqlite/V2/HashHandle.hs b/codebase2/codebase-sqlite-hashing-v2/src/U/Codebase/Sqlite/V2/HashHandle.hs index b2a74ddc8..efb8eb2e2 100644 --- a/codebase2/codebase-sqlite-hashing-v2/src/U/Codebase/Sqlite/V2/HashHandle.hs +++ b/codebase2/codebase-sqlite-hashing-v2/src/U/Codebase/Sqlite/V2/HashHandle.hs @@ -6,14 +6,14 @@ where import qualified Data.Set as Set import U.Codebase.Sqlite.HashHandle import U.Util.Type (removeAllEffectVars) +import qualified Unison.Hashing.V2 as H2 import Unison.Hashing.V2.Convert2 (h2ToV2Reference, v2ToH2Type, v2ToH2TypeD) -import qualified Unison.Hashing.V2.Type as H2 v2HashHandle :: HashHandle v2HashHandle = HashHandle - { toReference = h2ToV2Reference . H2.toReference . v2ToH2Type . removeAllEffectVars, - toReferenceMentions = Set.map h2ToV2Reference . H2.toReferenceMentions . v2ToH2Type . removeAllEffectVars, - toReferenceDecl = \h -> h2ToV2Reference . H2.toReference . v2ToH2TypeD h . removeAllEffectVars, - toReferenceDeclMentions = \h -> Set.map h2ToV2Reference . H2.toReferenceMentions . v2ToH2TypeD h . removeAllEffectVars + { toReference = h2ToV2Reference . H2.typeToReference . v2ToH2Type . removeAllEffectVars, + toReferenceMentions = Set.map h2ToV2Reference . H2.typeToReferenceMentions . v2ToH2Type . removeAllEffectVars, + toReferenceDecl = \h -> h2ToV2Reference . H2.typeToReference . v2ToH2TypeD h . removeAllEffectVars, + toReferenceDeclMentions = \h -> Set.map h2ToV2Reference . H2.typeToReferenceMentions . v2ToH2TypeD h . removeAllEffectVars } diff --git a/codebase2/codebase-sqlite-hashing-v2/src/Unison/Hashing/V2/Convert2.hs b/codebase2/codebase-sqlite-hashing-v2/src/Unison/Hashing/V2/Convert2.hs index 81d716f66..a5c193e62 100644 --- a/codebase2/codebase-sqlite-hashing-v2/src/Unison/Hashing/V2/Convert2.hs +++ b/codebase2/codebase-sqlite-hashing-v2/src/Unison/Hashing/V2/Convert2.hs @@ -11,50 +11,48 @@ import qualified U.Codebase.Reference as V2 import qualified U.Codebase.Term as V2 (TypeRef) import qualified U.Codebase.Type as V2.Type import qualified U.Core.ABT as ABT -import qualified U.Util.Hash as V2 (Hash) -import qualified Unison.Hashing.V2.Kind as H2 -import qualified Unison.Hashing.V2.Reference as H2 -import qualified Unison.Hashing.V2.Type as H2.Type +import Unison.Hash (Hash) +import qualified Unison.Hashing.V2 as H2 import Unison.Prelude -convertId :: V2.Hash -> V2.Id' (Maybe V2.Hash) -> H2.Id +convertId :: Hash -> V2.Id' (Maybe Hash) -> H2.ReferenceId convertId defaultHash = \case - V2.Id m p -> H2.Id (fromMaybe defaultHash m) p + V2.Id m p -> H2.ReferenceId (fromMaybe defaultHash m) p convertReference :: V2.Reference -> H2.Reference -convertReference = convertReference' (\(V2.Id a b) -> H2.Id a b) +convertReference = convertReference' (\(V2.Id a b) -> H2.ReferenceId a b) -convertReference' :: (V2.Id' hash -> H2.Id) -> V2.Reference' Text hash -> H2.Reference +convertReference' :: (V2.Id' hash -> H2.ReferenceId) -> V2.Reference' Text hash -> H2.Reference convertReference' idConv = \case - V2.ReferenceBuiltin x -> H2.Builtin x - V2.ReferenceDerived x -> H2.DerivedId (idConv x) + V2.ReferenceBuiltin x -> H2.ReferenceBuiltin x + V2.ReferenceDerived x -> H2.ReferenceDerivedId (idConv x) -v2ToH2Type :: forall v. Ord v => V2.Type.TypeR V2.TypeRef v -> H2.Type.Type v () +v2ToH2Type :: forall v. (Ord v) => V2.Type.TypeR V2.TypeRef v -> H2.Type v () v2ToH2Type = v2ToH2Type' convertReference -v2ToH2TypeD :: forall v. Ord v => V2.Hash -> V2.Type.TypeD v -> H2.Type.Type v () +v2ToH2TypeD :: forall v. (Ord v) => Hash -> V2.Type.TypeD v -> H2.Type v () v2ToH2TypeD defaultHash = v2ToH2Type' (convertReference' (convertId defaultHash)) -v2ToH2Type' :: forall r v. Ord v => (r -> H2.Reference) -> V2.Type.TypeR r v -> H2.Type.Type v () +v2ToH2Type' :: forall r v. (Ord v) => (r -> H2.Reference) -> V2.Type.TypeR r v -> H2.Type v () v2ToH2Type' mkReference = ABT.transform convertF where - convertF :: forall a. V2.Type.F' r a -> H2.Type.F a + convertF :: forall a. V2.Type.F' r a -> H2.TypeF a convertF = \case - V2.Type.Ref x -> H2.Type.Ref (mkReference x) - V2.Type.Arrow a b -> H2.Type.Arrow a b - V2.Type.Ann a k -> H2.Type.Ann a (convertKind k) - V2.Type.App a b -> H2.Type.App a b - V2.Type.Effect a b -> H2.Type.Effect a b - V2.Type.Effects a -> H2.Type.Effects a - V2.Type.Forall a -> H2.Type.Forall a - V2.Type.IntroOuter a -> H2.Type.IntroOuter a + V2.Type.Ref x -> H2.TypeRef (mkReference x) + V2.Type.Arrow a b -> H2.TypeArrow a b + V2.Type.Ann a k -> H2.TypeAnn a (convertKind k) + V2.Type.App a b -> H2.TypeApp a b + V2.Type.Effect a b -> H2.TypeEffect a b + V2.Type.Effects a -> H2.TypeEffects a + V2.Type.Forall a -> H2.TypeForall a + V2.Type.IntroOuter a -> H2.TypeIntroOuter a convertKind :: V2.Kind -> H2.Kind convertKind = \case - V2.Star -> H2.Star - V2.Arrow a b -> H2.Arrow (convertKind a) (convertKind b) + V2.Star -> H2.KindStar + V2.Arrow a b -> H2.KindArrow (convertKind a) (convertKind b) h2ToV2Reference :: H2.Reference -> V2.Reference h2ToV2Reference = \case - H2.Builtin txt -> V2.ReferenceBuiltin txt - H2.DerivedId (H2.Id x y) -> V2.ReferenceDerived (V2.Id x y) + H2.ReferenceBuiltin txt -> V2.ReferenceBuiltin txt + H2.ReferenceDerivedId (H2.ReferenceId x y) -> V2.ReferenceDerived (V2.Id x y) diff --git a/codebase2/codebase-sqlite-hashing-v2/unison-codebase-sqlite-hashing-v2.cabal b/codebase2/codebase-sqlite-hashing-v2/unison-codebase-sqlite-hashing-v2.cabal index c9a24489e..285fa96be 100644 --- a/codebase2/codebase-sqlite-hashing-v2/unison-codebase-sqlite-hashing-v2.cabal +++ b/codebase2/codebase-sqlite-hashing-v2/unison-codebase-sqlite-hashing-v2.cabal @@ -59,6 +59,7 @@ library , unison-codebase-sqlite , unison-core , unison-core1 + , unison-hash , unison-hashing-v2 , unison-prelude , unison-sqlite diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/HashHandle.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/HashHandle.hs index f219c1294..0924c6116 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/HashHandle.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/HashHandle.hs @@ -7,7 +7,7 @@ import qualified U.Codebase.Reference as C import U.Codebase.Sqlite.Symbol (Symbol) import qualified U.Codebase.Term as C.Term import qualified U.Codebase.Type as C.Type -import U.Util.Hash (Hash) +import Unison.Hash (Hash) import Unison.Prelude data HashHandle = HashHandle diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs index 186195134..0f857ef7a 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/LocalizeObject.hs @@ -122,7 +122,7 @@ type LocalizeBranchState = ) -- Run a computation that localizes a branch object, returning the local ids recorded within. -runLocalizeBranch :: Monad m => StateT LocalizeBranchState m a -> m (BranchLocalIds, a) +runLocalizeBranch :: (Monad m) => StateT LocalizeBranchState m a -> m (BranchLocalIds, a) runLocalizeBranch action = do (result, (localTexts, localDefns, localPatches, localChildren)) <- State.runStateT action (mempty @LocalizeBranchState) let branchLocalIds :: BranchLocalIds @@ -143,7 +143,7 @@ type LocalizePatchState = ) -- Run a computation that localizes a patch object, returning the local ids recorded within. -runLocalizePatch :: Monad m => StateT LocalizePatchState m a -> m (PatchLocalIds, a) +runLocalizePatch :: (Monad m) => StateT LocalizePatchState m a -> m (PatchLocalIds, a) runLocalizePatch action = do (result, (localTexts, localHashes, localDefns)) <- State.runStateT action (mempty @LocalizePatchState) let patchLocalIds :: PatchLocalIds diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/NamedRef.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/NamedRef.hs index ea69c9c64..b76257074 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/NamedRef.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/NamedRef.hs @@ -5,7 +5,7 @@ import qualified Data.List.NonEmpty as NEL import qualified Data.List.NonEmpty as NonEmpty import qualified Data.Text as Text import Unison.Prelude -import Unison.Sqlite (FromField (..), FromRow (..), SQLData (..), ToField (..), ToRow (..), field) +import Unison.Sqlite type ReversedSegments = NonEmpty Text @@ -28,19 +28,43 @@ instance FromField (ConstructorType) where data NamedRef ref = NamedRef {reversedSegments :: ReversedSegments, ref :: ref} deriving stock (Show, Functor, Foldable, Traversable) -instance ToRow ref => ToRow (NamedRef ref) where +instance (ToRow ref) => ToRow (NamedRef ref) where toRow (NamedRef {reversedSegments = segments, ref}) = [toField reversedName] <> toRow ref where - reversedName = Text.intercalate "." . toList $ segments + reversedName = + segments + & toList + & Text.intercalate "." + & (<> ".") -- Add trailing dot, see notes on scoped_term_name_lookup schema -instance FromRow ref => FromRow (NamedRef ref) where +instance (FromRow ref) => FromRow (NamedRef ref) where fromRow = do - reversedSegments <- NonEmpty.fromList . Text.splitOn "." <$> field + reversedSegments <- + field <&> \f -> + f + & Text.init -- Drop trailing dot, see notes on scoped_term_name_lookup schema + & Text.splitOn "." + & NonEmpty.fromList ref <- fromRow pure (NamedRef {reversedSegments, ref}) -toRowWithNamespace :: ToRow ref => NamedRef ref -> [SQLData] +toRowWithNamespace :: (ToRow ref) => NamedRef ref -> [SQLData] toRowWithNamespace nr = toRow nr <> [SQLText namespace] where namespace = Text.intercalate "." . reverse . NEL.tail . reversedSegments $ nr + +-- | The new 'scoped' name lookup format is different from the old version. +-- +-- Specifically, the scoped format adds the 'lastNameSegment' as well as adding a trailing '.' to the db format +-- of both the namespace and reversed_name. +-- +-- Converts a NamedRef to SQLData of the form: +-- [reversedName, namespace, lastNameSegment] <> ref fields... +namedRefToScopedRow :: (ToRow ref) => NamedRef ref -> [SQLData] +namedRefToScopedRow (NamedRef {reversedSegments = revSegments, ref}) = + toRow $ (SQLText reversedName, SQLText namespace, SQLText lastNameSegment) :. ref + where + reversedName = (Text.intercalate "." . toList $ revSegments) <> "." + namespace = (Text.intercalate "." . reverse . NEL.tail $ revSegments) <> "." + lastNameSegment = NEL.head revSegments diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs index 02a0c6edc..45bc6e03a 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Operations.hs @@ -67,9 +67,10 @@ module U.Codebase.Sqlite.Operations termsMentioningType, -- ** name lookup index - updateNameIndex, rootNamesByPath, NamesByPath (..), + checkBranchHashNameLookupExists, + buildNameLookupForBranchHash, -- * reflog getReflog, @@ -163,9 +164,9 @@ import qualified U.Codebase.TypeEdit as C import qualified U.Codebase.TypeEdit as C.TypeEdit import U.Codebase.WatchKind (WatchKind) import qualified U.Util.Base32Hex as Base32Hex -import qualified U.Util.Hash as H -import qualified U.Util.Hash32 as Hash32 import qualified U.Util.Serialization as S +import qualified Unison.Hash as H +import qualified Unison.Hash32 as Hash32 import Unison.NameSegment (NameSegment (NameSegment)) import qualified Unison.NameSegment as NameSegment import Unison.Prelude @@ -544,7 +545,8 @@ s2cBranch (S.Branch.Full.Branch tms tps patches children) = Map Db.TextId (Db.BranchObjectId, Db.CausalHashId) -> Transaction (Map NameSegment (C.Causal Transaction CausalHash BranchHash (C.Branch.Branch Transaction))) doChildren = Map.bitraverse (fmap NameSegment . Q.expectText) \(boId, chId) -> - C.Causal <$> Q.expectCausalHash chId + C.Causal + <$> Q.expectCausalHash chId <*> expectValueHashByCausalHashId chId <*> headParents chId <*> pure (expectBranch boId) @@ -572,7 +574,8 @@ s2cBranch (S.Branch.Full.Branch tms tps patches children) = Db.CausalHashId -> Transaction (C.Causal Transaction CausalHash BranchHash (C.Branch.Branch Transaction)) loadCausal chId = do - C.Causal <$> Q.expectCausalHash chId + C.Causal + <$> Q.expectCausalHash chId <*> expectValueHashByCausalHashId chId <*> headParents chId <*> pure (loadValue chId) @@ -771,7 +774,7 @@ expectDbBranch id = (mergePatches patches patches') (mergeChildren children children') mergeChildren :: - Ord ns => + (Ord ns) => Map ns (Db.BranchObjectId, Db.CausalHashId) -> Map ns S.BranchDiff.ChildOp -> Map ns (Db.BranchObjectId, Db.CausalHashId) @@ -794,7 +797,7 @@ expectDbBranch id = S.BranchDiff.ChildAddReplace id -> id S.BranchDiff.ChildRemove -> error "diff tries to remove a nonexistent child" mergePatches :: - Ord ns => + (Ord ns) => Map ns Db.PatchObjectId -> Map ns S.BranchDiff.PatchOp -> Map ns Db.PatchObjectId @@ -826,7 +829,7 @@ expectDbBranch id = S.Branch.Diff.RemoveDef -> error "diff tries to remove a nonexistent definition" S.Branch.Diff.AlterDefMetadata _md -> error "diff tries to change metadata for a nonexistent definition" mergeDefnOp :: - Ord r => + (Ord r) => Map r S.MetadataSet.DbMetadataSet -> Map r S.BranchDiff.DefinitionOp -> Map r S.MetadataSet.DbMetadataSet @@ -872,7 +875,10 @@ saveDbBranchUnderHashId hh bhId@(Db.unBranchHashId -> hashId) stats branch = do let (localBranchIds, localBranch) = LocalizeObject.localizeBranch branch when debug $ traceM $ - "saveBranchObject\n\tid = " ++ show bhId ++ "\n\tli = " ++ show localBranchIds + "saveBranchObject\n\tid = " + ++ show bhId + ++ "\n\tli = " + ++ show localBranchIds ++ "\n\tlBranch = " ++ show localBranch let bytes = S.putBytes S.putBranchFormat $ S.BranchFormat.Full localBranchIds localBranch @@ -1067,19 +1073,37 @@ derivedDependencies cid = do cids <- traverse s2cReferenceId sids pure $ Set.fromList cids --- | Given lists of names to add and remove, update the index accordingly. -updateNameIndex :: +-- | Apply a set of name updates to an existing index. +buildNameLookupForBranchHash :: + -- The existing name lookup index to copy before applying the diff. + -- If Nothing, run the diff against an empty index. + -- If Just, the name lookup must exist or an error will be thrown. + Maybe BranchHash -> + BranchHash -> -- | (add terms, remove terms) ([S.NamedRef (C.Referent, Maybe C.ConstructorType)], [S.NamedRef C.Referent]) -> -- | (add types, remove types) ([S.NamedRef C.Reference], [S.NamedRef C.Reference]) -> Transaction () -updateNameIndex (newTermNames, removedTermNames) (newTypeNames, removedTypeNames) = do - Q.ensureNameLookupTables - Q.removeTermNames ((fmap c2sTextReferent <$> removedTermNames)) - Q.removeTypeNames ((fmap c2sTextReference <$> removedTypeNames)) - Q.insertTermNames (fmap (c2sTextReferent *** fmap c2sConstructorType) <$> newTermNames) - Q.insertTypeNames (fmap c2sTextReference <$> newTypeNames) +buildNameLookupForBranchHash mayExistingBranchIndex newBranchHash (newTermNames, removedTermNames) (newTypeNames, removedTypeNames) = do + newBranchHashId <- Q.saveBranchHash newBranchHash + Q.trackNewBranchHashNameLookup newBranchHashId + case mayExistingBranchIndex of + Nothing -> pure () + Just existingBranchIndex -> do + unlessM (checkBranchHashNameLookupExists existingBranchIndex) $ error "buildNameLookupForBranchHash: existingBranchIndex was provided, but no index was found for that branch hash." + existingBranchHashId <- Q.saveBranchHash existingBranchIndex + Q.copyScopedNameLookup existingBranchHashId newBranchHashId + Q.removeScopedTermNames newBranchHashId ((fmap c2sTextReferent <$> removedTermNames)) + Q.removeScopedTypeNames newBranchHashId ((fmap c2sTextReference <$> removedTypeNames)) + Q.insertScopedTermNames newBranchHashId (fmap (c2sTextReferent *** fmap c2sConstructorType) <$> newTermNames) + Q.insertScopedTypeNames newBranchHashId (fmap c2sTextReference <$> newTypeNames) + +-- | Check whether we've already got an index for a given causal hash. +checkBranchHashNameLookupExists :: BranchHash -> Transaction Bool +checkBranchHashNameLookupExists bh = do + bhId <- Q.saveBranchHash bh + Q.checkBranchHashNameLookupExists bhId data NamesByPath = NamesByPath { termNamesInPath :: [S.NamedRef (C.Referent, Maybe C.ConstructorType)], @@ -1087,13 +1111,16 @@ data NamesByPath = NamesByPath } -- | Get all the term and type names for the root namespace from the lookup table. +-- Requires that an index for this branch hash already exists, which is currently +-- only true on Share. rootNamesByPath :: -- | A relative namespace string, e.g. Just "base.List" Maybe Text -> Transaction NamesByPath rootNamesByPath path = do - termNamesInPath <- Q.rootTermNamesByPath path - typeNamesInPath <- Q.rootTypeNamesByPath path + bhId <- Q.expectNamespaceRootBranchHashId + termNamesInPath <- Q.termNamesWithinNamespace bhId path + typeNamesInPath <- Q.typeNamesWithinNamespace bhId path pure $ NamesByPath { termNamesInPath = convertTerms <$> termNamesInPath, diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Orphans.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Orphans.hs index 17e1ce303..6fc417472 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Orphans.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Orphans.hs @@ -12,7 +12,7 @@ import U.Codebase.Sqlite.DbId import U.Codebase.WatchKind (WatchKind) import qualified U.Codebase.WatchKind as WatchKind import U.Util.Base32Hex -import qualified U.Util.Hash as Hash +import qualified Unison.Hash as Hash import Unison.Prelude import Unison.Sqlite diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs index c792d00de..d46d9a3e9 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/Format.hs @@ -59,7 +59,7 @@ applyPatchDiffs = addRemove add del src = Map.unionWith (<>) add (Map.differenceWith remove src del) - remove :: Ord b => Set b -> Set b -> Maybe (Set b) + remove :: (Ord b) => Set b -> Set b -> Maybe (Set b) remove src del = let diff = Set.difference src del in if Set.null diff then Nothing else Just diff diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs index 3335684fe..f2007f42b 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Queries.hs @@ -62,6 +62,7 @@ module U.Codebase.Sqlite.Queries loadNamespaceRoot, setNamespaceRoot, expectNamespaceRoot, + expectNamespaceRootBranchHashId, -- * namespace_statistics table saveNamespaceStats, @@ -115,6 +116,7 @@ module U.Codebase.Sqlite.Queries countWatches, getCausalsWithoutBranchObjects, removeHashObjectsByHashingVersion, + fixScopedNameLookupTables, -- ** type index addToTypeIndex, @@ -133,15 +135,16 @@ module U.Codebase.Sqlite.Queries causalHashIdByBase32Prefix, -- * Name Lookup - ensureNameLookupTables, + copyScopedNameLookup, dropNameLookupTables, - insertTermNames, - insertTypeNames, - removeTermNames, - removeTypeNames, - rootTermNamesByPath, - rootTypeNamesByPath, - getNamespaceDefinitionCount, + insertScopedTermNames, + insertScopedTypeNames, + removeScopedTermNames, + removeScopedTypeNames, + termNamesWithinNamespace, + typeNamesWithinNamespace, + checkBranchHashNameLookupExists, + trackNewBranchHashNameLookup, -- * Reflog appendReflog, @@ -270,13 +273,13 @@ import qualified U.Codebase.Term as C.Term import qualified U.Codebase.Type as C.Type import U.Codebase.WatchKind (WatchKind) import qualified U.Core.ABT as ABT -import U.Util.Hash (Hash) -import qualified U.Util.Hash as Hash -import U.Util.Hash32 (Hash32) -import qualified U.Util.Hash32 as Hash32 -import U.Util.Hash32.Orphans.Sqlite () import qualified U.Util.Serialization as S import qualified U.Util.Term as TermUtil +import Unison.Hash (Hash) +import qualified Unison.Hash as Hash +import Unison.Hash32 (Hash32) +import qualified Unison.Hash32 as Hash32 +import Unison.Hash32.Orphans.Sqlite () import Unison.Prelude import Unison.Sqlite import qualified Unison.Util.Alternative as Alternative @@ -285,7 +288,7 @@ import qualified Unison.Util.Lens as Lens -- * main squeeze currentSchemaVersion :: SchemaVersion -currentSchemaVersion = 7 +currentSchemaVersion = 9 createSchema :: Transaction () createSchema = do @@ -294,6 +297,7 @@ createSchema = do addTempEntityTables addNamespaceStatsTables addReflogTable + fixScopedNameLookupTables where insertSchemaVersionSql = [here| @@ -312,6 +316,10 @@ addReflogTable :: Transaction () addReflogTable = executeFile [hereFile|unison/sql/002-reflog-table.sql|] +fixScopedNameLookupTables :: Transaction () +fixScopedNameLookupTables = + executeFile [hereFile|unison/sql/004-fix-scoped-name-lookup-tables.sql|] + executeFile :: String -> Transaction () executeFile = traverse_ (execute_ . fromString) . filter (not . null) . List.splitOn ";" @@ -1082,6 +1090,11 @@ loadCausalParentsByHash hash = |] (Only hash) +expectNamespaceRootBranchHashId :: Transaction BranchHashId +expectNamespaceRootBranchHashId = do + chId <- expectNamespaceRoot + expectCausalValueHashId chId + expectNamespaceRoot :: Transaction CausalHashId expectNamespaceRoot = queryOneCol_ loadNamespaceRootSql @@ -1584,81 +1597,96 @@ dropNameLookupTables = do DROP TABLE IF EXISTS type_name_lookup |] --- | Ensure the name lookup tables exist. -ensureNameLookupTables :: Transaction () -ensureNameLookupTables = do - execute_ - [here| - CREATE TABLE IF NOT EXISTS term_name_lookup ( - -- The name of the term: E.g. map.List.base - reversed_name TEXT NOT NULL, - -- The namespace containing this term, not reversed: E.g. base.List - namespace TEXT NOT NULL, - referent_builtin TEXT NULL, - referent_component_hash TEXT NULL, - referent_component_index INTEGER NULL, - referent_constructor_index INTEGER NULL, - referent_constructor_type INTEGER NULL, - PRIMARY KEY (reversed_name, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index) - ) - |] - execute_ - [here| - CREATE INDEX IF NOT EXISTS term_names_by_namespace ON term_name_lookup(namespace) - |] - -- Don't need this index at the moment, but will likely be useful later. - -- execute_ - -- [here| - -- CREATE INDEX IF NOT EXISTS term_name_by_referent_lookup ON term_name_lookup(referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index) - -- |] - execute_ - [here| - CREATE TABLE IF NOT EXISTS type_name_lookup ( - -- The name of the term: E.g. List.base - reversed_name TEXT NOT NULL, - -- The namespace containing this term, not reversed: E.g. base.List - namespace TEXT NOT NULL, - reference_builtin TEXT NULL, - reference_component_hash INTEGER NULL, - reference_component_index INTEGER NULL, - PRIMARY KEY (reversed_name, reference_builtin, reference_component_hash, reference_component_index) - ); - |] - execute_ - [here| - CREATE INDEX IF NOT EXISTS type_names_by_namespace ON type_name_lookup(namespace) - |] +-- | Copies existing name lookup rows but replaces their branch hash id; +-- This is a low-level operation used as part of deriving a new name lookup index +-- from an existing one as performantly as possible. +copyScopedNameLookup :: BranchHashId -> BranchHashId -> Transaction () +copyScopedNameLookup fromBHId toBHId = do + execute termsCopySql (toBHId, fromBHId) + execute typesCopySql (toBHId, fromBHId) + where + termsCopySql = + [here| + INSERT INTO scoped_term_name_lookup(root_branch_hash_id, reversed_name, last_name_segment, namespace, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, referent_constructor_type) + SELECT ?, reversed_name, last_name_segment, namespace, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, referent_constructor_type + FROM scoped_term_name_lookup + WHERE root_branch_hash_id = ? + |] + typesCopySql = + [here| + INSERT INTO scoped_type_name_lookup(root_branch_hash_id, reversed_name, last_name_segment, namespace, reference_builtin, reference_component_hash, reference_component_index) + SELECT ?, reversed_name, last_name_segment, namespace, reference_builtin, reference_component_hash, reference_component_index + FROM scoped_type_name_lookup + WHERE root_branch_hash_id = ? + |] --- Don't need this index at the moment, but will likely be useful later. --- execute_ --- [here| --- CREATE INDEX IF NOT EXISTS type_name_by_reference_lookup ON type_name_lookup(reference_builtin, reference_object_id, reference_component_index); --- |] +-- | Inserts a new record into the name_lookups table +trackNewBranchHashNameLookup :: BranchHashId -> Transaction () +trackNewBranchHashNameLookup bhId = do + execute sql (Only bhId) + where + sql = + [here| + INSERT INTO name_lookups (root_branch_hash_id) + VALUES (?) + |] + +-- | Check if we've already got an index for the desired root branch hash. +checkBranchHashNameLookupExists :: BranchHashId -> Transaction Bool +checkBranchHashNameLookupExists hashId = do + queryOneCol sql (Only hashId) + where + sql = + [here| + SELECT EXISTS ( + SELECT 1 + FROM name_lookups + WHERE root_branch_hash_id = ? + LIMIT 1 + ) + |] -- | Insert the given set of term names into the name lookup table -insertTermNames :: [NamedRef (Referent.TextReferent, Maybe NamedRef.ConstructorType)] -> Transaction () -insertTermNames names = do - executeMany sql (NamedRef.toRowWithNamespace . fmap refToRow <$> names) +insertScopedTermNames :: BranchHashId -> [NamedRef (Referent.TextReferent, Maybe NamedRef.ConstructorType)] -> Transaction () +insertScopedTermNames bhId names = do + executeMany sql (namedRefToRow <$> names) where + namedRefToRow :: NamedRef (S.Referent.TextReferent, Maybe NamedRef.ConstructorType) -> (Only BranchHashId :. [SQLData]) + namedRefToRow namedRef = + namedRef + & fmap refToRow + & NamedRef.namedRefToScopedRow + & \nr -> (Only bhId :. nr) refToRow :: (Referent.TextReferent, Maybe NamedRef.ConstructorType) -> (Referent.TextReferent :. Only (Maybe NamedRef.ConstructorType)) refToRow (ref, ct) = ref :. Only ct sql = [here| - INSERT INTO term_name_lookup (reversed_name, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, referent_constructor_type, namespace) - VALUES (?, ?, ?, ?, ?, ?, ?) - ON CONFLICT DO NOTHING + INSERT INTO scoped_term_name_lookup (root_branch_hash_id, reversed_name, namespace, last_name_segment, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, referent_constructor_type) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) |] --- | Remove the given set of term names into the name lookup table -removeTermNames :: [NamedRef Referent.TextReferent] -> Transaction () -removeTermNames names = do - executeMany sql names +-- | Insert the given set of type names into the name lookup table +insertScopedTypeNames :: BranchHashId -> [NamedRef (Reference.TextReference)] -> Transaction () +insertScopedTypeNames bhId names = + executeMany sql ((Only bhId :.) . NamedRef.namedRefToScopedRow <$> names) where sql = [here| - DELETE FROM term_name_lookup + INSERT INTO scoped_type_name_lookup (root_branch_hash_id, reversed_name, namespace, last_name_segment, reference_builtin, reference_component_hash, reference_component_index) + VALUES (?, ?, ?, ?, ?, ?, ?) + |] + +-- | Remove the given set of term names into the name lookup table +removeScopedTermNames :: BranchHashId -> [NamedRef Referent.TextReferent] -> Transaction () +removeScopedTermNames bhId names = do + executeMany sql ((Only bhId :.) <$> names) + where + sql = + [here| + DELETE FROM scoped_term_name_lookup WHERE - reversed_name IS ? + root_branch_hash_id IS ? + AND reversed_name IS ? AND referent_builtin IS ? AND referent_component_hash IS ? AND referent_component_index IS ? @@ -1666,15 +1694,16 @@ removeTermNames names = do |] -- | Remove the given set of term names into the name lookup table -removeTypeNames :: [NamedRef (Reference.TextReference)] -> Transaction () -removeTypeNames names = do - executeMany sql names +removeScopedTypeNames :: BranchHashId -> [NamedRef (Reference.TextReference)] -> Transaction () +removeScopedTypeNames bhId names = do + executeMany sql ((Only bhId :.) <$> names) where sql = [here| - DELETE FROM type_name_lookup + DELETE FROM scoped_type_name_lookup WHERE - reversed_name IS ? + root_branch_hash_id IS ? + AND reversed_name IS ? AND reference_builtin IS ? AND reference_component_hash IS ? AND reference_component_index IS ? @@ -1721,66 +1750,40 @@ likeEscape escapeChar pat = | c == escapeChar -> Text.pack [escapeChar, escapeChar] | otherwise -> Text.singleton c --- | Gets the count of all definitions within the given namespace. --- NOTE: This requires a working name lookup index. -getNamespaceDefinitionCount :: Text -> Transaction Int -getNamespaceDefinitionCount namespace = do - let subnamespace = globEscape namespace <> ".*" - queryOneCol sql (subnamespace, namespace, subnamespace, namespace) - where - sql = - [here| - SELECT COUNT(*) FROM ( - SELECT 1 FROM term_name_lookup WHERE namespace GLOB ? OR namespace = ? - UNION ALL - SELECT 1 FROM type_name_lookup WHERE namespace GLOB ? OR namespace = ? - ) - |] - --- | Insert the given set of type names into the name lookup table -insertTypeNames :: [NamedRef (Reference.TextReference)] -> Transaction () -insertTypeNames names = - executeMany sql (NamedRef.toRowWithNamespace <$> names) - where - sql = - [here| - INSERT INTO type_name_lookup (reversed_name, reference_builtin, reference_component_hash, reference_component_index, namespace) - VALUES (?, ?, ?, ?, ?) - ON CONFLICT DO NOTHING - |] - -- | Get the list of a term names in the root namespace according to the name lookup index -rootTermNamesByPath :: Maybe Text -> Transaction [NamedRef (Referent.TextReferent, Maybe NamedRef.ConstructorType)] -rootTermNamesByPath mayNamespace = do - let (namespace, subnamespace) = case mayNamespace of - Nothing -> ("", "*") - Just namespace -> (namespace, globEscape namespace <> ".*") - results :: [NamedRef (Referent.TextReferent :. Only (Maybe NamedRef.ConstructorType))] <- queryListRow sql (subnamespace, namespace, subnamespace, namespace) +termNamesWithinNamespace :: BranchHashId -> Maybe Text -> Transaction [NamedRef (Referent.TextReferent, Maybe NamedRef.ConstructorType)] +termNamesWithinNamespace bhId mayNamespace = do + let namespaceGlob = case mayNamespace of + Nothing -> "*" + Just namespace -> globEscape namespace <> ".*" + results :: [NamedRef (Referent.TextReferent :. Only (Maybe NamedRef.ConstructorType))] <- queryListRow sql (bhId, namespaceGlob) pure (fmap unRow <$> results) where unRow (a :. Only b) = (a, b) sql = [here| - SELECT reversed_name, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, referent_constructor_type FROM term_name_lookup - WHERE (namespace GLOB ? OR namespace = ?) - ORDER BY (namespace GLOB ? OR namespace = ?) DESC + SELECT reversed_name, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, referent_constructor_type FROM scoped_term_name_lookup + WHERE + root_branch_hash_id = ? + AND namespace GLOB ? |] -- | Get the list of a type names in the root namespace according to the name lookup index -rootTypeNamesByPath :: Maybe Text -> Transaction [NamedRef Reference.TextReference] -rootTypeNamesByPath mayNamespace = do - let (namespace, subnamespace) = case mayNamespace of - Nothing -> ("", "*") - Just namespace -> (namespace, globEscape namespace <> ".*") - results :: [NamedRef Reference.TextReference] <- queryListRow sql (subnamespace, namespace, subnamespace, namespace) +typeNamesWithinNamespace :: BranchHashId -> Maybe Text -> Transaction [NamedRef Reference.TextReference] +typeNamesWithinNamespace bhId mayNamespace = do + let namespaceGlob = case mayNamespace of + Nothing -> "*" + Just namespace -> globEscape namespace <> ".*" + results :: [NamedRef Reference.TextReference] <- queryListRow sql (bhId, namespaceGlob) pure results where sql = [here| - SELECT reversed_name, reference_builtin, reference_component_hash, reference_component_index FROM type_name_lookup - WHERE namespace GLOB ? OR namespace = ? - ORDER BY (namespace GLOB ? OR namespace = ?) DESC - |] + SELECT reversed_name, reference_builtin, reference_component_hash, reference_component_index FROM scoped_type_name_lookup + WHERE + root_branch_hash_id = ? + AND namespace GLOB ? + |] -- | @before x y@ returns whether or not @x@ occurred before @y@, i.e. @x@ is an ancestor of @y@. before :: CausalHashId -> CausalHashId -> Transaction Bool @@ -2130,7 +2133,7 @@ saveDeclComponent hh@HashHandle {toReferenceDecl, toReferenceDeclMentions} maybe pure oId -- | implementation detail of {s,w}2c*Term* & s2cDecl -localIdsToLookups :: Monad m => (t -> m Text) -> (d -> m Hash) -> LocalIds' t d -> m (LocalTextId -> Text, LocalDefnId -> Hash) +localIdsToLookups :: (Monad m) => (t -> m Text) -> (d -> m Hash) -> LocalIds' t d -> m (LocalTextId -> Text, LocalDefnId -> Hash) localIdsToLookups loadText loadHash localIds = do texts <- traverse loadText $ LocalIds.textLookup localIds hashes <- traverse loadHash $ LocalIds.defnLookup localIds @@ -2174,7 +2177,7 @@ localIdsToTypeRefLookup localIds = do c2sDecl :: forall m t d. - Monad m => + (Monad m) => (Text -> m t) -> (Hash -> m d) -> C.Decl Symbol -> @@ -2210,7 +2213,7 @@ c2sDecl saveText saveDefn (C.Decl.DataDeclaration dt m b cts) = do -- | implementation detail of c2{s,w}Term -- The Type is optional, because we don't store them for watch expression results. -c2xTerm :: forall m t d. Monad m => (Text -> m t) -> (Hash -> m d) -> C.Term Symbol -> Maybe (C.Term.Type Symbol) -> m (LocalIds' t d, S.Term.Term, Maybe (S.Term.Type)) +c2xTerm :: forall m t d. (Monad m) => (Text -> m t) -> (Hash -> m d) -> C.Term Symbol -> Maybe (C.Term.Type Symbol) -> m (LocalIds' t d, S.Term.Term, Maybe (S.Term.Type)) c2xTerm saveText saveDefn tm tp = done =<< (runWriterT . flip evalStateT mempty) do sterm <- ABT.transformM go tm diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Reference.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Reference.hs index 7f68e9bed..ca228f83d 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Reference.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Reference.hs @@ -65,9 +65,9 @@ referenceFromRow' = liftA3 mkRef field field field where str = "(" ++ show t ++ ", " ++ show h ++ ", " ++ show i ++ ")" -instance ToField h => ToRow (Id' h) where +instance (ToField h) => ToRow (Id' h) where toRow = \case Id h i -> toRow (Only h) ++ toRow (Only i) -instance FromField h => FromRow (Id' h) where +instance (FromField h) => FromRow (Id' h) where fromRow = Id <$> field <*> field diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Serialization.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Serialization.hs index 784eb91df..146af9fc0 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Serialization.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Serialization.hs @@ -1,10 +1,4 @@ -{-# LANGUAGE BlockArguments #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE TypeApplications #-} -{-# LANGUAGE ViewPatterns #-} module U.Codebase.Sqlite.Serialization where @@ -44,9 +38,9 @@ import qualified U.Codebase.Term as Term import qualified U.Codebase.Type as Type import qualified U.Core.ABT as ABT import qualified U.Util.Base32Hex as Base32Hex -import U.Util.Hash32 (Hash32) -import qualified U.Util.Hash32 as Hash32 import U.Util.Serialization hiding (debug) +import Unison.Hash32 (Hash32) +import qualified Unison.Hash32 as Hash32 import Unison.Prelude import qualified Unison.Util.Monoid as Monoid import Prelude hiding (getChar, putChar) @@ -121,47 +115,47 @@ putLocalIdsWith putText putDefn LocalIds {textLookup, defnLookup} = do putFoldable putText textLookup putFoldable putDefn defnLookup -getLocalIds :: MonadGet m => m LocalIds +getLocalIds :: (MonadGet m) => m LocalIds getLocalIds = getLocalIdsWith getVarInt getVarInt -getWatchLocalIds :: MonadGet m => m WatchLocalIds +getWatchLocalIds :: (MonadGet m) => m WatchLocalIds getWatchLocalIds = getLocalIdsWith getVarInt getVarInt -getLocalIdsWith :: MonadGet m => m t -> m d -> m (LocalIds' t d) +getLocalIdsWith :: (MonadGet m) => m t -> m d -> m (LocalIds' t d) getLocalIdsWith getText getDefn = LocalIds <$> getVector getText <*> getVector getDefn -putUnit :: Applicative m => () -> m () +putUnit :: (Applicative m) => () -> m () putUnit _ = pure () -getUnit :: Applicative m => m () +getUnit :: (Applicative m) => m () getUnit = pure () -putWatchResultFormat :: MonadPut m => TermFormat.WatchResultFormat -> m () +putWatchResultFormat :: (MonadPut m) => TermFormat.WatchResultFormat -> m () putWatchResultFormat = \case TermFormat.WatchResult ids t -> do putWord8 0 putLocalIds ids putTerm t -getWatchResultFormat :: MonadGet m => m TermFormat.WatchResultFormat +getWatchResultFormat :: (MonadGet m) => m TermFormat.WatchResultFormat getWatchResultFormat = getWord8 >>= \case 0 -> TermFormat.WatchResult <$> getWatchLocalIds <*> getTerm other -> unknownTag "getWatchResultFormat" other -putTermFormat :: MonadPut m => TermFormat.TermFormat -> m () +putTermFormat :: (MonadPut m) => TermFormat.TermFormat -> m () putTermFormat = \case TermFormat.Term c -> putWord8 0 *> putTermComponent c -getTermFormat :: MonadGet m => m TermFormat.TermFormat +getTermFormat :: (MonadGet m) => m TermFormat.TermFormat getTermFormat = getWord8 >>= \case 0 -> TermFormat.Term <$> getTermComponent other -> unknownTag "getTermFormat" other putTermComponent :: - MonadPut m => + (MonadPut m) => TermFormat.LocallyIndexedComponent -> m () putTermComponent t | debug && trace ("putTermComponent " ++ show t) False = undefined @@ -172,11 +166,11 @@ putTermComponent (TermFormat.LocallyIndexedComponent v) = ) v -putTerm :: MonadPut m => TermFormat.Term -> m () +putTerm :: (MonadPut m) => TermFormat.Term -> m () putTerm _t | debug && trace "putTerm" False = undefined putTerm t = putABT putSymbol putUnit putF t where - putF :: MonadPut m => (a -> m ()) -> TermFormat.F a -> m () + putF :: (MonadPut m) => (a -> m ()) -> TermFormat.F a -> m () putF putChild = \case Term.Int n -> putWord8 0 *> putInt n @@ -222,10 +216,10 @@ putTerm t = putABT putSymbol putUnit putF t putWord8 20 *> putReferent' putRecursiveReference putReference r Term.TypeLink r -> putWord8 21 *> putReference r - putMatchCase :: MonadPut m => (a -> m ()) -> Term.MatchCase LocalTextId TermFormat.TypeRef a -> m () + putMatchCase :: (MonadPut m) => (a -> m ()) -> Term.MatchCase LocalTextId TermFormat.TypeRef a -> m () putMatchCase putChild (Term.MatchCase pat guard body) = putPattern pat *> putMaybe putChild guard *> putChild body - putPattern :: MonadPut m => Term.Pattern LocalTextId TermFormat.TypeRef -> m () + putPattern :: (MonadPut m) => Term.Pattern LocalTextId TermFormat.TypeRef -> m () putPattern p = case p of Term.PUnbound -> putWord8 0 Term.PVar -> putWord8 1 @@ -255,23 +249,23 @@ putTerm t = putABT putSymbol putUnit putF t *> putPattern r Term.PText t -> putWord8 12 *> putVarInt t Term.PChar c -> putWord8 13 *> putChar c - putSeqOp :: MonadPut m => Term.SeqOp -> m () + putSeqOp :: (MonadPut m) => Term.SeqOp -> m () putSeqOp Term.PCons = putWord8 0 putSeqOp Term.PSnoc = putWord8 1 putSeqOp Term.PConcat = putWord8 2 -getTermComponent :: MonadGet m => m TermFormat.LocallyIndexedComponent +getTermComponent :: (MonadGet m) => m TermFormat.LocallyIndexedComponent getTermComponent = TermFormat.LocallyIndexedComponent <$> getFramedArray (getTuple3 getLocalIds (getFramed getTerm) getTType) -getTermAndType :: MonadGet m => m (TermFormat.Term, TermFormat.Type) +getTermAndType :: (MonadGet m) => m (TermFormat.Term, TermFormat.Type) getTermAndType = (,) <$> getFramed getTerm <*> getTType -getTerm :: MonadGet m => m TermFormat.Term +getTerm :: (MonadGet m) => m TermFormat.Term getTerm = getABT getSymbol getUnit getF where - getF :: MonadGet m => m a -> m (TermFormat.F a) + getF :: (MonadGet m) => m a -> m (TermFormat.F a) getF getChild = getWord8 >>= \case 0 -> Term.Int <$> getInt @@ -302,13 +296,13 @@ getTerm = getABT getSymbol getUnit getF 21 -> Term.TypeLink <$> getReference tag -> unknownTag "getTerm" tag where - getReferent :: MonadGet m => m (Referent' TermFormat.TermRef TermFormat.TypeRef) + getReferent :: (MonadGet m) => m (Referent' TermFormat.TermRef TermFormat.TypeRef) getReferent = getWord8 >>= \case 0 -> Referent.Ref <$> getRecursiveReference 1 -> Referent.Con <$> getReference <*> getVarInt x -> unknownTag "getTermComponent" x - getPattern :: MonadGet m => m (Term.Pattern LocalTextId TermFormat.TypeRef) + getPattern :: (MonadGet m) => m (Term.Pattern LocalTextId TermFormat.TypeRef) getPattern = getWord8 >>= \case 0 -> pure Term.PUnbound @@ -336,7 +330,7 @@ getTerm = getABT getSymbol getUnit getF 13 -> Term.PChar <$> getChar x -> unknownTag "Pattern" x where - getSeqOp :: MonadGet m => m Term.SeqOp + getSeqOp :: (MonadGet m) => m Term.SeqOp getSeqOp = getWord8 >>= \case 0 -> pure Term.PCons @@ -344,28 +338,28 @@ getTerm = getABT getSymbol getUnit getF 2 -> pure Term.PConcat tag -> unknownTag "SeqOp" tag -lookupTermElement :: MonadGet m => Reference.Pos -> m (LocalIds, TermFormat.Term, TermFormat.Type) +lookupTermElement :: (MonadGet m) => Reference.Pos -> m (LocalIds, TermFormat.Term, TermFormat.Type) lookupTermElement i = getWord8 >>= \case 0 -> unsafeFramedArrayLookup (getTuple3 getLocalIds (getFramed getTerm) getTType) $ fromIntegral i tag -> unknownTag "lookupTermElement" tag -lookupTermElementDiscardingType :: MonadGet m => Reference.Pos -> m (LocalIds, TermFormat.Term) +lookupTermElementDiscardingType :: (MonadGet m) => Reference.Pos -> m (LocalIds, TermFormat.Term) lookupTermElementDiscardingType i = getWord8 >>= \case 0 -> unsafeFramedArrayLookup ((,) <$> getLocalIds <*> getFramed getTerm) $ fromIntegral i tag -> unknownTag "lookupTermElementDiscardingType" tag -lookupTermElementDiscardingTerm :: MonadGet m => Reference.Pos -> m (LocalIds, TermFormat.Type) +lookupTermElementDiscardingTerm :: (MonadGet m) => Reference.Pos -> m (LocalIds, TermFormat.Type) lookupTermElementDiscardingTerm i = getWord8 >>= \case 0 -> unsafeFramedArrayLookup ((,) <$> getLocalIds <* skipFramed <*> getTType) $ fromIntegral i tag -> unknownTag "lookupTermElementDiscardingTerm" tag -getTType :: MonadGet m => m TermFormat.Type +getTType :: (MonadGet m) => m TermFormat.Type getTType = getType getReference -getType :: forall m r. MonadGet m => m r -> m (Type.TypeR r Symbol) +getType :: forall m r. (MonadGet m) => m r -> m (Type.TypeR r Symbol) getType getReference = getABT getSymbol getUnit go where go :: m x -> m (Type.F' r x) @@ -380,19 +374,19 @@ getType getReference = getABT getSymbol getUnit go 6 -> Type.Forall <$> getChild 7 -> Type.IntroOuter <$> getChild tag -> unknownTag "getType" tag - getKind :: MonadGet m => m Kind + getKind :: (MonadGet m) => m Kind getKind = getWord8 >>= \case 0 -> pure Kind.Star 1 -> Kind.Arrow <$> getKind <*> getKind tag -> unknownTag "getKind" tag -putDeclFormat :: MonadPut m => DeclFormat.DeclFormat -> m () +putDeclFormat :: (MonadPut m) => DeclFormat.DeclFormat -> m () putDeclFormat = \case DeclFormat.Decl c -> putWord8 0 *> putDeclComponent c where -- These use a framed array for randomer access - putDeclComponent :: MonadPut m => DeclFormat.LocallyIndexedComponent -> m () + putDeclComponent :: (MonadPut m) => DeclFormat.LocallyIndexedComponent -> m () putDeclComponent t | debug && trace ("putDeclComponent " ++ show t) False = undefined putDeclComponent (DeclFormat.LocallyIndexedComponent v) = putFramedArray (putPair putLocalIds putDeclElement) v @@ -407,18 +401,18 @@ putDeclFormat = \case putModifier Decl.Structural = putWord8 0 putModifier (Decl.Unique t) = putWord8 1 *> putText t -getDeclFormat :: MonadGet m => m DeclFormat.DeclFormat +getDeclFormat :: (MonadGet m) => m DeclFormat.DeclFormat getDeclFormat = getWord8 >>= \case 0 -> DeclFormat.Decl <$> getDeclComponent other -> unknownTag "DeclFormat" other where - getDeclComponent :: MonadGet m => m DeclFormat.LocallyIndexedComponent + getDeclComponent :: (MonadGet m) => m DeclFormat.LocallyIndexedComponent getDeclComponent = DeclFormat.LocallyIndexedComponent <$> getFramedArray (getPair getLocalIds getDeclElement) -getDeclElement :: MonadGet m => m (DeclFormat.Decl Symbol) +getDeclElement :: (MonadGet m) => m (DeclFormat.Decl Symbol) getDeclElement = Decl.DataDeclaration <$> getDeclType @@ -438,13 +432,13 @@ getDeclElement = other -> unknownTag "DeclModifier" other lookupDeclElement :: - MonadGet m => Reference.Pos -> m (LocalIds, DeclFormat.Decl Symbol) + (MonadGet m) => Reference.Pos -> m (LocalIds, DeclFormat.Decl Symbol) lookupDeclElement i = getWord8 >>= \case 0 -> unsafeFramedArrayLookup (getPair getLocalIds getDeclElement) $ fromIntegral i other -> unknownTag "lookupDeclElement" other -putBranchFormat :: MonadPut m => BranchFormat.BranchFormat -> m () +putBranchFormat :: (MonadPut m) => BranchFormat.BranchFormat -> m () putBranchFormat b | debug && trace ("putBranchFormat " ++ show b) False = undefined putBranchFormat b = case b of BranchFormat.Full li b -> do @@ -486,14 +480,14 @@ putBranchFormat b = case b of BranchDiff.ChildRemove -> putWord8 0 BranchDiff.ChildAddReplace b -> putWord8 1 *> putVarInt b -putBranchLocalIds :: MonadPut m => BranchFormat.BranchLocalIds -> m () +putBranchLocalIds :: (MonadPut m) => BranchFormat.BranchLocalIds -> m () putBranchLocalIds (BranchFormat.LocalIds ts os ps cs) = do putFoldable putVarInt ts putFoldable putVarInt os putFoldable putVarInt ps putFoldable (putPair putVarInt putVarInt) cs -putPatchFormat :: MonadPut m => PatchFormat.PatchFormat -> m () +putPatchFormat :: (MonadPut m) => PatchFormat.PatchFormat -> m () putPatchFormat = \case PatchFormat.Full ids p -> do putWord8 0 @@ -505,71 +499,71 @@ putPatchFormat = \case putPatchLocalIds ids putPatchDiff p -getPatchFormat :: MonadGet m => m PatchFormat.PatchFormat +getPatchFormat :: (MonadGet m) => m PatchFormat.PatchFormat getPatchFormat = getWord8 >>= \case 0 -> PatchFormat.Full <$> getPatchLocalIds <*> getPatchFull 1 -> PatchFormat.Diff <$> getVarInt <*> getPatchLocalIds <*> getPatchDiff x -> unknownTag "getPatchFormat" x where - getPatchFull :: MonadGet m => m PatchFull.LocalPatch + getPatchFull :: (MonadGet m) => m PatchFull.LocalPatch getPatchFull = PatchFull.Patch <$> getMap getReferent (getSet getTermEdit) <*> getMap getReference (getSet getTypeEdit) - getPatchDiff :: MonadGet m => m PatchDiff.LocalPatchDiff + getPatchDiff :: (MonadGet m) => m PatchDiff.LocalPatchDiff getPatchDiff = PatchDiff.PatchDiff <$> getMap getReferent (getSet getTermEdit) <*> getMap getReference (getSet getTypeEdit) <*> getMap getReferent (getSet getTermEdit) <*> getMap getReference (getSet getTypeEdit) - getTermEdit :: MonadGet m => m TermEdit.LocalTermEdit + getTermEdit :: (MonadGet m) => m TermEdit.LocalTermEdit getTermEdit = getWord8 >>= \case 0 -> pure TermEdit.Deprecate 1 -> TermEdit.Replace <$> getReferent <*> getTyping x -> unknownTag "getTermEdit" x - getTyping :: MonadGet m => m TermEdit.Typing + getTyping :: (MonadGet m) => m TermEdit.Typing getTyping = getWord8 >>= \case 0 -> pure TermEdit.Same 1 -> pure TermEdit.Subtype 2 -> pure TermEdit.Different x -> unknownTag "getTyping" x - getTypeEdit :: MonadGet m => m TypeEdit.LocalTypeEdit + getTypeEdit :: (MonadGet m) => m TypeEdit.LocalTypeEdit getTypeEdit = getWord8 >>= \case 0 -> pure TypeEdit.Deprecate 1 -> TypeEdit.Replace <$> getReference x -> unknownTag "getTypeEdit" x -getPatchLocalIds :: MonadGet m => m PatchFormat.PatchLocalIds +getPatchLocalIds :: (MonadGet m) => m PatchFormat.PatchLocalIds getPatchLocalIds = PatchFormat.LocalIds <$> getVector getVarInt <*> getVector getVarInt <*> getVector getVarInt -putPatchFull :: MonadPut m => PatchFull.LocalPatch -> m () +putPatchFull :: (MonadPut m) => PatchFull.LocalPatch -> m () putPatchFull (PatchFull.Patch termEdits typeEdits) = do putMap putReferent (putFoldable putTermEdit) termEdits putMap putReference (putFoldable putTypeEdit) typeEdits -putPatchDiff :: MonadPut m => PatchDiff.LocalPatchDiff -> m () +putPatchDiff :: (MonadPut m) => PatchDiff.LocalPatchDiff -> m () putPatchDiff (PatchDiff.PatchDiff atm atp rtm rtp) = do putMap putReferent (putFoldable putTermEdit) atm putMap putReference (putFoldable putTypeEdit) atp putMap putReferent (putFoldable putTermEdit) rtm putMap putReference (putFoldable putTypeEdit) rtp -putPatchLocalIds :: MonadPut m => PatchFormat.PatchLocalIds -> m () +putPatchLocalIds :: (MonadPut m) => PatchFormat.PatchLocalIds -> m () putPatchLocalIds (PatchFormat.LocalIds ts hs os) = do putFoldable putVarInt ts putFoldable putVarInt hs putFoldable putVarInt os -putTermEdit :: MonadPut m => TermEdit.LocalTermEdit -> m () +putTermEdit :: (MonadPut m) => TermEdit.LocalTermEdit -> m () putTermEdit TermEdit.Deprecate = putWord8 0 putTermEdit (TermEdit.Replace r t) = putWord8 1 *> putReferent r *> putTyping t where @@ -577,29 +571,29 @@ putTermEdit (TermEdit.Replace r t) = putWord8 1 *> putReferent r *> putTyping t putTyping TermEdit.Subtype = putWord8 1 putTyping TermEdit.Different = putWord8 2 -putTypeEdit :: MonadPut m => TypeEdit.LocalTypeEdit -> m () +putTypeEdit :: (MonadPut m) => TypeEdit.LocalTypeEdit -> m () putTypeEdit TypeEdit.Deprecate = putWord8 0 putTypeEdit (TypeEdit.Replace r) = putWord8 1 *> putReference r -getBranchFormat :: MonadGet m => m BranchFormat.BranchFormat +getBranchFormat :: (MonadGet m) => m BranchFormat.BranchFormat getBranchFormat = getWord8 >>= \case 0 -> getBranchFull 1 -> getBranchDiff x -> unknownTag "getBranchFormat" x where - getBranchFull :: MonadGet m => m BranchFormat.BranchFormat + getBranchFull :: (MonadGet m) => m BranchFormat.BranchFormat getBranchFull = BranchFormat.Full <$> getBranchLocalIds <*> getLocalBranch where - getLocalBranch :: MonadGet m => m BranchFull.LocalBranch + getLocalBranch :: (MonadGet m) => m BranchFull.LocalBranch getLocalBranch = BranchFull.Branch <$> getMap getVarInt (getMap getReferent getMetadataSetFormat) <*> getMap getVarInt (getMap getReference getMetadataSetFormat) <*> getMap getVarInt getVarInt <*> getMap getVarInt getVarInt - getMetadataSetFormat :: MonadGet m => m BranchFull.LocalMetadataSet + getMetadataSetFormat :: (MonadGet m) => m BranchFull.LocalMetadataSet getMetadataSetFormat = getWord8 >>= \case 0 -> BranchFull.Inline <$> getSet getReference @@ -610,14 +604,14 @@ getBranchFormat = <*> getBranchLocalIds <*> getLocalBranchDiff where - getLocalBranchDiff :: MonadGet m => m BranchDiff.LocalDiff + getLocalBranchDiff :: (MonadGet m) => m BranchDiff.LocalDiff getLocalBranchDiff = BranchDiff.Diff <$> getMap getVarInt (getMap getReferent getDiffOp) <*> getMap getVarInt (getMap getReference getDiffOp) <*> getMap getVarInt getPatchOp <*> getMap getVarInt getChildOp - getDiffOp :: MonadGet m => m BranchDiff.LocalDefinitionOp + getDiffOp :: (MonadGet m) => m BranchDiff.LocalDefinitionOp getDiffOp = getWord8 >>= \case 0 -> pure BranchDiff.RemoveDef @@ -628,20 +622,20 @@ getBranchFormat = adds <- getMap get (pure True) -- and removes: addToExistingMap get (pure False) adds - getPatchOp :: MonadGet m => m BranchDiff.LocalPatchOp + getPatchOp :: (MonadGet m) => m BranchDiff.LocalPatchOp getPatchOp = getWord8 >>= \case 0 -> pure BranchDiff.PatchRemove 1 -> BranchDiff.PatchAddReplace <$> getVarInt x -> unknownTag "getPatchOp" x - getChildOp :: MonadGet m => m BranchDiff.LocalChildOp + getChildOp :: (MonadGet m) => m BranchDiff.LocalChildOp getChildOp = getWord8 >>= \case 0 -> pure BranchDiff.ChildRemove 1 -> BranchDiff.ChildAddReplace <$> getVarInt x -> unknownTag "getChildOp" x -getBranchLocalIds :: MonadGet m => m BranchFormat.BranchLocalIds +getBranchLocalIds :: (MonadGet m) => m BranchFormat.BranchLocalIds getBranchLocalIds = BranchFormat.LocalIds <$> getVector getVarInt @@ -649,7 +643,7 @@ getBranchLocalIds = <*> getVector getVarInt <*> getVector (getPair getVarInt getVarInt) -decomposeTermFormat :: MonadGet m => m TermFormat.SyncTermFormat +decomposeTermFormat :: (MonadGet m) => m TermFormat.SyncTermFormat decomposeTermFormat = getWord8 >>= \case 0 -> @@ -658,7 +652,7 @@ decomposeTermFormat = <$> decomposeComponent tag -> error $ "todo: unknown term format tag " ++ show tag -decomposeDeclFormat :: MonadGet m => m DeclFormat.SyncDeclFormat +decomposeDeclFormat :: (MonadGet m) => m DeclFormat.SyncDeclFormat decomposeDeclFormat = getWord8 >>= \case 0 -> @@ -667,7 +661,7 @@ decomposeDeclFormat = <$> decomposeComponent tag -> error $ "todo: unknown term format tag " ++ show tag -decomposeComponent :: MonadGet m => m (Vector (LocalIds, BS.ByteString)) +decomposeComponent :: (MonadGet m) => m (Vector (LocalIds, BS.ByteString)) decomposeComponent = do offsets <- getList (getVarInt @_ @Int) componentBytes <- getByteString (last offsets) @@ -677,60 +671,60 @@ decomposeComponent = do split = (,) <$> getLocalIds <*> getRemainingByteString Monoid.foldMapM get1 (zip offsets (tail offsets)) -recomposeTermFormat :: MonadPut m => TermFormat.SyncTermFormat -> m () +recomposeTermFormat :: (MonadPut m) => TermFormat.SyncTermFormat -> m () recomposeTermFormat = \case TermFormat.SyncTerm (TermFormat.SyncLocallyIndexedComponent x) -> putWord8 0 >> recomposeComponent x -recomposeDeclFormat :: MonadPut m => DeclFormat.SyncDeclFormat -> m () +recomposeDeclFormat :: (MonadPut m) => DeclFormat.SyncDeclFormat -> m () recomposeDeclFormat = \case DeclFormat.SyncDecl (DeclFormat.SyncLocallyIndexedComponent x) -> putWord8 0 >> recomposeComponent x -recomposeComponent :: MonadPut m => Vector (LocalIds, BS.ByteString) -> m () +recomposeComponent :: (MonadPut m) => Vector (LocalIds, BS.ByteString) -> m () recomposeComponent = putFramedArray \(localIds, bytes) -> do putLocalIds localIds putByteString bytes -decomposeWatchFormat :: MonadGet m => m TermFormat.SyncWatchResultFormat +decomposeWatchFormat :: (MonadGet m) => m TermFormat.SyncWatchResultFormat decomposeWatchFormat = getWord8 >>= \case 0 -> TermFormat.SyncWatchResult <$> getWatchLocalIds <*> getRemainingByteString x -> unknownTag "decomposeWatchFormat" x -recomposeWatchFormat :: MonadPut m => TermFormat.SyncWatchResultFormat -> m () +recomposeWatchFormat :: (MonadPut m) => TermFormat.SyncWatchResultFormat -> m () recomposeWatchFormat (TermFormat.SyncWatchResult wli bs) = putWord8 0 *> putLocalIds wli *> putByteString bs -decomposePatchFormat :: MonadGet m => m PatchFormat.SyncPatchFormat +decomposePatchFormat :: (MonadGet m) => m PatchFormat.SyncPatchFormat decomposePatchFormat = getWord8 >>= \case 0 -> PatchFormat.SyncFull <$> getPatchLocalIds <*> getRemainingByteString 1 -> PatchFormat.SyncDiff <$> getVarInt <*> getPatchLocalIds <*> getRemainingByteString x -> unknownTag "decomposePatchFormat" x -recomposePatchFormat :: MonadPut m => PatchFormat.SyncPatchFormat -> m () +recomposePatchFormat :: (MonadPut m) => PatchFormat.SyncPatchFormat -> m () recomposePatchFormat = \case PatchFormat.SyncFull li bs -> putWord8 0 *> putPatchLocalIds li *> putByteString bs PatchFormat.SyncDiff id li bs -> putWord8 1 *> putVarInt id *> putPatchLocalIds li *> putByteString bs -decomposeBranchFormat :: MonadGet m => m BranchFormat.SyncBranchFormat +decomposeBranchFormat :: (MonadGet m) => m BranchFormat.SyncBranchFormat decomposeBranchFormat = getWord8 >>= \case 0 -> BranchFormat.SyncFull <$> getBranchLocalIds <*> getRemainingByteString 1 -> BranchFormat.SyncDiff <$> getVarInt <*> getBranchLocalIds <*> getRemainingByteString x -> unknownTag "decomposeBranchFormat" x -recomposeBranchFormat :: MonadPut m => BranchFormat.SyncBranchFormat -> m () +recomposeBranchFormat :: (MonadPut m) => BranchFormat.SyncBranchFormat -> m () recomposeBranchFormat = \case BranchFormat.SyncFull li bs -> putWord8 0 *> putBranchLocalIds li *> putByteString bs BranchFormat.SyncDiff id li bs -> putWord8 1 *> putVarInt id *> putBranchLocalIds li *> putByteString bs -putTempEntity :: MonadPut m => TempEntity -> m () +putTempEntity :: (MonadPut m) => TempEntity -> m () putTempEntity = \case Entity.TC tc -> case tc of TermFormat.SyncTerm term -> @@ -790,10 +784,10 @@ putTempEntity = \case putLocalIdsWith putText putHash32 localIds putFramedByteString bytes -getHash32 :: MonadGet m => m Hash32 +getHash32 :: (MonadGet m) => m Hash32 getHash32 = Hash32.UnsafeFromBase32Hex . Base32Hex.UnsafeFromText <$> getText -getTempTermFormat :: MonadGet m => m TempEntity.TempTermFormat +getTempTermFormat :: (MonadGet m) => m TempEntity.TempTermFormat getTempTermFormat = getWord8 >>= \case 0 -> @@ -805,7 +799,7 @@ getTempTermFormat = ) tag -> unknownTag "getTempTermFormat" tag -getTempDeclFormat :: MonadGet m => m TempEntity.TempDeclFormat +getTempDeclFormat :: (MonadGet m) => m TempEntity.TempDeclFormat getTempDeclFormat = getWord8 >>= \case 0 -> @@ -817,7 +811,7 @@ getTempDeclFormat = ) tag -> unknownTag "getTempDeclFormat" tag -getTempPatchFormat :: MonadGet m => m TempEntity.TempPatchFormat +getTempPatchFormat :: (MonadGet m) => m TempEntity.TempPatchFormat getTempPatchFormat = getWord8 >>= \case 0 -> PatchFormat.SyncFull <$> getPatchLocalIds <*> getFramedByteString @@ -830,7 +824,7 @@ getTempPatchFormat = <*> getVector getHash32 <*> getVector getHash32 -getTempNamespaceFormat :: MonadGet m => m TempEntity.TempNamespaceFormat +getTempNamespaceFormat :: (MonadGet m) => m TempEntity.TempNamespaceFormat getTempNamespaceFormat = getWord8 >>= \case 0 -> BranchFormat.SyncFull <$> getBranchLocalIds <*> getFramedByteString @@ -844,16 +838,16 @@ getTempNamespaceFormat = <*> getVector getHash32 <*> getVector (getPair getHash32 getHash32) -getTempCausalFormat :: MonadGet m => m TempEntity.TempCausalFormat +getTempCausalFormat :: (MonadGet m) => m TempEntity.TempCausalFormat getTempCausalFormat = Causal.SyncCausalFormat <$> getHash32 <*> getVector getHash32 -getSymbol :: MonadGet m => m Symbol +getSymbol :: (MonadGet m) => m Symbol getSymbol = Symbol <$> getVarInt <*> getText -putSymbol :: MonadPut m => Symbol -> m () +putSymbol :: (MonadPut m) => Symbol -> m () putSymbol (Symbol n t) = putVarInt n >> putText t putReferent :: @@ -871,7 +865,7 @@ putReferent :: m () putReferent = putReferent' putReference putReference -putReferent' :: MonadPut m => (r1 -> m ()) -> (r2 -> m ()) -> Referent' r1 r2 -> m () +putReferent' :: (MonadPut m) => (r1 -> m ()) -> (r2 -> m ()) -> Referent' r1 r2 -> m () putReferent' putRefRef putConRef = \case Referent.Ref r -> do putWord8 0 @@ -891,7 +885,7 @@ putReference = \case ReferenceDerived (Reference.Id r index) -> putWord8 1 *> putVarInt r *> putVarInt index -getReferent' :: MonadGet m => m r1 -> m r2 -> m (Referent' r1 r2) +getReferent' :: (MonadGet m) => m r1 -> m r2 -> m (Referent' r1 r2) getReferent' getRefRef getConRef = getWord8 >>= \case 0 -> Referent.Ref <$> getRefRef @@ -940,39 +934,39 @@ getRecursiveReference = 1 -> ReferenceDerived <$> (Reference.Id <$> getMaybe getVarInt <*> getVarInt) x -> unknownTag "getRecursiveReference" x -putInt :: MonadPut m => Int64 -> m () +putInt :: (MonadPut m) => Int64 -> m () putInt = serializeBE -getInt :: MonadGet m => m Int64 +getInt :: (MonadGet m) => m Int64 getInt = deserializeBE -putNat :: MonadPut m => Word64 -> m () +putNat :: (MonadPut m) => Word64 -> m () putNat = serializeBE -getNat :: MonadGet m => m Word64 +getNat :: (MonadGet m) => m Word64 getNat = deserializeBE -putFloat :: MonadPut m => Double -> m () +putFloat :: (MonadPut m) => Double -> m () putFloat = serializeBE -getFloat :: MonadGet m => m Double +getFloat :: (MonadGet m) => m Double getFloat = deserializeBE -putBoolean :: MonadPut m => Bool -> m () +putBoolean :: (MonadPut m) => Bool -> m () putBoolean False = putWord8 0 putBoolean True = putWord8 1 -getBoolean :: MonadGet m => m Bool +getBoolean :: (MonadGet m) => m Bool getBoolean = getWord8 >>= \case 0 -> pure False 1 -> pure True x -> unknownTag "Boolean" x -putTType :: MonadPut m => TermFormat.Type -> m () +putTType :: (MonadPut m) => TermFormat.Type -> m () putTType = putType putReference putSymbol -putDType :: MonadPut m => DeclFormat.Type Symbol -> m () +putDType :: (MonadPut m) => DeclFormat.Type Symbol -> m () putDType = putType putRecursiveReference putSymbol putType :: @@ -994,23 +988,23 @@ putType putReference putVar = putABT putVar putUnit go Type.Effects es -> putWord8 5 *> putFoldable putChild es Type.Forall body -> putWord8 6 *> putChild body Type.IntroOuter body -> putWord8 7 *> putChild body - putKind :: MonadPut m => Kind -> m () + putKind :: (MonadPut m) => Kind -> m () putKind k = case k of Kind.Star -> putWord8 0 Kind.Arrow i o -> putWord8 1 *> putKind i *> putKind o -putChar :: MonadPut m => Char -> m () +putChar :: (MonadPut m) => Char -> m () putChar = serialize . VarInt . fromEnum -getChar :: MonadGet m => m Char +getChar :: (MonadGet m) => m Char getChar = toEnum . unVarInt <$> deserialize -putMaybe :: MonadPut m => (a -> m ()) -> Maybe a -> m () +putMaybe :: (MonadPut m) => (a -> m ()) -> Maybe a -> m () putMaybe putA = \case Nothing -> putWord8 0 Just a -> putWord8 1 *> putA a -getMaybe :: MonadGet m => m a -> m (Maybe a) +getMaybe :: (MonadGet m) => m a -> m (Maybe a) getMaybe getA = getWord8 >>= \tag -> case tag of 0 -> pure Nothing diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Sync22.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Sync22.hs index 2b0ab3031..717827acf 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/Sync22.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/Sync22.hs @@ -195,7 +195,8 @@ trySync hh runSrc runDest tCache hCache oCache cCache = \case localIds' <- traverse syncLocalIds localIds -- reassemble and save the reindexed term let bytes' = - runPutS . S.recomposeDeclFormat + runPutS + . S.recomposeDeclFormat . DeclFormat.SyncDecl . DeclFormat.SyncLocallyIndexedComponent $ Vector.zip localIds' declBytes diff --git a/codebase2/codebase-sqlite/U/Codebase/Sqlite/TempEntity.hs b/codebase2/codebase-sqlite/U/Codebase/Sqlite/TempEntity.hs index be966233b..205396d78 100644 --- a/codebase2/codebase-sqlite/U/Codebase/Sqlite/TempEntity.hs +++ b/codebase2/codebase-sqlite/U/Codebase/Sqlite/TempEntity.hs @@ -7,7 +7,7 @@ import qualified U.Codebase.Sqlite.Entity as Entity import U.Codebase.Sqlite.LocalIds (LocalIds') import qualified U.Codebase.Sqlite.Patch.Format as Patch import qualified U.Codebase.Sqlite.Term.Format as Term -import U.Util.Hash32 (Hash32) +import Unison.Hash32 (Hash32) import Unison.Prelude -- | diff --git a/codebase2/codebase-sqlite/package.yaml b/codebase2/codebase-sqlite/package.yaml index de09e22b4..78a56ebce 100644 --- a/codebase2/codebase-sqlite/package.yaml +++ b/codebase2/codebase-sqlite/package.yaml @@ -29,10 +29,11 @@ dependencies: - unison-codebase - unison-codebase-sync - unison-core + - unison-hash + - unison-hash-orphans-sqlite - unison-prelude - unison-sqlite - unison-util-base32hex - - unison-util-base32hex-orphans-sqlite - unison-util-cache - unison-util-serialization - unison-util-term diff --git a/codebase2/codebase-sqlite/sql/004-fix-scoped-name-lookup-tables.sql b/codebase2/codebase-sqlite/sql/004-fix-scoped-name-lookup-tables.sql new file mode 100644 index 000000000..aeb4d6999 --- /dev/null +++ b/codebase2/codebase-sqlite/sql/004-fix-scoped-name-lookup-tables.sql @@ -0,0 +1,120 @@ +ALTER TABLE scoped_term_name_lookup +RENAME TO scoped_term_name_lookup_old; + +ALTER TABLE scoped_type_name_lookup +RENAME TO scoped_type_name_lookup_old; + +-- Drop all existing indexes because we'll re-create them on the new table. +DROP INDEX scoped_term_names_by_namespace_and_last_name_segment; +DROP INDEX scoped_term_name_by_referent_lookup; +DROP INDEX scoped_term_names_by_namespace; +DROP INDEX scoped_type_names_by_namespace_and_last_name_segment; +DROP INDEX scoped_type_name_by_reference_lookup; +DROP INDEX scoped_type_names_by_namespace; + +-- Create the new tables. +CREATE TABLE scoped_term_name_lookup ( + root_branch_hash_id INTEGER NOT NULL REFERENCES name_lookups(root_branch_hash_id) ON DELETE CASCADE, + + -- The name of the term in reversed form, with a trailing '.': + -- E.g. map.List.base. + -- + -- The trailing '.' is helpful when performing suffix queries where we may not know + -- whether the suffix is complete or not, e.g. we could suffix search using any of the + -- following globs and it would still find 'map.List.base.': + -- map.List.base.* + -- map.List.* + -- map.* + reversed_name TEXT NOT NULL, + + -- The last name segment of the name. This is used when looking up names for + -- suffixification when building PPEs. + -- E.g. for the name 'base.List.map' this would be 'map' + last_name_segment TEXT NOT NULL, + + -- The namespace containing this definition, not reversed, with a trailing '.' + -- The trailing '.' simplifies GLOB queries, so that 'base.*' matches both things in + -- 'base' and 'base.List', but not 'base1', which allows us to avoid an OR in our where + -- clauses which in turn helps the sqlite query planner use indexes more effectively. + -- + -- example value: 'base.List.' + namespace TEXT NOT NULL, + referent_builtin TEXT NULL, + referent_component_hash TEXT NULL, + referent_component_index INTEGER NULL, + referent_constructor_index INTEGER NULL, + referent_constructor_type INTEGER NULL, + PRIMARY KEY (root_branch_hash_id, reversed_name, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index) +); + + +-- This index allows finding all names we need to consider within a given namespace for +-- suffixification of a name. +-- It may seem strange to use last_name_segment rather than a suffix search over reversed_name name here +-- but SQLite will only optimize for a single prefix-glob at once, so we can't glob search +-- over both namespace and reversed_name, but we can EXACT match on last_name_segment and +-- then glob search on the namespace prefix, and have SQLite do the final glob search on +-- reversed_name over rows with a matching last segment without using an index and should be plenty fast. +CREATE INDEX scoped_term_names_by_namespace_and_last_name_segment ON scoped_term_name_lookup(root_branch_hash_id, last_name_segment, namespace); + + -- This index allows us to find all names with a given ref within a specific namespace +CREATE INDEX scoped_term_name_by_referent_lookup ON scoped_term_name_lookup(root_branch_hash_id, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, namespace); + +-- Allows fetching ALL names within a specific namespace prefix. We currently use this to +-- pretty-print on share, but will be replaced with a more precise set of queries soon. +CREATE INDEX scoped_term_names_by_namespace ON scoped_term_name_lookup(root_branch_hash_id, namespace); + + +CREATE TABLE scoped_type_name_lookup ( + root_branch_hash_id INTEGER NOT NULL REFERENCES name_lookups(root_branch_hash_id) ON DELETE CASCADE, + + -- The name of the term: E.g. List.base + reversed_name TEXT NOT NULL, + + -- The last name segment of the name. This is used when looking up names for + -- suffixification when building PPEs. + -- E.g. for the name 'base.List.map' this would be 'map' + last_name_segment TEXT NOT NULL, + -- The namespace containing this definition, not reversed, with a trailing '.' + -- The trailing '.' simplifies GLOB queries, so that 'base.*' matches both things in + -- 'base' and 'base.List', but not 'base1', which allows us to avoid an OR in our where + -- clauses which in turn helps the sqlite query planner use indexes more effectively. + -- + -- example value: 'base.List.' + namespace TEXT NOT NULL, + reference_builtin TEXT NULL, + reference_component_hash INTEGER NULL, + reference_component_index INTEGER NULL, + PRIMARY KEY (root_branch_hash_id, reversed_name, reference_builtin, reference_component_hash, reference_component_index) +); + +-- This index allows finding all names we need to consider within a given namespace for +-- suffixification of a name. +-- It may seem strange to use last_name_segment rather than a suffix search over reversed_name name here +-- but SQLite will only optimize for a single prefix-glob at once, so we can't glob search +-- over both namespace and reversed_name, but we can EXACT match on last_name_segment and +-- then glob search on the namespace prefix, and have SQLite do the final glob search on +-- reversed_name over rows with a matching last segment without using an index and should be plenty fast. +CREATE INDEX scoped_type_names_by_namespace_and_last_name_segment ON scoped_type_name_lookup(root_branch_hash_id, last_name_segment, namespace); + +-- This index allows us to find all names with a given ref within a specific namespace. +CREATE INDEX scoped_type_name_by_reference_lookup ON scoped_type_name_lookup(root_branch_hash_id, reference_builtin, reference_component_hash, reference_component_index, namespace); + +-- Allows fetching ALL names within a specific namespace prefix. We currently use this to +-- pretty-print on share, but will be replaced with a more precise set of queries soon. +CREATE INDEX scoped_type_names_by_namespace ON scoped_type_name_lookup(root_branch_hash_id, namespace); + +-- Copy the old tables over to the new ones +INSERT INTO scoped_term_name_lookup (root_branch_hash_id, reversed_name, last_name_segment, namespace, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, referent_constructor_type) + SELECT root_branch_hash_id, reversed_name, last_name_segment, namespace, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, referent_constructor_type + FROM scoped_term_name_lookup_old; + +INSERT INTO scoped_type_name_lookup (root_branch_hash_id, reversed_name, last_name_segment, namespace, reference_builtin, reference_component_hash, reference_component_index) + SELECT root_branch_hash_id, reversed_name, last_name_segment, namespace, reference_builtin, reference_component_hash, reference_component_index + FROM scoped_type_name_lookup_old; + +-- Remove the old tables +DROP TABLE scoped_term_name_lookup_old; +DROP TABLE scoped_type_name_lookup_old + +-- Semicolons intentionally omitted diff --git a/codebase2/codebase-sqlite/sql/create.sql b/codebase2/codebase-sqlite/sql/create.sql index 39491710b..7c1977d4f 100644 --- a/codebase2/codebase-sqlite/sql/create.sql +++ b/codebase2/codebase-sqlite/sql/create.sql @@ -229,7 +229,103 @@ CREATE INDEX dependents_by_dependency ON dependents_index ( CREATE INDEX dependencies_by_dependent ON dependents_index ( dependent_object_id, dependent_component_index -) +); + + +-- This table allows us to look up which branch hashes have a name lookup. +CREATE TABLE name_lookups ( + root_branch_hash_id INTEGER PRIMARY KEY REFERENCES hash(id) ON DELETE CASCADE +); + +CREATE TABLE scoped_term_name_lookup ( + root_branch_hash_id INTEGER NOT NULL REFERENCES hash(id) ON DELETE CASCADE, + + -- The name of the term in reversed form, with a trailing '.': + -- E.g. map.List.base. + -- + -- The trailing '.' is helpful when performing suffix queries where we may not know + -- whether the suffix is complete or not, e.g. we could suffix search using any of the + -- following globs and it would still find 'map.List.base.': + -- map.List.base.* + -- map.List.* + -- map.* + reversed_name TEXT NOT NULL, + + -- The last name segment of the name. This is used when looking up names for + -- suffixification when building PPEs. + -- E.g. for the name 'base.List.map' this would be 'map' + last_name_segment TEXT NOT NULL, + + -- The namespace containing this definition, not reversed, with a trailing '.' + -- The trailing '.' simplifies GLOB queries, so that 'base.*' matches both things in + -- 'base' and 'base.List', but not 'base1', which allows us to avoid an OR in our where + -- clauses which in turn helps the sqlite query planner use indexes more effectively. + -- + -- example value: 'base.List.' + namespace TEXT NOT NULL, + referent_builtin TEXT NULL, + referent_component_hash TEXT NULL, + referent_component_index INTEGER NULL, + referent_constructor_index INTEGER NULL, + referent_constructor_type INTEGER NULL, + PRIMARY KEY (root_branch_hash_id, reversed_name, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index) +); + +-- This index allows finding all names we need to consider within a given namespace for +-- suffixification of a name. +-- It may seem strange to use last_name_segment rather than a suffix search over reversed_name name her +-- but SQLite will only optimize for a single prefix-glob at once, so we can't glob search +-- over both namespace and reversed_name, but we can EXACT match on last_name_segment and +-- then glob search on the namespace prefix, and have SQLite do the final glob search on +-- reversed_name over rows with a matching last segment without using an index and should be plenty fast. +CREATE INDEX scoped_term_names_by_namespace_and_last_name_segment ON scoped_term_name_lookup(root_branch_hash_id, last_name_segment, namespace); + +-- This index allows us to find all names with a given ref within a specific namespace +CREATE INDEX scoped_term_name_by_referent_lookup ON scoped_term_name_lookup(root_branch_hash_id, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, namespace); + +-- Allows fetching ALL names within a specific namespace prefix. We currently use this to +-- pretty-print on share, but will be replaced with a more precise set of queries soon. +CREATE INDEX scoped_term_names_by_namespace ON scoped_term_name_lookup(root_branch_hash_id, namespace); + + +CREATE TABLE scoped_type_name_lookup ( + root_branch_hash_id INTEGER NOT NULL REFERENCES hash(id), + -- The name of the term: E.g. List.base + reversed_name TEXT NOT NULL, + -- The last name segment of the name. This is used when looking up names for + -- suffixification when building PPEs. + -- E.g. for the name 'base.List.map' this would be 'map' + last_name_segment TEXT NOT NULL, + -- The namespace containing this definition, not reversed, with a trailing '.' + -- The trailing '.' simplifies GLOB queries, so that 'base.*' matches both things in + -- 'base' and 'base.List', but not 'base1', which allows us to avoid an OR in our where + -- clauses which in turn helps the sqlite query planner use indexes more effectively. + -- + -- example value: 'base.List.' + namespace TEXT NOT NULL, + reference_builtin TEXT NULL, + reference_component_hash INTEGER NULL, + reference_component_index INTEGER NULL, + PRIMARY KEY (reversed_name, reference_builtin, reference_component_hash, reference_component_index) +); + +-- This index allows finding all names we need to consider within a given namespace for +-- suffixification of a name. +-- It may seem strange to use last_name_segment rather than a suffix search over reversed_name name here +-- but SQLite will only optimize for a single prefix-glob at once, so we can't glob search +-- over both namespace and reversed_name, but we can EXACT match on last_name_segment and +-- then glob search on the namespace prefix, and have SQLite do the final glob search on +-- reversed_name over rows with a matching last segment without using an index and should be plenty fast. +CREATE INDEX scoped_type_names_by_namespace_and_last_name_segment ON scoped_type_name_lookup(root_branch_hash_id, last_name_segment, namespace); + + +-- This index allows us to find all names with a given ref within a specific namespace. +CREATE INDEX scoped_type_name_by_reference_lookup ON scoped_type_name_lookup(root_branch_hash_id, reference_builtin, reference_component_hash, reference_component_index, namespace); + + +-- Allows fetching ALL names within a specific namespace prefix. We currently use this to +-- pretty-print on share, but will be replaced with a more precise set of queries soon. +CREATE INDEX scoped_type_names_by_namespace ON scoped_type_name_lookup(root_branch_hash_id, namespace) -- Semicolon intentionally omitted, for the same reason -- semicolons in comments will blow up codebase initialization. diff --git a/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal b/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal index 275f675b8..26412e728 100644 --- a/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal +++ b/codebase2/codebase-sqlite/unison-codebase-sqlite.cabal @@ -1,6 +1,6 @@ cabal-version: 1.12 --- This file has been generated from package.yaml by hpack version 0.34.4. +-- This file has been generated from package.yaml by hpack version 0.35.0. -- -- see: https://github.com/sol/hpack @@ -13,6 +13,7 @@ extra-source-files: sql/001-temp-entity-tables.sql sql/002-reflog-table.sql sql/003-namespace-statistics.sql + sql/004-fix-scoped-name-lookup-tables.sql sql/create.sql source-repository head @@ -106,10 +107,11 @@ library , unison-codebase , unison-codebase-sync , unison-core + , unison-hash + , unison-hash-orphans-sqlite , unison-prelude , unison-sqlite , unison-util-base32hex - , unison-util-base32hex-orphans-sqlite , unison-util-cache , unison-util-serialization , unison-util-term diff --git a/codebase2/codebase/U/Codebase/Branch/Type.hs b/codebase2/codebase/U/Codebase/Branch/Type.hs index 1a98245c7..0f3fd246c 100644 --- a/codebase2/codebase/U/Codebase/Branch/Type.hs +++ b/codebase2/codebase/U/Codebase/Branch/Type.hs @@ -57,7 +57,8 @@ data Patch = Patch instance Show (Branch m) where show b = - "Branch { terms = " ++ show (fmap Map.keys (terms b)) + "Branch { terms = " + ++ show (fmap Map.keys (terms b)) ++ ", types = " ++ show (fmap Map.keys (types b)) ++ ", patches = " @@ -90,7 +91,7 @@ hasDefinitions (NamespaceStats numTerms numTypes _numPatches) = childAt :: NameSegment -> Branch m -> Maybe (CausalBranch m) childAt ns (Branch {children}) = Map.lookup ns children -hoist :: Functor n => (forall x. m x -> n x) -> Branch m -> Branch n +hoist :: (Functor n) => (forall x. m x -> n x) -> Branch m -> Branch n hoist f Branch {..} = Branch { terms = (fmap . fmap) f terms, @@ -100,7 +101,7 @@ hoist f Branch {..} = fmap (fmap (hoist f) . Causal.hoist f) children } -hoistCausalBranch :: Functor n => (forall x. m x -> n x) -> CausalBranch m -> CausalBranch n +hoistCausalBranch :: (Functor n) => (forall x. m x -> n x) -> CausalBranch m -> CausalBranch n hoistCausalBranch f cb = cb & Causal.hoist f @@ -110,14 +111,14 @@ hoistCausalBranch f cb = -- provided branch. -- -- If only name is specified, metadata will be returned for all terms at that name. -termMetadata :: Monad m => Branch m -> NameSegment -> Maybe Referent -> m [Map MetadataValue MetadataType] +termMetadata :: (Monad m) => Branch m -> NameSegment -> Maybe Referent -> m [Map MetadataValue MetadataType] termMetadata Branch {terms} = metadataHelper terms -- | Returns all the metadata value references that are attached to a type with the provided name in the -- provided branch. -- -- If only name is specified, metadata will be returned for all types at that name. -typeMetadata :: Monad m => Branch m -> NameSegment -> Maybe Reference -> m [Map MetadataValue MetadataType] +typeMetadata :: (Monad m) => Branch m -> NameSegment -> Maybe Reference -> m [Map MetadataValue MetadataType] typeMetadata Branch {types} = metadataHelper types metadataHelper :: (Monad m, Ord ref) => Map NameSegment (Map ref (m MdValues)) -> NameSegment -> Maybe ref -> m [Map MetadataValue MetadataType] diff --git a/codebase2/codebase/U/Codebase/Causal.hs b/codebase2/codebase/U/Codebase/Causal.hs index 26eb4c0c2..7cd6a07eb 100644 --- a/codebase2/codebase/U/Codebase/Causal.hs +++ b/codebase2/codebase/U/Codebase/Causal.hs @@ -18,7 +18,7 @@ data Causal m hc he e = Causal } deriving (Functor) -hoist :: Functor n => (forall x. m x -> n x) -> Causal m hc he e -> Causal n hc he e +hoist :: (Functor n) => (forall x. m x -> n x) -> Causal m hc he e -> Causal n hc he e hoist f (Causal {..}) = Causal { parents = parents & fmap f & (fmap . fmap) (hoist f), diff --git a/codebase2/codebase/U/Codebase/Decl.hs b/codebase2/codebase/U/Codebase/Decl.hs index 27f5b1a6e..ca07dbdbc 100644 --- a/codebase2/codebase/U/Codebase/Decl.hs +++ b/codebase2/codebase/U/Codebase/Decl.hs @@ -1,16 +1,10 @@ -{-# LANGUAGE DeriveFoldable #-} -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE RecordWildCards #-} - module U.Codebase.Decl where -import Data.Set (Set) -import Data.Text (Text) -import Data.Word (Word64) import U.Codebase.Reference (Reference') import U.Codebase.Type (TypeR) import qualified U.Codebase.Type as Type -import U.Util.Hash (Hash) +import Unison.Hash (Hash) +import Unison.Prelude type ConstructorId = Word64 diff --git a/codebase2/codebase/U/Codebase/Reference.hs b/codebase2/codebase/U/Codebase/Reference.hs index ea43f44d8..5578a7173 100644 --- a/codebase2/codebase/U/Codebase/Reference.hs +++ b/codebase2/codebase/U/Codebase/Reference.hs @@ -1,21 +1,13 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE TypeSynonymInstances #-} - module U.Codebase.Reference where import Control.Lens (Bifunctor (..), Lens, Prism, Traversal, lens, prism) import Data.Bifoldable (Bifoldable (..)) import Data.Bitraversable (Bitraversable (..)) -import Data.Text (Text) -import Data.Word (Word64) import U.Codebase.ShortHash (ShortHash) import qualified U.Codebase.ShortHash as SH -import U.Util.Hash (Hash) -import qualified U.Util.Hash as Hash +import Unison.Hash (Hash) +import qualified Unison.Hash as Hash +import Unison.Prelude -- | This is the canonical representation of Reference type Reference = Reference' Text Hash diff --git a/codebase2/codebase/U/Codebase/Referent.hs b/codebase2/codebase/U/Codebase/Referent.hs index 52b3e0f71..79ee67e5c 100644 --- a/codebase2/codebase/U/Codebase/Referent.hs +++ b/codebase2/codebase/U/Codebase/Referent.hs @@ -1,9 +1,4 @@ {-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE TypeApplications #-} -{-# LANGUAGE TypeSynonymInstances #-} module U.Codebase.Referent where @@ -17,7 +12,7 @@ import U.Codebase.Reference (Reference, Reference') import qualified U.Codebase.Reference as Reference import U.Codebase.ShortHash (ShortHash) import qualified U.Codebase.ShortHash as SH -import U.Util.Hash (Hash) +import Unison.Hash (Hash) import Unison.Prelude data ConstructorType diff --git a/codebase2/codebase/U/Codebase/Term.hs b/codebase2/codebase/U/Codebase/Term.hs index 9b9617682..f2b90514e 100644 --- a/codebase2/codebase/U/Codebase/Term.hs +++ b/codebase2/codebase/U/Codebase/Term.hs @@ -1,35 +1,15 @@ -{-# LANGUAGE BlockArguments #-} -{-# LANGUAGE DeriveFoldable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE Rank2Types #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TypeApplications #-} -{-# LANGUAGE UnicodeSyntax #-} -{-# LANGUAGE ViewPatterns #-} - module U.Codebase.Term where import qualified Control.Monad.Writer as Writer import qualified Data.Foldable as Foldable -import Data.Int (Int64) -import Data.Sequence (Seq) -import Data.Set (Set) import qualified Data.Set as Set -import Data.Text (Text) -import Data.Word (Word64) -import GHC.Generics (Generic, Generic1) import U.Codebase.Reference (Reference, Reference') import U.Codebase.Referent (Referent') import U.Codebase.Type (TypeR) import qualified U.Codebase.Type as Type import qualified U.Core.ABT as ABT -import U.Util.Hash (Hash) +import Unison.Hash (Hash) +import Unison.Prelude type ConstructorId = Word64 diff --git a/codebase2/codebase/U/Codebase/Type.hs b/codebase2/codebase/U/Codebase/Type.hs index c503ba1d5..4de17ec37 100644 --- a/codebase2/codebase/U/Codebase/Type.hs +++ b/codebase2/codebase/U/Codebase/Type.hs @@ -1,27 +1,14 @@ -{-# LANGUAGE BlockArguments #-} -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DeriveFoldable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE Rank2Types #-} -{-# LANGUAGE ViewPatterns #-} - module U.Codebase.Type where import qualified Control.Monad.Writer.Strict as Writer import Data.Bifunctor (Bifunctor (bimap)) -import Data.Functor (($>)) import qualified Data.Maybe as Maybe -import Data.Set (Set) import qualified Data.Set as Set -import Data.Text (Text) import U.Codebase.Kind (Kind) import U.Codebase.Reference (Reference, Reference') import qualified U.Core.ABT as ABT -import U.Util.Hash (Hash) +import Unison.Hash (Hash) +import Unison.Prelude import Unsafe.Coerce (unsafeCoerce) -- | For standalone types, like those in Term.Ann @@ -51,17 +38,17 @@ type TypeD v = ABT.Term FD v () type TypeR r v = ABT.Term (F' r) v () -rmap :: Ord v => (r -> r') -> ABT.Term (F' r) v a -> ABT.Term (F' r') v a +rmap :: (Ord v) => (r -> r') -> ABT.Term (F' r) v a -> ABT.Term (F' r') v a rmap f = ABT.transform \case Ref r -> Ref (f r) x -> unsafeCoerce x -typeD2T :: Ord v => Hash -> TypeD v -> TypeT v +typeD2T :: (Ord v) => Hash -> TypeD v -> TypeT v typeD2T h = rmap $ bimap id $ Maybe.fromMaybe h dependencies :: (Ord v, Ord r) => ABT.Term (F' r) v a -> Set r dependencies = Writer.execWriter . ABT.visit' f where - f :: Ord r => F' r a -> Writer.Writer (Set r) (F' r a) + f :: (Ord r) => F' r a -> Writer.Writer (Set r) (F' r a) f t@(Ref r) = Writer.tell (Set.singleton r) $> t f t = pure t diff --git a/codebase2/codebase/package.yaml b/codebase2/codebase/package.yaml index af3a882c8..bb733a8be 100644 --- a/codebase2/codebase/package.yaml +++ b/codebase2/codebase/package.yaml @@ -4,8 +4,10 @@ github: unisonweb/unison default-extensions: - ApplicativeDo - BlockArguments + - DeriveFoldable - DeriveFunctor - DeriveGeneric + - DeriveTraversable - DerivingStrategies - DoAndIfThenElse - FlexibleContexts @@ -32,6 +34,7 @@ dependencies: - mtl - text - unison-core + - unison-hash - unison-util-base32hex - unison-prelude - time diff --git a/codebase2/codebase/unison-codebase.cabal b/codebase2/codebase/unison-codebase.cabal index 0dc8c0a8b..a0fe07cf6 100644 --- a/codebase2/codebase/unison-codebase.cabal +++ b/codebase2/codebase/unison-codebase.cabal @@ -34,8 +34,10 @@ library default-extensions: ApplicativeDo BlockArguments + DeriveFoldable DeriveFunctor DeriveGeneric + DeriveTraversable DerivingStrategies DoAndIfThenElse FlexibleContexts @@ -56,6 +58,7 @@ library , text , time , unison-core + , unison-hash , unison-prelude , unison-util-base32hex default-language: Haskell2010 diff --git a/codebase2/core/U/Codebase/HashTags.hs b/codebase2/core/U/Codebase/HashTags.hs index a78ed15ca..54531f0da 100644 --- a/codebase2/core/U/Codebase/HashTags.hs +++ b/codebase2/core/U/Codebase/HashTags.hs @@ -1,6 +1,6 @@ module U.Codebase.HashTags where -import U.Util.Hash (Hash) +import Unison.Hash (Hash) newtype BranchHash = BranchHash {unBranchHash :: Hash} deriving (Eq, Ord) diff --git a/codebase2/core/U/Core/ABT.hs b/codebase2/core/U/Core/ABT.hs index cb9dc011d..eaf7b9cdd 100644 --- a/codebase2/core/U/Core/ABT.hs +++ b/codebase2/core/U/Core/ABT.hs @@ -8,8 +8,10 @@ import Data.Functor.Identity (Identity (runIdentity)) import Data.Maybe (fromMaybe) import Data.Set (Set) import qualified Data.Set as Set +import qualified Debug.RecoverRTTI as RTTI import GHC.Generics (Generic) import U.Core.ABT.Var (Var (freshIn)) +import qualified Unison.Debug as Debug import Prelude hiding (abs, cycle) data ABT f v r @@ -24,7 +26,7 @@ data ABT f v r data Term f v a = Term {freeVars :: Set v, annotation :: a, out :: ABT f v (Term f v a)} deriving (Functor, Foldable, Generic, Traversable) -instance (Foldable f, Functor f, forall a. Eq a => Eq (f a), Var v) => Eq (Term f v a) where +instance (Foldable f, Functor f, forall a. (Eq a) => Eq (f a), Var v) => Eq (Term f v a) where -- alpha equivalence, works by renaming any aligned Abs ctors to use a common fresh variable t1 == t2 = go (out t1) (out t2) where @@ -41,10 +43,10 @@ instance (Foldable f, Functor f, forall a. Eq a => Eq (f a), Var v) => Eq (Term go _ _ = False instance - ( forall a. Eq a => Eq (f a), + ( forall a. (Eq a) => Eq (f a), Foldable f, Functor f, - forall a. Ord a => Ord (f a), + forall a. (Ord a) => Ord (f a), Var v ) => Ord (Term f v a) @@ -68,15 +70,19 @@ instance tag (Abs _ _) = 2 tag (Cycle _) = 3 -instance (forall a. Show a => Show (f a), Show v) => Show (Term f v a) where - -- annotations not shown - showsPrec p (Term _ _ out) = case out of - Var v -> showParen (p >= 9) $ \x -> "Var " ++ show v ++ x - Cycle body -> ("Cycle " ++) . showsPrec p body - Abs v body -> showParen True $ (show v ++) . showString ". " . showsPrec p body - Tm f -> showsPrec p f +instance (forall a. (Show a) => Show (f a), Show v) => Show (Term f v a) where + showsPrec p (Term _ ann out) = case out of + Var v -> showParen (p >= 9) $ \x -> showAnn ++ "Var " ++ show v ++ x + Cycle body -> (\s -> showAnn ++ "Cycle " ++ s) . showsPrec p body + Abs v body -> showParen True $ (\s -> showAnn ++ show v ++ s) . showString ". " . showsPrec p body + Tm f -> (showAnn ++) . showsPrec p f + where + showAnn = + if Debug.shouldDebug Debug.Annotations + then "(" ++ RTTI.anythingToString ann ++ ")" + else "" -amap :: Functor f => (a -> a') -> Term f v a -> Term f v a' +amap :: (Functor f) => (a -> a') -> Term f v a -> Term f v a' amap = fmap vmap :: (Functor f, Foldable f, Ord v') => (v -> v') -> Term f v a -> Term f v' a @@ -87,7 +93,7 @@ vmap f (Term _ a out) = case out of Abs v body -> abs a (f v) (vmap f body) cata :: - Functor f => + (Functor f) => (a -> ABT f v x -> x) -> Term f v a -> x @@ -96,7 +102,7 @@ cata abtAlg = in go para :: - Functor f => + (Functor f) => (a -> ABT f v (Term f v a, x) -> x) -> Term f v a -> x @@ -126,7 +132,7 @@ transformM f t = case out t of Tm subterms -> tm (annotation t) <$> (traverse (transformM f) =<< f subterms) Cycle body -> cycle (annotation t) <$> (transformM f body) -abs :: Ord v => a -> v -> Term f v a -> Term f v a +abs :: (Ord v) => a -> v -> Term f v a -> Term f v a abs a v body = Term (Set.delete v (freeVars body)) a (Abs v body) var :: a -> v -> Term f v a @@ -228,7 +234,7 @@ unabs (Term _ _ (Abs hd body)) = unabs t = ([], t) -- | Produce a variable which is free in both terms -freshInBoth :: Var v => Term f v a -> Term f v a -> v -> v +freshInBoth :: (Var v) => Term f v a -> Term f v a -> v -> v freshInBoth t1 t2 = freshIn $ Set.union (freeVars t1) (freeVars t2) substsInheritAnnotation :: diff --git a/codebase2/core/U/Core/ABT/Var.hs b/codebase2/core/U/Core/ABT/Var.hs index 3a70eb736..6a2d443cd 100644 --- a/codebase2/core/U/Core/ABT/Var.hs +++ b/codebase2/core/U/Core/ABT/Var.hs @@ -6,5 +6,5 @@ import Data.Set (Set) -- -- * `Set.notMember (freshIn vs v) vs`: -- `freshIn` returns a variable not used in the `Set` -class Ord v => Var v where +class (Ord v) => Var v where freshIn :: Set v -> v -> v diff --git a/codebase2/core/Unison/Util/Alphabetical.hs b/codebase2/core/Unison/Util/Alphabetical.hs index 7bcb549db..1dc6a2515 100644 --- a/codebase2/core/Unison/Util/Alphabetical.hs +++ b/codebase2/core/Unison/Util/Alphabetical.hs @@ -14,7 +14,7 @@ import Data.Text (Text) -- This need not coincide with the `Ord` instance for a type, which -- is often an efficient yet arbitrary ordering that's used for -- stashing the values in maps and sets. -class Eq n => Alphabetical n where +class (Eq n) => Alphabetical n where compareAlphabetical :: n -> n -> Ordering instance Alphabetical Text where @@ -26,11 +26,11 @@ newtype OrderAlphabetically a = OrderAlphabetically a deriving (Functor, Travers instance (Eq a, Alphabetical a) => Ord (OrderAlphabetically a) where compare (OrderAlphabetically a) (OrderAlphabetically b) = compareAlphabetical a b -instance Alphabetical a => Alphabetical [a] where +instance (Alphabetical a) => Alphabetical [a] where compareAlphabetical a1s a2s = compare (OrderAlphabetically <$> a1s) (OrderAlphabetically <$> a2s) -instance Alphabetical a => Alphabetical (List.NonEmpty a) where +instance (Alphabetical a) => Alphabetical (List.NonEmpty a) where compareAlphabetical a1s a2s = compare (OrderAlphabetically <$> a1s) (OrderAlphabetically <$> a2s) -instance Alphabetical a => Alphabetical (Maybe a) where +instance (Alphabetical a) => Alphabetical (Maybe a) where compareAlphabetical a1s a2s = compare (OrderAlphabetically <$> a1s) (OrderAlphabetically <$> a2s) diff --git a/codebase2/core/package.yaml b/codebase2/core/package.yaml index 9e10b266e..9fa017ef9 100644 --- a/codebase2/core/package.yaml +++ b/codebase2/core/package.yaml @@ -13,6 +13,8 @@ dependencies: - rfc5051 - text - vector + - recover-rtti + - unison-hash - unison-prelude - unison-util-base32hex diff --git a/codebase2/core/unison-core.cabal b/codebase2/core/unison-core.cabal index 46b0f402d..93d6455d4 100644 --- a/codebase2/core/unison-core.cabal +++ b/codebase2/core/unison-core.cabal @@ -52,8 +52,10 @@ library build-depends: base , containers + , recover-rtti , rfc5051 , text + , unison-hash , unison-prelude , unison-util-base32hex , vector diff --git a/codebase2/util-serialization/U/Util/Serialization.hs b/codebase2/util-serialization/U/Util/Serialization.hs index 773f4886f..3e249770a 100644 --- a/codebase2/util-serialization/U/Util/Serialization.hs +++ b/codebase2/util-serialization/U/Util/Serialization.hs @@ -40,9 +40,9 @@ import UnliftIO (MonadIO, liftIO) import UnliftIO.Directory (createDirectoryIfMissing, doesFileExist) import Prelude hiding (readFile, writeFile) -type Get a = forall m. MonadGet m => m a +type Get a = forall m. (MonadGet m) => m a -type Put a = forall m. MonadPut m => a -> m () +type Put a = forall m. (MonadPut m) => a -> m () -- todo: do we use this? data Format a = Format @@ -57,12 +57,12 @@ getFromBytes :: Get a -> ByteString -> Maybe a getFromBytes getA bytes = case runGetS getA bytes of Left _ -> Nothing; Right a -> Just a -getFromFile :: MonadIO m => Get a -> FilePath -> m (Maybe a) +getFromFile :: (MonadIO m) => Get a -> FilePath -> m (Maybe a) getFromFile getA file = do b <- doesFileExist file if b then getFromBytes getA <$> liftIO (readFile file) else pure Nothing -getFromFile' :: MonadIO m => Get a -> FilePath -> m (Either String a) +getFromFile' :: (MonadIO m) => Get a -> FilePath -> m (Either String a) getFromFile' getA file = do b <- doesFileExist file if b @@ -72,7 +72,7 @@ getFromFile' getA file = do putBytes :: Put a -> a -> ByteString putBytes put a = runPutS (put a) -putWithParentDirs :: MonadIO m => Put a -> FilePath -> a -> m () +putWithParentDirs :: (MonadIO m) => Put a -> FilePath -> a -> m () putWithParentDirs putA file a = do createDirectoryIfMissing True (takeDirectory file) liftIO . writeFile file $ putBytes putA a @@ -97,28 +97,28 @@ getVarInt = getWord8 >>= getVarInt {-# INLINE getVarInt #-} {-# INLINE getVarInt #-} -putText :: MonadPut m => Text -> m () +putText :: (MonadPut m) => Text -> m () putText text = do let bs = encodeUtf8 text putVarInt $ BS.length bs putByteString bs -getText :: MonadGet m => m Text +getText :: (MonadGet m) => m Text getText = do len <- getVarInt bs <- BS.copy <$> getBytes len pure $ decodeUtf8 bs -skipText :: MonadGet m => m () +skipText :: (MonadGet m) => m () skipText = skip =<< getVarInt -putShortText :: MonadPut m => ShortText -> m () +putShortText :: (MonadPut m) => ShortText -> m () putShortText text = do let sbs = TS.toShortByteString text putVarInt $ BSS.length sbs putShortByteString sbs -getShortText :: MonadGet m => m ShortText +getShortText :: (MonadGet m) => m ShortText getShortText = do len <- getVarInt sbs <- getShortByteString len @@ -126,12 +126,12 @@ getShortText = do -- | the `binary` package has a native version of this, -- which may be more efficient by a constant factor -putShortByteString :: MonadPut m => ShortByteString -> m () +putShortByteString :: (MonadPut m) => ShortByteString -> m () putShortByteString = putByteString . BSS.fromShort -- | the `binary` package has a native version of this, -- which may be more efficient by a constant factor -getShortByteString :: MonadGet m => Int -> m ShortByteString +getShortByteString :: (MonadGet m) => Int -> m ShortByteString getShortByteString len = BSS.toShort <$> getByteString len putFoldable :: @@ -140,17 +140,17 @@ putFoldable putA as = do putVarInt (length as) traverse_ putA as -getList :: MonadGet m => m a -> m [a] +getList :: (MonadGet m) => m a -> m [a] getList getA = do length <- getVarInt replicateM length getA -getVector :: MonadGet m => m a -> m (Vector a) +getVector :: (MonadGet m) => m a -> m (Vector a) getVector getA = do length <- getVarInt Vector.replicateM length getA -getSequence :: MonadGet m => m a -> m (Seq a) +getSequence :: (MonadGet m) => m a -> m (Seq a) getSequence getA = do length <- getVarInt Seq.replicateM length getA @@ -161,7 +161,7 @@ getSet getA = do -- avoid materializing intermediate list foldM (\s ma -> Set.insert <$> ma <*> pure s) mempty (replicate length getA) -putMap :: MonadPut m => (a -> m ()) -> (b -> m ()) -> Map a b -> m () +putMap :: (MonadPut m) => (a -> m ()) -> (b -> m ()) -> Map a b -> m () putMap putA putB m = putFoldable (putPair putA putB) (Map.toList m) addToExistingMap :: (MonadGet m, Ord a) => m a -> m b -> Map a b -> m (Map a b) @@ -176,22 +176,22 @@ addToExistingMap getA getB map = do getMap :: (MonadGet m, Ord a) => m a -> m b -> m (Map a b) getMap getA getB = addToExistingMap getA getB mempty -getFramedByteString :: MonadGet m => m ByteString +getFramedByteString :: (MonadGet m) => m ByteString getFramedByteString = getVarInt >>= getByteString -getRemainingByteString :: MonadGet m => m ByteString +getRemainingByteString :: (MonadGet m) => m ByteString getRemainingByteString = fromIntegral <$> remaining >>= getByteString -getFramed :: MonadGet m => Get a -> m a +getFramed :: (MonadGet m) => Get a -> m a getFramed get = getFramedByteString >>= either fail pure . runGetS get -putFramedByteString :: MonadPut m => ByteString -> m () +putFramedByteString :: (MonadPut m) => ByteString -> m () putFramedByteString bs = do putVarInt (BS.length bs) putByteString bs -putFramed :: MonadPut m => Put a -> a -> m () +putFramed :: (MonadPut m) => Put a -> a -> m () putFramed put a = do -- 1. figure out the length `len` of serialized `a` -- 2. Put the length `len` @@ -201,7 +201,7 @@ putFramed put a = do putVarInt (BS.length bs) putByteString bs -skipFramed :: MonadGet m => m () +skipFramed :: (MonadGet m) => m () skipFramed = do len <- getVarInt skip len @@ -214,7 +214,7 @@ putFramedArray put (toList -> as) = do putFoldable putVarInt offsets traverse_ putByteString bss -getFramedArray :: MonadGet m => m a -> m (Vector a) +getFramedArray :: (MonadGet m) => m a -> m (Vector a) getFramedArray getA = do offsets :: [Int] <- getList getVarInt let count = length offsets - 1 @@ -223,7 +223,7 @@ getFramedArray getA = do -- | Look up a 0-based index in a framed array, O(num array elements), -- because it reads the start indices for all elements first. -- This could be skipped if the indices had a fixed size instead of varint -lookupFramedArray :: MonadGet m => m a -> Int -> m (Maybe a) +lookupFramedArray :: (MonadGet m) => m a -> Int -> m (Maybe a) lookupFramedArray getA index = do offsets <- getVector getVarInt if index > Vector.length offsets - 1 @@ -232,20 +232,20 @@ lookupFramedArray getA index = do skip (Vector.unsafeIndex offsets index) Just <$> getA -lengthFramedArray :: MonadGet m => m Word64 +lengthFramedArray :: (MonadGet m) => m Word64 lengthFramedArray = (\offsetsLen -> offsetsLen - 1) <$> getVarInt -unsafeFramedArrayLookup :: MonadGet m => m a -> Int -> m a +unsafeFramedArrayLookup :: (MonadGet m) => m a -> Int -> m a unsafeFramedArrayLookup getA index = do offsets <- getVector getVarInt skip (Vector.unsafeIndex offsets index) getA -putPair :: MonadPut m => (a -> m ()) -> (b -> m ()) -> (a, b) -> m () +putPair :: (MonadPut m) => (a -> m ()) -> (b -> m ()) -> (a, b) -> m () putPair putA putB (a, b) = putA a *> putB b -getPair :: MonadGet m => m a -> m b -> m (a, b) +getPair :: (MonadGet m) => m a -> m b -> m (a, b) getPair = liftA2 (,) -getTuple3 :: MonadGet m => m a -> m b -> m c -> m (a, b, c) +getTuple3 :: (MonadGet m) => m a -> m b -> m c -> m (a, b, c) getTuple3 = liftA3 (,,) diff --git a/codebase2/util-term/U/Util/Term.hs b/codebase2/util-term/U/Util/Term.hs index d012ffd6c..f86f598e8 100644 --- a/codebase2/util-term/U/Util/Term.hs +++ b/codebase2/util-term/U/Util/Term.hs @@ -11,14 +11,14 @@ import U.Codebase.Term (F' (..), MatchCase (..), Pattern (..)) import qualified U.Codebase.Term as Term import qualified U.Core.ABT as ABT -text :: Ord v => ABT.Term (Term.F' text termRef typeRef termLink typeLink vt) v a -> [text] +text :: (Ord v) => ABT.Term (Term.F' text termRef typeRef termLink typeLink vt) v a -> [text] text = execWriter . ABT.visit_ \case Text t -> tell [t] _ -> pure () dependencies :: - Ord v => + (Ord v) => ABT.Term (Term.F' text termRef typeRef termLink typeLink vt) v a -> ([termRef], [typeRef], [termLink], [typeLink]) dependencies = diff --git a/codebase2/util-term/U/Util/Type.hs b/codebase2/util-term/U/Util/Type.hs index d6e52d0e0..7acf6a4c1 100644 --- a/codebase2/util-term/U/Util/Type.hs +++ b/codebase2/util-term/U/Util/Type.hs @@ -13,13 +13,13 @@ import qualified U.Core.ABT.Var as ABT -- * Constructors -effect :: Ord v => [TypeR r v] -> TypeR r v -> TypeR r v +effect :: (Ord v) => [TypeR r v] -> TypeR r v -> TypeR r v effect es (Effect1' fs t) = let es' = (es >>= flattenEffects) ++ flattenEffects fs in ABT.tm () (Effect (ABT.tm () (Effects es')) t) effect es t = ABT.tm () (Effect (ABT.tm () (Effects es)) t) -effects :: Ord v => [TypeR r v] -> TypeR r v +effects :: (Ord v) => [TypeR r v] -> TypeR r v effects es = ABT.tm () (Effects $ es >>= flattenEffects) -- * Modification @@ -28,7 +28,7 @@ effects es = ABT.tm () (Effects $ es >>= flattenEffects) -- Used for type-based search, we apply this transformation to both the -- indexed type and the query type, so the user can supply `a -> b` that will -- match `a ->{e} b` (but not `a ->{IO} b`). -removeAllEffectVars :: ABT.Var v => TypeR r v -> TypeR r v +removeAllEffectVars :: (ABT.Var v) => TypeR r v -> TypeR r v removeAllEffectVars t = let allEffectVars = foldMap go (ABT.subterms t) go (Effects' vs) = Set.fromList [v | Var' v <- vs] @@ -38,7 +38,7 @@ removeAllEffectVars t = in generalize vs (removeEffectVars allEffectVars tu) -- Remove free effect variables from the type that are in the set -removeEffectVars :: ABT.Var v => Set v -> TypeR r v -> TypeR r v +removeEffectVars :: (ABT.Var v) => Set v -> TypeR r v -> TypeR r v removeEffectVars removals t = let z = effects [] t' = ABT.substsInheritAnnotation ((,z) <$> Set.toList removals) t @@ -58,7 +58,7 @@ flattenEffects (Effects' es) = es >>= flattenEffects flattenEffects es = [es] -- | Bind the given variables with an outer `forall`, if they are used in `t`. -generalize :: Ord v => [v] -> TypeR r v -> TypeR r v +generalize :: (Ord v) => [v] -> TypeR r v -> TypeR r v generalize vs t = foldr f t vs where f v t = if Set.member v (ABT.freeVars t) then forall v t else t @@ -80,7 +80,7 @@ pattern Effect1' e t <- ABT.Tm' (Effect e t) pattern Ref' :: r -> TypeR r v pattern Ref' r <- ABT.Tm' (Ref r) -forall :: Ord v => v -> TypeR r v -> TypeR r v +forall :: (Ord v) => v -> TypeR r v -> TypeR r v forall v body = ABT.tm () (Forall (ABT.abs () v body)) unforall' :: TypeR r v -> ([v], TypeR r v) diff --git a/development.markdown b/development.markdown index f54066a3a..3b45c47d5 100644 --- a/development.markdown +++ b/development.markdown @@ -16,6 +16,37 @@ To get cracking with Unison: On startup, Unison prints a url for the codebase UI. If you did step 3 above, then visiting that URL in a browser will give you a nice interface to your codebase. +## Autoformatting your code with Ormolu + +We use 0.5.3.0 of Ormolu and CI will fail if your code isn't properly formatted. You can add the following to `.git/hooks/pre-commit` to make sure all your commits get formatted (this assumes you've got [`rg`](https://github.com/BurntSushi/ripgrep) installed and on your path): + +``` +#!/bin/bash + +set -e + +if [[ -z "${SKIP_FORMATTING}" ]]; then + git diff --cached --name-only | rg '.hs$' | xargs -n1 ormolu --ghc-opt '-XBangPatterns' --ghc-opt '-XCPP' --ghc-opt '-XPatternSynonyms' -i + git add $(git diff --cached --name-only) +fi +``` + +If you've got an existing PR that somehow hasn't been formatted correctly, you can install the correct version of Ormolu locally, then do: + +``` +ormolu --mode inplace $(find . -name '*.hs') +``` + +Also note that you can always wrap a comment around some code you don't want Ormolu to touch, using: + +``` +{- ORMOLU_DISABLE -} +dontFormatMe = do blah + blah + blah +{- ORMOLU_ENABLE -} +``` + ## Running Tests * `stack test --fast` builds and runs most test suites, see below for exceptions to this (e.g. transcript tests). diff --git a/docs/adding-builtins.markdown b/docs/adding-builtins.markdown index b3e52b1c4..56cea13e9 100644 --- a/docs/adding-builtins.markdown +++ b/docs/adding-builtins.markdown @@ -44,7 +44,6 @@ namespace. However, we will also add the value: ```haskell Rename' "MVar" "io2.MVar" ``` - because this is a type to be used with the new IO functions, which are currently nested under the `io2` namespace. With both of these added to the list, running `builtins.merge` should have a `builtin.io2.MVar` @@ -76,25 +75,28 @@ add declarations similar to: ```haskell B "MVar.new" $ forall1 "a" (\a -> a --> io (mvar a)) Rename "MVar.new" "io2.MVar.new" -B "MVar.take" $ forall1 "a" (\a -> mvar a --> ioe a) +B "MVar.take" $ forall1 "a" (\a -> mvar a --> iof a) Rename "MVar.take" "io2.MVar.take" ``` -The `forall1`, `io`, `ioe` and `-->` functions are local definitions -in `Unison.Builtin` for assistance in writing the types. `ioe` +The `forall1`, `io`, `iof` and `-->` functions are local definitions +in `Unison.Builtin` for assistance in writing the types. `iof` indicates that an error result may be returned, while `io` should -always succeed. `mvar` can be defined locally using some other +always succeed. Note that when the `{IO}` ability appears as a type +parameter rather than the return type of a function, you will need to +use `iot` instead. +`mvar` can be defined locally using some other helpers in scope: ```haskell -mvar :: Var v => Type v -> Type v +mvar :: Type -> Type mvar a = Type.ref () Type.mvarRef `app` a ``` For the actual `MVar` implementation, we'll be doing many definitions followed by renames, so it'll be factored into a list of the name and -type, together with a function that generates the declaration and the -rename. +type, and we can then call the `moveUnder` helper to generate the `B` +declaration and the `Rename`. ## Builtin function implementation -- new runtime @@ -111,41 +113,45 @@ in `Unison.Runtime.Builtin`, in a definition `declareForeigns`. We can declare our builtins there by adding: ```haskell - declareForeign "MVar.new" mvar'new + declareForeign Tracked "MVar.new" boxDirect . mkForeign $ \(c :: Closure) -> newMVar c - declareForeign "MVar.take" mvar'take - . mkForeignIOE $ \(mv :: MVar Closure) -> takeMVar mv + declareForeign Tracked "MVar.take" boxToEFBox + . mkForeignIOF $ \(mv :: MVar Closure) -> takeMVar mv ``` These lines do multiple things at once. The first argument to -`declareForeign` must match the name from `Unison.Builtin`, as this -is how they are associated. The second argument is wrapper code -that actually defines the unison function that will be called, and -the definitions for these two cases will be shown later. The last -argument is the actual Haskell implementation of the operation. -However, the format for foreign functions is somewhat more limited -than 'any Haskell function,' so the `mkForeign` and `mkForeignIOE` -helpers assist in wrapping Haskell functions correctly. The latter -will catch some exceptions and yield them as explicit results. +`declareForeign` determines whether the function should be explicitly +tracked by the Unison Cloud sandboxing functionality or not. As a +general guideline, functions in `{IO}` are `Tracked`, and pure +functions are `Untracked`. The second argument must match the name +from `Unison.Builtin`, as this is how they are associated. The third +argument is wrapper code that defines the conversion from the Haskell +runtim calling convention into Unison, and the definitions for these +two cases will be shown later. The last argument is the actual Haskell +implementation of the operation. However, the format for foreign +functions is somewhat more limited than 'any Haskell function,' so the +`mkForeign` and `mkForeignIOF` helpers assist in wrapping Haskell +functions correctly. The latter will catch some exceptions and yield +them as explicit results. The wrapper code for these two operations looks like: - ```haskell -mvar'new :: ForeignOp -mvar'new instr - = ([BX],) - . TAbs init - $ TFOp instr [init] +-- a -> b +boxDirect :: ForeignOp +boxDirect instr = + ([BX],) + . TAbs arg + $ TFOp instr [arg] where - [init] = freshes 1 + arg = fresh1 -mvar'take :: ForeignOp -mvar'take instr - = ([BX],) - . TAbs mv - $ io'error'result'direct instr [mv] ior e r +-- a -> Either Failure b +boxToEFBox :: ForeignOp +boxToEFBox = + inBx arg result $ + outIoFailBox stack1 stack2 stack3 any fail result where - [mv,ior,e,r] = freshes 4 + (arg, result, stack1, stack2, stack3, any, fail) = fresh ``` The breakdown of what is happening here is as follows: @@ -161,23 +167,25 @@ The breakdown of what is happening here is as follows: currently be taking all boxed arguments, because there is no way to talk about unboxed values in the surface syntax where they are called. -- `TAbs init` abstracts the argument variable, which we got from - `freshes'` at the bottom. Multiple arguments may be abstracted with - e.g. `TAbss [x,y,z]` -- `io'error'result'direct` is a helper function for calling the - instruction and wrapping up a possible error result. The first - argument is the identifier to call, the list is the arguments, - and the last three arguments are variables used in the common - result handling code. +- `TAbs arg` abstracts the argument variable, which we got from + `fresh1'` at the bottom. Multiple arguments may be abstracted with + e.g. `TAbss [x,y,z]`. You can call `fresh` to instantiate a tuple of + fresh variables of a certain arity. +- `inBx` and `outIoFailBox` are helper functions for calling the + instruction and wrapping up a possible error result. - `TFOp` simply calls the instruction with the assumption that the result value is acceptable for directly returning. `MVar` values will be represented directly by their Haskell values wrapped into - a closure, so the `mvar'new` code doesn't need to do any + a closure, so the `boxDirect` code doesn't need to do any processing of the results of its foreign function. -Other builtins use slightly different implementations, so looking at -other parts of the file may be instructive, depending on what is being -added. +The names of the helpers generally follow a form of form of Hungarian +notation, e.g. `boxToEFBox` means "boxed value to either a failure or +a boxed value", i.e. `a -> Either a b`. +However, not all helpers are named consistently at the moment, and +different builtins use slightly different implementations, so looking +at other parts of the file may be instructive, depending on what is +being added. At first, our declarations will cause an error, because some of the automatic machinery for creating builtin 'foreign' functions does not diff --git a/docs/configuration.md b/docs/configuration.md index 95f12b6b2..0dd770894 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -5,6 +5,7 @@ * [`UNISON_DEBUG`](#unison_debug) * [`UNISON_PAGER`](#unison_pager) * [`UNISON_LSP_PORT`](#unison_lsp_port) + * [`UNISON_LSP_ENABLED`](#unison_lsp_enabled) * [`UNISON_SHARE_HOST`](#unison_share_host) * [`UNISON_SHARE_ACCESS_TOKEN`](#unison_share_access_token) * [Local Codebase Server](#local-codebase-server) @@ -50,6 +51,31 @@ E.g. $ UNISON_LSP_PORT=8080 ucm ``` +### `UNISON_LSP_ENABLED` + +Allows explicitly enabling or disabling the LSP server. +Acceptable values are 'true' or 'false' + +Note for Windows users: Due to an outstanding issue with GHC's IO manager on Windows, the LSP is **disabled by default** on Windows machines. +Enabling the LSP on windows can cause UCM to hang on exit and may require the process to be killed by the operating system or via Ctrl-C. +Note that this doesn't pose any risk of codebase corruption or cause any known issues, it's simply an annoyance. + +If you accept this annoyance, you can enable the LSP server on Windows by exporting the `UNISON_LSP_ENABLED=true` environment variable. + +You can set this persistently in powershell using: + +```powershell +[System.Environment]::SetEnvironmentVariable('UNISON_LSP_ENABLED','true') +``` + +See [this issue](https://github.com/unisonweb/unison/issues/3487) for more details. + +E.g. + +```sh +$ UNISON_LSP_ENABLED=true ucm +``` + ### `UNISON_SHARE_HOST` Allows selecting the location for the default Share server. diff --git a/docs/language-server.markdown b/docs/language-server.markdown index d503d1780..00e3ed72f 100644 --- a/docs/language-server.markdown +++ b/docs/language-server.markdown @@ -20,9 +20,40 @@ Currently the only supported configuration is to connect to the LSP via a specif By default the LSP is hosted at `127.0.0.1:5757`, but you can change the port using `UNISON_LSP_PORT=1234`. +Note for Windows users: Due to an outstanding issue with GHC's IO manager on Windows, the LSP is **disabled by default** on Windows machines. +Enabling the LSP on windows can cause UCM to hang on exit and may require the process to be killed by the operating system or via Ctrl-C. +Note that this doesn't pose any risk of codebase corruption or cause any known issues, it's simply an annoyance. + +If you accept this annoyance, you can enable the LSP server on Windows by exporting the `UNISON_LSP_ENABLED=true` environment variable. + +You can set this persistently in powershell using: + +```powershell +[System.Environment]::SetEnvironmentVariable('UNISON_LSP_ENABLED','true') +``` + +See [this issue](https://github.com/unisonweb/unison/issues/3487) for more details. ### NeoVim +Before configuring the LSP, install the Vim plugin for filetype detection and syntax highlighting. +For [Packer](https://github.com/wbthomason/packer.nvim) you can install the package as follow: + +```lua +-- You may need to increase the git clone timeout setting in Packer! +use { + "unisonweb/unison", + branch = "trunk", + rtp = "/editor-support/vim" +} +``` + +or [Plug](https://github.com/junegunn/vim-plug): + +```vim +Plug 'unisonweb/unison', { 'branch': 'trunk', 'rtp': 'editor-support/vim' } +``` + Configuration for [coc-nvim](https://github.com/neoclide/coc.nvim), enter the following in the relevant place of your CocConfig ``` @@ -36,12 +67,93 @@ Configuration for [coc-nvim](https://github.com/neoclide/coc.nvim), enter the fo } ``` +For [lspconfig](https://github.com/neovim/nvim-lspconfig) with optional autocomplete [nvim-cmp](https://github.com/hrsh7th/nvim-cmp) for LSP +[cmp-nvim-lsp](https://github.com/hrsh7th/cmp-nvim-lsp), you can use the following setup function(s): + +```lua +-- This function is for configuring a buffer when an LSP is attached +local on_attach = function(client, bufnr) + -- Always show the signcolumn, otherwise it would shift the text each time + -- diagnostics appear/become resolved + vim.o.signcolumn = 'yes' + + -- Update the cursor hover location every 1/4 of a second + vim.o.updatetime = 250 + + -- Disable appending of the error text at the offending line + vim.diagnostic.config({virtual_text=false}) + + -- Enable a floating window containing the error text when hovering over an error + vim.api.nvim_create_autocmd("CursorHold", { + buffer = bufnr, + callback = function() + local opts = { + focusable = false, + close_events = { "BufLeave", "CursorMoved", "InsertEnter", "FocusLost" }, + border = 'rounded', + source = 'always', + prefix = ' ', + scope = 'cursor', + } + vim.diagnostic.open_float(nil, opts) + end + }) + + -- This setting is to display hover information about the symbol under the cursor + vim.keymap.set('n', 'K', vim.lsp.buf.hover) + +end + +-- Setup the Unison LSP +require('lspconfig')['unison'].setup{ + on_attach = on_attach, +} +``` + +```lua +-- This is NVim Autocompletion support +local cmp = require 'cmp' + +-- This function sets up autocompletion +cmp.setup { + + -- This mapping affects the autocompletion choices menu + mapping = cmp.mapping.preset.insert(), + + -- This table names the sources for autocompletion + sources = { + { name = 'nvim_lsp' }, + }, +} + +``` + Note that you'll need to start UCM _before_ you try connecting to it in your editor or your editor might give up. ### VSCode Simply install the [Unison Language VSCode extension](https://marketplace.visualstudio.com/items?itemName=unison-lang.unison). +### Helix Editor + +To `~/.config/helix/languages.toml` append this code: + +```toml +[[language]] +name = "unison" +scope = "source.unison" +injection-regex = "unison" +file-types = ["u"] +shebangs = [] +roots = [] +auto-format = false +comment-token = "--" +indent = { tab-width = 4, unit = " " } +language-server = { command = "ncat", args = ["localhost", "5757"] } +``` + +or follow the instructions for Unison in "[How to install the default language servers](https://github.com/helix-editor/helix/wiki/How-to-install-the-default-language-servers#unison)" wiki page. + ### Other Editors diff --git a/docs/m1-mac-setup-tips.markdown b/docs/m1-mac-setup-tips.markdown new file mode 100644 index 000000000..6a24f3ad4 --- /dev/null +++ b/docs/m1-mac-setup-tips.markdown @@ -0,0 +1,164 @@ + +# M1 Mac Haskell toolchain setup + +If you are a newcomer to the Haskell ecosystem trying to set up your dev environment on a Mac M1 computer, welcome, you can do this! The tips in this document provide one way to get a working development setup, but are not the only path forward. If you haven't downloaded the Haskell toolchain before, our recommendation is to use GHCup. We've found that issues can arise if you mix ARM native binaries with x86 binaries to be run with Rosetta. If you're a veteran Haskell developer, much of this won't apply to you as it's likely you already have a working development environment. + +Here is a working set of versions you can use to build the Unison executable: + +GHC version: 8.10.7 +Stack version: 2.9.1 +Cabal version 3.6.2.0 +Haskell language server version: 1.7.0.0 + +The GHC version for the project can be confirmed by looking at the `resolver` key in this project's `stack.yaml`. + +## Newcomer setup tips + +[Install GHCup using the instructions on their website.](https://www.haskell.org/ghcup/) Once it's installed make sure `ghcup` is on your path. + +``` +export PATH="$HOME/.ghcup/bin:$PATH" +``` + +GHCup has a nice ui for setting Haskell toolchain versions for the project. Enter `ghcup tui` to open it up and follow the instructions for installing and setting the versions there. GHCup will try to download M1 native binaries for the versions given. + +Check your clang version. For [hand-wavey reasons](https://gitlab.haskell.org/haskell/ghcup-hs/-/issues/301) we recommend you use llvm version 12. See troubleshooting note below about changing your LLVM if your version is different. + +```shell +$ clang --version +Homebrew clang version 12.0.1 +Target: arm64-apple-darwin20.2.0 +Thread model: posix +InstalledDir: /opt/homebrew/opt/llvm@12/bin +``` + +At the end of the process you should see something like the following for executable locations and versions. + +```shell +$ which ghcup +~/.ghcup/bin/ghcup +$ ghcup --version +The GHCup Haskell installer, version 0.1.19.0 +``` + +```bash +$ which stack +~/.ghcup/bin/stack +$ stack --version +Version 2.9.1, Git revision 13c9c8772a6dce093dbeacc08bb5877bdb6cfc2e (dirty) (155 commits) aarch64 +``` + +```shell +$ which ghc +~/.ghcup/bin/ghc +$ ghc --version +The Glorious Glasgow Haskell Compilation System, version 8.10.7 +``` + +Check which GHC version Stack thinks it's using too, for good measure: + +```shell +$ stack ghc -- --version +The Glorious Glasgow Haskell Compilation System, version 8.10.7 +$ stack exec -- which ghc +~/.ghcup/ghc/8.10.7/bin/ghc +``` + +```shell +$ which haskell-language-server-wrapper +~/.ghcup/bin/haskell-language-server-wrapper +$ haskell-language-server-wrapper + +Found "...unison/hie.yaml" for "...unison/a" +Run entered for haskell-language-server-wrapper(haskell-language-server-wrapper) Version 1.7.0.0 aarch64 ghc-9.2.2 +Current directory: ...unison +Operating system: darwin +Arguments: [] +Cradle directory: ...unison +Cradle type: Stack + +Tool versions found on the $PATH +cabal: 3.6.2.0 +stack: 2.9.1 +ghc: 8.10.7 +``` + +If you're a VS Code user, you can download the Haskell extension for IDE support. You may need to configure it in `settings.json`. + +```json + "haskell.manageHLS": "GHCup", + "haskell.toolchain": { + "stack": "2.9.1", + "ghc": "8.10.7", + "cabal": "recommended", + "hls": "1.7.0.0" + } +``` + +These setting blocks say that the VS Code extension will use GHCup for your Haskell language server distribution, and sets the versions for elements in the toolchain. + +## Troubleshooting: + +The VS Code extension has compiled a helpful list of troubleshooting steps here: https://github.com/haskell/vscode-haskell#troubleshooting + +### "Couldn't figure out LLVM version" or "failed to compile a sanity check" errors + +``` +: error: + Warning: Couldn't figure out LLVM version! + Make sure you have installed LLVM between [9 and 13) +ghc: could not execute: opt +``` + +Or + +``` +ld: symbol(s) not found for architecture x86_64 +clang: error: linker command failed with exit code 1 (use -v to see invocation) +`gcc' failed in phase `Linker'. (Exit code: 1) +``` + +Try installing llvm version 12 +`brew install llvm@12` + +and prepend it to your path +``` +export PATH="$(brew --prefix)/opt/llvm@12/bin:$PATH" +``` + +(The GHC version 8.10.7 mentions it supports LLVM versions up to 12. https://www.haskell.org/ghc/download_ghc_8_10_7.html) + +### "GHC ABIs don't match!" + +Follow the steps here: + +https://github.com/haskell/vscode-haskell#ghc-abis-dont-match + +We found some success telling Stack to use the system's GHC instead of managing its own version of GHC. You can try this by setting the following two configuration flags in ~/.stack/config.yaml + +``` +system-ghc: true +install-ghc: false +``` + +This is telling Stack to use the GHC executable that it finds on your $PATH. Make sure the ghc being provided is the proper version, 8.10.7, from ghcup. + +Note that you may need to clean the cache for the project after this failure with `stack clean --full` if you have previously built things with a different stack distribution. + +### "stack" commands like "stack build" cause a segfault: + +1. Make sure your stack state is clean. `stack clean --full` removes the project's stack work directories (things in .stack-work). +2. [Wait for this bug to be fixed (or help fix this bug!)](https://github.com/commercialhaskell/stack/issues/5607) +3. Or subshell out your stack commands `$(stack commandHere)` +4. Or use bash instead of zsh + +### Help! Everything is broken and I want to start over + +Warning, the following will remove ghcup, configuration files, cached packages, and versions of the toolchain. + +``` +ghcup nuke +rm -rf ~/.ghcup +rm -rf ~/.stack +rm -rf ~/.cabal +``` diff --git a/editor-support/vim/ftplugin/unison.vim b/editor-support/vim/ftplugin/unison.vim index ce410f091..1e8afb627 100644 --- a/editor-support/vim/ftplugin/unison.vim +++ b/editor-support/vim/ftplugin/unison.vim @@ -4,4 +4,9 @@ if exists("b:did_ftplugin") endif let b:did_ftplugin = 1 -call unison#SetBufferDefaults() +setlocal commentstring=--\ %s +setlocal iskeyword+=!,' +" setlocal tabstop=2 +" setlocal softtabstop=2 +" setlocal shiftwidth=2 +" setlocal completefunc=syntaxcomplete#Complete diff --git a/editor-support/vim/syntax/unison.vim b/editor-support/vim/syntax/unison.vim index ebf998528..ec193723a 100644 --- a/editor-support/vim/syntax/unison.vim +++ b/editor-support/vim/syntax/unison.vim @@ -17,7 +17,7 @@ " u_allow_hash_operator - Don't highlight seemingly incorrect C " preprocessor directives but assume them to be " operators -" +" 2023 Jan 6: Update for current syntax (dt) " 2018 Aug 23: Adapt Haskell highlighting to Unison, cleanup. " 2004 Feb 19: Added C preprocessor directive handling, corrected eol comments " cleaned away literate unison support (should be entirely in @@ -34,20 +34,12 @@ elseif exists("b:current_syntax") finish endif -" (Qualified) identifiers (no default highlighting) -syn match ConId "\(\<[A-Z][a-zA-Z0-9_']*\.\)\=\<[A-Z][a-zA-Z0-9_']*\>" -syn match VarId "\(\<[A-Z][a-zA-Z0-9_']*\.\)\=\<[a-z][a-zA-Z0-9_']*\>" +syntax include @markdown $VIMRUNTIME/syntax/markdown.vim -" Infix operators--most punctuation characters and any (qualified) identifier -" enclosed in `backquotes`. An operator starting with : is a constructor, -" others are variables (e.g. functions). -syn match uVarSym "\(\<[A-Z][a-zA-Z0-9_']*\.\)\=[-!#$%&\*\+/<=>\?@\\^|~.][-!#$%&\*\+/<=>\?@\\^|~:.]*" -syn match uConSym "\(\<[A-Z][a-zA-Z0-9_']*\.\)\=:[-!#$%&\*\+./<=>\?@\\^|~:]*" -syn match uVarSym "`\(\<[A-Z][a-zA-Z0-9_']*\.\)\=[a-z][a-zA-Z0-9_']*`" -syn match uConSym "`\(\<[A-Z][a-zA-Z0-9_']*\.\)\=[A-Z][a-zA-Z0-9_']*`" +syn cluster markdownLikeDocs contains=markdownBold,markdownItalic,markdownLinkText,markdownListMarker,markdownOrderedListMarker,markdownH1,markdownH2,markdownH3,markdownH4,markdownH5,markdownH6 -" Reserved symbols--cannot be overloaded. -syn match uDelimiter "(\|)\|\[\|\]\|,\|_\|{\|}" +syn match uOperator "[-!#$%&\*\+/<=>\?@\\^|~]" +syn match uDelimiter "[\[\](){},.]" " Strings and constants syn match uSpecialChar contained "\\\([0-9]\+\|o[0-7]\+\|x[0-9a-fA-F]\+\|[\"\\'&\\abfnrtv]\|^[A-Z^_\[\\\]]\)" @@ -62,45 +54,42 @@ syn match uFloat "\<[0-9]\+\.[0-9]\+\([eE][-+]\=[0-9]\+\)\=\>" " Keyword definitions. These must be patterns instead of keywords " because otherwise they would match as keywords at the start of a " "literate" comment (see lu.vim). -syn match uModule "\" +syn match uModule "\" syn match uImport "\" -syn match uInfix "\<\(infix\|infixl\|infixr\)\>" -syn match uTypedef "\<\(∀\|forall\)\>" -syn match uStatement "\<\(unique\|ability\|type\|where\|match\|cases\|;\|let\|with\|handle\)\>" +syn match uTypedef "\<\(unique\|structural\|∀\|forall\)\>" +syn match uStatement "\<\(ability\|do\|type\|where\|match\|cases\|;\|let\|with\|handle\)\>" syn match uConditional "\<\(if\|else\|then\)\>" -" Not real keywords, but close. -if exists("u_highlight_boolean") - " Boolean constants from the standard prelude. - syn match uBoolean "\<\(true\|false\)\>" -endif -if exists("u_highlight_types") - " Primitive types from the standard prelude and libraries. - syn match uType "\<\(Float\|Nat\|Int\|Boolean\|Remote\|Text\)\>" -endif -if exists("u_highlight_more_types") - " Types from the standard prelude libraries. - syn match uType "\<\(Optional\|Either\|Sequence\|Effect\)\>" - syn match uMaybe "\" - syn match uExitCode "\<\(ExitSuccess\)\>" - syn match uOrdering "\<\(GT\|LT\|EQ\)\>" -endif -if exists("u_highlight_debug") - " Debugging functions from the standard prelude. - syn match uDebug "\<\(undefined\|error\|trace\)\>" -endif +syn match uBoolean "\<\(true\|false\)\>" +syn match uType "\<\C[A-Z][0-9A-Za-z_'!]*\>" +syn match uName "\<\C[a-z_][0-9A-Za-z_'!]*\>" " Comments syn match uLineComment "---*\([^-!#$%&\*\+./<=>\?@\\^|~].*\)\?$" syn region uBlockComment start="{-" end="-}" contains=uBlockComment -syn region uPragma start="{-#" end="#-}" -syn region uBelowFold start="^---" skip="." end="." contains=uBelowFold +syn region uPragma start="{-#" end="#-}" +syn region uBelowFold start="^---" skip="." end="." contains=uBelowFold " Docs -syn region uDocBlock start="\[:" end=":]" contains=uLink,uDocDirective -syn match uLink contained "@\([A-Z][a-zA-Z0-9_']*\.\)\=\<[a-z][a-zA-Z0-9_'.]*\>" -syn match uDocDirective contained "@\[\([A-Z][a-zA-Z0-9_']*\.\)\=\<[a-z][a-zA-Z0-9_'.]*\>] \(\<[A-Z][a-zA-Z0-9_']*\.\)\=\<[a-z][a-zA-Z0-9_'.]*\>" +syn region uDocBlock matchgroup=unisonDoc start="{{" end="}}" contains=uDocTypecheck,uDocQuasiquote,uDocDirective,uDocCode,uDocCodeInline,uDocCodeRaw,uDocMono,@markdownLikeDocs +syn region uDocQuasiquote contained matchgroup=unisonDocQuote start="{{" end= "}}" contains=TOP +syn region uDocCode contained matchgroup=unisonDocCode start="^\s*```\s*$" end="^\s*```\s*$" contains=TOP +syn region uDocTypecheck contained matchgroup=unisonDocCode start="^\s*@typecheck\s*```\s*$" end="^\s*```\s*$" contains=TOP +syn region uDocCodeRaw contained matchgroup=unisonDocCode start="^\s*```\s*raw\s*$" end="^\s*```\s*$" contains=NoSyntax +syn region uDocCodeInline contained matchgroup=unisonDocCode start="`\@" + +" things like +" > my_func 1 3 +" test> Function.tap.tests.t1 = check let +" use Nat == + +" ( 99, 100 ) === (withInitialValue 0 do +" : : : +syn match uWatch "^[A-Za-z]*>" " Define the default highlighting. " For version 5.7 and earlier: only when not done already @@ -112,40 +101,41 @@ if version >= 508 || !exists("did_u_syntax_inits") else command -nargs=+ HiLink hi def link endif + + HiLink uWatch Debug + HiLink uDocMono Delimiter + HiLink unisonDocDirective Import + HiLink unisonDocQuote Delimiter + HiLink unisonDocCode Delimiter + HiLink unisonDoc String + HiLink uBelowFold Comment + HiLink uBlockComment Comment + HiLink uBoolean Boolean + HiLink uCharacter Character + HiLink uComment Comment + HiLink uConditional Conditional + HiLink uConditional Conditional + HiLink uDebug Debug + HiLink uDelimiter Delimiter + HiLink uDocBlock String + HiLink uDocDirective Import + HiLink uDocIncluded Import + HiLink uFloat Float + HiLink uImport Include + HiLink uLineComment Comment + HiLink uLink Type + HiLink uName Identifier + HiLink uNumber Number + HiLink uOperator Operator + HiLink uPragma SpecialComment + HiLink uSpecialChar SpecialChar + HiLink uSpecialCharError Error + HiLink uStatement Statement + HiLink uString String + HiLink uType Type + HiLink uTypedef Typedef - HiLink uImport Include - HiLink uInfix PreProc - HiLink uStatement Statement - HiLink uConditional Conditional - HiLink uSpecialChar SpecialChar - HiLink uTypedef Typedef - HiLink uVarSym uOperator - HiLink uConSym uOperator - HiLink uOperator Operator - HiLink uDelimiter Delimiter - HiLink uSpecialCharError Error - HiLink uString String - HiLink uCharacter Character - HiLink uNumber Number - HiLink uFloat Float - HiLink uConditional Conditional - HiLink uLiterateComment uComment - HiLink uBlockComment uComment - HiLink uLineComment uComment - HiLink uComment Comment - HiLink uBelowFold Comment - HiLink uDocBlock String - HiLink uLink uType - HiLink uDocDirective uImport - HiLink uPragma SpecialComment - HiLink uBoolean Boolean - HiLink uType Type - HiLink uMaybe uEnumConst - HiLink uOrdering uEnumConst - HiLink uEnumConst Constant - HiLink uDebug Debug - - delcommand HiLink + delcommand HiLink endif diff --git a/flake.lock b/flake.lock new file mode 100644 index 000000000..86a86d8b7 --- /dev/null +++ b/flake.lock @@ -0,0 +1,43 @@ +{ + "nodes": { + "flake-utils": { + "locked": { + "lastModified": 1667395993, + "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1678703398, + "narHash": "sha256-Y1mW3dBsoWLHpYm+UIHb5VZ7rx024NNHaF16oZBx++o=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "67f26c1cfc5d5783628231e776a81c1ade623e0b", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-22.11", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 000000000..23bd49214 --- /dev/null +++ b/flake.nix @@ -0,0 +1,149 @@ +{ + description = "A common environment for unison development"; + + inputs = { + flake-utils.url = "github:numtide/flake-utils"; + nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11"; + }; + + outputs = { self, flake-utils, nixpkgs }: + let + ghc-version = "8107"; + systemAttrs = flake-utils.lib.eachDefaultSystem (system: + let + pkgs = nixpkgs.legacyPackages."${system}".extend self.overlay; + ghc-version = "8107"; + ghc = pkgs.haskell.packages."ghc${ghc-version}"; + nativePackages = pkgs.lib.optionals pkgs.stdenv.isDarwin + (with pkgs.darwin.apple_sdk.frameworks; [ Cocoa ]); + + unison-env = pkgs.mkShell { + packages = let exports = self.packages."${system}"; + in with pkgs; + [ + exports.stack + exports.hls + exports.ormolu + exports.ghc + pkg-config + zlib + ] ++ nativePackages; + # workaround for https://gitlab.haskell.org/ghc/ghc/-/issues/11042 + shellHook = '' + export LD_LIBRARY_PATH=${pkgs.zlib}/lib:$LD_LIBRARY_PATH + ''; + }; + in { + + apps.repl = flake-utils.lib.mkApp { + drv = + nixpkgs.legacyPackages."${system}".writeShellScriptBin "repl" '' + confnix=$(mktemp) + echo "builtins.getFlake (toString $(git rev-parse --show-toplevel))" >$confnix + trap "rm $confnix" EXIT + nix repl $confnix + ''; + }; + + pkgs = pkgs; + + devShells.default = unison-env; + + packages = { + hls = pkgs.unison-hls; + hls-call-hierarchy-plugin = ghc.hls-call-hierarchy-plugin; + ormolu = pkgs.ormolu; + ghc = pkgs.haskell.compiler."ghc${ghc-version}".override { + useLLVM = pkgs.stdenv.isAarch64; + }; + stack = pkgs.unison-stack; + devShell = self.devShells."${system}".default; + + }; + + defaultPackage = self.packages."${system}".devShell; + }); + topLevelAttrs = { + overlay = final: prev: { + ormolu = prev.haskell.lib.justStaticExecutables + final.haskell.packages."ghc${ghc-version}".ormolu; + haskell = with prev.haskell.lib; + prev.haskell // { + packages = prev.haskell.packages // { + "ghc${ghc-version}" = prev.haskell.packages.ghc8107.extend + (hfinal: hprev: { + mkDerivation = drv: + hprev.mkDerivation (drv // { + doCheck = false; + doHaddock = false; + doBenchmark = false; + enableLibraryProfiling = false; + enableExecutableProfiling = false; + }); + aeson = hfinal.aeson_2_1_1_0; + lens-aeson = hfinal.lens-aeson_1_2_2; + Cabal = hfinal.Cabal_3_6_3_0; + ormolu = hfinal.ormolu_0_5_0_1; + ghc-lib-parser = hfinal.ghc-lib-parser_9_2_5_20221107; + # avoid deprecated version https://github.com/Avi-D-coder/implicit-hie/issues/50 + implicit-hie = hfinal.callHackageDirect { + pkg = "implicit-hie"; + ver = "0.1.4.0"; + sha256 = + "15qy9vwm8vbnyv47vh6kd50m09vc4vhqbbrhf8gdifrvlxhad69l"; + } { }; + haskell-language-server = let + p = prev.haskell.lib.overrideCabal + hprev.haskell-language-server (drv: { + # undo terrible nixpkgs hacks + buildDepends = + prev.lib.filter (x: x != hprev.hls-brittany-plugin) + drv.buildDepends; + configureFlags = drv.configureFlags ++ [ + "-f-brittany" + "-f-fourmolu" + "-f-floskell" + "-f-stylishhaskell" + "-f-hlint" + ]; + }); + in p.overrideScope (lfinal: lprev: { + # undo all of the horrible overrideScope in + # nixpkgs configuration files + ormolu = hfinal.ormolu; + ghc-lib-parser = hfinal.ghc-lib-parser; + ghc-lib-parser-ex = hfinal.ghc-lib-parser-ex; + ghc-paths = hfinal.ghc-paths; + aeson = hfinal.aeson; + lsp-types = hfinal.lsp-types; + # null out some dependencies that we drop with cabal flags + hls-fourmolu-plugin = null; + hls-floskell-plugin = null; + hls-brittany-plugin = hfinal.hls-brittany-plugin; + hls-stylish-haskell-plugin = null; + hls-hlint-plugin = null; + }); + }); + }; + }; + unison-hls = final.haskell-language-server.override { + haskellPackages = final.haskell.packages."ghc${ghc-version}"; + dynamic = true; + supportedGhcVersions = [ ghc-version ]; + }; + unison-stack = prev.symlinkJoin { + name = "stack"; + paths = [ final.stack ]; + buildInputs = [ final.makeWrapper ]; + postBuild = let + flags = [ "--no-nix" "--system-ghc" "--no-install-ghc" ]; + add-flags = + "--add-flags '${prev.lib.concatStringsSep " " flags}'"; + in '' + wrapProgram "$out/bin/stack" ${add-flags} + ''; + }; + }; + }; + in systemAttrs // topLevelAttrs; +} diff --git a/hie.yaml b/hie.yaml index 68d04e290..7df0c0a98 100644 --- a/hie.yaml +++ b/hie.yaml @@ -21,6 +21,18 @@ cradle: - path: "codebase2/util-term/./" component: "unison-util-term:lib" + - path: "lib/unison-hash/src" + component: "unison-hash:lib" + + - path: "lib/unison-hash-orphans-aeson/src" + component: "unison-hash-orphans-aeson:lib" + + - path: "lib/unison-hash-orphans-sqlite/src" + component: "unison-hash-orphans-sqlite:lib" + + - path: "lib/unison-hashing/src" + component: "unison-hashing:lib" + - path: "lib/unison-prelude/src" component: "unison-prelude:lib" @@ -39,12 +51,6 @@ cradle: - path: "lib/unison-util-base32hex/src" component: "unison-util-base32hex:lib" - - path: "lib/unison-util-base32hex-orphans-aeson/src" - component: "unison-util-base32hex-orphans-aeson:lib" - - - path: "lib/unison-util-base32hex-orphans-sqlite/src" - component: "unison-util-base32hex-orphans-sqlite:lib" - - path: "lib/unison-util-bytes/src" component: "unison-util-bytes:lib" diff --git a/lib/unison-util-base32hex-orphans-aeson/package.yaml b/lib/unison-hash-orphans-aeson/package.yaml similarity index 87% rename from lib/unison-util-base32hex-orphans-aeson/package.yaml rename to lib/unison-hash-orphans-aeson/package.yaml index da2b27afc..1bdba2625 100644 --- a/lib/unison-util-base32hex-orphans-aeson/package.yaml +++ b/lib/unison-hash-orphans-aeson/package.yaml @@ -1,17 +1,18 @@ -name: unison-util-base32hex-orphans-aeson +name: unison-hash-orphans-aeson github: unisonweb/unison copyright: Copyright (C) 2013-2021 Unison Computing, PBC and contributors library: when: - condition: false - other-modules: Paths_unison_util_base32hex_orphans_aeson + other-modules: Paths_unison_hash_orphans_aeson source-dirs: src dependencies: - aeson - base - text + - unison-hash - unison-util-base32hex ghc-options: diff --git a/lib/unison-util-base32hex-orphans-aeson/src/U/Util/Hash32/Orphans/Aeson.hs b/lib/unison-hash-orphans-aeson/src/Unison/Hash32/Orphans/Aeson.hs similarity index 81% rename from lib/unison-util-base32hex-orphans-aeson/src/U/Util/Hash32/Orphans/Aeson.hs rename to lib/unison-hash-orphans-aeson/src/Unison/Hash32/Orphans/Aeson.hs index 4cc9efbbe..3d52a35b5 100644 --- a/lib/unison-util-base32hex-orphans-aeson/src/U/Util/Hash32/Orphans/Aeson.hs +++ b/lib/unison-hash-orphans-aeson/src/Unison/Hash32/Orphans/Aeson.hs @@ -1,11 +1,11 @@ {-# OPTIONS_GHC -fno-warn-orphans #-} -module U.Util.Hash32.Orphans.Aeson () where +module Unison.Hash32.Orphans.Aeson () where import Data.Aeson (FromJSON, FromJSONKey, ToJSON, ToJSONKey) import Data.Text (Text) import U.Util.Base32Hex (Base32Hex (..)) -import U.Util.Hash32 (Hash32 (..)) +import Unison.Hash32 (Hash32 (..)) deriving via Text instance FromJSON Hash32 diff --git a/lib/unison-util-base32hex-orphans-aeson/unison-util-base32hex-orphans-aeson.cabal b/lib/unison-hash-orphans-aeson/unison-hash-orphans-aeson.cabal similarity index 92% rename from lib/unison-util-base32hex-orphans-aeson/unison-util-base32hex-orphans-aeson.cabal rename to lib/unison-hash-orphans-aeson/unison-hash-orphans-aeson.cabal index b77f6d5be..d85db2665 100644 --- a/lib/unison-util-base32hex-orphans-aeson/unison-util-base32hex-orphans-aeson.cabal +++ b/lib/unison-hash-orphans-aeson/unison-hash-orphans-aeson.cabal @@ -4,7 +4,7 @@ cabal-version: 1.12 -- -- see: https://github.com/sol/hpack -name: unison-util-base32hex-orphans-aeson +name: unison-hash-orphans-aeson version: 0.0.0 homepage: https://github.com/unisonweb/unison#readme bug-reports: https://github.com/unisonweb/unison/issues @@ -17,7 +17,7 @@ source-repository head library exposed-modules: - U.Util.Hash32.Orphans.Aeson + Unison.Hash32.Orphans.Aeson hs-source-dirs: src default-extensions: @@ -49,5 +49,6 @@ library aeson , base , text + , unison-hash , unison-util-base32hex default-language: Haskell2010 diff --git a/lib/unison-util-base32hex-orphans-sqlite/package.yaml b/lib/unison-hash-orphans-sqlite/package.yaml similarity index 87% rename from lib/unison-util-base32hex-orphans-sqlite/package.yaml rename to lib/unison-hash-orphans-sqlite/package.yaml index 7ed1fbaa8..c5e577660 100644 --- a/lib/unison-util-base32hex-orphans-sqlite/package.yaml +++ b/lib/unison-hash-orphans-sqlite/package.yaml @@ -1,17 +1,18 @@ -name: unison-util-base32hex-orphans-sqlite +name: unison-hash-orphans-sqlite github: unisonweb/unison copyright: Copyright (C) 2013-2021 Unison Computing, PBC and contributors library: when: - condition: false - other-modules: Paths_unison_util_base32hex_orphans_sqlite + other-modules: Paths_unison_hash_orphans_sqlite source-dirs: src dependencies: - base - sqlite-simple - text + - unison-hash - unison-util-base32hex ghc-options: diff --git a/lib/unison-util-base32hex-orphans-sqlite/src/U/Util/Hash32/Orphans/Sqlite.hs b/lib/unison-hash-orphans-sqlite/src/Unison/Hash32/Orphans/Sqlite.hs similarity index 78% rename from lib/unison-util-base32hex-orphans-sqlite/src/U/Util/Hash32/Orphans/Sqlite.hs rename to lib/unison-hash-orphans-sqlite/src/Unison/Hash32/Orphans/Sqlite.hs index 4602e8737..941e56480 100644 --- a/lib/unison-util-base32hex-orphans-sqlite/src/U/Util/Hash32/Orphans/Sqlite.hs +++ b/lib/unison-hash-orphans-sqlite/src/Unison/Hash32/Orphans/Sqlite.hs @@ -1,12 +1,12 @@ {-# OPTIONS_GHC -fno-warn-orphans #-} -module U.Util.Hash32.Orphans.Sqlite () where +module Unison.Hash32.Orphans.Sqlite () where import Data.Text (Text) import Database.SQLite.Simple.FromField (FromField) import Database.SQLite.Simple.ToField (ToField) import U.Util.Base32Hex (Base32Hex (..)) -import U.Util.Hash32 (Hash32 (..)) +import Unison.Hash32 (Hash32 (..)) deriving via Text instance ToField Hash32 diff --git a/lib/unison-util-base32hex-orphans-sqlite/unison-util-base32hex-orphans-sqlite.cabal b/lib/unison-hash-orphans-sqlite/unison-hash-orphans-sqlite.cabal similarity index 92% rename from lib/unison-util-base32hex-orphans-sqlite/unison-util-base32hex-orphans-sqlite.cabal rename to lib/unison-hash-orphans-sqlite/unison-hash-orphans-sqlite.cabal index f78c9eca9..2f14ce5fc 100644 --- a/lib/unison-util-base32hex-orphans-sqlite/unison-util-base32hex-orphans-sqlite.cabal +++ b/lib/unison-hash-orphans-sqlite/unison-hash-orphans-sqlite.cabal @@ -4,7 +4,7 @@ cabal-version: 1.12 -- -- see: https://github.com/sol/hpack -name: unison-util-base32hex-orphans-sqlite +name: unison-hash-orphans-sqlite version: 0.0.0 homepage: https://github.com/unisonweb/unison#readme bug-reports: https://github.com/unisonweb/unison/issues @@ -17,7 +17,7 @@ source-repository head library exposed-modules: - U.Util.Hash32.Orphans.Sqlite + Unison.Hash32.Orphans.Sqlite hs-source-dirs: src default-extensions: @@ -49,5 +49,6 @@ library base , sqlite-simple , text + , unison-hash , unison-util-base32hex default-language: Haskell2010 diff --git a/lib/unison-hash/package.yaml b/lib/unison-hash/package.yaml new file mode 100644 index 000000000..5731e2269 --- /dev/null +++ b/lib/unison-hash/package.yaml @@ -0,0 +1,46 @@ +name: unison-hash +github: unisonweb/unison +copyright: Copyright (C) 2013-2021 Unison Computing, PBC and contributors + +ghc-options: -Wall -O0 -fno-warn-name-shadowing -fno-warn-missing-pattern-synonym-signatures + +dependencies: + - base + - bytestring + - text + - unison-prelude + - unison-util-base32hex + +library: + source-dirs: src + when: + - condition: false + other-modules: Paths_unison_hash + +default-extensions: + - ApplicativeDo + - BangPatterns + - BlockArguments + - DeriveAnyClass + - DeriveFunctor + - DeriveGeneric + - DeriveTraversable + - DerivingStrategies + - DerivingVia + - DoAndIfThenElse + - DuplicateRecordFields + - FlexibleContexts + - FlexibleInstances + - GeneralizedNewtypeDeriving + - LambdaCase + - MultiParamTypeClasses + - NamedFieldPuns + - OverloadedStrings + - PatternSynonyms + - RankNTypes + - ScopedTypeVariables + - StandaloneDeriving + - TupleSections + - TypeApplications + - TypeFamilies + - ViewPatterns diff --git a/lib/unison-util-base32hex/src/U/Util/Hash.hs b/lib/unison-hash/src/Unison/Hash.hs similarity index 51% rename from lib/unison-util-base32hex/src/U/Util/Hash.hs rename to lib/unison-hash/src/Unison/Hash.hs index 05dfae334..60842ebe0 100644 --- a/lib/unison-util-base32hex/src/U/Util/Hash.hs +++ b/lib/unison-hash/src/Unison/Hash.hs @@ -1,59 +1,67 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE OverloadedStrings #-} - -module U.Util.Hash - ( Hash (Hash, toShort), - unsafeFromBase32HexText, - fromBase32HexText, - fromBase32Hex, - fromByteString, - toBase32Hex, - toBase32HexText, - toByteString, +module Unison.Hash + ( Hash (Hash), HashFor (..), + + -- ** ShortByteString conversions + toShort, + + -- ** ByteString conversions + fromByteString, + toByteString, + + -- ** Base32Hex conversions + fromBase32Hex, + toBase32Hex, + + -- ** Base32Hex Text conversions + fromBase32HexText, + unsafeFromBase32HexText, + toBase32HexText, ) where -import Data.ByteString (ByteString) -import Data.ByteString.Short (ShortByteString, fromShort) +import Data.ByteString.Short (ShortByteString) import qualified Data.ByteString.Short as B.Short -import Data.Text (Text) -import GHC.Generics (Generic) import U.Util.Base32Hex (Base32Hex) import qualified U.Util.Base32Hex as Base32Hex +import Unison.Prelude --- | Hash which uniquely identifies a Unison type or term -newtype Hash = Hash {toShort :: ShortByteString} deriving (Eq, Ord, Generic) +-- | A hash. +newtype Hash = Hash {toShort :: ShortByteString} + deriving stock (Eq, Ord, Generic) -toBase32Hex :: Hash -> Base32Hex -toBase32Hex = Base32Hex.fromByteString . toByteString +instance Show Hash where + show = show . toBase32HexText -toBase32HexText :: Hash -> Text -toBase32HexText = Base32Hex.toText . toBase32Hex - -fromBase32Hex :: Base32Hex -> Hash -fromBase32Hex = Hash . B.Short.toShort . Base32Hex.toByteString - --- | Constructs a hash from base32 checks without any validation. --- Note that this converts Text -> ByteString -> ShortByteString and so is slower than --- we'd prefer. -unsafeFromBase32HexText :: Text -> Hash -unsafeFromBase32HexText = fromBase32Hex . Base32Hex.UnsafeFromText - -fromBase32HexText :: Text -> Maybe Hash -fromBase32HexText = fmap fromBase32Hex . Base32Hex.fromText +-- | A hash tagged with the type it's a hash of, useful for maintaining type safety guarantees. +newtype HashFor t = HashFor {genericHash :: Hash} + deriving newtype (Show, Eq, Ord, Generic) +-- | Convert a hash to a byte string. toByteString :: Hash -> ByteString -toByteString = fromShort . toShort +toByteString = B.Short.fromShort . toShort +-- | Convert a byte string to a hash. fromByteString :: ByteString -> Hash fromByteString = Hash . B.Short.toShort -instance Show Hash where - show h = (show . toBase32HexText) h +-- | Convert base32 hex to a hash. +fromBase32Hex :: Base32Hex -> Hash +fromBase32Hex = fromByteString . Base32Hex.toByteString --- | A hash tagged with the type it's a hash of, useful for maintaining type safety --- guarantees. -newtype HashFor t = HashFor {genericHash :: Hash} - deriving newtype (Show, Eq, Ord, Generic) +-- | Convert a hash to base32 hex. +toBase32Hex :: Hash -> Base32Hex +toBase32Hex = Base32Hex.fromByteString . toByteString + +-- | Produce a 'Hash' from a base32hex-encoded version of its binary representation +fromBase32HexText :: Text -> Maybe Hash +fromBase32HexText = fmap fromBase32Hex . Base32Hex.fromText + +-- | Convert a hash from base32 hex without any validation. +unsafeFromBase32HexText :: Text -> Hash +unsafeFromBase32HexText = fromBase32Hex . Base32Hex.UnsafeFromText + +-- | Return the lowercase unpadded base32Hex encoding of this 'Hash'. +-- Multibase prefix would be 'v', see https://github.com/multiformats/multibase +toBase32HexText :: Hash -> Text +toBase32HexText = Base32Hex.toText . toBase32Hex diff --git a/lib/unison-util-base32hex/src/U/Util/Hash32.hs b/lib/unison-hash/src/Unison/Hash32.hs similarity index 92% rename from lib/unison-util-base32hex/src/U/Util/Hash32.hs rename to lib/unison-hash/src/Unison/Hash32.hs index 39f4205e0..25704f026 100644 --- a/lib/unison-util-base32hex/src/U/Util/Hash32.hs +++ b/lib/unison-hash/src/Unison/Hash32.hs @@ -1,5 +1,5 @@ -- | A 512-bit hash, internally represented as base32hex. -module U.Util.Hash32 +module Unison.Hash32 ( -- * Hash32 type Hash32 (..), @@ -19,8 +19,8 @@ module U.Util.Hash32 where import U.Util.Base32Hex (Base32Hex (..)) -import U.Util.Hash (Hash) -import qualified U.Util.Hash as Hash +import Unison.Hash (Hash) +import qualified Unison.Hash as Hash import Unison.Prelude -- | A 512-bit hash, internally represented as base32hex. diff --git a/lib/unison-hash/unison-hash.cabal b/lib/unison-hash/unison-hash.cabal new file mode 100644 index 000000000..783c09a53 --- /dev/null +++ b/lib/unison-hash/unison-hash.cabal @@ -0,0 +1,58 @@ +cabal-version: 1.12 + +-- This file has been generated from package.yaml by hpack version 0.34.4. +-- +-- see: https://github.com/sol/hpack + +name: unison-hash +version: 0.0.0 +homepage: https://github.com/unisonweb/unison#readme +bug-reports: https://github.com/unisonweb/unison/issues +copyright: Copyright (C) 2013-2021 Unison Computing, PBC and contributors +build-type: Simple + +source-repository head + type: git + location: https://github.com/unisonweb/unison + +library + exposed-modules: + Unison.Hash + Unison.Hash32 + hs-source-dirs: + src + default-extensions: + ApplicativeDo + BangPatterns + BlockArguments + DeriveAnyClass + DeriveFunctor + DeriveGeneric + DeriveTraversable + DerivingStrategies + DerivingVia + DoAndIfThenElse + DuplicateRecordFields + FlexibleContexts + FlexibleInstances + GeneralizedNewtypeDeriving + LambdaCase + MultiParamTypeClasses + NamedFieldPuns + OverloadedStrings + PatternSynonyms + RankNTypes + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + TypeFamilies + ViewPatterns + ghc-options: -Wall -O0 -fno-warn-name-shadowing -fno-warn-missing-pattern-synonym-signatures + build-depends: + base + , bytestring + , text + , unison-prelude + , unison-util-base32hex + default-language: Haskell2010 diff --git a/lib/unison-hashing/package.yaml b/lib/unison-hashing/package.yaml new file mode 100644 index 000000000..118a08adc --- /dev/null +++ b/lib/unison-hashing/package.yaml @@ -0,0 +1,43 @@ +name: unison-hashing +github: unisonweb/unison +copyright: Copyright (C) 2013-2021 Unison Computing, PBC and contributors + +ghc-options: -Wall -O0 -fno-warn-name-shadowing -fno-warn-missing-pattern-synonym-signatures + +dependencies: + - base + - unison-hash + +library: + source-dirs: src + when: + - condition: false + other-modules: Paths_unison_hashing + +default-extensions: + - ApplicativeDo + - BangPatterns + - BlockArguments + - DeriveAnyClass + - DeriveFunctor + - DeriveGeneric + - DeriveTraversable + - DerivingStrategies + - DerivingVia + - DoAndIfThenElse + - DuplicateRecordFields + - FlexibleContexts + - FlexibleInstances + - GeneralizedNewtypeDeriving + - LambdaCase + - MultiParamTypeClasses + - NamedFieldPuns + - OverloadedStrings + - PatternSynonyms + - RankNTypes + - ScopedTypeVariables + - StandaloneDeriving + - TupleSections + - TypeApplications + - TypeFamilies + - ViewPatterns diff --git a/lib/unison-hashing/src/Unison/Hashing/ContentAddressable.hs b/lib/unison-hashing/src/Unison/Hashing/ContentAddressable.hs new file mode 100644 index 000000000..cc884d79d --- /dev/null +++ b/lib/unison-hashing/src/Unison/Hashing/ContentAddressable.hs @@ -0,0 +1,39 @@ +module Unison.Hashing.ContentAddressable + ( ContentAddressable (..), + ) +where + +import Unison.Hash (Hash) + +-- | A type class that is inhabited by types that can compute a hash of their content. +-- +-- The base instances of this class should only live in dedicated "hashing packages" such as @unison-hashing-v2@, whose +-- types and implementations should never change. +-- +-- Trivial wrapper instances can be written around these, but note these pipelines from +-- @MyType ==> SomeHashingType ==> Hash@ must take care not to change the @MyType ==> SomeHashingType@ conversion, once +-- written. +-- +-- For example, we might have a representation of some namespace in memory +-- +-- @ +-- data Namespace = Namespace Terms Types OtherStuff CachesAndWhatnot +-- @ +-- +-- with a somewhat equivalent "hashing" type in some "hashing package", with a ContentAddressable instance +-- +-- @ +-- data HashingNamespace = Namespace Terms Types +-- @ +-- +-- We can of course make our own convenience instance +-- +-- @ +-- instance ContentAddressable Namespace where +-- contentHash = contentHash . namespaceToHashingNamespace +-- @ +-- +-- But we must make sure that the implementation of @namespaceToHashingNamespace@ never changes the fields in the +-- corresponding @HashingNamespace@, even as features are added to or removed from @Namespace@. +class ContentAddressable a where + contentHash :: a -> Hash diff --git a/lib/unison-hashing/unison-hashing.cabal b/lib/unison-hashing/unison-hashing.cabal new file mode 100644 index 000000000..0c5e5c97e --- /dev/null +++ b/lib/unison-hashing/unison-hashing.cabal @@ -0,0 +1,54 @@ +cabal-version: 1.12 + +-- This file has been generated from package.yaml by hpack version 0.34.4. +-- +-- see: https://github.com/sol/hpack + +name: unison-hashing +version: 0.0.0 +homepage: https://github.com/unisonweb/unison#readme +bug-reports: https://github.com/unisonweb/unison/issues +copyright: Copyright (C) 2013-2021 Unison Computing, PBC and contributors +build-type: Simple + +source-repository head + type: git + location: https://github.com/unisonweb/unison + +library + exposed-modules: + Unison.Hashing.ContentAddressable + hs-source-dirs: + src + default-extensions: + ApplicativeDo + BangPatterns + BlockArguments + DeriveAnyClass + DeriveFunctor + DeriveGeneric + DeriveTraversable + DerivingStrategies + DerivingVia + DoAndIfThenElse + DuplicateRecordFields + FlexibleContexts + FlexibleInstances + GeneralizedNewtypeDeriving + LambdaCase + MultiParamTypeClasses + NamedFieldPuns + OverloadedStrings + PatternSynonyms + RankNTypes + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + TypeFamilies + ViewPatterns + ghc-options: -Wall -O0 -fno-warn-name-shadowing -fno-warn-missing-pattern-synonym-signatures + build-depends: + base + , unison-hash + default-language: Haskell2010 diff --git a/lib/unison-prelude/src/U/Util/Text.hs b/lib/unison-prelude/src/U/Util/Text.hs index 69664c836..c16e33b72 100644 --- a/lib/unison-prelude/src/U/Util/Text.hs +++ b/lib/unison-prelude/src/U/Util/Text.hs @@ -21,7 +21,8 @@ import Safe.Foldable (minimumMay) stripMargin :: Text -> Text stripMargin str = let stripLen = - Data.Maybe.fromMaybe 0 . minimumMay + Data.Maybe.fromMaybe 0 + . minimumMay . map (Text.length . fst . Text.span (== ' ')) . filter (not . Text.all (Char.isSpace)) $ Text.lines str diff --git a/lib/unison-prelude/src/Unison/Debug.hs b/lib/unison-prelude/src/Unison/Debug.hs index 9c7e03c00..f646b48d7 100644 --- a/lib/unison-prelude/src/Unison/Debug.hs +++ b/lib/unison-prelude/src/Unison/Debug.hs @@ -37,6 +37,10 @@ data DebugFlag | -- | Useful for adding temporary debugging statements during development. -- Remove uses of Debug.Temp before merging to keep things clean for the next person :) Temp + | -- | Shows Annotations when printing terms + Annotations + | PatternCoverage + | PatternCoverageConstraintSolver deriving (Eq, Ord, Show, Bounded, Enum) debugFlags :: Set DebugFlag @@ -58,6 +62,9 @@ debugFlags = case (unsafePerformIO (lookupEnv "UNISON_DEBUG")) of "LSP" -> pure LSP "TIMING" -> pure Timing "TEMP" -> pure Temp + "ANNOTATIONS" -> pure Annotations + "PATTERN_COVERAGE" -> pure PatternCoverage + "PATTERN_COVERAGE_CONSTRAINT_SOLVER" -> pure PatternCoverageConstraintSolver _ -> empty {-# NOINLINE debugFlags #-} @@ -101,13 +108,25 @@ debugTemp :: Bool debugTemp = Temp `Set.member` debugFlags {-# NOINLINE debugTemp #-} +debugAnnotations :: Bool +debugAnnotations = Annotations `Set.member` debugFlags +{-# NOINLINE debugAnnotations #-} + +debugPatternCoverage :: Bool +debugPatternCoverage = PatternCoverage `Set.member` debugFlags +{-# NOINLINE debugPatternCoverage #-} + +debugPatternCoverageConstraintSolver :: Bool +debugPatternCoverageConstraintSolver = PatternCoverageConstraintSolver `Set.member` debugFlags +{-# NOINLINE debugPatternCoverageConstraintSolver #-} + -- | Use for trace-style selective debugging. -- E.g. 1 + (debug Git "The second number" 2) -- -- Or, use in pattern matching to view arguments. -- E.g. -- myFunc (debug Git "argA" -> argA) = ... -debug :: Show a => DebugFlag -> String -> a -> a +debug :: (Show a) => DebugFlag -> String -> a -> a debug flag msg a = if shouldDebug flag then pTraceShowId (pTrace (msg <> ":\n") a) @@ -135,7 +154,7 @@ debugLogM flag msg = whenDebug flag $ pTraceM msg -- | A 'when' block which is triggered if the given flag is being debugged. -whenDebug :: Monad m => DebugFlag -> m () -> m () +whenDebug :: (Monad m) => DebugFlag -> m () -> m () whenDebug flag action = do when (shouldDebug flag) action @@ -151,3 +170,6 @@ shouldDebug = \case LSP -> debugLSP Timing -> debugTiming Temp -> debugTemp + Annotations -> debugAnnotations + PatternCoverage -> debugPatternCoverage + PatternCoverageConstraintSolver -> debugPatternCoverageConstraintSolver diff --git a/lib/unison-prelude/src/Unison/Prelude.hs b/lib/unison-prelude/src/Unison/Prelude.hs index 6ed914f63..b7f3bb779 100644 --- a/lib/unison-prelude/src/Unison/Prelude.hs +++ b/lib/unison-prelude/src/Unison/Prelude.hs @@ -89,43 +89,43 @@ altMap f = altSum . fmap f . toList -- @@ -- onNothing (throwIO MissingPerson) $ mayThing -- @@ -onNothing :: Applicative m => m a -> Maybe a -> m a +onNothing :: (Applicative m) => m a -> Maybe a -> m a onNothing m may = maybe m pure may -onNothingM :: Monad m => m a -> m (Maybe a) -> m a +onNothingM :: (Monad m) => m a -> m (Maybe a) -> m a onNothingM = flip whenNothingM -- | E.g. @maybePerson `whenNothing` throwIO MissingPerson@ -whenNothing :: Applicative m => Maybe a -> m a -> m a +whenNothing :: (Applicative m) => Maybe a -> m a -> m a whenNothing may m = maybe m pure may -whenNothingM :: Monad m => m (Maybe a) -> m a -> m a +whenNothingM :: (Monad m) => m (Maybe a) -> m a -> m a whenNothingM mx my = mx >>= maybe my pure -whenJust :: Applicative m => Maybe a -> (a -> m ()) -> m () +whenJust :: (Applicative m) => Maybe a -> (a -> m ()) -> m () whenJust mx f = maybe (pure ()) f mx -whenJustM :: Monad m => m (Maybe a) -> (a -> m ()) -> m () +whenJustM :: (Monad m) => m (Maybe a) -> (a -> m ()) -> m () whenJustM mx f = do mx >>= maybe (pure ()) f -onLeft :: Applicative m => (a -> m b) -> Either a b -> m b +onLeft :: (Applicative m) => (a -> m b) -> Either a b -> m b onLeft = flip whenLeft -onLeftM :: Monad m => (a -> m b) -> m (Either a b) -> m b +onLeftM :: (Monad m) => (a -> m b) -> m (Either a b) -> m b onLeftM = flip whenLeftM -whenLeft :: Applicative m => Either a b -> (a -> m b) -> m b +whenLeft :: (Applicative m) => Either a b -> (a -> m b) -> m b whenLeft = \case Left a -> \f -> f a Right b -> \_ -> pure b -whenLeftM :: Monad m => m (Either a b) -> (a -> m b) -> m b +whenLeftM :: (Monad m) => m (Either a b) -> (a -> m b) -> m b whenLeftM m f = m >>= \case Left x -> f x @@ -146,7 +146,7 @@ throwEitherM = throwEitherMWith id throwEitherMWith :: forall e e' m a. (MonadIO m, Exception e') => (e -> e') -> m (Either e a) -> m a throwEitherMWith f action = throwExceptT . withExceptT f $ (ExceptT action) -tShow :: Show a => a -> Text +tShow :: (Show a) => a -> Text tShow = Text.pack . show -- | Strictly read an entire file decoding UTF8. @@ -206,5 +206,5 @@ reportBug bugId msg = ] {-# WARNING wundefined "You left this wundefined." #-} -wundefined :: HasCallStack => a +wundefined :: (HasCallStack) => a wundefined = undefined diff --git a/lib/unison-prelude/src/Unison/Util/Alternative.hs b/lib/unison-prelude/src/Unison/Util/Alternative.hs index b97f3df9a..785ffb497 100644 --- a/lib/unison-prelude/src/Unison/Util/Alternative.hs +++ b/lib/unison-prelude/src/Unison/Util/Alternative.hs @@ -1,6 +1,7 @@ module Unison.Util.Alternative - ( whenM - ) where + ( whenM, + ) +where import Control.Applicative (Alternative (empty)) @@ -8,4 +9,3 @@ whenM :: (Monad m, Alternative m) => m Bool -> a -> m a whenM m a = do b <- m if b then pure a else empty - diff --git a/lib/unison-prelude/src/Unison/Util/Map.hs b/lib/unison-prelude/src/Unison/Util/Map.hs index be3af46cb..d682a3fe1 100644 --- a/lib/unison-prelude/src/Unison/Util/Map.hs +++ b/lib/unison-prelude/src/Unison/Util/Map.hs @@ -25,7 +25,7 @@ import Data.Vector (Vector) import qualified Data.Vector as Vector import Unison.Prelude -bimap :: Ord a' => (a -> a') -> (b -> b') -> Map a b -> Map a' b' +bimap :: (Ord a') => (a -> a') -> (b -> b') -> Map a b -> Map a' b' bimap fa fb = Map.fromList . map (B.bimap fa fb) . Map.toList bitraverse :: (Applicative f, Ord a') => (a -> f a') -> (b -> f b') -> Map a b -> f (Map a' b') @@ -36,12 +36,12 @@ bitraversed keyT valT f m = bitraverse (keyT f) (valT f) m -- | 'swap' throws away data if the input contains duplicate values -swap :: Ord b => Map a b -> Map b a +swap :: (Ord b) => Map a b -> Map b a swap = Map.foldlWithKey' (\z a b -> Map.insert b a z) mempty -- | Upsert an element into a map. -upsert :: Ord k => (Maybe v -> v) -> k -> Map k v -> Map k v +upsert :: (Ord k) => (Maybe v -> v) -> k -> Map k v -> Map k v upsert f = Map.alter (Just . f) @@ -50,7 +50,7 @@ valuesVector = Vector.fromList . Map.elems -- | Like 'Map.delete', but returns the value as well. -deleteLookup :: Ord k => k -> Map k v -> (Maybe v, Map k v) +deleteLookup :: (Ord k) => k -> Map k v -> (Maybe v, Map k v) deleteLookup = Map.alterF (,Nothing) @@ -85,7 +85,7 @@ unionWithM f m1 m2 = -- @ -- remap f = Map.fromList . map f . Map.toList -- @ -remap :: Ord k1 => ((k0, v0) -> (k1, v1)) -> Map k0 v0 -> Map k1 v1 +remap :: (Ord k1) => ((k0, v0) -> (k1, v1)) -> Map k0 v0 -> Map k1 v1 remap f = Map.fromList . map f . Map.toList diff --git a/lib/unison-prelude/src/Unison/Util/Monoid.hs b/lib/unison-prelude/src/Unison/Util/Monoid.hs index 82cf96264..239d91159 100644 --- a/lib/unison-prelude/src/Unison/Util/Monoid.hs +++ b/lib/unison-prelude/src/Unison/Util/Monoid.hs @@ -22,11 +22,11 @@ intercalateMap separator renderer elements = intercalateMapM :: (Traversable t, Monad m, Monoid a) => a -> (b -> m a) -> t b -> m a intercalateMapM separator renderer = traverse renderer >=> return . intercalateMap separator id -fromMaybe :: Monoid a => Maybe a -> a +fromMaybe :: (Monoid a) => Maybe a -> a fromMaybe Nothing = mempty fromMaybe (Just a) = a -whenM, unlessM :: Monoid a => Bool -> a -> a +whenM, unlessM :: (Monoid a) => Bool -> a -> a whenM True a = a whenM False _ = mempty unlessM = whenM . not diff --git a/lib/unison-prelude/src/Unison/Util/Set.hs b/lib/unison-prelude/src/Unison/Util/Set.hs index fe7392f0e..9d18673a8 100644 --- a/lib/unison-prelude/src/Unison/Util/Set.hs +++ b/lib/unison-prelude/src/Unison/Util/Set.hs @@ -21,22 +21,22 @@ asSingleton xs = if Set.size xs == 1 then Just (Set.findMin xs) else Nothing -- | Set difference, but return @Nothing@ if the difference is empty. -difference1 :: Ord a => Set a -> Set a -> Maybe (Set a) +difference1 :: (Ord a) => Set a -> Set a -> Maybe (Set a) difference1 xs ys = if null zs then Nothing else Just zs where zs = Set.difference xs ys -symmetricDifference :: Ord a => Set a -> Set a -> Set a +symmetricDifference :: (Ord a) => Set a -> Set a -> Set a symmetricDifference a b = (a `Set.difference` b) `Set.union` (b `Set.difference` a) -mapMaybe :: Ord b => (a -> Maybe b) -> Set a -> Set b +mapMaybe :: (Ord b) => (a -> Maybe b) -> Set a -> Set b mapMaybe f = Set.fromList . Maybe.mapMaybe f . Set.toList traverse :: (Applicative f, Ord b) => (a -> f b) -> Set a -> f (Set b) traverse f = fmap Set.fromList . Prelude.traverse f . Set.toList -flatMap :: Ord b => (a -> Set b) -> Set a -> Set b +flatMap :: (Ord b) => (a -> Set b) -> Set a -> Set b flatMap f = Set.unions . fmap f . Set.toList filterM :: (Ord a, Monad m) => (a -> m Bool) -> Set a -> m (Set a) diff --git a/lib/unison-prelude/src/Unison/Util/Timing.hs b/lib/unison-prelude/src/Unison/Util/Timing.hs index a3702ee67..16ffed52c 100644 --- a/lib/unison-prelude/src/Unison/Util/Timing.hs +++ b/lib/unison-prelude/src/Unison/Util/Timing.hs @@ -12,7 +12,7 @@ import System.IO.Unsafe (unsafePerformIO) import qualified Unison.Debug as Debug import UnliftIO (MonadIO, liftIO) -time :: MonadIO m => String -> m a -> m a +time :: (MonadIO m) => String -> m a -> m a time label ma = if Debug.shouldDebug Debug.Timing then do @@ -29,7 +29,7 @@ time label ma = else ma -- Mitchell says: this function doesn't look like it would work at all; let's just delete it -unsafeTime :: Monad m => String -> m a -> m a +unsafeTime :: (Monad m) => String -> m a -> m a unsafeTime label ma = if Debug.shouldDebug Debug.Timing then do diff --git a/lib/unison-pretty-printer/src/Unison/Util/AnnotatedText.hs b/lib/unison-pretty-printer/src/Unison/Util/AnnotatedText.hs index 49bcb16cb..cfd343b5b 100644 --- a/lib/unison-pretty-printer/src/Unison/Util/AnnotatedText.hs +++ b/lib/unison-pretty-printer/src/Unison/Util/AnnotatedText.hs @@ -182,10 +182,10 @@ snipWithContext margin source = text', text2' :: [String] (text', text2') = splitAt takeLineCount (drop dropLineCount (lines (text source))) - in AnnotatedExcerpt startLine' (unlines text') group' : - snipWithContext - margin - (AnnotatedExcerpt (endLine' + 1) (unlines text2') rest') + in AnnotatedExcerpt startLine' (unlines text') group' + : snipWithContext + margin + (AnnotatedExcerpt (endLine' + 1) (unlines text2') rest') where withinMargin :: Range -> Range -> Bool withinMargin (Range _start1 (Pos end1 _)) (Range (Pos start2 _) _end2) = diff --git a/lib/unison-pretty-printer/src/Unison/Util/Less.hs b/lib/unison-pretty-printer/src/Unison/Util/Less.hs index 98c7c5a3a..c4c658edc 100644 --- a/lib/unison-pretty-printer/src/Unison/Util/Less.hs +++ b/lib/unison-pretty-printer/src/Unison/Util/Less.hs @@ -48,7 +48,7 @@ less str = do lessArgs :: [String] lessArgs = [ "--no-init", -- don't clear the screen on exit - "--raw-control-chars", -- pass through colors and stuff + "--RAW-CONTROL-CHARS", -- pass through colors and stuff "--prompt=[less] Use space/arrow keys to navigate, or 'q' to return to ucm:", "--quit-if-one-screen" -- self-explanatory ] diff --git a/lib/unison-pretty-printer/src/Unison/Util/Pretty.hs b/lib/unison-pretty-printer/src/Unison/Util/Pretty.hs index b7dde7ad4..ac252c7e3 100644 --- a/lib/unison-pretty-printer/src/Unison/Util/Pretty.hs +++ b/lib/unison-pretty-printer/src/Unison/Util/Pretty.hs @@ -187,7 +187,7 @@ data F s r | Append (Seq r) deriving (Eq, Show, Foldable, Traversable, Functor) -isEmpty :: Eq s => IsString s => Pretty s -> Bool +isEmpty :: (Eq s) => (IsString s) => Pretty s -> Bool isEmpty s = out s == Empty || out s == Lit "" mapLit :: (s -> t) -> F s r -> F t r @@ -211,7 +211,7 @@ orElses :: [Pretty s] -> Pretty s orElses [] = mempty orElses ps = foldr1 orElse ps -wrapImpl :: IsString s => [Pretty s] -> Pretty s +wrapImpl :: (IsString s) => [Pretty s] -> Pretty s wrapImpl [] = mempty wrapImpl (p : ps) = wrap_ . Seq.fromList $ @@ -241,7 +241,7 @@ wrapString s = wrap (lit $ fromString s) paragraphyText :: (LL.ListLike s Char, IsString s) => Text -> Pretty s paragraphyText = sep "\n" . fmap (wrapPreserveSpaces . text) . Text.splitOn "\n" -wrap' :: IsString s => (s -> [Pretty s]) -> Pretty s -> Pretty s +wrap' :: (IsString s) => (s -> [Pretty s]) -> Pretty s -> Pretty s wrap' wordify p = wrapImpl (toLeaves [p]) where toLeaves [] = [] @@ -383,22 +383,22 @@ render availableWidth p = go mempty [Right p] SingleLine c -> SingleLine (min c (availableWidth - 1)) MultiLine fc lc mc -> MultiLine fc lc (min mc (availableWidth - 1)) -newline :: IsString s => Pretty s +newline :: (IsString s) => Pretty s newline = "\n" -lineSkip :: IsString s => Pretty s +lineSkip :: (IsString s) => Pretty s lineSkip = newline <> newline -spaceIfNeeded :: Eq s => IsString s => Pretty s -> Pretty s -> Pretty s +spaceIfNeeded :: (Eq s) => (IsString s) => Pretty s -> Pretty s -> Pretty s spaceIfNeeded a b = if isEmpty a then b else a <> " " <> b -spaceIfBreak :: IsString s => Pretty s +spaceIfBreak :: (IsString s) => Pretty s spaceIfBreak = "" `orElse` " " -spacesIfBreak :: IsString s => Int -> Pretty s +spacesIfBreak :: (IsString s) => Int -> Pretty s spacesIfBreak n = "" `orElse` fromString (replicate n ' ') -softbreak :: IsString s => Pretty s +softbreak :: (IsString s) => Pretty s softbreak = " " `orElse` newline spaced :: (Foldable f, IsString s) => f (Pretty s) -> Pretty s @@ -460,12 +460,12 @@ sepNonEmpty :: (Foldable f, IsString s) => Pretty s -> f (Pretty s) -> Pretty s sepNonEmpty between ps = sep between (nonEmpty ps) -- if list is too long, adds `... 22 more` to the end -excerptSep :: IsString s => Maybe Int -> Pretty s -> [Pretty s] -> Pretty s +excerptSep :: (IsString s) => Maybe Int -> Pretty s -> [Pretty s] -> Pretty s excerptSep maxCount = excerptSep' maxCount (\i -> group ("... " <> shown i <> " more")) excerptSep' :: - IsString s => + (IsString s) => Maybe Int -> (Int -> Pretty s) -> Pretty s -> @@ -474,7 +474,7 @@ excerptSep' :: excerptSep' maxCount summarize s ps = case maxCount of Just max | length ps > max -> - sep s (take max ps) <> summarize (length ps - max) + sep s (take max ps) <> summarize (length ps - max) _ -> sep s ps nonEmpty :: (Foldable f, IsString s) => f (Pretty s) -> [Pretty s] @@ -483,10 +483,10 @@ nonEmpty (toList -> l) = case l of h : t -> h : nonEmpty t [] -> [] -parenthesize :: IsString s => Pretty s -> Pretty s +parenthesize :: (IsString s) => Pretty s -> Pretty s parenthesize p = group $ "(" <> p <> ")" -parenthesizeIf :: IsString s => Bool -> Pretty s -> Pretty s +parenthesizeIf :: (IsString s) => Bool -> Pretty s -> Pretty s parenthesizeIf False s = s parenthesizeIf True s = parenthesize s @@ -564,10 +564,10 @@ numberedColumnNHeader headers rows = in columnNHeader ("" : headers) (zipWith (:) numbers rows) -- Opinionated `numbered` that uses bold numbers in front -numberedList :: Foldable f => f (Pretty ColorText) -> Pretty ColorText +numberedList :: (Foldable f) => f (Pretty ColorText) -> Pretty ColorText numberedList = numbered (\i -> hiBlack . fromString $ show i <> ".") -leftPad, rightPad :: IsString s => Width -> Pretty s -> Pretty s +leftPad, rightPad :: (IsString s) => Width -> Pretty s -> Pretty s leftPad n p = let rem = n - preferredWidth p in if rem > 0 then fromString (replicate (widthToInt rem) ' ') <> p else p @@ -584,7 +584,7 @@ excerptColumn2Headed :: excerptColumn2Headed max hd cols = case max of Just max | len > max -> - lines [column2 (hd : take max cols), "... " <> shown (len - max) <> " more"] + lines [column2 (hd : take max cols), "... " <> shown (len - max) <> " more"] _ -> column2 (hd : cols) where len = length cols @@ -771,13 +771,13 @@ align' rows = alignedRows | (col0, col1) <- rows ] -text :: IsString s => Text -> Pretty s +text :: (IsString s) => Text -> Pretty s text t = fromString (Text.unpack t) num :: (Show n, Num n, IsString s) => n -> Pretty s num n = fromString (show n) -string :: IsString s => String -> Pretty s +string :: (IsString s) => String -> Pretty s string = fromString shown :: (Show a, IsString s) => a -> Pretty s @@ -912,7 +912,7 @@ indentAfterNewline by = flatMap f -- or other extra info attached to the original `s` lit (LL.take (LL.length hd) s0) <> "\n" <> by <> f (LL.drop 1 s) -instance IsString s => IsString (Pretty s) where +instance (IsString s) => IsString (Pretty s) where fromString s = lit' (foldMap chDelta s) (fromString s) instance Semigroup (Pretty s) where @@ -1022,7 +1022,7 @@ background f p = _ -> p plural :: - Foldable f => + (Foldable f) => f a -> Pretty ColorText -> Pretty ColorText @@ -1068,7 +1068,7 @@ type BoxStyle s = (Pretty s, Pretty s) -- singleton ) -lBoxStyle1, lBoxStyle2, rBoxStyle2 :: IsString s => BoxStyle s +lBoxStyle1, lBoxStyle2, rBoxStyle2 :: (IsString s) => BoxStyle s lBoxStyle1 = ( ("┌ ", "│ "), -- first ("├ ", "│ "), -- middle @@ -1143,23 +1143,23 @@ fatalCallout = callout "❗️" okCallout = callout "✅" blockedCallout = callout "🚫" -backticked :: IsString s => Pretty s -> Pretty s +backticked :: (IsString s) => Pretty s -> Pretty s backticked p = group ("`" <> p <> "`") -- | Attach some punctuation after the closing backtick. -backticked' :: IsString s => Pretty s -> Pretty s -> Pretty s +backticked' :: (IsString s) => Pretty s -> Pretty s -> Pretty s backticked' p end = group ("`" <> p <> "`" <> end) -singleQuoted :: IsString s => Pretty s -> Pretty s +singleQuoted :: (IsString s) => Pretty s -> Pretty s singleQuoted p = "'" <> p <> "'" -singleQuoted' :: IsString s => Pretty s -> Pretty s -> Pretty s +singleQuoted' :: (IsString s) => Pretty s -> Pretty s -> Pretty s singleQuoted' p end = "'" <> p <> "'" <> end -instance Show s => Show (Pretty s) where +instance (Show s) => Show (Pretty s) where show p = render 80 (metaPretty p) -metaPretty :: Show s => Pretty s -> Pretty String +metaPretty :: (Show s) => Pretty s -> Pretty String metaPretty = go (0 :: Int) where go prec p = case out p of @@ -1175,7 +1175,7 @@ metaPretty = go (0 :: Int) "OrElse" `hang` spaced [go 1 a, go 1 b] Append s -> surroundCommas "[" "]" (go 1 <$> s) -map :: LL.ListLike s2 Char => (s -> s2) -> Pretty s -> Pretty s2 +map :: (LL.ListLike s2 Char) => (s -> s2) -> Pretty s -> Pretty s2 map f p = case out p of Append ps -> foldMap (map f) ps Empty -> mempty diff --git a/lib/unison-pretty-printer/tests/Unison/Test/ColorText.hs b/lib/unison-pretty-printer/tests/Unison/Test/ColorText.hs index f34dd762c..42a213d86 100644 --- a/lib/unison-pretty-printer/tests/Unison/Test/ColorText.hs +++ b/lib/unison-pretty-printer/tests/Unison/Test/ColorText.hs @@ -65,7 +65,7 @@ ex4 = (Range (Pos 1 1) (Pos 1 5), Green) ] -ex :: Ord a => AnnotatedExcerpt a +ex :: (Ord a) => AnnotatedExcerpt a ex = [r|The Tempest | Act 1, Scene 1 diff --git a/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs b/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs index 48712ce2b..2d1b9a387 100644 --- a/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs +++ b/lib/unison-sqlite/src/Unison/Sqlite/Connection.hs @@ -101,7 +101,7 @@ import UnliftIO.Exception -- does not automatically enforce foreign key integrity, because it elected to maintain backwards compatibility with -- code that was written before the foreign key integrity feature was implemented. withConnection :: - MonadUnliftIO m => + (MonadUnliftIO m) => -- | Connection name, for debugging. String -> -- | Path to SQLite database file. @@ -168,7 +168,7 @@ logQuery sql params result = -- Without results, with parameters -execute :: Sqlite.ToRow a => Connection -> Sql -> a -> IO () +execute :: (Sqlite.ToRow a) => Connection -> Sql -> a -> IO () execute conn@(Connection _ _ conn0) s params = do logQuery s (Just params) Nothing Sqlite.execute conn0 (coerce s) params `catch` \(exception :: Sqlite.SQLError) -> @@ -180,7 +180,7 @@ execute conn@(Connection _ _ conn0) s params = do sql = s } -executeMany :: Sqlite.ToRow a => Connection -> Sql -> [a] -> IO () +executeMany :: (Sqlite.ToRow a) => Connection -> Sql -> [a] -> IO () executeMany conn@(Connection _ _ conn0) s = \case [] -> pure () params -> do @@ -397,7 +397,7 @@ queryOneColCheck conn s params check = -- With results, without parameters, without checks -queryListRow_ :: Sqlite.FromRow a => Connection -> Sql -> IO [a] +queryListRow_ :: (Sqlite.FromRow a) => Connection -> Sql -> IO [a] queryListRow_ conn@(Connection _ _ conn0) s = do result <- Sqlite.query_ conn0 (coerce s) @@ -412,28 +412,28 @@ queryListRow_ conn@(Connection _ _ conn0) s = do logQuery s Nothing (Just result) pure result -queryListCol_ :: forall a. Sqlite.FromField a => Connection -> Sql -> IO [a] +queryListCol_ :: forall a. (Sqlite.FromField a) => Connection -> Sql -> IO [a] queryListCol_ conn s = coerce @(IO [Sqlite.Only a]) @(IO [a]) (queryListRow_ conn s) -queryMaybeRow_ :: Sqlite.FromRow a => Connection -> Sql -> IO (Maybe a) +queryMaybeRow_ :: (Sqlite.FromRow a) => Connection -> Sql -> IO (Maybe a) queryMaybeRow_ conn s = queryListRowCheck_ conn s \case [] -> Right Nothing [x] -> Right (Just x) xs -> Left (SomeSqliteExceptionReason (ExpectedAtMostOneRowException (anythingToString xs))) -queryMaybeCol_ :: forall a. Sqlite.FromField a => Connection -> Sql -> IO (Maybe a) +queryMaybeCol_ :: forall a. (Sqlite.FromField a) => Connection -> Sql -> IO (Maybe a) queryMaybeCol_ conn s = coerce @(IO (Maybe (Sqlite.Only a))) @(IO (Maybe a)) (queryMaybeRow_ conn s) -queryOneRow_ :: Sqlite.FromRow a => Connection -> Sql -> IO a +queryOneRow_ :: (Sqlite.FromRow a) => Connection -> Sql -> IO a queryOneRow_ conn s = queryListRowCheck_ conn s \case [x] -> Right x xs -> Left (SomeSqliteExceptionReason (ExpectedExactlyOneRowException (anythingToString xs))) -queryOneCol_ :: forall a. Sqlite.FromField a => Connection -> Sql -> IO a +queryOneCol_ :: forall a. (Sqlite.FromField a) => Connection -> Sql -> IO a queryOneCol_ conn s = coerce @(IO (Sqlite.Only a)) @(IO a) (queryOneRow_ conn s) @@ -443,7 +443,7 @@ queryListRowCheck_ :: (Sqlite.FromRow a, SqliteExceptionReason e) => Connection queryListRowCheck_ conn s check = gqueryListCheck_ conn s (mapLeft SomeSqliteExceptionReason . check) -gqueryListCheck_ :: Sqlite.FromRow a => Connection -> Sql -> ([a] -> Either SomeSqliteExceptionReason r) -> IO r +gqueryListCheck_ :: (Sqlite.FromRow a) => Connection -> Sql -> ([a] -> Either SomeSqliteExceptionReason r) -> IO r gqueryListCheck_ conn s check = do xs <- queryListRow_ conn s case check xs of @@ -523,7 +523,7 @@ vacuum conn = Right () -> pure True -- | @VACUUM INTO@ -vacuumInto :: Connection -> Text -> IO () +vacuumInto :: Connection -> FilePath -> IO () vacuumInto conn file = execute conn "VACUUM INTO ?" (Sqlite.Only file) @@ -550,7 +550,7 @@ rollback conn = execute_ conn "ROLLBACK" -- | Perform an action within a named savepoint. The action is provided a rollback action. -withSavepoint :: MonadUnliftIO m => Connection -> Text -> (m () -> m a) -> m a +withSavepoint :: (MonadUnliftIO m) => Connection -> Text -> (m () -> m a) -> m a withSavepoint conn name action = withRunInIO \runInIO -> withSavepointIO conn name \rollback -> diff --git a/lib/unison-sqlite/src/Unison/Sqlite/Exception.hs b/lib/unison-sqlite/src/Unison/Sqlite/Exception.hs index 931cb24f5..7d8a56522 100644 --- a/lib/unison-sqlite/src/Unison/Sqlite/Exception.hs +++ b/lib/unison-sqlite/src/Unison/Sqlite/Exception.hs @@ -52,7 +52,7 @@ import UnliftIO.Exception -- When actions are run on an untrusted codebase, e.g. one downloaded from a remote server, it is sufficient to catch -- just one exception type, @SomeSqliteException@. data SomeSqliteException - = forall e. Exception e => SomeSqliteException e + = forall e. (Exception e) => SomeSqliteException e deriving anyclass (Exception) instance Show SomeSqliteException where @@ -155,7 +155,7 @@ throwSqliteQueryException SqliteQueryExceptionInfo {connection, exception, param } data SomeSqliteExceptionReason - = forall e. SqliteExceptionReason e => SomeSqliteExceptionReason e + = forall e. (SqliteExceptionReason e) => SomeSqliteExceptionReason e deriving anyclass (SqliteExceptionReason) instance Show SomeSqliteExceptionReason where diff --git a/lib/unison-sqlite/src/Unison/Sqlite/JournalMode.hs b/lib/unison-sqlite/src/Unison/Sqlite/JournalMode.hs index 67537eff6..8ac4d80df 100644 --- a/lib/unison-sqlite/src/Unison/Sqlite/JournalMode.hs +++ b/lib/unison-sqlite/src/Unison/Sqlite/JournalMode.hs @@ -32,7 +32,7 @@ journalModeFromText = \case "off" -> Just JournalMode'OFF _ -> Nothing -unsafeJournalModeFromText :: HasCallStack => Text -> JournalMode +unsafeJournalModeFromText :: (HasCallStack) => Text -> JournalMode unsafeJournalModeFromText s = fromMaybe (error ("Unknown journal mode: " ++ Text.unpack s)) (journalModeFromText s) @@ -45,7 +45,7 @@ journalModeToText = \case JournalMode'WAL -> "wal" JournalMode'OFF -> "off" -trySetJournalMode :: MonadIO m => Connection -> JournalMode -> m () +trySetJournalMode :: (MonadIO m) => Connection -> JournalMode -> m () trySetJournalMode conn mode0 = liftIO do queryOneRowCheck_ conn diff --git a/lib/unison-sqlite/src/Unison/Sqlite/Transaction.hs b/lib/unison-sqlite/src/Unison/Sqlite/Transaction.hs index 0977625b3..9f4c0d20f 100644 --- a/lib/unison-sqlite/src/Unison/Sqlite/Transaction.hs +++ b/lib/unison-sqlite/src/Unison/Sqlite/Transaction.hs @@ -86,7 +86,7 @@ unsafeGetConnection :: Transaction Connection unsafeGetConnection = Transaction pure -- | Run a transaction on the given connection. -runTransaction :: MonadIO m => Connection -> Transaction a -> m a +runTransaction :: (MonadIO m) => Connection -> Transaction a -> m a runTransaction conn (Transaction f) = liftIO do uninterruptibleMask \restore -> do Connection.begin conn @@ -111,7 +111,7 @@ runTransaction conn (Transaction f) = liftIO do -- -- The transaction is never retried, so it is (more) safe to interleave arbitrary IO actions. If the transaction does -- attempt a write and gets SQLITE_BUSY, it's your fault! -runReadOnlyTransaction :: MonadUnliftIO m => Connection -> ((forall x. Transaction x -> m x) -> m a) -> m a +runReadOnlyTransaction :: (MonadUnliftIO m) => Connection -> ((forall x. Transaction x -> m x) -> m a) -> m a runReadOnlyTransaction conn f = withRunInIO \runInIO -> runReadOnlyTransaction_ conn (runInIO (f (\transaction -> liftIO (unsafeUnTransaction transaction conn)))) @@ -134,7 +134,7 @@ runReadOnlyTransaction_ conn action = do -- BEGIN/COMMIT statements. -- -- The transaction is never retried, so it is (more) safe to interleave arbitrary IO actions. -runWriteTransaction :: MonadUnliftIO m => Connection -> ((forall x. Transaction x -> m x) -> m a) -> m a +runWriteTransaction :: (MonadUnliftIO m) => Connection -> ((forall x. Transaction x -> m x) -> m a) -> m a runWriteTransaction conn f = withRunInIO \runInIO -> uninterruptibleMask \restore -> @@ -197,11 +197,11 @@ unsafeIO action = -- Without results, with parameters -execute :: Sqlite.ToRow a => Sql -> a -> Transaction () +execute :: (Sqlite.ToRow a) => Sql -> a -> Transaction () execute s params = do Transaction \conn -> Connection.execute conn s params -executeMany :: Sqlite.ToRow a => Sql -> [a] -> Transaction () +executeMany :: (Sqlite.ToRow a) => Sql -> [a] -> Transaction () executeMany s params = Transaction \conn -> Connection.executeMany conn s params @@ -324,27 +324,27 @@ queryOneColCheck s params check = -- With results, without parameters, without checks -queryListRow_ :: Sqlite.FromRow a => Sql -> Transaction [a] +queryListRow_ :: (Sqlite.FromRow a) => Sql -> Transaction [a] queryListRow_ s = Transaction \conn -> Connection.queryListRow_ conn s -queryListCol_ :: Sqlite.FromField a => Sql -> Transaction [a] +queryListCol_ :: (Sqlite.FromField a) => Sql -> Transaction [a] queryListCol_ s = Transaction \conn -> Connection.queryListCol_ conn s -queryMaybeRow_ :: Sqlite.FromRow a => Sql -> Transaction (Maybe a) +queryMaybeRow_ :: (Sqlite.FromRow a) => Sql -> Transaction (Maybe a) queryMaybeRow_ s = Transaction \conn -> Connection.queryMaybeRow_ conn s -queryMaybeCol_ :: Sqlite.FromField a => Sql -> Transaction (Maybe a) +queryMaybeCol_ :: (Sqlite.FromField a) => Sql -> Transaction (Maybe a) queryMaybeCol_ s = Transaction \conn -> Connection.queryMaybeCol_ conn s -queryOneRow_ :: Sqlite.FromRow a => Sql -> Transaction a +queryOneRow_ :: (Sqlite.FromRow a) => Sql -> Transaction a queryOneRow_ s = Transaction \conn -> Connection.queryOneRow_ conn s -queryOneCol_ :: Sqlite.FromField a => Sql -> Transaction a +queryOneCol_ :: (Sqlite.FromField a) => Sql -> Transaction a queryOneCol_ s = Transaction \conn -> Connection.queryOneCol_ conn s diff --git a/lib/unison-sqlite/src/Unison/Sqlite/Values.hs b/lib/unison-sqlite/src/Unison/Sqlite/Values.hs index e38c4df05..8b3730730 100644 --- a/lib/unison-sqlite/src/Unison/Sqlite/Values.hs +++ b/lib/unison-sqlite/src/Unison/Sqlite/Values.hs @@ -16,7 +16,7 @@ newtype Values a = Values (List.NonEmpty a) deriving stock (Show) -instance Sqlite.Simple.ToRow a => Sqlite.Simple.ToRow (Values a) where +instance (Sqlite.Simple.ToRow a) => Sqlite.Simple.ToRow (Values a) where toRow (Values values) = foldMap Sqlite.Simple.toRow values @@ -26,7 +26,7 @@ instance Sqlite.Simple.ToRow a => Sqlite.Simple.ToRow (Values a) where -- @ -- VALUES (?, ?), (?, ?), (?, ?) -- @ -valuesSql :: Sqlite.Simple.ToRow a => Values a -> Sql +valuesSql :: (Sqlite.Simple.ToRow a) => Values a -> Sql valuesSql (Values values) = Sql ("VALUES " <> Text.intercalate "," (replicate (length values) (valueSql columns))) where diff --git a/lib/unison-util-base32hex/unison-util-base32hex.cabal b/lib/unison-util-base32hex/unison-util-base32hex.cabal index 57b7d69cb..d4364532a 100644 --- a/lib/unison-util-base32hex/unison-util-base32hex.cabal +++ b/lib/unison-util-base32hex/unison-util-base32hex.cabal @@ -18,8 +18,6 @@ source-repository head library exposed-modules: U.Util.Base32Hex - U.Util.Hash - U.Util.Hash32 hs-source-dirs: src default-extensions: diff --git a/lib/unison-util-bytes/src/Unison/Util/Bytes.hs b/lib/unison-util-bytes/src/Unison/Util/Bytes.hs index a8e2d5b9a..29bdd7a00 100644 --- a/lib/unison-util-bytes/src/Unison/Util/Bytes.hs +++ b/lib/unison-util-bytes/src/Unison/Util/Bytes.hs @@ -115,10 +115,10 @@ fromByteString b = snoc empty (byteStringToChunk b) toByteString :: Bytes -> B.ByteString toByteString b = B.concat (map chunkToByteString (chunks b)) -toArray :: BA.ByteArray b => Bytes -> b +toArray :: (BA.ByteArray b) => Bytes -> b toArray b = chunkToArray $ V.concat (chunks b) -fromArray :: BA.ByteArrayAccess b => b -> Bytes +fromArray :: (BA.ByteArrayAccess b) => b -> Bytes fromArray b = snoc empty (arrayToChunk b) byteStringToChunk, chunkFromByteString :: B.ByteString -> Chunk @@ -341,7 +341,7 @@ toBase16 bs = foldl' step empty (chunks bs) BE.convertToBase BE.Base16 (chunkToArray @BA.Bytes b) ) -chunkToArray, arrayFromChunk :: BA.ByteArray b => Chunk -> b +chunkToArray, arrayFromChunk :: (BA.ByteArray b) => Chunk -> b chunkToArray bs = BA.allocAndFreeze (V.length bs) $ \ptr -> let go !ind = if ind < V.length bs @@ -350,7 +350,7 @@ chunkToArray bs = BA.allocAndFreeze (V.length bs) $ \ptr -> in go 0 arrayFromChunk = chunkToArray -arrayToChunk, chunkFromArray :: BA.ByteArrayAccess b => b -> Chunk +arrayToChunk, chunkFromArray :: (BA.ByteArrayAccess b) => b -> Chunk arrayToChunk bs = case BA.convert bs :: Block Word8 of Block bs -> V.Vector 0 n (ByteArray bs) where diff --git a/lib/unison-util-bytes/test/Main.hs b/lib/unison-util-bytes/test/Main.hs index a97686bf1..cc56c309a 100644 --- a/lib/unison-util-bytes/test/Main.hs +++ b/lib/unison-util-bytes/test/Main.hs @@ -42,8 +42,10 @@ test = scope "<>" . expect' $ Bytes.toArray (b1s <> b2s <> b3s) == b1 <> b2 <> b3 scope "Ord" . expect' $ - (b1 <> b2 <> b3) `compare` b3 - == (b1s <> b2s <> b3s) `compare` b3s + (b1 <> b2 <> b3) + `compare` b3 + == (b1s <> b2s <> b3s) + `compare` b3s scope "take" . expect' $ Bytes.toArray (Bytes.take k (b1s <> b2s)) == BS.take k (b1 <> b2) scope "drop" . expect' $ diff --git a/lib/unison-util-cache/src/Unison/Util/Cache.hs b/lib/unison-util-cache/src/Unison/Util/Cache.hs index 84f824955..9dcfd7448 100644 --- a/lib/unison-util-cache/src/Unison/Util/Cache.hs +++ b/lib/unison-util-cache/src/Unison/Util/Cache.hs @@ -1,13 +1,14 @@ -module Unison.Util.Cache - ( Cache - , cache - , nullCache - , semispaceCache - , lookup - , insert - , apply - , applyDefined - ) where +module Unison.Util.Cache + ( Cache, + cache, + nullCache, + semispaceCache, + lookup, + insert, + apply, + applyDefined, + ) +where import Control.Monad (when) import Control.Monad.IO.Class (liftIO) @@ -22,10 +23,10 @@ data Cache k v = Cache insert_ :: k -> v -> IO () } -lookup :: MonadIO m => Cache k v -> k -> m (Maybe v) +lookup :: (MonadIO m) => Cache k v -> k -> m (Maybe v) lookup c k = liftIO (lookup_ c k) -insert :: MonadIO m => Cache k v -> k -> v -> m () +insert :: (MonadIO m) => Cache k v -> k -> v -> m () insert c k v = liftIO (insert_ c k v) -- Create a cache of unbounded size. @@ -80,7 +81,7 @@ semispaceCache maxSize = do -- Cached function application: if a key `k` is not in the cache, -- calls `f` and inserts `f k` results in the cache. -apply :: MonadIO m => Cache k v -> (k -> m v) -> k -> m v +apply :: (MonadIO m) => Cache k v -> (k -> m v) -> k -> m v apply c f k = lookup c k >>= \case Just v -> pure v diff --git a/lib/unison-util-relation/src/Unison/Util/Relation.hs b/lib/unison-util-relation/src/Unison/Util/Relation.hs index dae52b0bc..f5fa40bdd 100644 --- a/lib/unison-util-relation/src/Unison/Util/Relation.hs +++ b/lib/unison-util-relation/src/Unison/Util/Relation.hs @@ -360,19 +360,19 @@ delete x y r = r {domain = domain', range = range'} erase e s = if S.singleton e == s then Nothing else Just $ S.delete e s -- | The Set of values associated with a value in the domain. -lookupDom' :: Ord a => a -> Relation a b -> Maybe (Set b) +lookupDom' :: (Ord a) => a -> Relation a b -> Maybe (Set b) lookupDom' x r = M.lookup x (domain r) -- | The Set of values associated with a value in the range. -lookupRan' :: Ord b => b -> Relation a b -> Maybe (Set a) +lookupRan' :: (Ord b) => b -> Relation a b -> Maybe (Set a) lookupRan' y r = M.lookup y (range r) -- | True if the element @ x @ exists in the domain of @ r @. -memberDom :: Ord a => a -> Relation a b -> Bool +memberDom :: (Ord a) => a -> Relation a b -> Bool memberDom x r = isJust $ lookupDom' x r -- | True if the element exists in the range. -memberRan :: Ord b => b -> Relation a b -> Bool +memberRan :: (Ord b) => b -> Relation a b -> Bool memberRan y r = isJust $ lookupRan' y r filterDom :: (Ord a, Ord b) => (a -> Bool) -> Relation a b -> Relation a b @@ -417,10 +417,10 @@ notMember :: (Ord a, Ord b) => a -> b -> Relation a b -> Bool notMember x y r = not $ member x y r -- | True if a value appears more than one time in the relation. -manyDom :: Ord a => a -> Relation a b -> Bool +manyDom :: (Ord a) => a -> Relation a b -> Bool manyDom a = (> 1) . S.size . lookupDom a -manyRan :: Ord b => b -> Relation a b -> Bool +manyRan :: (Ord b) => b -> Relation a b -> Bool manyRan b = (> 1) . S.size . lookupRan b -- | Returns the domain in the relation, as a Set, in its entirety. @@ -441,7 +441,7 @@ ran r = M.keysSet (range r) -- The cases of 'Nothing' are purged. -- -- It is similar to 'concat'. -compactSet :: Ord a => Set (Maybe (Set a)) -> Set a +compactSet :: (Ord a) => Set (Maybe (Set a)) -> Set a compactSet = S.fold (S.union . fromMaybe S.empty) S.empty -- $selectops @@ -566,10 +566,10 @@ insertManyDom :: (Foldable f, Ord a, Ord b) => f a -> b -> Relation a b -> Relation a b insertManyDom as b r = foldl' (flip $ flip insert b) r as -lookupRan :: Ord b => b -> Relation a b -> Set a +lookupRan :: (Ord b) => b -> Relation a b -> Set a lookupRan b r = fromMaybe S.empty $ lookupRan' b r -lookupDom :: Ord a => a -> Relation a b -> Set b +lookupDom :: (Ord a) => a -> Relation a b -> Set b lookupDom a r = fromMaybe S.empty $ lookupDom' a r -- Efficiently locate the `Set b` for which the corresponding `a` tests @@ -708,7 +708,7 @@ toMultimap :: Relation a b -> Map a (Set b) toMultimap = domain -- Returns Nothing if Relation isn't one-to-one. -toMap :: Ord a => Relation a b -> Maybe (Map a b) +toMap :: (Ord a) => Relation a b -> Maybe (Map a b) toMap r = let mm = toMultimap r in if all (\s -> S.size s == 1) mm @@ -752,12 +752,12 @@ instance (Ord a, Ord b) => Semigroup (Relation a b) where (<>) = union toUnzippedMultimap :: - Ord a => Ord b => Ord c => Relation a (b, c) -> Map a (Set b, Set c) + (Ord a) => (Ord b) => (Ord c) => Relation a (b, c) -> Map a (Set b, Set c) toUnzippedMultimap r = (\s -> (S.map fst s, S.map snd s)) <$> toMultimap r collectRan :: - Ord a => - Ord c => + (Ord a) => + (Ord c) => (b -> Maybe c) -> Relation a b -> Relation a c diff --git a/lib/unison-util-relation/src/Unison/Util/Relation3.hs b/lib/unison-util-relation/src/Unison/Util/Relation3.hs index 9a564505d..358bbdc02 100644 --- a/lib/unison-util-relation/src/Unison/Util/Relation3.hs +++ b/lib/unison-util-relation/src/Unison/Util/Relation3.hs @@ -94,6 +94,10 @@ mapD2Monotonic f Relation3 {d1, d2, d3} = member :: (Ord a, Ord b, Ord c) => a -> b -> c -> Relation3 a b c -> Bool member a b c = R.member b c . lookupD1 a +memberD2 :: (Ord b) => b -> Relation3 a b c -> Bool +memberD2 b = + Map.member b . d2 + lookupD1 :: (Ord a, Ord b, Ord c) => a -> Relation3 a b c -> Relation b c lookupD1 a = fromMaybe mempty . Map.lookup a . d1 @@ -150,10 +154,10 @@ insert a b c Relation3 {..} = insertAll, deleteAll :: - Foldable f => - Ord a => - Ord b => - Ord c => + (Foldable f) => + (Ord a) => + (Ord b) => + (Ord c) => f (a, b, c) -> Relation3 a b c -> Relation3 a b c diff --git a/lib/unison-util-relation/src/Unison/Util/Relation4.hs b/lib/unison-util-relation/src/Unison/Util/Relation4.hs index 9e5251fa0..10eebf81e 100644 --- a/lib/unison-util-relation/src/Unison/Util/Relation4.hs +++ b/lib/unison-util-relation/src/Unison/Util/Relation4.hs @@ -55,6 +55,12 @@ fromList xs = insertAll xs empty filter :: (Ord a, Ord b, Ord c, Ord d) => ((a, b, c, d) -> Bool) -> Relation4 a b c d -> Relation4 a b c d filter f = fromList . Prelude.filter f . toList +memberD13 :: (Ord a, Ord c) => a -> c -> Relation4 a b c d -> Bool +memberD13 a c r4 = + case Map.lookup a (d1 r4) of + Nothing -> False + Just r3 -> R3.memberD2 c r3 + selectD3 :: (Ord a, Ord b, Ord c, Ord d) => c -> @@ -93,12 +99,21 @@ keys :: Relation4 a b c d -> (Set a, Set b, Set c, Set d) keys Relation4 {d1, d2, d3, d4} = (Map.keysSet d1, Map.keysSet d2, Map.keysSet d3, Map.keysSet d4) -d1set :: Ord a => Relation4 a b c d -> Set a +lookupD1 :: (Ord a, Ord b, Ord c, Ord d) => a -> Relation4 a b c d -> Relation3 b c d +lookupD1 a = fromMaybe mempty . Map.lookup a . d1 + +lookupD2 :: (Ord a, Ord b, Ord c, Ord d) => b -> Relation4 a b c d -> Relation3 a c d +lookupD2 b = fromMaybe mempty . Map.lookup b . d2 + +d1set :: (Ord a) => Relation4 a b c d -> Set a d1set = Map.keysSet . d1 d12 :: (Ord a, Ord b) => Relation4 a b c d -> Relation a b d12 = R.fromMultimap . fmap (Map.keysSet . R3.d1) . d1 +d13 :: (Ord a, Ord c) => Relation4 a b c d -> Relation a c +d13 = R.fromMultimap . fmap (Map.keysSet . R3.d2) . d1 + d34 :: (Ord c, Ord d) => Relation4 a b c d -> Relation c d d34 = R.fromMultimap . fmap (Map.keysSet . R3.d3) . d3 @@ -124,6 +139,12 @@ d234 Relation4 {d2, d3, d4} = d12s :: (Ord a, Ord b) => Relation4 a b c d -> [(a, b)] d12s = nubOrd . fmap (\(a, (b, _)) -> (a, b)) . toNestedList +d3s :: Relation4 a b c d -> Set c +d3s = Map.keysSet . d3 + +d4s :: Relation4 a b c d -> Set d +d4s = Map.keysSet . d4 + -- e.g. Map.toList (d1 r) >>= \(a, r3) -> (a,) <$> Map.keys (R3.d1 r3) insert, @@ -175,11 +196,11 @@ mapD2Monotonic f Relation4 {d1, d2, d3, d4} = } insertAll :: - Foldable f => - Ord a => - Ord b => - Ord c => - Ord d => + (Foldable f) => + (Ord a) => + (Ord b) => + (Ord c) => + (Ord d) => f (a, b, c, d) -> Relation4 a b c d -> Relation4 a b c d diff --git a/lib/unison-util-rope/src/Unison/Util/Rope.hs b/lib/unison-util-rope/src/Unison/Util/Rope.hs index b635b36b6..e0d2c6475 100644 --- a/lib/unison-util-rope/src/Unison/Util/Rope.hs +++ b/lib/unison-util-rope/src/Unison/Util/Rope.hs @@ -40,7 +40,7 @@ data Rope a chunks :: Rope a -> [a] chunks = toList -singleton, one :: Sized a => a -> Rope a +singleton, one :: (Sized a) => a -> Rope a one a | size a == 0 = Empty one a = One a singleton = one @@ -49,7 +49,7 @@ singleton = one -- be used unless the function is "roughly" size-preserving. -- So converting from text to utf-8 encoded text chunks is okay, -- wherease filtering out 95% of the chunks will lead to a size-unbalanced tree -map :: Sized b => (a -> b) -> Rope a -> Rope b +map :: (Sized b) => (a -> b) -> Rope a -> Rope b map f = \case Empty -> Empty One a -> one (f a) @@ -73,16 +73,16 @@ class Index a elem where unsafeIndex :: Int -> a -> elem class Reverse a where reverse :: a -> a -instance Sized a => Sized (Rope a) where +instance (Sized a) => Sized (Rope a) where size = \case Empty -> 0 One a -> size a Two n _ _ -> n -null :: Sized a => Rope a -> Bool +null :: (Sized a) => Rope a -> Bool null r = size r == 0 -flatten :: Monoid a => Rope a -> a +flatten :: (Monoid a) => Rope a -> a flatten = mconcat . toList instance (Sized a, Semigroup a) => Semigroup (Rope a) where @@ -100,7 +100,7 @@ instance (Sized a, Semigroup a) => Monoid (Rope a) where mempty = Empty -- size-balanced append, leaving the left tree as is -appendL :: Sized a => Int -> Rope a -> Rope a -> Rope a +appendL :: (Sized a) => Int -> Rope a -> Rope a -> Rope a appendL 0 _ a = a appendL _ l Empty = l appendL szl l r@(One a) = Two (szl + size a) l r @@ -109,7 +109,7 @@ appendL szl l r@(Two szr r1 r2) | otherwise = Two (szl + szr) (appendL szl l r1) r2 -- size-balanced append, leaving the right tree as is -appendR :: Sized a => Rope a -> Int -> Rope a -> Rope a +appendR :: (Sized a) => Rope a -> Int -> Rope a -> Rope a appendR a 0 _ = a appendR Empty _ r = r appendR l@(One a) szr r = Two (size a + szr) l r @@ -151,13 +151,13 @@ snoc' as szN aN = go as | szN >= sz -> Two (sz + szN) as (One aN) | otherwise -> appendL (size l) l (go r) -instance Reverse a => Reverse (Rope a) where +instance (Reverse a) => Reverse (Rope a) where reverse = \case One a -> One (reverse a) Two sz l r -> Two sz (reverse r) (reverse l) Empty -> Empty -two :: Sized a => Rope a -> Rope a -> Rope a +two :: (Sized a) => Rope a -> Rope a -> Rope a two r1 r2 = Two (size r1 + size r2) r1 r2 -- Cutoff for when `snoc` or `cons` will create a new subtree @@ -204,7 +204,7 @@ instance (Sized a, Semigroup a, Drop a) => Drop (Rope a) where | otherwise -> two (drop n l) r -- don't rebalance Empty -> Empty -uncons :: Sized a => Rope a -> Maybe (a, Rope a) +uncons :: (Sized a) => Rope a -> Maybe (a, Rope a) uncons = \case Empty -> Nothing One a -> Just (a, Empty) @@ -212,7 +212,7 @@ uncons = \case Nothing -> uncons r Just (hd, tl) -> Just (hd, two tl r) -unsnoc :: Sized a => Rope a -> Maybe (Rope a, a) +unsnoc :: (Sized a) => Rope a -> Maybe (Rope a, a) unsnoc = \case Empty -> Nothing One a -> Just (Empty, a) @@ -249,7 +249,7 @@ instance (Sized a, Take a, Drop a, Eq a) => Eq (Rope a) where instance (Sized a, Take a, Drop a, Ord a) => Ord (Rope a) where b1 `compare` b2 = uncurry compare (alignChunks (chunks b1) (chunks b2)) -instance NFData a => NFData (Rope a) where +instance (NFData a) => NFData (Rope a) where rnf Empty = () rnf (One a) = rnf a rnf (Two _ l r) = rnf l `seq` rnf r diff --git a/parser-typechecker/package.yaml b/parser-typechecker/package.yaml index 0225d7dd2..f4bf2e6f7 100644 --- a/parser-typechecker/package.yaml +++ b/parser-typechecker/package.yaml @@ -8,10 +8,15 @@ flags: optimized: manual: true default: true + arraychecks: + manual: true + default: false when: - condition: flag(optimized) ghc-options: -funbox-strict-fields -O2 + - condition: flag(arraychecks) + cpp-options: -DARRAY_CHECK dependencies: - ListLike @@ -19,6 +24,7 @@ dependencies: - aeson - ansi-terminal - async + - atomic-primops - base - base16 >= 0.2.1.0 - base64-bytestring @@ -64,6 +70,7 @@ dependencies: - monad-validate - mtl - mutable-containers + - murmur-hash - mwc-random - natural-transformation - network @@ -108,6 +115,7 @@ dependencies: - unison-codebase-sync - unison-core - unison-core1 + - unison-hash - unison-hashing-v2 - unison-prelude - unison-pretty-printer diff --git a/parser-typechecker/src/U/Codebase/Branch/Diff.hs b/parser-typechecker/src/U/Codebase/Branch/Diff.hs index 614942e08..ad8e17496 100644 --- a/parser-typechecker/src/U/Codebase/Branch/Diff.hs +++ b/parser-typechecker/src/U/Codebase/Branch/Diff.hs @@ -113,7 +113,7 @@ instance Semigroup NameBasedDiff where NameBasedDiff (terms0 <> terms1) (types0 <> types1) -- | Diff two Branches, returning a tree containing all of the changes -diffBranches :: forall m. Monad m => Branch m -> Branch m -> m TreeDiff +diffBranches :: forall m. (Monad m) => Branch m -> Branch m -> m TreeDiff diffBranches from to = do let termDiffs = diffMap (Branch.terms from) (Branch.terms to) let typeDiffs = diffMap (Branch.types from) (Branch.types to) @@ -142,7 +142,7 @@ diffBranches from to = do TreeDiff cfr -> pure . Just $ cfr pure $ TreeDiff (defDiff :< childDiff) where - diffMap :: forall ref. Ord ref => Map NameSegment (Map ref (m MdValues)) -> Map NameSegment (Map ref (m MdValues)) -> Map NameSegment (Diff ref) + diffMap :: forall ref. (Ord ref) => Map NameSegment (Map ref (m MdValues)) -> Map NameSegment (Map ref (m MdValues)) -> Map NameSegment (Diff ref) diffMap l r = Align.align l r & fmap \case diff --git a/parser-typechecker/src/Unison/Builtin.hs b/parser-typechecker/src/Unison/Builtin.hs index fc7ff80ac..a7a901a3a 100644 --- a/parser-typechecker/src/Unison/Builtin.hs +++ b/parser-typechecker/src/Unison/Builtin.hs @@ -110,7 +110,7 @@ builtinDataDecls = builtinEffectDecls :: [(Symbol, (R.Id, EffectDeclaration))] builtinEffectDecls = [(v, (r, Intrinsic <$ d)) | (v, r, d) <- DD.builtinEffectDecls] -codeLookup :: Applicative m => CodeLookup Symbol m Ann +codeLookup :: (Applicative m) => CodeLookup Symbol m Ann codeLookup = CodeLookup (const $ pure Nothing) $ \r -> pure $ lookup r [(r, Right x) | (r, x) <- snd <$> builtinDataDecls] @@ -165,7 +165,11 @@ builtinTypes = Rename' r name -> case Map.lookup name m of Just _ -> error . Text.unpack $ - "tried to rename `" <> r <> "` to `" <> name <> "`, " + "tried to rename `" + <> r + <> "` to `" + <> name + <> "`, " <> "which already exists." Nothing -> case Map.lookup r m of Nothing -> @@ -175,7 +179,11 @@ builtinTypes = Alias' r name -> case Map.lookup name m of Just _ -> error . Text.unpack $ - "tried to alias `" <> r <> "` to `" <> name <> "`, " + "tried to alias `" + <> r + <> "` to `" + <> name + <> "`, " <> "which already exists." Nothing -> case Map.lookup r m of Nothing -> @@ -204,6 +212,8 @@ builtinTypesSrc = Rename' "IO" "io2.IO", B' "Handle" CT.Data, Rename' "Handle" "io2.Handle", + B' "ProcessHandle" CT.Data, + Rename' "ProcessHandle" "io2.ProcessHandle", B' "Socket" CT.Data, Rename' "Socket" "io2.Socket", B' "ThreadId" CT.Data, @@ -234,12 +244,17 @@ builtinTypesSrc = Rename' "STM" "io2.STM", B' "Ref" CT.Data, B' "Scope" CT.Effect, + B' "Ref.Ticket" CT.Data, + Rename' "Ref.Ticket" "io2.Ref.Ticket", + B' "Promise" CT.Data, + Rename' "Promise" "io2.Promise", B' "TimeSpec" CT.Data, Rename' "TimeSpec" "io2.Clock.internals.TimeSpec", B' "ImmutableArray" CT.Data, B' "MutableArray" CT.Data, B' "ImmutableByteArray" CT.Data, - B' "MutableByteArray" CT.Data + B' "MutableByteArray" CT.Data, + B' "Char.Class" CT.Data ] -- rename these to "builtin" later, when builtin means intrinsic as opposed to @@ -288,7 +303,11 @@ termNameRefs = Map.mapKeys Name.unsafeFromText $ foldl' go mempty (stripVersion Rename r name -> case Map.lookup name m of Just _ -> error . Text.unpack $ - "tried to rename `" <> r <> "` to `" <> name <> "`, " + "tried to rename `" + <> r + <> "` to `" + <> name + <> "`, " <> "which already exists." Nothing -> case Map.lookup r m of Nothing -> @@ -298,7 +317,11 @@ termNameRefs = Map.mapKeys Name.unsafeFromText $ foldl' go mempty (stripVersion Alias r name -> case Map.lookup name m of Just _ -> error . Text.unpack $ - "tried to alias `" <> r <> "` to `" <> name <> "`, " + "tried to alias `" + <> r + <> "` to `" + <> name + <> "`, " <> "which already exists." Nothing -> case Map.lookup r m of Nothing -> @@ -449,6 +472,7 @@ builtinsSrc = B "Universal.<" $ forall1 "a" (\a -> a --> a --> boolean), B "Universal.>=" $ forall1 "a" (\a -> a --> a --> boolean), B "Universal.<=" $ forall1 "a" (\a -> a --> a --> boolean), + B "Universal.murmurHash" $ forall1 "a" (\a -> a --> nat), B "bug" $ forall1 "a" (\a -> forall1 "b" (\b -> a --> b)), B "todo" $ forall1 "a" (\a -> forall1 "b" (\b -> a --> b)), B "Any.Any" $ forall1 "a" (\a -> a --> anyt), @@ -544,6 +568,8 @@ builtinsSrc = B "ThreadId.toText" $ threadId --> text, B "Debug.watch" $ forall1 "a" (\a -> text --> a --> a), B "Debug.trace" $ forall1 "a" (\a -> text --> a --> unit), + B "Debug.toText" $ + forall1 "a" (\a -> a --> optionalt (eithert text text)), B "unsafe.coerceAbilities" $ forall4 "a" "b" "e1" "e2" $ \a b e1 e2 -> (a --> Type.effect1 () e1 b) --> (a --> Type.effect1 () e2 b), @@ -560,10 +586,18 @@ builtinsSrc = B "ImmutableArray.size" . forall1 "a" $ \a -> iarrayt a --> nat, B "ImmutableByteArray.size" $ ibytearrayt --> nat, B "MutableArray.copyTo!" . forall2 "g" "a" $ \g a -> - marrayt g a --> nat --> marrayt g a --> nat --> nat + marrayt g a + --> nat + --> marrayt g a + --> nat + --> nat --> Type.effect () [g, DD.exceptionType ()] unit, B "MutableByteArray.copyTo!" . forall1 "g" $ \g -> - mbytearrayt g --> nat --> mbytearrayt g --> nat --> nat + mbytearrayt g + --> nat + --> mbytearrayt g + --> nat + --> nat --> Type.effect () [g, DD.exceptionType ()] unit, B "MutableArray.read" . forall2 "g" "a" $ \g a -> marrayt g a --> nat --> Type.effect () [g, DD.exceptionType ()] a, @@ -590,10 +624,18 @@ builtinsSrc = B "MutableByteArray.write64be" . forall1 "g" $ \g -> mbytearrayt g --> nat --> nat --> Type.effect () [g, DD.exceptionType ()] unit, B "ImmutableArray.copyTo!" . forall2 "g" "a" $ \g a -> - marrayt g a --> nat --> iarrayt a --> nat --> nat + marrayt g a + --> nat + --> iarrayt a + --> nat + --> nat --> Type.effect () [g, DD.exceptionType ()] unit, B "ImmutableByteArray.copyTo!" . forall1 "g" $ \g -> - mbytearrayt g --> nat --> ibytearrayt --> nat --> nat + mbytearrayt g + --> nat + --> ibytearrayt + --> nat + --> nat --> Type.effect () [g, DD.exceptionType ()] unit, B "ImmutableArray.read" . forall1 "a" $ \a -> iarrayt a --> nat --> Type.effect1 () (DD.exceptionType ()) a, @@ -624,7 +666,32 @@ builtinsSrc = B "Scope.bytearray" . forall1 "s" $ \s -> nat --> Type.effect1 () (scopet s) (mbytearrayt (scopet s)), B "Scope.bytearrayOf" . forall1 "s" $ \s -> - nat --> nat --> Type.effect1 () (scopet s) (mbytearrayt (scopet s)) + nat --> nat --> Type.effect1 () (scopet s) (mbytearrayt (scopet s)), + B "Char.Class.any" charClass, + B "Char.Class.not" $ charClass --> charClass, + B "Char.Class.and" $ charClass --> charClass --> charClass, + B "Char.Class.or" $ charClass --> charClass --> charClass, + B "Char.Class.range" $ char --> char --> charClass, + B "Char.Class.anyOf" $ list char --> charClass, + B "Char.Class.alphanumeric" charClass, + B "Char.Class.upper" charClass, + B "Char.Class.lower" charClass, + B "Char.Class.whitespace" charClass, + B "Char.Class.control" charClass, + B "Char.Class.printable" charClass, + B "Char.Class.mark" charClass, + B "Char.Class.number" charClass, + B "Char.Class.punctuation" charClass, + B "Char.Class.symbol" charClass, + B "Char.Class.separator" charClass, + B "Char.Class.letter" charClass, + B "Char.Class.is" $ + charClass + --> char + --> boolean, + B + "Text.patterns.char" + $ charClass --> pat text ] ++ -- avoid name conflicts with Universal == < > <= >= @@ -641,6 +708,7 @@ builtinsSrc = ++ moveUnder "io2" ioBuiltins ++ moveUnder "io2" mvarBuiltins ++ moveUnder "io2" stmBuiltins + ++ moveUnder "io2" refPromiseBuiltins ++ hashBuiltins ++ fmap (uncurry B) codeBuiltins @@ -694,7 +762,7 @@ hashBuiltins = B "crypto.hmac" $ forall1 "a" (\a -> hashAlgo --> bytes --> a --> bytes), B "crypto.hmacBytes" $ hashAlgo --> bytes --> bytes --> bytes ] - ++ map h ["Sha3_512", "Sha3_256", "Sha2_512", "Sha2_256", "Sha1", "Blake2b_512", "Blake2b_256", "Blake2s_256"] + ++ map h ["Sha3_512", "Sha3_256", "Sha2_512", "Sha2_256", "Sha1", "Blake2b_512", "Blake2b_256", "Blake2s_256", "Md5"] where hashAlgo = Type.ref () Type.hashAlgorithmRef h name = B ("crypto.HashAlgorithm." <> name) hashAlgo @@ -750,8 +818,17 @@ ioBuiltins = ("IO.kill.impl.v3", threadId --> iof unit), ( "IO.ref", forall1 "a" $ \a -> - a --> io (reft (Type.effects () [Type.builtinIO ()]) a) + a --> io (reft iot a) ), + ("IO.process.call", text --> list text --> io nat), + ( "IO.process.start", + text + --> list text + --> io (tuple [handle, handle, handle, phandle]) + ), + ("IO.process.kill", phandle --> io unit), + ("IO.process.wait", phandle --> io nat), + ("IO.process.exitCode", phandle --> io (optionalt nat)), ( "validateSandboxed", forall1 "a" $ \a -> list termLink --> a --> boolean ), @@ -781,17 +858,17 @@ ioBuiltins = ("Clock.internals.nsec.v1", timeSpec --> nat), ( "IO.array", forall1 "a" $ \a -> - nat --> io (marrayt (Type.effects () [Type.builtinIO ()]) a) + nat --> io (marrayt iot a) ), ( "IO.arrayOf", forall1 "a" $ \a -> - a --> nat --> io (marrayt (Type.effects () [Type.builtinIO ()]) a) + a --> nat --> io (marrayt iot a) ), ( "IO.bytearray", - nat --> io (mbytearrayt (Type.effects () [Type.builtinIO ()])) + nat --> io (mbytearrayt iot) ), ( "IO.bytearrayOf", - nat --> nat --> io (mbytearrayt (Type.effects () [Type.builtinIO ()])) + nat --> nat --> io (mbytearrayt iot) ), ( "IO.tryEval", forall1 "a" $ \a -> @@ -848,6 +925,22 @@ stmBuiltins = ("STM.atomically", forall1 "a" $ \a -> (unit --> stm a) --> io a) ] +refPromiseBuiltins :: [(Text, Type)] +refPromiseBuiltins = + [ ("Ref.Ticket.read", forall1 "a" $ \a -> ticket a --> a), + ("Ref.readForCas", forall1 "a" $ \a -> reft iot a --> io (ticket a)), + ("Ref.cas", forall1 "a" $ \a -> reft iot a --> ticket a --> a --> io boolean), + ("Promise.new", forall1 "a" $ \a -> unit --> io (promise a)), + ("Promise.read", forall1 "a" $ \a -> promise a --> io a), + ("Promise.tryRead", forall1 "a" $ \a -> promise a --> io (optionalt a)), + ("Promise.write", forall1 "a" $ \a -> promise a --> a --> io boolean) + ] + where + ticket :: Type -> Type + ticket a = Type.ref () Type.ticketRef `app` a + promise :: Type -> Type + promise a = Type.ref () Type.promiseRef `app` a + forall1 :: Text -> (Type -> Type) -> Type forall1 name body = let a = Var.named name @@ -905,6 +998,9 @@ io, iof :: Type -> Type io = Type.effect1 () (Type.builtinIO ()) iof = io . eithert failure +iot :: Type +iot = (Type.effects () [Type.builtinIO ()]) + failure :: Type failure = DD.failureType () @@ -929,10 +1025,11 @@ iarrayt a = Type.iarrayType () `app` a marrayt :: Type -> Type -> Type marrayt g a = Type.marrayType () `app` g `app` a -socket, threadId, handle, unit :: Type +socket, threadId, handle, phandle, unit :: Type socket = Type.socket () threadId = Type.threadId () handle = Type.fileHandle () +phandle = Type.processHandle () unit = DD.unitType () tls, tlsClientConfig, tlsServerConfig, tlsSignedCert, tlsPrivateKey, tlsVersion, tlsCipher :: Type @@ -970,5 +1067,8 @@ stm = Type.effect1 () (Type.ref () Type.stmRef) tvar a = Type.ref () Type.tvarRef `app` a pat a = Type.ref () Type.patternRef `app` a +charClass :: Type +charClass = Type.ref () Type.charClassRef + timeSpec :: Type timeSpec = Type.ref () Type.timeSpecRef diff --git a/parser-typechecker/src/Unison/Builtin/Decls.hs b/parser-typechecker/src/Unison/Builtin/Decls.hs index 59191c429..f7fae7f48 100644 --- a/parser-typechecker/src/Unison/Builtin/Decls.hs +++ b/parser-typechecker/src/Unison/Builtin/Decls.hs @@ -78,11 +78,12 @@ tlsSignedCertRef = lookupDeclRef "io2.Tls.SignedCert" tlsPrivateKeyRef = lookupDeclRef "io2.Tls.PrivateKey" -runtimeFailureRef, arithmeticFailureRef, miscFailureRef, stmFailureRef :: Reference +runtimeFailureRef, arithmeticFailureRef, miscFailureRef, stmFailureRef, threadKilledFailureRef :: Reference runtimeFailureRef = lookupDeclRef "io2.RuntimeFailure" arithmeticFailureRef = lookupDeclRef "io2.ArithmeticFailure" miscFailureRef = lookupDeclRef "io2.MiscFailure" stmFailureRef = lookupDeclRef "io2.STMFailure" +threadKilledFailureRef = lookupDeclRef "io2.ThreadKilledFailure" fileModeRef, filePathRef, bufferModeRef, seekModeRef, seqViewRef :: Reference fileModeRef = lookupDeclRef "io2.FileMode" @@ -184,7 +185,8 @@ builtinDataDecls = rs1 ++ rs (v "io2.RuntimeFailure", runtimeFailure), (v "io2.ArithmeticFailure", arithmeticFailure), (v "io2.MiscFailure", miscFailure), - (v "io2.STMFailure", stmFailure) + (v "io2.STMFailure", stmFailure), + (v "io2.ThreadKilledFailure", threadKilledFailure) ] of Right a -> a Left e -> error $ "builtinDataDecls: " <> show e @@ -363,6 +365,13 @@ builtinDataDecls = rs1 ++ rs [] [] + threadKilledFailure = + DataDeclaration + (Unique "e7e479ebb757edcd5acff958b00aa228ac75b0c53638d44cf9d62fca045c33cf") + () + [] + [] + stdhnd = DataDeclaration (Unique "67bf7a8e517cbb1e9f42bc078e35498212d3be3c") @@ -460,7 +469,7 @@ pattern OptionalSome' :: ABT.Term (Term.F typeVar typeAnn patternAnn) v a pattern OptionalSome' d <- Term.App' (Term.Constructor' (ConstructorReference OptionalRef ((==) someId -> True))) d -pattern TupleType' :: Var v => [Type v a] -> Type v a +pattern TupleType' :: (Var v) => [Type v a] -> Type v a pattern TupleType' ts <- (unTupleType -> Just ts) pattern TupleTerm' :: [Term2 vt at ap v a] -> Term2 vt at ap v a @@ -577,7 +586,7 @@ unitType, stdHandleType, failureType, exceptionType :: - Ord v => a -> Type v a + (Ord v) => a -> Type v a unitType a = Type.ref a unitRef pairType a = Type.ref a pairRef testResultType a = Type.app a (Type.list a) (Type.ref a testResultRef) @@ -592,10 +601,10 @@ stdHandleType a = Type.ref a stdHandleRef failureType a = Type.ref a failureRef exceptionType a = Type.ref a exceptionRef -tlsSignedCertType :: Var v => a -> Type v a +tlsSignedCertType :: (Var v) => a -> Type v a tlsSignedCertType a = Type.ref a tlsSignedCertRef -unitTerm :: Var v => a -> Term v a +unitTerm :: (Var v) => a -> Term v a unitTerm ann = Term.constructor ann (ConstructorReference unitRef 0) tupleConsTerm :: @@ -611,10 +620,10 @@ tupleTerm = foldr tupleConsTerm (unitTerm mempty) -- delayed terms are just lambdas that take a single `()` arg -- `force` calls the function -forceTerm :: Var v => a -> a -> Term v a -> Term v a +forceTerm :: (Var v) => a -> a -> Term v a -> Term v a forceTerm a au e = Term.app a e (unitTerm au) -delayTerm :: Var v => a -> Term v a -> Term v a +delayTerm :: (Var v) => a -> Term v a -> Term v a delayTerm a = Term.lam a $ Var.named "()" unTupleTerm :: @@ -626,7 +635,7 @@ unTupleTerm t = case t of Term.Constructor' (ConstructorReference UnitRef 0) -> Just [] _ -> Nothing -unTupleType :: Var v => Type v a -> Maybe [Type v a] +unTupleType :: (Var v) => Type v a -> Maybe [Type v a] unTupleType t = case t of Type.Apps' (Type.Ref' PairRef) [fst, snd] -> (fst :) <$> unTupleType snd Type.Ref' UnitRef -> Just [] diff --git a/parser-typechecker/src/Unison/Builtin/Terms.hs b/parser-typechecker/src/Unison/Builtin/Terms.hs index f9748bf5d..7f322a28b 100644 --- a/parser-typechecker/src/Unison/Builtin/Terms.hs +++ b/parser-typechecker/src/Unison/Builtin/Terms.hs @@ -36,7 +36,7 @@ builtinTermsSrc ann = ) ] -v :: Var v => Text -> v +v :: (Var v) => Text -> v v = Var.named builtinTermsRef :: Map Symbol Reference.Id diff --git a/parser-typechecker/src/Unison/Codebase.hs b/parser-typechecker/src/Unison/Codebase.hs index 09e6bb97c..32b7a8c75 100644 --- a/parser-typechecker/src/Unison/Codebase.hs +++ b/parser-typechecker/src/Unison/Codebase.hs @@ -161,6 +161,7 @@ import Unison.Symbol (Symbol) import Unison.Term (Term) import qualified Unison.Term as Term import Unison.Type (Type) +import qualified Unison.Type as Type import Unison.Typechecker.TypeLookup (TypeLookup (TypeLookup)) import qualified Unison.Typechecker.TypeLookup as TL import qualified Unison.UnisonFile as UF @@ -170,7 +171,7 @@ import Unison.Var (Var) import qualified Unison.WatchKind as WK -- | Run a transaction on a codebase. -runTransaction :: MonadIO m => Codebase m v a -> Sqlite.Transaction b -> m b +runTransaction :: (MonadIO m) => Codebase m v a -> Sqlite.Transaction b -> m b runTransaction Codebase {withConnection} action = withConnection \conn -> Sqlite.runTransaction conn action @@ -232,7 +233,7 @@ getShallowBranchAtPath path mayBranch = do getShallowBranchAtPath p (Just childBranch) -- | Get a branch from the codebase. -getBranchForHash :: Monad m => Codebase m v a -> CausalHash -> m (Maybe (Branch m)) +getBranchForHash :: (Monad m) => Codebase m v a -> CausalHash -> m (Maybe (Branch m)) getBranchForHash codebase h = -- Attempt to find the Branch in the current codebase cache and root up to 3 levels deep -- If not found, attempt to find it in the Codebase (sqlite) @@ -263,7 +264,7 @@ termMetadata mayBranch (path, nameSeg) ref = do V2Branch.termMetadata b (coerce @NameSegment.NameSegment nameSeg) ref -- | Get the lowest common ancestor of two branches, i.e. the most recent branch that is an ancestor of both branches. -lca :: MonadIO m => Codebase m v a -> Branch m -> Branch m -> m (Maybe (Branch m)) +lca :: (MonadIO m) => Codebase m v a -> Branch m -> Branch m -> m (Maybe (Branch m)) lca code b1@(Branch.headHash -> h1) b2@(Branch.headHash -> h2) = do action <- runTransaction code do @@ -310,11 +311,11 @@ addDefsToCodebase c uf = do goTerm (_, r, Nothing, tm, tp) = putTerm c r tm tp goTerm (_, r, Just WK.TestWatch, tm, tp) = putTerm c r tm tp goTerm _ = pure () - goType :: Show t => (t -> Decl v a) -> (Reference.Id, t) -> Sqlite.Transaction () + goType :: (Show t) => (t -> Decl v a) -> (Reference.Id, t) -> Sqlite.Transaction () goType _f pair | debug && trace ("Codebase.addDefsToCodebase.goType " ++ show pair) False = undefined goType f (ref, decl) = putTypeDeclaration c ref (f decl) -getTypeOfConstructor :: Ord v => Codebase m v a -> ConstructorReference -> Sqlite.Transaction (Maybe (Type v a)) +getTypeOfConstructor :: (Ord v) => Codebase m v a -> ConstructorReference -> Sqlite.Transaction (Maybe (Type v a)) getTypeOfConstructor codebase (ConstructorReference r0 cid) = case r0 of Reference.DerivedId r -> do @@ -338,28 +339,47 @@ lookupWatchCache codebase h = do maybe (getWatch codebase WK.TestWatch h) (pure . Just) m1 typeLookupForDependencies :: - BuiltinAnnotation a => + forall m a. + (BuiltinAnnotation a) => Codebase m Symbol a -> Set Reference -> Sqlite.Transaction (TL.TypeLookup Symbol a) typeLookupForDependencies codebase s = do when debug $ traceM $ "typeLookupForDependencies " ++ show s - foldM go mempty s + depthFirstAccum mempty s where - go tl ref@(Reference.DerivedId id) = - fmap (tl <>) $ - getTypeOfTerm codebase ref >>= \case - Just typ -> pure $ TypeLookup (Map.singleton ref typ) mempty mempty - Nothing -> - getTypeDeclaration codebase id >>= \case - Just (Left ed) -> - pure $ TypeLookup mempty mempty (Map.singleton ref ed) - Just (Right dd) -> - pure $ TypeLookup mempty (Map.singleton ref dd) mempty - Nothing -> pure mempty - go tl Reference.Builtin {} = pure tl -- codebase isn't consulted for builtins + depthFirstAccum :: TL.TypeLookup Symbol a -> Set Reference -> Sqlite.Transaction (TL.TypeLookup Symbol a) + depthFirstAccum tl refs = foldM go tl (Set.filter (unseen tl) refs) -toCodeLookup :: MonadIO m => Codebase m Symbol Parser.Ann -> CL.CodeLookup Symbol m Parser.Ann + -- We need the transitive dependencies of data decls + -- that are scrutinized in a match expression for + -- pattern match coverage checking (specifically for + -- the inhabitation check). We ensure these are found + -- by collecting all transitive type dependencies. + go tl ref@(Reference.DerivedId id) = + getTypeOfTerm codebase ref >>= \case + Just typ -> + let z = tl <> TypeLookup (Map.singleton ref typ) mempty mempty + in depthFirstAccum z (Type.dependencies typ) + Nothing -> + getTypeDeclaration codebase id >>= \case + Just (Left ed) -> + let z = tl <> TypeLookup mempty mempty (Map.singleton ref ed) + in depthFirstAccum z (DD.dependencies $ DD.toDataDecl ed) + Just (Right dd) -> + let z = tl <> TypeLookup mempty (Map.singleton ref dd) mempty + in depthFirstAccum z (DD.dependencies dd) + Nothing -> pure tl + go tl Reference.Builtin {} = pure tl -- codebase isn't consulted for builtins + unseen :: TL.TypeLookup Symbol a -> Reference -> Bool + unseen tl r = + isNothing + ( Map.lookup r (TL.dataDecls tl) $> () + <|> Map.lookup r (TL.typeOfTerms tl) $> () + <|> Map.lookup r (TL.effectDecls tl) $> () + ) + +toCodeLookup :: (MonadIO m) => Codebase m Symbol Parser.Ann -> CL.CodeLookup Symbol m Parser.Ann toCodeLookup c = CL.CodeLookup (runTransaction c . getTerm c) (runTransaction c . getTypeDeclaration c) <> Builtin.codeLookup @@ -370,7 +390,7 @@ toCodeLookup c = -- Note that it is possible to call 'putTerm', then 'getTypeOfTerm', and receive @Nothing@, per the semantics of -- 'putTerm'. getTypeOfTerm :: - BuiltinAnnotation a => + (BuiltinAnnotation a) => Codebase m Symbol a -> Reference -> Sqlite.Transaction (Maybe (Type Symbol a)) @@ -384,7 +404,7 @@ getTypeOfTerm c r = case r of -- | Get the type of a referent. getTypeOfReferent :: - BuiltinAnnotation a => + (BuiltinAnnotation a) => Codebase m Symbol a -> Referent.Referent -> Sqlite.Transaction (Maybe (Type Symbol a)) @@ -413,18 +433,18 @@ dependentsOfComponent h = <$> SqliteCodebase.Operations.dependentsOfComponentImpl h -- | Get the set of terms-or-constructors that have the given type. -termsOfType :: Var v => Codebase m v a -> Type v a -> Sqlite.Transaction (Set Referent.Referent) +termsOfType :: (Var v) => Codebase m v a -> Type v a -> Sqlite.Transaction (Set Referent.Referent) termsOfType c ty = termsOfTypeByReference c $ Hashing.typeToReference ty -- | Get all terms which match the exact type the provided reference points to. -termsOfTypeByReference :: Var v => Codebase m v a -> Reference -> Sqlite.Transaction (Set Referent.Referent) +termsOfTypeByReference :: (Var v) => Codebase m v a -> Reference -> Sqlite.Transaction (Set Referent.Referent) termsOfTypeByReference c r = Set.union (Rel.lookupDom r Builtin.builtinTermsByType) . Set.map (fmap Reference.DerivedId) <$> termsOfTypeImpl c r -- | Get the set of terms-or-constructors mention the given type anywhere in their signature. -termsMentioningType :: Var v => Codebase m v a -> Type v a -> Sqlite.Transaction (Set Referent.Referent) +termsMentioningType :: (Var v) => Codebase m v a -> Type v a -> Sqlite.Transaction (Set Referent.Referent) termsMentioningType c ty = Set.union (Rel.lookupDom r Builtin.builtinTermsByTypeMention) . Set.map (fmap Reference.DerivedId) @@ -434,7 +454,7 @@ termsMentioningType c ty = -- | Check whether a reference is a term. isTerm :: - BuiltinAnnotation a => + (BuiltinAnnotation a) => Codebase m Symbol a -> Reference -> Sqlite.Transaction Bool @@ -458,7 +478,7 @@ data Preprocessing m -- otherwise we try to load the root branch. importRemoteBranch :: forall m v a. - MonadUnliftIO m => + (MonadUnliftIO m) => Codebase m v a -> ReadGitRemoteNamespace -> SyncMode -> @@ -484,7 +504,7 @@ importRemoteBranch codebase ns mode preprocess = runExceptT $ do -- | Pull a git branch and view it from the cache, without syncing into the -- local codebase. viewRemoteBranch :: - MonadIO m => + (MonadIO m) => Codebase m v a -> ReadGitRemoteNamespace -> Git.GitBranchBehavior -> @@ -493,35 +513,35 @@ viewRemoteBranch :: viewRemoteBranch codebase ns gitBranchBehavior action = viewRemoteBranch' codebase ns gitBranchBehavior (\(b, _dir) -> action b) -unsafeGetComponentLength :: HasCallStack => Hash -> Sqlite.Transaction Reference.CycleSize +unsafeGetComponentLength :: (HasCallStack) => Hash -> Sqlite.Transaction Reference.CycleSize unsafeGetComponentLength h = Operations.getCycleLen h >>= \case Nothing -> error (reportBug "E713350" ("component with hash " ++ show h ++ " not found")) Just size -> pure size -- | Like 'getTerm', for when the term is known to exist in the codebase. -unsafeGetTerm :: HasCallStack => Codebase m v a -> Reference.Id -> Sqlite.Transaction (Term v a) +unsafeGetTerm :: (HasCallStack) => Codebase m v a -> Reference.Id -> Sqlite.Transaction (Term v a) unsafeGetTerm codebase rid = getTerm codebase rid >>= \case Nothing -> error (reportBug "E520818" ("term " ++ show rid ++ " not found")) Just term -> pure term -- | Like 'getTypeDeclaration', for when the type declaration is known to exist in the codebase. -unsafeGetTypeDeclaration :: HasCallStack => Codebase m v a -> Reference.Id -> Sqlite.Transaction (Decl v a) +unsafeGetTypeDeclaration :: (HasCallStack) => Codebase m v a -> Reference.Id -> Sqlite.Transaction (Decl v a) unsafeGetTypeDeclaration codebase rid = getTypeDeclaration codebase rid >>= \case Nothing -> error (reportBug "E129043" ("type decl " ++ show rid ++ " not found")) Just decl -> pure decl -- | Like 'getTypeOfTerm', but for when the term is known to exist in the codebase. -unsafeGetTypeOfTermById :: HasCallStack => Codebase m v a -> Reference.Id -> Sqlite.Transaction (Type v a) +unsafeGetTypeOfTermById :: (HasCallStack) => Codebase m v a -> Reference.Id -> Sqlite.Transaction (Type v a) unsafeGetTypeOfTermById codebase rid = getTypeOfTermImpl codebase rid >>= \case Nothing -> error (reportBug "E377910" ("type of term " ++ show rid ++ " not found")) Just ty -> pure ty -- | Like 'unsafeGetTerm', but returns the type of the term, too. -unsafeGetTermWithType :: HasCallStack => Codebase m v a -> Reference.Id -> Sqlite.Transaction (Term v a, Type v a) +unsafeGetTermWithType :: (HasCallStack) => Codebase m v a -> Reference.Id -> Sqlite.Transaction (Term v a, Type v a) unsafeGetTermWithType codebase rid = do term <- unsafeGetTerm codebase rid ty <- @@ -534,7 +554,7 @@ unsafeGetTermWithType codebase rid = do -- | Like 'getTermComponentWithTypes', for when the term component is known to exist in the codebase. unsafeGetTermComponent :: - HasCallStack => + (HasCallStack) => Codebase m v a -> Hash -> Sqlite.Transaction [(Term v a, Type v a)] diff --git a/parser-typechecker/src/Unison/Codebase/Branch.hs b/parser-typechecker/src/Unison/Codebase/Branch.hs index 1298bf209..375698e48 100644 --- a/parser-typechecker/src/Unison/Codebase/Branch.hs +++ b/parser-typechecker/src/Unison/Codebase/Branch.hs @@ -113,8 +113,8 @@ import Unison.Codebase.Patch (Patch) import qualified Unison.Codebase.Patch as Patch import Unison.Codebase.Path (Path (..)) import qualified Unison.Codebase.Path as Path +import qualified Unison.Hashing.V2 as Hashing (ContentAddressable (contentHash)) import qualified Unison.Hashing.V2.Convert as H -import qualified Unison.Hashing.V2.Hashable as H import Unison.Name (Name) import qualified Unison.Name as Name import Unison.NameSegment (NameSegment) @@ -136,8 +136,8 @@ instance AsEmpty (Branch m) where | b0 == empty = Just () | otherwise = Nothing -instance H.Hashable (Branch0 m) where - hash = H.hashBranch0 +instance Hashing.ContentAddressable (Branch0 m) where + contentHash = H.hashBranch0 deepReferents :: Branch0 m -> Set Referent deepReferents = R.dom . deepTerms @@ -388,18 +388,18 @@ deepEdits' = go id f (c, b) = go (addPrefix . Name.cons c) (head b) -- | Discards the history of a Branch0's children, recursively -discardHistory0 :: Applicative m => Branch0 m -> Branch0 m +discardHistory0 :: (Applicative m) => Branch0 m -> Branch0 m discardHistory0 = over children (fmap tweak) where tweak b = one (discardHistory0 (head b)) -- | Discards the history of a Branch and its children, recursively -discardHistory :: Applicative m => Branch m -> Branch m +discardHistory :: (Applicative m) => Branch m -> Branch m discardHistory b = one (discardHistory0 (head b)) -- `before b1 b2` is true if `b2` incorporates all of `b1` -before :: Monad m => Branch m -> Branch m -> m Bool +before :: (Monad m) => Branch m -> Branch m -> m Bool before (Branch b1) (Branch b2) = Causal.before b1 b2 -- | what does this do? —AI @@ -407,12 +407,12 @@ toList0 :: Branch0 m -> [(Path, Branch0 m)] toList0 = go Path.empty where go p b = - (p, b) : - ( Map.toList (_children b) - >>= ( \(seg, cb) -> - go (Path.snoc p seg) (head cb) - ) - ) + (p, b) + : ( Map.toList (_children b) + >>= ( \(seg, cb) -> + go (Path.snoc p seg) (head cb) + ) + ) -- returns `Nothing` if no Branch at `path` or if Branch is empty at `path` getAt :: @@ -450,7 +450,7 @@ isEmpty :: Branch m -> Bool isEmpty = (== empty) -- | Perform an update over the current branch and create a new causal step. -step :: Applicative m => (Branch0 m -> Branch0 m) -> Branch m -> Branch m +step :: (Applicative m) => (Branch0 m -> Branch0 m) -> Branch m -> Branch m step f = runIdentity . stepM (Identity . f) -- | Perform an update over the current branch and create a new causal step. @@ -459,14 +459,14 @@ stepM f = \case Branch (Causal.One _h _eh e) | e == empty0 -> Branch . Causal.one <$> f empty0 b -> mapMOf history (Causal.stepDistinctM f) b -cons :: Applicative m => Branch0 m -> Branch m -> Branch m +cons :: (Applicative m) => Branch0 m -> Branch m -> Branch m cons = step . const isOne :: Branch m -> Bool isOne (Branch Causal.One {}) = True isOne _ = False -uncons :: Applicative m => Branch m -> m (Maybe (Branch0 m, Branch m)) +uncons :: (Applicative m) => Branch m -> m (Maybe (Branch0 m, Branch m)) uncons (Branch b) = go <$> Causal.uncons b where go = over (_Just . _2) Branch @@ -518,7 +518,7 @@ stepManyAtM actions startBranch = do -- starting at the leaves, apply `f` to every level of the branch. stepEverywhere :: - Applicative m => (Branch0 m -> Branch0 m) -> (Branch0 m -> Branch0 m) + (Applicative m) => (Branch0 m -> Branch0 m) -> (Branch0 m -> Branch0 m) stepEverywhere f Branch0 {..} = f (branch0 _terms _types children _edits) where children = fmap (step $ stepEverywhere f) _children @@ -533,18 +533,18 @@ getChildBranch seg b = fromMaybe empty $ Map.lookup seg (_children b) setChildBranch :: NameSegment -> Branch m -> Branch0 m -> Branch0 m setChildBranch seg b = over children (updateChildren seg b) -getPatch :: Applicative m => NameSegment -> Branch0 m -> m Patch +getPatch :: (Applicative m) => NameSegment -> Branch0 m -> m Patch getPatch seg b = case Map.lookup seg (_edits b) of Nothing -> pure Patch.empty Just (_, p) -> p -getMaybePatch :: Applicative m => NameSegment -> Branch0 m -> m (Maybe Patch) +getMaybePatch :: (Applicative m) => NameSegment -> Branch0 m -> m (Maybe Patch) getMaybePatch seg b = case Map.lookup seg (_edits b) of Nothing -> pure Nothing Just (_, p) -> Just <$> p modifyPatches :: - Monad m => NameSegment -> (Patch -> Patch) -> Branch0 m -> m (Branch0 m) + (Monad m) => NameSegment -> (Patch -> Patch) -> Branch0 m -> m (Branch0 m) modifyPatches seg f = mapMOf edits update where update m = do @@ -554,7 +554,7 @@ modifyPatches seg f = mapMOf edits update let h = H.hashPatch p' pure $ Map.insert seg (PatchHash h, pure p') m -replacePatch :: Applicative m => NameSegment -> Patch -> Branch0 m -> Branch0 m +replacePatch :: (Applicative m) => NameSegment -> Patch -> Branch0 m -> Branch0 m replacePatch n p = over edits (Map.insert n (PatchHash (H.hashPatch p), pure p)) deletePatch :: NameSegment -> Branch0 m -> Branch0 m @@ -573,7 +573,7 @@ updateChildren seg updatedChild = -- Modify the Branch at `path` with `f`, after creating it if necessary. -- Because it's a `Branch`, it overwrites the history at `path`. modifyAt :: - Applicative m => + (Applicative m) => Path -> (Branch m -> Branch m) -> Branch m -> @@ -584,8 +584,8 @@ modifyAt path f = runIdentity . modifyAtM path (pure . f) -- Because it's a `Branch`, it overwrites the history at `path`. modifyAtM :: forall n m. - Functor n => - Applicative m => -- because `Causal.cons` uses `pure` + (Functor n) => + (Applicative m) => -- because `Causal.cons` uses `pure` Path -> (Branch m -> n (Branch m)) -> Branch m -> @@ -700,14 +700,14 @@ deleteTypeName r n b over types (Star3.deletePrimaryD1 (r, n)) b deleteTypeName _ _ b = b -lca :: Monad m => Branch m -> Branch m -> m (Maybe (Branch m)) +lca :: (Monad m) => Branch m -> Branch m -> m (Maybe (Branch m)) lca (Branch a) (Branch b) = fmap Branch <$> Causal.lca a b -transform :: Functor m => (forall a. m a -> n a) -> Branch m -> Branch n +transform :: (Functor m) => (forall a. m a -> n a) -> Branch m -> Branch n transform f b = case _history b of causal -> Branch . Causal.transform f $ transformB0s f causal where - transformB0 :: Functor m => (forall a. m a -> n a) -> Branch0 m -> Branch0 n + transformB0 :: (Functor m) => (forall a. m a -> n a) -> Branch0 m -> Branch0 n transformB0 f b = b { _children = transform f <$> _children b, @@ -715,7 +715,7 @@ transform f b = case _history b of } transformB0s :: - Functor m => + (Functor m) => (forall a. m a -> n a) -> Causal m (Branch0 m) -> Causal m (Branch0 n) @@ -733,7 +733,7 @@ children0 = children .> itraversed <. (history . Causal.head_) -- the existing base if there are no) consBranchSnapshot :: forall m. - Monad m => + (Monad m) => Branch m -> Branch m -> Branch m diff --git a/parser-typechecker/src/Unison/Codebase/Branch/BranchDiff.hs b/parser-typechecker/src/Unison/Codebase/Branch/BranchDiff.hs index a7e2e81f9..b53128cde 100644 --- a/parser-typechecker/src/Unison/Codebase/Branch/BranchDiff.hs +++ b/parser-typechecker/src/Unison/Codebase/Branch/BranchDiff.hs @@ -25,7 +25,7 @@ data BranchDiff = BranchDiff } deriving (Eq, Ord, Show) -diff0 :: Monad m => Branch0 m -> Branch0 m -> m BranchDiff +diff0 :: (Monad m) => Branch0 m -> Branch0 m -> m BranchDiff diff0 old new = do newEdits <- sequenceA $ snd <$> _edits new oldEdits <- sequenceA $ snd <$> _edits old diff --git a/parser-typechecker/src/Unison/Codebase/Branch/Merge.hs b/parser-typechecker/src/Unison/Codebase/Branch/Merge.hs index a6a4c73da..096601a8b 100644 --- a/parser-typechecker/src/Unison/Codebase/Branch/Merge.hs +++ b/parser-typechecker/src/Unison/Codebase/Branch/Merge.hs @@ -40,7 +40,7 @@ data MergeMode = RegularMerge | SquashMerge deriving (Eq, Ord, Show) merge'' :: forall m. - Monad m => + (Monad m) => (Branch m -> Branch m -> m (Maybe (Branch m))) -> -- lca calculator MergeMode -> Branch m -> @@ -111,7 +111,7 @@ merge'' lca mode (Branch x) (Branch y) = merge0 :: forall m. - Monad m => + (Monad m) => (Branch m -> Branch m -> m (Maybe (Branch m))) -> MergeMode -> Branch0 m -> diff --git a/parser-typechecker/src/Unison/Codebase/Branch/Names.hs b/parser-typechecker/src/Unison/Codebase/Branch/Names.hs index 2b15f3147..7bd594693 100644 --- a/parser-typechecker/src/Unison/Codebase/Branch/Names.hs +++ b/parser-typechecker/src/Unison/Codebase/Branch/Names.hs @@ -40,7 +40,7 @@ toNames b = -- This stops searching for a given HashQualified once it encounters -- any term or type in any Branch0 that satisfies that HashQualified. findHistoricalHQs :: - Monad m => + (Monad m) => Set (HashQualified Name) -> Branch m -> m (Set (HashQualified Name), Names) @@ -50,7 +50,7 @@ findHistoricalHQs = (\hq r n -> HQ.matchesNamedReference n r hq) findHistoricalRefs :: - Monad m => + (Monad m) => Set LabeledDependency -> Branch m -> m (Set LabeledDependency, Names) @@ -60,7 +60,7 @@ findHistoricalRefs = (\query r _n -> LD.fold (== r) (const False) query) findHistoricalRefs' :: - Monad m => + (Monad m) => Set Reference -> Branch m -> m (Set Reference, Names) diff --git a/parser-typechecker/src/Unison/Codebase/BranchDiff.hs b/parser-typechecker/src/Unison/Codebase/BranchDiff.hs index 4cf9f5616..2812b2455 100644 --- a/parser-typechecker/src/Unison/Codebase/BranchDiff.hs +++ b/parser-typechecker/src/Unison/Codebase/BranchDiff.hs @@ -46,7 +46,7 @@ data BranchDiff = BranchDiff } deriving stock (Generic, Show) -diff0 :: forall m. Monad m => Branch0 m -> Branch0 m -> m BranchDiff +diff0 :: forall m. (Monad m) => Branch0 m -> Branch0 m -> m BranchDiff diff0 old new = BranchDiff terms types <$> patchDiff old new where (terms, types) = @@ -56,7 +56,7 @@ diff0 old new = BranchDiff terms types <$> patchDiff old new (deepr4ToSlice (Branch.deepTypes old) (Branch.deepTypeMetadata old)) (deepr4ToSlice (Branch.deepTypes new) (Branch.deepTypeMetadata new)) -patchDiff :: forall m. Monad m => Branch0 m -> Branch0 m -> m (Map Name (DiffType PatchDiff)) +patchDiff :: forall m. (Monad m) => Branch0 m -> Branch0 m -> m (Map Name (DiffType PatchDiff)) patchDiff old new = do let oldDeepEdits, newDeepEdits :: Map Name (PatchHash, m Patch) oldDeepEdits = Branch.deepEdits' old @@ -80,7 +80,7 @@ patchDiff old new = do pure $ added <> removed <> modified deepr4ToSlice :: - Ord r => + (Ord r) => R.Relation r Name -> Metadata.R4 r Name -> NamespaceSlice r @@ -118,13 +118,13 @@ computeSlices oldTerms newTerms oldTypes newTypes = (termsOut, typesOut) tremovedMetadata = removedMetadata oldTypes newTypes } - allNames :: Ord r => NamespaceSlice r -> NamespaceSlice r -> Map r (Set Name, Set Name) + allNames :: (Ord r) => NamespaceSlice r -> NamespaceSlice r -> Map r (Set Name, Set Name) allNames old new = R.outerJoinDomMultimaps (names old) (names new) allAdds, allRemoves :: forall r. - Ord r => + (Ord r) => Map r (Set Name, Set Name) -> Map Name (Set r, Set r) -> Relation r Name @@ -147,26 +147,26 @@ computeSlices oldTerms newTerms oldTypes newTypes = (termsOut, typesOut) -- renames and stuff, name changes without a reference change remainingNameChanges :: forall r. - Ord r => + (Ord r) => Map r (Set Name, Set Name) -> Map r (Set Name, Set Name) remainingNameChanges = Map.filter (\(old, new) -> not (null old) && not (null new) && old /= new) - allNamespaceUpdates :: Ord r => NamespaceSlice r -> NamespaceSlice r -> Map Name (Set r, Set r) + allNamespaceUpdates :: (Ord r) => NamespaceSlice r -> NamespaceSlice r -> Map Name (Set r, Set r) allNamespaceUpdates old new = Map.filter f $ R.innerJoinRanMultimaps (names old) (names new) where f (old, new) = old /= new - addedMetadata :: Ord r => NamespaceSlice r -> NamespaceSlice r -> Relation3 r Name Metadata.Value + addedMetadata :: (Ord r) => NamespaceSlice r -> NamespaceSlice r -> Relation3 r Name Metadata.Value addedMetadata old new = metadata new `R3.difference` metadata old - removedMetadata :: Ord r => NamespaceSlice r -> NamespaceSlice r -> Relation3 r Name Metadata.Value + removedMetadata :: (Ord r) => NamespaceSlice r -> NamespaceSlice r -> Relation3 r Name Metadata.Value removedMetadata old new = metadata old `R3.difference` metadata new -- the namespace updates that aren't propagated -namespaceUpdates :: Ord r => DiffSlice r -> Map Name (Set r, Set r) +namespaceUpdates :: (Ord r) => DiffSlice r -> Map Name (Set r, Set r) namespaceUpdates s = Map.mapMaybeWithKey f (tallnamespaceUpdates s) where f name (olds, news) = @@ -174,7 +174,7 @@ namespaceUpdates s = Map.mapMaybeWithKey f (tallnamespaceUpdates s) in if null news' then Nothing else Just (olds, news') propagated = propagatedUpdates s -propagatedUpdates :: Ord r => DiffSlice r -> Map Name (Set r) +propagatedUpdates :: (Ord r) => DiffSlice r -> Map Name (Set r) propagatedUpdates s = Map.fromList [ (name, news) diff --git a/parser-typechecker/src/Unison/Codebase/BranchUtil.hs b/parser-typechecker/src/Unison/Codebase/BranchUtil.hs index 33c160503..ded59dc2a 100644 --- a/parser-typechecker/src/Unison/Codebase/BranchUtil.hs +++ b/parser-typechecker/src/Unison/Codebase/BranchUtil.hs @@ -13,8 +13,6 @@ module Unison.Codebase.BranchUtil -- * Branch modifications makeSetBranch, - makeDeleteBranch, - makeObliterateBranch, makeAddTypeName, makeDeleteTypeName, makeAddTermName, @@ -24,7 +22,6 @@ module Unison.Codebase.BranchUtil ) where -import Control.Lens import qualified Data.Map as Map import qualified Data.Set as Set import Unison.Codebase.Branch (Branch, Branch0) @@ -51,7 +48,7 @@ import qualified Unison.Util.Relation4 as R4 import qualified Unison.Util.Star3 as Star3 -- | Creates a branch containing all of the given names, with a single history node. -fromNames :: Monad m => Names -> Branch m +fromNames :: (Monad m) => Names -> Branch m fromNames names0 = Branch.stepManyAt (typeActions <> termActions) Branch.empty where typeActions = map doType . R.toList $ Names.types names0 @@ -122,7 +119,7 @@ makeAddTermName (p, name) r md = (p, Branch.addTermName r name md) makeDeleteTermName :: Path.Split -> Referent -> (Path, Branch0 m -> Branch0 m) makeDeleteTermName (p, name) r = (p, Branch.deleteTermName r name) -makeReplacePatch :: Applicative m => Path.Split -> Patch -> (Path, Branch0 m -> Branch0 m) +makeReplacePatch :: (Applicative m) => Path.Split -> Patch -> (Path, Branch0 m -> Branch0 m) makeReplacePatch (p, name) patch = (p, Branch.replacePatch name patch) makeDeletePatch :: Path.Split -> (Path, Branch0 m -> Branch0 m) @@ -137,18 +134,3 @@ makeDeleteTypeName (p, name) r = (p, Branch.deleteTypeName r name) makeSetBranch :: Path.Split -> Branch m -> (Path, Branch0 m -> Branch0 m) makeSetBranch (p, name) b = (p, Branch.setChildBranch name b) - --- | "delete"s a branch by cons'ing an empty Branch0 onto the history at that location. --- See also 'makeObliterateBranch'. -makeDeleteBranch :: - Applicative m => - Path.Split -> - (Path, Branch0 m -> Branch0 m) -makeDeleteBranch (p, name) = (p, Branch.children . ix name %~ Branch.cons Branch.empty0) - --- | Erase a branch and its history --- See also 'makeDeleteBranch'. --- Note that this requires a AllowRewritingHistory update strategy to behave correctly. -makeObliterateBranch :: - Path.Split -> (Path, Branch0 m -> Branch0 m) -makeObliterateBranch p = makeSetBranch p Branch.empty diff --git a/parser-typechecker/src/Unison/Codebase/Causal.hs b/parser-typechecker/src/Unison/Codebase/Causal.hs index 3f318c5de..ae2764038 100644 --- a/parser-typechecker/src/Unison/Codebase/Causal.hs +++ b/parser-typechecker/src/Unison/Codebase/Causal.hs @@ -51,13 +51,14 @@ import Unison.Codebase.Causal.Type pattern Merge, pattern One, ) +import Unison.Hash (HashFor (HashFor)) +import qualified Unison.Hashing.V2 as Hashing (ContentAddressable) import qualified Unison.Hashing.V2.Convert as Hashing -import Unison.Hashing.V2.Hashable (HashFor (HashFor), Hashable) import Unison.Prelude import Prelude hiding (head, read, tail) -- | Focus the current head, keeping the hash up to date. -head_ :: Hashable e => Lens.Lens' (Causal m e) e +head_ :: (Hashing.ContentAddressable e) => Lens.Lens' (Causal m e) e head_ = Lens.lens getter setter where getter = head @@ -73,7 +74,7 @@ head_ = Lens.lens getter setter -- (or is equal to `c2` if `c1` changes nothing). squashMerge' :: forall m e. - (Monad m, Hashable e, Eq e) => + (Monad m, Hashing.ContentAddressable e, Eq e) => (Causal m e -> Causal m e -> m (Maybe (Causal m e))) -> (e -> m e) -> (Maybe e -> e -> e -> m e) -> @@ -92,7 +93,7 @@ squashMerge' lca discardHistory combine c1 c2 = do threeWayMerge :: forall m e. - (Monad m, Hashable e) => + (Monad m, Hashing.ContentAddressable e) => (Maybe e -> e -> e -> m e) -> Causal m e -> Causal m e -> @@ -101,7 +102,7 @@ threeWayMerge = threeWayMerge' lca threeWayMerge' :: forall m e. - (Monad m, Hashable e) => + (Monad m, Hashing.ContentAddressable e) => (Causal m e -> Causal m e -> m (Maybe (Causal m e))) -> (Maybe e -> e -> e -> m e) -> Causal m e -> @@ -121,7 +122,7 @@ threeWayMerge' lca combine c1 c2 = do -- `True` if `h` is found in the history of `c` within `maxDepth` path length -- from the tip of `c` -beforeHash :: forall m e. Monad m => Word -> CausalHash -> Causal m e -> m Bool +beforeHash :: forall m e. (Monad m) => Word -> CausalHash -> Causal m e -> m Bool beforeHash maxDepth h c = Reader.runReaderT (State.evalStateT (go c) Set.empty) (0 :: Word) where @@ -137,11 +138,11 @@ beforeHash maxDepth h c = State.modify' (<> Set.fromList cs) Monad.anyM (Reader.local (1 +) . go) unseens -stepDistinct :: (Applicative m, Eq e, Hashable e) => (e -> e) -> Causal m e -> Causal m e +stepDistinct :: (Applicative m, Eq e, Hashing.ContentAddressable e) => (e -> e) -> Causal m e -> Causal m e stepDistinct f c = f (head c) `consDistinct` c stepDistinctM :: - (Applicative m, Functor n, Eq e, Hashable e) => + (Applicative m, Functor n, Eq e, Hashing.ContentAddressable e) => (e -> n e) -> Causal m e -> n (Causal m e) @@ -149,12 +150,12 @@ stepDistinctM f c = (`consDistinct` c) <$> f (head c) -- | Causal construction should go through here for uniformity; -- with an exception for `one`, which avoids an Applicative constraint. -fromList :: (Applicative m, Hashable e) => e -> [Causal m e] -> Causal m e +fromList :: (Applicative m, Hashing.ContentAddressable e) => e -> [Causal m e] -> Causal m e fromList e cs = fromListM e (map (\c -> (currentHash c, pure c)) cs) -- | Construct a causal from a list of predecessors. The predecessors may be given in any order. -fromListM :: Hashable e => e -> [(CausalHash, m (Causal m e))] -> Causal m e +fromListM :: (Hashing.ContentAddressable e) => e -> [(CausalHash, m (Causal m e))] -> Causal m e fromListM e ts = case ts of [] -> UnsafeOne ch eh e @@ -164,41 +165,41 @@ fromListM e ts = (ch, eh) = (Hashing.hashCausal e (Set.fromList (map fst ts))) -- | An optimized variant of 'fromListM' for when it is known we have 2+ predecessors (merge node). -mergeNode :: Hashable e => e -> Map (CausalHash) (m (Causal m e)) -> Causal m e +mergeNode :: (Hashing.ContentAddressable e) => e -> Map (CausalHash) (m (Causal m e)) -> Causal m e mergeNode newHead predecessors = let (ch, eh) = Hashing.hashCausal newHead (Map.keysSet predecessors) in UnsafeMerge ch eh newHead predecessors -- duplicated logic here instead of delegating to `fromList` to avoid `Applicative m` constraint. -one :: Hashable e => e -> Causal m e +one :: (Hashing.ContentAddressable e) => e -> Causal m e one e = UnsafeOne ch eh e where (ch, eh) = Hashing.hashCausal e mempty -cons :: (Applicative m, Hashable e) => e -> Causal m e -> Causal m e +cons :: (Applicative m, Hashing.ContentAddressable e) => e -> Causal m e -> Causal m e cons e tail = fromList e [tail] -consDistinct :: (Applicative m, Eq e, Hashable e) => e -> Causal m e -> Causal m e +consDistinct :: (Applicative m, Eq e, Hashing.ContentAddressable e) => e -> Causal m e -> Causal m e consDistinct e tl = if head tl == e then tl else cons e tl -uncons :: Applicative m => Causal m e -> m (Maybe (e, Causal m e)) +uncons :: (Applicative m) => Causal m e -> m (Maybe (e, Causal m e)) uncons c = case c of Cons _ _ e (_, tl) -> fmap (e,) . Just <$> tl _ -> pure Nothing -- it's okay to call "Unsafe"* here with the existing hashes because `nt` can't -- affect `e`. -transform :: Functor m => (forall a. m a -> n a) -> Causal m e -> Causal n e +transform :: (Functor m) => (forall a. m a -> n a) -> Causal m e -> Causal n e transform nt c = case c of One h eh e -> UnsafeOne h eh e Cons h eh e (ht, tl) -> UnsafeCons h eh e (ht, nt (transform nt <$> tl)) Merge h eh e tls -> UnsafeMerge h eh e $ Map.map (\mc -> nt (transform nt <$> mc)) tls -- "unsafe" because the hashes will be wrong if `f` affects aspects of `e` that impact hashing -unsafeMapHashPreserving :: forall m e e2. Functor m => (e -> e2) -> Causal m e -> Causal m e2 +unsafeMapHashPreserving :: forall m e e2. (Functor m) => (e -> e2) -> Causal m e -> Causal m e2 unsafeMapHashPreserving f c = case c of One h eh e -> UnsafeOne h (retagValueHash eh) (f e) Cons h eh e (ht, tl) -> UnsafeCons h (retagValueHash eh) (f e) (ht, unsafeMapHashPreserving f <$> tl) diff --git a/parser-typechecker/src/Unison/Codebase/Causal/Type.hs b/parser-typechecker/src/Unison/Codebase/Causal/Type.hs index d3ba902e8..d3f0aebc5 100644 --- a/parser-typechecker/src/Unison/Codebase/Causal/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Causal/Type.hs @@ -24,18 +24,18 @@ import Prelude hiding (head, read, tail) {- `Causal a` has 5 operations, specified algebraically here: -* `before : Causal m a -> Causal m a -> m Bool` defines a partial order on +\* `before : Causal m a -> Causal m a -> m Bool` defines a partial order on `Causal`. -* `head : Causal m a -> a`, which represents the "latest" `a` value in a causal +\* `head : Causal m a -> a`, which represents the "latest" `a` value in a causal chain. -* `one : a -> Causal m a`, satisfying `head (one hd) == hd` -* `cons : a -> Causal a -> Causal a`, satisfying `head (cons hd tl) == hd` and +\* `one : a -> Causal m a`, satisfying `head (one hd) == hd` +\* `cons : a -> Causal a -> Causal a`, satisfying `head (cons hd tl) == hd` and also `before tl (cons hd tl)`. -* `merge : CommutativeSemigroup a => Causal a -> Causal a -> Causal a`, which is +\* `merge : CommutativeSemigroup a => Causal a -> Causal a -> Causal a`, which is commutative (but not associative) and satisfies: * `before c1 (merge c1 c2)` * `before c2 (merge c1 c2)` -* `sequence : Causal a -> Causal a -> Causal a`, which is defined as +\* `sequence : Causal a -> Causal a -> Causal a`, which is defined as `sequence c1 c2 = cons (head c2) (merge c1 c2)`. * `before c1 (sequence c1 c2)` * `head (sequence c1 c2) == head c2` @@ -85,11 +85,11 @@ predecessors (UnsafeOne _ _ _) = Seq.empty predecessors (UnsafeCons _ _ _ (_, t)) = Seq.singleton t predecessors (UnsafeMerge _ _ _ ts) = Seq.fromList $ Map.elems ts -before :: Monad m => Causal m e -> Causal m e -> m Bool +before :: (Monad m) => Causal m e -> Causal m e -> m Bool before a b = (== Just a) <$> lca a b -- Find the lowest common ancestor of two causals. -lca :: Monad m => Causal m e -> Causal m e -> m (Maybe (Causal m e)) +lca :: (Monad m) => Causal m e -> Causal m e -> m (Maybe (Causal m e)) lca a b = lca' (Seq.singleton $ pure a) (Seq.singleton $ pure b) @@ -97,7 +97,7 @@ lca a b = -- element of `ys`. -- This is a breadth-first search used in the implementation of `lca a b`. lca' :: - Monad m => + (Monad m) => Seq (m (Causal m e)) -> Seq (m (Causal m e)) -> m (Maybe (Causal m e)) diff --git a/parser-typechecker/src/Unison/Codebase/CodeLookup.hs b/parser-typechecker/src/Unison/Codebase/CodeLookup.hs index 8c33ee1d5..a4f500a35 100644 --- a/parser-typechecker/src/Unison/Codebase/CodeLookup.hs +++ b/parser-typechecker/src/Unison/Codebase/CodeLookup.hs @@ -27,7 +27,7 @@ instance (Ord v, Functor m) => Functor (CodeLookup v m) where md (Left e) = Left (f <$> e) md (Right d) = Right (f <$> d) -instance Monad m => Semigroup (CodeLookup v m a) where +instance (Monad m) => Semigroup (CodeLookup v m a) where c1 <> c2 = CodeLookup tm ty where tm id = do @@ -37,7 +37,7 @@ instance Monad m => Semigroup (CodeLookup v m a) where o <- getTypeDeclaration c1 id case o of Nothing -> getTypeDeclaration c2 id; Just _ -> pure o -instance Monad m => Monoid (CodeLookup v m a) where +instance (Monad m) => Monoid (CodeLookup v m a) where mempty = CodeLookup (const $ pure Nothing) (const $ pure Nothing) -- todo: can this be implemented in terms of TransitiveClosure.transitiveClosure? diff --git a/parser-typechecker/src/Unison/Codebase/Editor/Git.hs b/parser-typechecker/src/Unison/Codebase/Editor/Git.hs index da6089a46..9df7d7fea 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/Git.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/Git.hs @@ -54,26 +54,28 @@ encodeFileName s = go ('$' : rem) = "$$" <> go rem go (c : rem) | elem @[] c "/\\:*?\"<>|" || not (Char.isPrint c && Char.isAscii c) = - "$x" <> encodeHex [c] <> "$" <> go rem + "$x" <> encodeHex [c] <> "$" <> go rem | otherwise = c : go rem go [] = [] encodeHex :: String -> String encodeHex = - Text.unpack . Text.toUpper . ByteString.encodeBase16 + Text.unpack + . Text.toUpper + . ByteString.encodeBase16 . encodeUtf8 . Text.pack in -- 'bare' suffix is to avoid clashes with non-bare repos initialized by earlier versions -- of ucm. go s <> "-bare" -gitCacheDir :: MonadIO m => Text -> m FilePath +gitCacheDir :: (MonadIO m) => Text -> m FilePath gitCacheDir url = getXdgDirectory XdgCache $ "unisonlanguage" "gitfiles" encodeFileName (Text.unpack url) -withStatus :: MonadIO m => String -> m a -> m a +withStatus :: (MonadIO m) => String -> m a -> m a withStatus str ma = do flushStr str a <- ma @@ -233,19 +235,19 @@ cloneIfMissing repo@(ReadGitRepo {url = uri}) localPath = do unless isGitDir . throwError $ GitError.UnrecognizableCheckoutDir repo localPath -- | See if `git` is on the system path. -checkForGit :: MonadIO m => MonadError GitProtocolError m => m () +checkForGit :: (MonadIO m) => (MonadError GitProtocolError m) => m () checkForGit = do gitPath <- liftIO $ findExecutable "git" when (isNothing gitPath) $ throwError GitError.NoGit -- | Returns the name of the default branch of a repository, if one exists. -getDefaultBranch :: MonadIO m => GitRepo -> m (Maybe Text) +getDefaultBranch :: (MonadIO m) => GitRepo -> m (Maybe Text) getDefaultBranch dir = liftIO $ do (Text.stripPrefix "refs/heads/" <$> gitTextIn dir ["symbolic-ref", "HEAD"]) $? pure Nothing -- | Does `git` recognize this directory as being managed by git? -isGitRepo :: MonadIO m => GitRepo -> m Bool +isGitRepo :: (MonadIO m) => GitRepo -> m Bool isGitRepo dir = liftIO $ (True <$ gitIn dir (["rev-parse"] ++ gitVerbosity)) $? pure False @@ -257,7 +259,7 @@ isEmptyGitRepo dir = liftIO do (gitTextIn dir (["rev-parse", "HEAD"] ++ gitVerbosity) $> False) $? pure True -- | Perform an IO action, passing any IO exception to `handler` -withIOError :: MonadIO m => IO a -> (IOException -> m a) -> m a +withIOError :: (MonadIO m) => IO a -> (IOException -> m a) -> m a withIOError action handler = liftIO (fmap Right action `Control.Exception.catch` (pure . Left)) >>= either handler pure @@ -289,27 +291,27 @@ setupGitDir dir = -- | Run a git command in the current work directory. -- Note: this should only be used for commands like 'clone' which don't interact with an -- existing repository. -gitGlobal :: MonadIO m => [Text] -> m () +gitGlobal :: (MonadIO m) => [Text] -> m () gitGlobal args = do when debugGit $ traceM (Text.unpack . Text.unwords $ ["$ git"] <> args) liftIO $ "git" $^ (args ++ gitVerbosity) -- | Run a git command in the repository at localPath -gitIn :: MonadIO m => GitRepo -> [Text] -> m () +gitIn :: (MonadIO m) => GitRepo -> [Text] -> m () gitIn localPath args = do when debugGit $ traceM (Text.unpack . Text.unwords $ ["$ git"] <> setupGitDir localPath <> args) liftIO $ "git" $^ (setupGitDir localPath <> args) -- | like 'gitIn', but silences all output from the command and returns whether the command -- succeeded. -gitInCaptured :: MonadIO m => GitRepo -> [Text] -> m (Bool, Text, Text) +gitInCaptured :: (MonadIO m) => GitRepo -> [Text] -> m (Bool, Text, Text) gitInCaptured localPath args = do when debugGit $ traceM (Text.unpack . Text.unwords $ ["$ git"] <> setupGitDir localPath <> args) (exitCode, stdout, stderr) <- UnliftIO.readProcessWithExitCode "git" (Text.unpack <$> setupGitDir localPath <> args) "" pure (exitCode == ExitSuccess, Text.pack stdout, Text.pack stderr) -- | Run a git command in the repository at localPath and capture stdout -gitTextIn :: MonadIO m => GitRepo -> [Text] -> m Text +gitTextIn :: (MonadIO m) => GitRepo -> [Text] -> m Text gitTextIn localPath args = do when debugGit $ traceM (Text.unpack . Text.unwords $ ["$ git"] <> setupGitDir localPath <> args) liftIO $ "git" $| setupGitDir localPath <> args diff --git a/parser-typechecker/src/Unison/Codebase/Editor/RemoteRepo.hs b/parser-typechecker/src/Unison/Codebase/Editor/RemoteRepo.hs index 7b1d13973..77b771614 100644 --- a/parser-typechecker/src/Unison/Codebase/Editor/RemoteRepo.hs +++ b/parser-typechecker/src/Unison/Codebase/Editor/RemoteRepo.hs @@ -1,6 +1,3 @@ -{-# LANGUAGE DuplicateRecordFields #-} -{-# LANGUAGE OverloadedLabels #-} -{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} module Unison.Codebase.Editor.RemoteRepo where @@ -26,6 +23,9 @@ data ShareCodeserver | CustomCodeserver CodeserverURI deriving stock (Eq, Ord, Show) +newtype ShareUserHandle = ShareUserHandle {shareUserHandleToText :: Text} + deriving stock (Eq, Ord, Show) + -- | -- >>> :set -XOverloadedLists -- >>> import Data.Maybe (fromJust) @@ -36,12 +36,12 @@ data ShareCodeserver -- "share" -- >>> displayShareCodeserver (CustomCodeserver . fromJust $ parseURI "https://share-next.unison-lang.org/api" >>= codeserverFromURI ) "unison" ["base", "List"] -- "share(https://share-next.unison-lang.org:443/api).unison.base.List" -displayShareCodeserver :: ShareCodeserver -> Text -> Path -> Text -displayShareCodeserver cs repo path = +displayShareCodeserver :: ShareCodeserver -> ShareUserHandle -> Path -> Text +displayShareCodeserver cs shareUser path = let shareServer = case cs of DefaultCodeserver -> "" CustomCodeserver cu -> "share(" <> tShow cu <> ")." - in shareServer <> repo <> maybePrintPath path + in shareServer <> shareUserHandleToText shareUser <> maybePrintPath path data ReadGitRepo = ReadGitRepo {url :: Text, ref :: Maybe Text} deriving stock (Eq, Ord, Show) @@ -117,7 +117,7 @@ data ReadGitRemoteNamespace = ReadGitRemoteNamespace data ReadShareRemoteNamespace = ReadShareRemoteNamespace { server :: ShareCodeserver, - repo :: Text, + repo :: ShareUserHandle, -- sch :: Maybe ShortCausalHash, -- maybe later path :: Path } @@ -153,7 +153,7 @@ data WriteGitRemotePath = WriteGitRemotePath data WriteShareRemotePath = WriteShareRemotePath { server :: ShareCodeserver, - repo :: Text, + repo :: ShareUserHandle, path :: Path } deriving stock (Eq, Show) diff --git a/parser-typechecker/src/Unison/Codebase/FileCodebase.hs b/parser-typechecker/src/Unison/Codebase/FileCodebase.hs index 03a5346ae..b5efd4e48 100644 --- a/parser-typechecker/src/Unison/Codebase/FileCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/FileCodebase.hs @@ -6,7 +6,7 @@ import Unison.Prelude (MonadIO) import UnliftIO.Directory (doesDirectoryExist) -- checks if a minimal codebase structure exists at `path` -codebaseExists :: MonadIO m => CodebasePath -> m Bool +codebaseExists :: (MonadIO m) => CodebasePath -> m Bool codebaseExists root = and <$> traverse doesDirectoryExist (minimalCodebaseStructure root) where diff --git a/parser-typechecker/src/Unison/Codebase/Init.hs b/parser-typechecker/src/Unison/Codebase/Init.hs index f2de5de2a..97708e248 100644 --- a/parser-typechecker/src/Unison/Codebase/Init.hs +++ b/parser-typechecker/src/Unison/Codebase/Init.hs @@ -10,6 +10,7 @@ module Unison.Codebase.Init InitResult (..), SpecifiedCodebase (..), MigrationStrategy (..), + BackupStrategy (..), Pretty, createCodebase, initCodebaseAndExit, @@ -47,11 +48,20 @@ data CodebaseLockOption = DoLock | DontLock +data BackupStrategy + = -- Create a backup of the codebase in the same directory as the codebase, + -- see 'backupCodebasePath'. + Backup + | -- Don't create a backup when migrating, this might be used if the caller has + -- already created a copy of the codebase for instance. + NoBackup + deriving stock (Show, Eq, Ord) + data MigrationStrategy = -- | Perform a migration immediately if one is required. - MigrateAutomatically + MigrateAutomatically BackupStrategy | -- | Prompt the user that a migration is about to occur, continue after acknownledgment - MigrateAfterPrompt + MigrateAfterPrompt BackupStrategy | -- | Triggers an 'OpenCodebaseRequiresMigration' error instead of migrating DontMigrate deriving stock (Show, Eq, Ord) @@ -86,7 +96,7 @@ data InitResult deriving (Show, Eq) createCodebaseWithResult :: - MonadIO m => + (MonadIO m) => Init m v a -> DebugName -> CodebasePath -> @@ -98,7 +108,7 @@ createCodebaseWithResult cbInit debugName dir lockOption action = errorMessage -> (dir, (CouldntCreateCodebase errorMessage)) withOpenOrCreateCodebase :: - MonadIO m => + (MonadIO m) => Init m v a -> DebugName -> CodebaseInitOptions -> @@ -136,7 +146,7 @@ withOpenOrCreateCodebase cbInit debugName initOptions lockOption migrationStrate OpenCodebaseRequiresMigration {} -> pure (Left (resolvedPath, InitErrorOpen err)) OpenCodebaseFileLockFailed {} -> pure (Left (resolvedPath, InitErrorOpen err)) -createCodebase :: MonadIO m => Init m v a -> DebugName -> CodebasePath -> CodebaseLockOption -> (Codebase m v a -> m r) -> m (Either Pretty r) +createCodebase :: (MonadIO m) => Init m v a -> DebugName -> CodebasePath -> CodebaseLockOption -> (Codebase m v a -> m r) -> m (Either Pretty r) createCodebase cbInit debugName path lockOption action = do prettyDir <- P.string <$> canonicalizePath path withCreatedCodebase cbInit debugName path lockOption action <&> mapLeft \case @@ -149,7 +159,7 @@ createCodebase cbInit debugName path lockOption action = do -- previously: initCodebaseOrExit :: CodebasePath -> m (m (), Codebase m v a) -- previously: FileCodebase.initCodebase :: CodebasePath -> m (m (), Codebase m v a) -withNewUcmCodebaseOrExit :: MonadIO m => Init m Symbol Ann -> DebugName -> CodebasePath -> CodebaseLockOption -> (Codebase m Symbol Ann -> m r) -> m r +withNewUcmCodebaseOrExit :: (MonadIO m) => Init m Symbol Ann -> DebugName -> CodebasePath -> CodebaseLockOption -> (Codebase m Symbol Ann -> m r) -> m r withNewUcmCodebaseOrExit cbInit debugName path lockOption action = do prettyDir <- P.string <$> canonicalizePath path let codebaseSetup codebase = do @@ -161,13 +171,13 @@ withNewUcmCodebaseOrExit cbInit debugName path lockOption action = do Right result -> pure result -- | try to init a codebase where none exists and then exit regardless (i.e. `ucm --codebase dir init`) -initCodebaseAndExit :: MonadIO m => Init m Symbol Ann -> DebugName -> Maybe CodebasePath -> CodebaseLockOption -> m () +initCodebaseAndExit :: (MonadIO m) => Init m Symbol Ann -> DebugName -> Maybe CodebasePath -> CodebaseLockOption -> m () initCodebaseAndExit i debugName mdir lockOption = do codebaseDir <- Codebase.getCodebaseDir mdir withNewUcmCodebaseOrExit i debugName codebaseDir lockOption (const $ pure ()) withTemporaryUcmCodebase :: - MonadUnliftIO m => + (MonadUnliftIO m) => Init m Symbol Ann -> DebugName -> CodebaseLockOption -> diff --git a/parser-typechecker/src/Unison/Codebase/IntegrityCheck.hs b/parser-typechecker/src/Unison/Codebase/IntegrityCheck.hs index 2fcb8feac..008fb344b 100644 --- a/parser-typechecker/src/Unison/Codebase/IntegrityCheck.hs +++ b/parser-typechecker/src/Unison/Codebase/IntegrityCheck.hs @@ -28,10 +28,10 @@ import qualified U.Codebase.Sqlite.Branch.Full as DBBranch import qualified U.Codebase.Sqlite.DbId as DB import qualified U.Codebase.Sqlite.Operations as Ops import qualified U.Codebase.Sqlite.Queries as Q -import qualified U.Util.Hash as Hash import qualified Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema1To2.DbHelpers as Helpers import qualified Unison.Debug as Debug import Unison.Hash (Hash) +import qualified Unison.Hash as Hash import Unison.Prelude import qualified Unison.Sqlite as Sqlite import Unison.Util.Monoid (foldMapM) @@ -203,7 +203,7 @@ integrityCheckAllBranches = do pure (Set.singleton $ MismatchedObjectForChild ch branchObjId foundBranchId) | otherwise -> pure mempty -prettyPrintIntegrityErrors :: Foldable f => f IntegrityError -> P.Pretty P.ColorText +prettyPrintIntegrityErrors :: (Foldable f) => f IntegrityError -> P.Pretty P.ColorText prettyPrintIntegrityErrors xs | null xs = mempty | otherwise = diff --git a/parser-typechecker/src/Unison/Codebase/MainTerm.hs b/parser-typechecker/src/Unison/Codebase/MainTerm.hs index 1564b8020..d62df18b3 100644 --- a/parser-typechecker/src/Unison/Codebase/MainTerm.hs +++ b/parser-typechecker/src/Unison/Codebase/MainTerm.hs @@ -56,25 +56,25 @@ getMainTerm loadTypeOfTerm parseNames mainName mainType = _ -> pure (error "multiple matching refs") -- TODO: make a real exception -- forall x. '{ io2.IO, Exception } x -builtinMain :: Var v => a -> Type.Type v a +builtinMain :: (Var v) => a -> Type.Type v a builtinMain a = let result = Var.named "result" in Type.forall a result (builtinMainWithResultType a (Type.var a result)) -- '{io2.IO, Exception} res -builtinMainWithResultType :: Var v => a -> Type.Type v a -> Type.Type v a +builtinMainWithResultType :: (Var v) => a -> Type.Type v a -> Type.Type v a builtinMainWithResultType a res = Type.arrow a (Type.ref a DD.unitRef) io where io = Type.effect a [Type.builtinIO a, DD.exceptionType a] res -- [Result] -resultArr :: Ord v => a -> Type.Type v a +resultArr :: (Ord v) => a -> Type.Type v a resultArr a = Type.app a (Type.ref a Type.listRef) (Type.ref a DD.testResultRef) -builtinResultArr :: Ord v => a -> Type.Type v a +builtinResultArr :: (Ord v) => a -> Type.Type v a builtinResultArr a = Type.effect a [Type.builtinIO a, DD.exceptionType a] (resultArr a) -- '{io2.IO} [Result] -builtinTest :: Ord v => a -> Type.Type v a +builtinTest :: (Ord v) => a -> Type.Type v a builtinTest a = Type.arrow a (Type.ref a DD.unitRef) (builtinResultArr a) diff --git a/parser-typechecker/src/Unison/Codebase/Metadata.hs b/parser-typechecker/src/Unison/Codebase/Metadata.hs index d91677afb..4ca5c0bf6 100644 --- a/parser-typechecker/src/Unison/Codebase/Metadata.hs +++ b/parser-typechecker/src/Unison/Codebase/Metadata.hs @@ -7,7 +7,6 @@ import Unison.Reference (Reference) import qualified Unison.Util.List as List import Unison.Util.Relation (Relation) import qualified Unison.Util.Relation as R -import qualified Unison.Util.Relation3 as R3 import Unison.Util.Relation4 (Relation4) import qualified Unison.Util.Relation4 as R4 import Unison.Util.Star3 (Star3) @@ -32,7 +31,7 @@ starToR4 :: (Ord r, Ord n) => Star r n -> Relation4 r n Type Value starToR4 = R4.fromList . starToR4List -- | Flattens a Metadata.Star into a 4-tuple. -starToR4List :: Ord r => Star r n -> [(r, n, Type, Value)] +starToR4List :: (Ord r) => Star r n -> [(r, n, Type, Value)] starToR4List s = [ (f, x, y, z) | f <- Set.toList (Star3.fact s), @@ -40,14 +39,14 @@ starToR4List s = (y, z) <- Set.toList (R.lookupDom f (Star3.d3 s)) ] -hasMetadata :: Ord a => a -> Type -> Value -> Star a n -> Bool +hasMetadata :: (Ord a) => a -> Type -> Value -> Star a n -> Bool hasMetadata a t v = Set.member (t, v) . R.lookupDom a . Star3.d3 -hasMetadataWithType' :: Ord a => a -> Type -> R4 a n -> Bool -hasMetadataWithType' a t r = - fromMaybe False $ Set.member t . R3.d2s <$> (Map.lookup a $ R4.d1 r) +hasMetadataWithType' :: (Ord a) => a -> Type -> R4 a n -> Bool +hasMetadataWithType' = + R4.memberD13 -hasMetadataWithType :: Ord a => a -> Type -> Star a n -> Bool +hasMetadataWithType :: (Ord a) => a -> Type -> Star a n -> Bool hasMetadataWithType a t = Set.member t . R.lookupDom a . Star3.d2 inserts :: (Ord a, Ord n) => [(a, Type, Value)] -> Star a n -> Star a n diff --git a/parser-typechecker/src/Unison/Codebase/Path.hs b/parser-typechecker/src/Unison/Codebase/Path.hs index 49484c0d9..18f75f64f 100644 --- a/parser-typechecker/src/Unison/Codebase/Path.hs +++ b/parser-typechecker/src/Unison/Codebase/Path.hs @@ -527,7 +527,7 @@ instance Convert (path, NameSegment) (path, HQ'.HQSegment) where convert (path, name) = (path, HQ'.fromName name) -instance Convert path0 path1 => Convert (path0, name) (path1, name) where +instance (Convert path0 path1) => Convert (path0, name) (path1, name) where convert = over _1 convert diff --git a/parser-typechecker/src/Unison/Codebase/RootBranchCache.hs b/parser-typechecker/src/Unison/Codebase/RootBranchCache.hs index 6fed979dc..1c7bd0eab 100644 --- a/parser-typechecker/src/Unison/Codebase/RootBranchCache.hs +++ b/parser-typechecker/src/Unison/Codebase/RootBranchCache.hs @@ -36,7 +36,7 @@ data RootBranchCacheVal -- This is isomorphic to @TMVar (Maybe (Branch Sqlite.Transaction))@ newtype RootBranchCache = RootBranchCache (TVar RootBranchCacheVal) -newEmptyRootBranchCacheIO :: MonadIO m => m RootBranchCache +newEmptyRootBranchCacheIO :: (MonadIO m) => m RootBranchCache newEmptyRootBranchCacheIO = liftIO (coerce $ newTVarIO Empty) newEmptyRootBranchCache :: STM RootBranchCache @@ -57,7 +57,7 @@ readRootBranchCache v = ConcurrentModification -> retrySTM Full x -> pure (Just x) -fetchRootBranch :: forall m. MonadUnliftIO m => RootBranchCache -> m (Branch Sqlite.Transaction) -> m (Branch Sqlite.Transaction) +fetchRootBranch :: forall m. (MonadUnliftIO m) => RootBranchCache -> m (Branch Sqlite.Transaction) -> m (Branch Sqlite.Transaction) fetchRootBranch rbc getFromDb = mask \restore -> do join (atomically (fetch restore)) where @@ -78,7 +78,7 @@ fetchRootBranch rbc getFromDb = mask \restore -> do -- the cache to Empty or Full withLock :: forall m r. - MonadUnliftIO m => + (MonadUnliftIO m) => RootBranchCache -> -- | Perform an action with the cached value ( -- restore masking state diff --git a/parser-typechecker/src/Unison/Codebase/Runtime.hs b/parser-typechecker/src/Unison/Codebase/Runtime.hs index 2b79285b6..03e4c21fb 100644 --- a/parser-typechecker/src/Unison/Codebase/Runtime.hs +++ b/parser-typechecker/src/Unison/Codebase/Runtime.hs @@ -70,7 +70,7 @@ type WatchResults v a = -- can be skipped. evaluateWatches :: forall v a. - Var v => + (Var v) => CL.CodeLookup v IO a -> PPE.PrettyPrintEnv -> (Reference.Id -> IO (Maybe (Term v))) -> diff --git a/parser-typechecker/src/Unison/Codebase/Serialization.hs b/parser-typechecker/src/Unison/Codebase/Serialization.hs index 85deaf6c4..565f0d7f4 100644 --- a/parser-typechecker/src/Unison/Codebase/Serialization.hs +++ b/parser-typechecker/src/Unison/Codebase/Serialization.hs @@ -10,9 +10,9 @@ import UnliftIO (MonadIO, liftIO) import UnliftIO.Directory (createDirectoryIfMissing, doesFileExist) import Prelude hiding (readFile, writeFile) -type Get a = forall m. MonadGet m => m a +type Get a = forall m. (MonadGet m) => m a -type Put a = forall m. MonadPut m => a -> m () +type Put a = forall m. (MonadPut m) => a -> m () -- todo: do we use this? data Format a = Format @@ -24,12 +24,12 @@ getFromBytes :: Get a -> ByteString -> Maybe a getFromBytes getA bytes = case runGetS getA bytes of Left _ -> Nothing; Right a -> Just a -getFromFile :: MonadIO m => Get a -> FilePath -> m (Maybe a) +getFromFile :: (MonadIO m) => Get a -> FilePath -> m (Maybe a) getFromFile getA file = do b <- doesFileExist file if b then getFromBytes getA <$> liftIO (readFile file) else pure Nothing -getFromFile' :: MonadIO m => Get a -> FilePath -> m (Either String a) +getFromFile' :: (MonadIO m) => Get a -> FilePath -> m (Either String a) getFromFile' getA file = do b <- doesFileExist file if b @@ -39,7 +39,7 @@ getFromFile' getA file = do putBytes :: Put a -> a -> ByteString putBytes put a = runPutS (put a) -putWithParentDirs :: MonadIO m => Put a -> FilePath -> a -> m () +putWithParentDirs :: (MonadIO m) => Put a -> FilePath -> a -> m () putWithParentDirs putA file a = do createDirectoryIfMissing True (takeDirectory file) liftIO . writeFile file $ putBytes putA a diff --git a/parser-typechecker/src/Unison/Codebase/ShortCausalHash.hs b/parser-typechecker/src/Unison/Codebase/ShortCausalHash.hs index cfb801fa2..75f8095d8 100644 --- a/parser-typechecker/src/Unison/Codebase/ShortCausalHash.hs +++ b/parser-typechecker/src/Unison/Codebase/ShortCausalHash.hs @@ -9,6 +9,7 @@ where import qualified Data.Set as Set import qualified Data.Text as Text +import qualified U.Util.Base32Hex as Base32Hex import qualified Unison.Hash as Hash import Unison.Prelude @@ -19,18 +20,18 @@ newtype ShortCausalHash = ShortCausalHash {toText :: Text} -- base32hex characte toString :: ShortCausalHash -> String toString = Text.unpack . toText -toHash :: Coercible Hash.Hash h => ShortCausalHash -> Maybe h -toHash = fmap coerce . Hash.fromBase32Hex . toText +toHash :: (Coercible Hash.Hash h) => ShortCausalHash -> Maybe h +toHash = fmap coerce . Hash.fromBase32HexText . toText -fromHash :: Coercible h Hash.Hash => Int -> h -> ShortCausalHash +fromHash :: (Coercible h Hash.Hash) => Int -> h -> ShortCausalHash fromHash len = - ShortCausalHash . Text.take len . Hash.base32Hex . coerce + ShortCausalHash . Text.take len . Hash.toBase32HexText . coerce -- abc -> SCH abc -- #abc -> SCH abc fromText :: Text -> Maybe ShortCausalHash fromText (Text.dropWhile (== '#') -> t) - | Text.all (`Set.member` Hash.validBase32HexChars) t = + | Text.all (`Set.member` Base32Hex.validChars) t = Just $ ShortCausalHash t fromText _ = Nothing diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs index 1e9ca9dcd..1ee0e2de6 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase.hs @@ -9,7 +9,9 @@ module Unison.Codebase.SqliteCodebase ( Unison.Codebase.SqliteCodebase.init, MigrationStrategy (..), + BackupStrategy (..), CodebaseLockOption (..), + copyCodebase, ) where @@ -27,7 +29,7 @@ import qualified System.Console.ANSI as ANSI import System.FileLock (SharedExclusive (Exclusive), withTryFileLock) import qualified System.FilePath as FilePath import qualified System.FilePath.Posix as FilePath.Posix -import U.Codebase.HashTags (BranchHash, CausalHash, PatchHash (..)) +import U.Codebase.HashTags (CausalHash, PatchHash (..)) import qualified U.Codebase.Reflog as Reflog import qualified U.Codebase.Sqlite.Operations as Ops import qualified U.Codebase.Sqlite.Queries as Q @@ -47,12 +49,11 @@ import Unison.Codebase.Editor.RemoteRepo writeToReadGit, ) import qualified Unison.Codebase.GitError as GitError -import Unison.Codebase.Init (CodebaseLockOption (..), MigrationStrategy (..)) +import Unison.Codebase.Init (BackupStrategy (..), CodebaseLockOption (..), MigrationStrategy (..)) import qualified Unison.Codebase.Init as Codebase import qualified Unison.Codebase.Init.CreateCodebaseError as Codebase1 import Unison.Codebase.Init.OpenCodebaseError (OpenCodebaseError (..)) import qualified Unison.Codebase.Init.OpenCodebaseError as Codebase1 -import Unison.Codebase.Path (Path) import Unison.Codebase.RootBranchCache import Unison.Codebase.SqliteCodebase.Branch.Cache (newBranchCache) import qualified Unison.Codebase.SqliteCodebase.Branch.Dependencies as BD @@ -88,7 +89,7 @@ debug, debugProcessBranches :: Bool debug = False debugProcessBranches = False -init :: HasCallStack => (MonadUnliftIO m) => Codebase.Init m Symbol Ann +init :: (HasCallStack) => (MonadUnliftIO m) => Codebase.Init m Symbol Ann init = Codebase.Init { withOpenCodebase = withCodebaseOrError, @@ -103,7 +104,7 @@ data CodebaseStatus -- | Open the codebase at the given location, or create it if one doesn't already exist. withOpenOrCreateCodebase :: - MonadUnliftIO m => + (MonadUnliftIO m) => Codebase.DebugName -> CodebasePath -> LocalOrRemote -> @@ -159,7 +160,7 @@ withCodebaseOrError debugName dir lockOption migrationStrategy action = do False -> pure (Left Codebase1.OpenCodebaseDoesntExist) True -> sqliteCodebase debugName dir Local lockOption migrationStrategy action -initSchemaIfNotExist :: MonadIO m => FilePath -> m () +initSchemaIfNotExist :: (MonadIO m) => FilePath -> m () initSchemaIfNotExist path = liftIO do unlessM (doesDirectoryExist $ makeCodebaseDirPath path) $ createDirectoryIfMissing True (makeCodebaseDirPath path) @@ -176,7 +177,7 @@ initSchemaIfNotExist path = liftIO do -- | Run an action with a connection to the codebase, closing the connection on completion or -- failure. withConnection :: - MonadUnliftIO m => + (MonadUnliftIO m) => Codebase.DebugName -> CodebasePath -> (Sqlite.Connection -> m a) -> @@ -186,7 +187,7 @@ withConnection name root action = sqliteCodebase :: forall m r. - MonadUnliftIO m => + (MonadUnliftIO m) => Codebase.DebugName -> CodebasePath -> -- | When local, back up the existing codebase before migrating, in case there's a catastrophic bug in the migration. @@ -212,17 +213,17 @@ sqliteCodebase debugName root localOrRemote lockOption migrationStrategy action Migrations.CodebaseRequiresMigration fromSv toSv -> case migrationStrategy of DontMigrate -> pure $ Left (OpenCodebaseRequiresMigration fromSv toSv) - MigrateAfterPrompt -> do + MigrateAfterPrompt backupStrategy -> do let shouldPrompt = True - Migrations.ensureCodebaseIsUpToDate localOrRemote root getDeclType termBuffer declBuffer shouldPrompt conn - MigrateAutomatically -> do + Migrations.ensureCodebaseIsUpToDate localOrRemote root getDeclType termBuffer declBuffer shouldPrompt backupStrategy conn + MigrateAutomatically backupStrategy -> do let shouldPrompt = False - Migrations.ensureCodebaseIsUpToDate localOrRemote root getDeclType termBuffer declBuffer shouldPrompt conn + Migrations.ensureCodebaseIsUpToDate localOrRemote root getDeclType termBuffer declBuffer shouldPrompt backupStrategy conn case result of Left err -> pure $ Left err Right () -> do - let finalizer :: MonadIO m => m () + let finalizer :: (MonadIO m) => m () finalizer = do decls <- readTVarIO declBuffer terms <- readTVarIO termBuffer @@ -344,19 +345,12 @@ sqliteCodebase debugName root localOrRemote lockOption migrationStrategy action referentsByPrefix = CodebaseOps.referentsByPrefix getDeclType - updateNameLookup :: Path -> Maybe BranchHash -> BranchHash -> Sqlite.Transaction () - updateNameLookup = - CodebaseOps.updateNameLookupIndex getDeclType - let codebase = C.Codebase { getTerm, getTypeOfTermImpl, getTypeDeclaration, - getDeclType = - \r -> - withConn \conn -> - Sqlite.runReadOnlyTransaction conn \run -> run (getDeclType r), + getDeclType, putTerm, putTermComponent, putTypeDeclaration, @@ -374,7 +368,6 @@ sqliteCodebase debugName root localOrRemote lockOption migrationStrategy action termsOfTypeImpl, termsMentioningTypeImpl, termReferentsByPrefix = referentsByPrefix, - updateNameLookup, withConnection = withConn, withConnectionIO = withConnection debugName root } @@ -397,7 +390,7 @@ sqliteCodebase debugName root localOrRemote lockOption migrationStrategy action syncInternal :: forall m. - MonadUnliftIO m => + (MonadUnliftIO m) => Sync.Progress m Sync22.Entity -> (forall a. Sqlite.Transaction a -> m a) -> (forall a. Sqlite.Transaction a -> m a) -> @@ -485,7 +478,7 @@ data SyncProgressState = SyncProgressState emptySyncProgressState :: SyncProgressState emptySyncProgressState = SyncProgressState (Just mempty) (Right mempty) (Right mempty) -syncProgress :: forall m. MonadIO m => IORef SyncProgressState -> Sync.Progress m Sync22.Entity +syncProgress :: forall m. (MonadIO m) => IORef SyncProgressState -> Sync.Progress m Sync22.Entity syncProgress progressStateRef = Sync.Progress (liftIO . need) (liftIO . done) (liftIO . warn) (liftIO allDone) where quiet = False @@ -546,7 +539,9 @@ syncProgress progressStateRef = Sync.Progress (liftIO . need) (liftIO . done) (l SyncProgressState Nothing (Left done) (Left warn) -> "\r" ++ prefix ++ show done ++ " entities" ++ if warn > 0 then " with " ++ show warn ++ " warnings." else "." SyncProgressState (Just _need) (Right done) (Right warn) -> - "\r" ++ prefix ++ show (Set.size done + Set.size warn) + "\r" + ++ prefix + ++ show (Set.size done + Set.size warn) ++ " entities" ++ if Set.size warn > 0 then " with " ++ show (Set.size warn) ++ " warnings." @@ -586,7 +581,7 @@ viewRemoteBranch' ReadGitRemoteNamespace {repo, sch, path} gitBranchBehavior act then throwIO (C.GitSqliteCodebaseError (GitError.NoDatabaseFile repo remotePath)) else throwIO exception - result <- sqliteCodebase "viewRemoteBranch.gitCache" remotePath Remote DoLock MigrateAfterPrompt \codebase -> do + result <- sqliteCodebase "viewRemoteBranch.gitCache" remotePath Remote DoLock (MigrateAfterPrompt Codebase.Backup) \codebase -> do -- try to load the requested branch from it branch <- time "Git fetch (sch)" $ case sch of -- no sub-branch was specified, so use the root. @@ -612,7 +607,7 @@ viewRemoteBranch' ReadGitRemoteNamespace {repo, sch, path} gitBranchBehavior act -- the existing root. pushGitBranch :: forall m e. - MonadUnliftIO m => + (MonadUnliftIO m) => Sqlite.Connection -> WriteGitRepo -> PushGitBranchOpts -> @@ -636,7 +631,7 @@ pushGitBranch srcConn repo (PushGitBranchOpts behavior _syncMode) action = Unlif -- set up the cache dir throwEitherMWith C.GitProtocolError . withRepo readRepo Git.CreateBranchIfMissing $ \pushStaging -> do newBranchOrErr <- throwEitherMWith (C.GitSqliteCodebaseError . C.gitErrorFromOpenCodebaseError (Git.gitDirToPath pushStaging) readRepo) - . withOpenOrCreateCodebase "push.dest" (Git.gitDirToPath pushStaging) Remote DoLock MigrateAfterPrompt + . withOpenOrCreateCodebase "push.dest" (Git.gitDirToPath pushStaging) Remote DoLock (MigrateAfterPrompt Codebase.Backup) $ \(codebaseStatus, destCodebase) -> do currentRootBranch <- Codebase1.runTransaction destCodebase CodebaseOps.getRootBranchExists >>= \case @@ -728,7 +723,7 @@ pushGitBranch srcConn repo (PushGitBranchOpts behavior _syncMode) action = Unlif hasDeleteShm = any isShmDelete statusLines -- Commit our changes - push :: forall n. MonadIO n => Git.GitRepo -> WriteGitRepo -> Branch m -> n Bool -- withIOError needs IO + push :: forall n. (MonadIO n) => Git.GitRepo -> WriteGitRepo -> Branch m -> n Bool -- withIOError needs IO push remotePath repo@(WriteGitRepo {url, branch = mayGitBranch}) newRootBranch = time "SqliteCodebase.pushGitRootBranch.push" $ do -- has anything changed? -- note: -uall recursively shows status for all files in untracked directories @@ -764,3 +759,12 @@ pushGitBranch srcConn repo (PushGitBranchOpts behavior _syncMode) action = Unlif (successful, _stdout, stderr) <- gitInCaptured remotePath $ ["push", url] ++ Git.gitVerbosity ++ maybe [] (pure @[]) mayGitBranch when (not successful) . throwIO $ GitError.PushException repo (Text.unpack stderr) pure True + +-- | Given two codebase roots (e.g. "./mycodebase"), safely copy the codebase +-- at the source to the destination. +-- Note: this does not copy the .unisonConfig file. +copyCodebase :: (MonadIO m) => CodebasePath -> CodebasePath -> m () +copyCodebase src dest = liftIO $ do + createDirectoryIfMissing True (makeCodebaseDirPath dest) + withConnection ("copy-from:" <> src) src $ \srcConn -> do + Sqlite.vacuumInto srcConn (makeCodebasePath dest) diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Cache.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Cache.hs index 43ef4701b..b5f792ef9 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Cache.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Cache.hs @@ -18,7 +18,7 @@ data BranchCache m = BranchCache -- as long as they're reachable by something else in the app. -- -- This means you don't need to worry about a Branch not being GC'd because it's in the cache. -newBranchCache :: forall m. MonadIO m => m (BranchCache Sqlite.Transaction) +newBranchCache :: forall m. (MonadIO m) => m (BranchCache Sqlite.Transaction) newBranchCache = do var <- newTVarIO mempty pure $ diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Dependencies.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Dependencies.hs index 323738fb4..3dc0941fd 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Dependencies.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Branch/Dependencies.hs @@ -50,7 +50,7 @@ data Dependencies' = Dependencies' to' :: Dependencies -> Dependencies' to' Dependencies {..} = Dependencies' (toList patches) (toList terms) (toList decls) -fromBranch :: Applicative m => Branch m -> (Branches m, Dependencies) +fromBranch :: (Applicative m) => Branch m -> (Branches m, Dependencies) fromBranch (Branch c) = case c of Causal.One _hh _eh e -> fromBranch0 e Causal.Cons _hh _eh e (h, m) -> fromBranch0 e <> fromTails (Map.singleton h m) @@ -58,7 +58,7 @@ fromBranch (Branch c) = case c of where fromTails m = ([(h, Branch <$> mc) | (h, mc) <- Map.toList m], mempty) -fromBranch0 :: Applicative m => Branch0 m -> (Branches m, Dependencies) +fromBranch0 :: (Applicative m) => Branch0 m -> (Branches m, Dependencies) fromBranch0 b = ( fromChildren (Branch._children b), fromTermsStar (Branch._terms b) @@ -66,7 +66,7 @@ fromBranch0 b = <> fromEdits (Branch._edits b) ) where - fromChildren :: Applicative m => Map NameSegment (Branch m) -> Branches m + fromChildren :: (Applicative m) => Map NameSegment (Branch m) -> Branches m fromChildren m = [(Branch.headHash b, pure b) | b <- toList m] references :: Branch.Star r NameSegment -> [r] references = toList . R.dom . Star3.d1 diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs index 1e2d59dc6..7de8e842f 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Conversions.hs @@ -1,11 +1,9 @@ -{-# LANGUAGE ViewPatterns #-} - module Unison.Codebase.SqliteCodebase.Conversions where import Data.Bifunctor (Bifunctor (bimap)) import qualified Data.Map as Map import qualified Data.Set as Set -import Data.Text (pack, unpack) +import Data.Text (pack) import qualified U.Codebase.Branch as V2.Branch import qualified U.Codebase.Causal as V2 import qualified U.Codebase.Decl as V2.Decl @@ -35,7 +33,8 @@ import qualified Unison.Codebase.TypeEdit as V1.TypeEdit import qualified Unison.ConstructorReference as V1 (GConstructorReference (..)) import qualified Unison.ConstructorType as CT import qualified Unison.DataDeclaration as V1.Decl -import Unison.Hash (Hash, base32Hex) +import Unison.Hash (Hash) +import qualified Unison.Hash as Hash import qualified Unison.Hash as V1 import qualified Unison.Kind as V1.Kind import Unison.NameSegment (NameSegment) @@ -114,7 +113,7 @@ term1to2 h = V1.Term.Match e cases -> V2.Term.Match e (goCase <$> cases) V1.Term.TermLink r -> V2.Term.TermLink (rreferent1to2 h r) V1.Term.TypeLink r -> V2.Term.TypeLink (reference1to2 r) - V1.Term.Blank _ -> error ("can't serialize term with blanks (" ++ unpack (base32Hex h) ++ ")") + V1.Term.Blank _ -> error ("can't serialize term with blanks (" ++ show h ++ ")") goCase (V1.Term.MatchCase p g b) = V2.Term.MatchCase (goPat p) g b @@ -143,13 +142,13 @@ term1to2 h = V1.Pattern.Snoc -> V2.Term.PSnoc V1.Pattern.Concat -> V2.Term.PConcat -term2to1 :: forall m. Monad m => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.Term.Term V2.Symbol -> m (V1.Term.Term V1.Symbol Ann) +term2to1 :: forall m. (Monad m) => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.Term.Term V2.Symbol -> m (V1.Term.Term V1.Symbol Ann) term2to1 h lookupCT = ABT.transformM (termF2to1 h lookupCT) . ABT.vmap symbol2to1 . ABT.amap (const Ann.External) where - termF2to1 :: forall m a. Monad m => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.Term.F V2.Symbol a -> m (V1.Term.F V1.Symbol Ann Ann a) + termF2to1 :: forall m a. (Monad m) => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.Term.F V2.Symbol a -> m (V1.Term.F V1.Symbol Ann Ann a) termF2to1 h lookupCT = go where go :: V2.Term.F V2.Symbol a -> m (V1.Term.F V1.Symbol Ann Ann a) @@ -295,7 +294,7 @@ referenceid1to2 (V1.Reference.Id h i) = V2.Reference.Id h i referenceid2to1 :: V2.Reference.Id -> V1.Reference.Id referenceid2to1 (V2.Reference.Id h i) = V1.Reference.Id h i -rreferent2to1 :: Applicative m => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.ReferentH -> m V1.Referent +rreferent2to1 :: (Applicative m) => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.ReferentH -> m V1.Referent rreferent2to1 h lookupCT = \case V2.Ref r -> pure . V1.Ref $ rreference2to1 h r V2.Con r i -> V1.Con (V1.ConstructorReference (reference2to1 r) (fromIntegral i)) <$> lookupCT r @@ -305,7 +304,7 @@ rreferent1to2 h = \case V1.Ref r -> V2.Ref (rreference1to2 h r) V1.Con (V1.ConstructorReference r i) _ct -> V2.Con (reference1to2 r) (fromIntegral i) -referent2to1 :: Applicative m => (V2.Reference -> m CT.ConstructorType) -> V2.Referent -> m V1.Referent +referent2to1 :: (Applicative m) => (V2.Reference -> m CT.ConstructorType) -> V2.Referent -> m V1.Referent referent2to1 lookupCT = \case V2.Ref r -> pure $ V1.Ref (reference2to1 r) V2.Con r i -> V1.Con (V1.ConstructorReference (reference2to1 r) (fromIntegral i)) <$> lookupCT r @@ -315,7 +314,7 @@ referent1to2 = \case V1.Ref r -> V2.Ref $ reference1to2 r V1.Con (V1.ConstructorReference r i) _ct -> V2.Con (reference1to2 r) (fromIntegral i) -referentid2to1 :: Applicative m => (V2.Reference -> m CT.ConstructorType) -> V2.Referent.Id -> m V1.Referent.Id +referentid2to1 :: (Applicative m) => (V2.Reference -> m CT.ConstructorType) -> V2.Referent.Id -> m V1.Referent.Id referentid2to1 lookupCT = \case V2.RefId r -> pure $ V1.RefId (referenceid2to1 r) V2.ConId r i -> @@ -386,7 +385,7 @@ type1to2' convertRef = V1.Kind.Arrow i o -> V2.Kind.Arrow (convertKind i) (convertKind o) -- | forces loading v1 branches even if they may not exist -causalbranch2to1 :: Monad m => BranchCache m -> (V2.Reference -> m CT.ConstructorType) -> V2.Branch.CausalBranch m -> m (V1.Branch.Branch m) +causalbranch2to1 :: (Monad m) => BranchCache m -> (V2.Reference -> m CT.ConstructorType) -> V2.Branch.CausalBranch m -> m (V1.Branch.Branch m) causalbranch2to1 branchCache lookupCT cb = do let ch = V2.causalHash cb lookupCachedBranch branchCache ch >>= \case @@ -396,7 +395,7 @@ causalbranch2to1 branchCache lookupCT cb = do insertCachedBranch branchCache ch b pure b -causalbranch2to1' :: Monad m => BranchCache m -> (V2.Reference -> m CT.ConstructorType) -> V2.Branch.CausalBranch m -> m (V1.Branch.UnwrappedBranch m) +causalbranch2to1' :: (Monad m) => BranchCache m -> (V2.Reference -> m CT.ConstructorType) -> V2.Branch.CausalBranch m -> m (V1.Branch.UnwrappedBranch m) causalbranch2to1' branchCache lookupCT (V2.Causal currentHash eh (Map.toList -> parents) me) = do let branchHash = branchHash2to1 eh case parents of @@ -410,18 +409,18 @@ causalbranch2to1' branchCache lookupCT (V2.Causal currentHash eh (Map.toList -> e <- me V1.Causal.UnsafeMerge currentHash branchHash <$> branch2to1 branchCache lookupCT e <*> pure (Map.fromList tailsList) -causalbranch1to2 :: forall m. Monad m => V1.Branch.Branch m -> V2.Branch.CausalBranch m +causalbranch1to2 :: forall m. (Monad m) => V1.Branch.Branch m -> V2.Branch.CausalBranch m causalbranch1to2 (V1.Branch.Branch c) = causal1to2 branchHash1to2 branch1to2 c where - causal1to2 :: forall m h2e e e2. Monad m => (V1.HashFor e -> h2e) -> (e -> m e2) -> V1.Causal.Causal m e -> V2.Causal m CausalHash h2e e2 + causal1to2 :: forall m h2e e e2. (Monad m) => (V1.HashFor e -> h2e) -> (e -> m e2) -> V1.Causal.Causal m e -> V2.Causal m CausalHash h2e e2 causal1to2 eh1to2 e1to2 = \case V1.Causal.One hc eh e -> V2.Causal hc (eh1to2 eh) Map.empty (e1to2 e) V1.Causal.Cons hc eh e (ht, mt) -> V2.Causal hc (eh1to2 eh) (Map.singleton ht (causal1to2 eh1to2 e1to2 <$> mt)) (e1to2 e) V1.Causal.Merge hc eh e parents -> V2.Causal hc (eh1to2 eh) (Map.map (causal1to2 eh1to2 e1to2 <$>) parents) (e1to2 e) -- todo: this could be a pure function - branch1to2 :: forall m. Monad m => V1.Branch.Branch0 m -> m (V2.Branch.Branch m) + branch1to2 :: forall m. (Monad m) => V1.Branch.Branch0 m -> m (V2.Branch.Branch m) branch1to2 b = pure $ V2.Branch.Branch @@ -509,7 +508,7 @@ patch1to2 (V1.Patch v1termedits v1typeedits) = V2.Branch.Patch v2termedits v2typ V1.TermEdit.Different -> V2.TermEdit.Different branch2to1 :: - Monad m => + (Monad m) => BranchCache m -> (V2.Reference -> m CT.ConstructorType) -> V2.Branch.Branch m -> @@ -553,7 +552,7 @@ referent2toshorthash1 hashLength ref = reference2toshorthash1 :: Maybe Int -> V2.Reference.Reference -> V1.ShortHash.ShortHash reference2toshorthash1 hashLength ref = maybe id V1.ShortHash.take hashLength $ case ref of (V2.Reference.ReferenceBuiltin b) -> V1.ShortHash.Builtin b - (V2.Reference.ReferenceDerived (V2.Reference.Id h i)) -> V1.ShortHash.ShortHash (base32Hex h) (showComponentPos i) Nothing + (V2.Reference.ReferenceDerived (V2.Reference.Id h i)) -> V1.ShortHash.ShortHash (Hash.toBase32HexText h) (showComponentPos i) Nothing where showComponentPos :: V2.Reference.Pos -> Maybe Text showComponentPos 0 = Nothing diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations.hs index 368fd415d..d56c78637 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations.hs @@ -9,13 +9,13 @@ import qualified Data.Map as Map import qualified Data.Text as Text import Data.Time.Clock.POSIX (getPOSIXTime) import qualified System.Console.Regions as Region -import System.Directory (copyFile) import System.FilePath (()) import Text.Printf (printf) import qualified U.Codebase.Reference as C.Reference import U.Codebase.Sqlite.DbId (SchemaVersion (..)) import qualified U.Codebase.Sqlite.Queries as Q import Unison.Codebase (CodebasePath) +import Unison.Codebase.Init (BackupStrategy (..)) import Unison.Codebase.Init.OpenCodebaseError (OpenCodebaseError (OpenCodebaseUnknownSchemaVersion)) import qualified Unison.Codebase.Init.OpenCodebaseError as Codebase import Unison.Codebase.IntegrityCheck (IntegrityResult (..), integrityCheckAllBranches, integrityCheckAllCausals, prettyPrintIntegrityErrors) @@ -26,8 +26,10 @@ import Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema3To4 (migrateSchem import Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema4To5 (migrateSchema4To5) import Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema5To6 (migrateSchema5To6) import Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema6To7 (migrateSchema6To7) +import Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema7To8 (migrateSchema7To8) +import Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema8To9 (migrateSchema8To9) import qualified Unison.Codebase.SqliteCodebase.Operations as Ops2 -import Unison.Codebase.SqliteCodebase.Paths (backupCodebasePath, codebasePath) +import Unison.Codebase.SqliteCodebase.Paths (backupCodebasePath) import Unison.Codebase.Type (LocalOrRemote (..)) import qualified Unison.ConstructorType as CT import Unison.Hash (Hash) @@ -35,11 +37,11 @@ import Unison.Prelude import qualified Unison.Sqlite as Sqlite import qualified Unison.Sqlite.Connection as Sqlite.Connection import Unison.Util.Monoid (foldMapM) +import qualified Unison.Util.Monoid as Monoid import qualified Unison.Util.Pretty as Pretty import qualified UnliftIO -- | Mapping from schema version to the migration required to get there. --- Each migration may only be run on a schema of its immediate predecessor, -- E.g. The migration at index 2 must be run on a codebase at version 1. migrations :: -- | A 'getDeclType'-like lookup, possibly backed by a cache. @@ -55,7 +57,9 @@ migrations getDeclType termBuffer declBuffer rootCodebasePath = (4, migrateSchema3To4), (5, migrateSchema4To5), (6, migrateSchema5To6 rootCodebasePath), - (7, migrateSchema6To7) + (7, migrateSchema6To7), + (8, migrateSchema7To8), + (9, migrateSchema8To9) ] data CodebaseVersionStatus @@ -82,7 +86,7 @@ checkCodebaseIsUpToDate = do -- This is a No-op if it's up to date -- Returns an error if the schema version is newer than this ucm knows about. ensureCodebaseIsUpToDate :: - MonadIO m => + (MonadIO m) => LocalOrRemote -> CodebasePath -> -- | A 'getDeclType'-like lookup, possibly backed by a cache. @@ -90,9 +94,10 @@ ensureCodebaseIsUpToDate :: TVar (Map Hash Ops2.TermBufferEntry) -> TVar (Map Hash Ops2.DeclBufferEntry) -> Bool -> + BackupStrategy -> Sqlite.Connection -> m (Either Codebase.OpenCodebaseError ()) -ensureCodebaseIsUpToDate localOrRemote root getDeclType termBuffer declBuffer shouldPrompt conn = +ensureCodebaseIsUpToDate localOrRemote root getDeclType termBuffer declBuffer shouldPrompt backupStrategy conn = (liftIO . UnliftIO.try) do regionVar <- newEmptyMVar let finalizeRegion :: IO () @@ -103,15 +108,20 @@ ensureCodebaseIsUpToDate localOrRemote root getDeclType termBuffer declBuffer sh Region.displayConsoleRegions do (`UnliftIO.finally` finalizeRegion) do + let migs = migrations getDeclType termBuffer declBuffer root + -- The highest schema that this ucm knows how to migrate to. + let highestKnownSchemaVersion = fst . head $ Map.toDescList migs + currentSchemaVersion <- Sqlite.runTransaction conn Q.schemaVersion + when (currentSchemaVersion > highestKnownSchemaVersion) $ UnliftIO.throwIO $ OpenCodebaseUnknownSchemaVersion (fromIntegral currentSchemaVersion) + backupCodebaseIfNecessary backupStrategy localOrRemote conn currentSchemaVersion highestKnownSchemaVersion root + when shouldPrompt do + putStrLn "Press to start the migration once all other ucm processes are shutdown..." + void $ liftIO getLine ranMigrations <- Sqlite.runWriteTransaction conn \run -> do - schemaVersion <- run Q.schemaVersion - let migs = migrations getDeclType termBuffer declBuffer root - -- The highest schema that this ucm knows how to migrate to. - let currentSchemaVersion = fst . head $ Map.toDescList migs - when (schemaVersion > currentSchemaVersion) $ UnliftIO.throwIO $ OpenCodebaseUnknownSchemaVersion (fromIntegral schemaVersion) - let migrationsToRun = Map.filterWithKey (\v _ -> v > schemaVersion) migs - when (localOrRemote == Local && (not . null) migrationsToRun) $ backupCodebase root shouldPrompt + -- Get the schema version again now that we're in a transaction. + currentSchemaVersion <- run Q.schemaVersion + let migrationsToRun = Map.filterWithKey (\v _ -> v > currentSchemaVersion) migs -- This is a bit of a hack, hopefully we can remove this when we have a more -- reliable way to freeze old migration code in time. -- The problem is that 'saveObject' has been changed to flush temp entity tables, @@ -121,8 +131,8 @@ ensureCodebaseIsUpToDate localOrRemote root getDeclType termBuffer declBuffer sh -- -- Hopefully we can remove this once we've got better methods of freezing migration -- code in time. - when (schemaVersion < 5) $ run Q.addTempEntityTables - when (schemaVersion < 6) $ run Q.addNamespaceStatsTables + when (currentSchemaVersion < 5) $ run Q.addTempEntityTables + when (currentSchemaVersion < 6) $ run Q.addNamespaceStatsTables for_ (Map.toAscList migrationsToRun) $ \(SchemaVersion v, migration) -> do putStrLn $ "🔨 Migrating codebase to version " <> show v <> "..." run migration @@ -134,13 +144,16 @@ ensureCodebaseIsUpToDate localOrRemote root getDeclType termBuffer declBuffer sh putMVar regionVar region pure region result <- do + -- Ideally we'd check everything here, but certain codebases are known to have objects + -- with missing Hash Objects, we'll want to clean that up in a future migration. + -- integrityCheckAllHashObjects, let checks = - [ -- Ideally we'd check everything here, but certain codebases are known to have objects - -- with missing Hash Objects, we'll want to clean that up in a future migration. - -- integrityCheckAllHashObjects, - integrityCheckAllBranches, - integrityCheckAllCausals - ] + Monoid.whenM + (currentSchemaVersion < 7) -- Only certain migrations actually make changes which reasonably need to be checked + [ integrityCheckAllBranches, + integrityCheckAllCausals + ] + zip [(1 :: Int) ..] checks & foldMapM \(i, check) -> do Region.setConsoleRegion region @@ -161,13 +174,16 @@ ensureCodebaseIsUpToDate localOrRemote root getDeclType termBuffer declBuffer sh _success <- Sqlite.Connection.vacuum conn Region.setConsoleRegion region ("🏁 Migrations complete 🏁" :: Text) --- | Copy the sqlite database to a new file with a unique name based on current time. -backupCodebase :: CodebasePath -> Bool -> IO () -backupCodebase root shouldPrompt = do - backupPath <- backupCodebasePath <$> getPOSIXTime - copyFile (root codebasePath) (root backupPath) - putStrLn ("📋 I backed up your codebase to " ++ (root backupPath)) - putStrLn "⚠️ Please close all other ucm processes and wait for the migration to complete before interacting with your codebase." - when shouldPrompt do - putStrLn "Press to start the migration once all other ucm processes are shutdown..." - void $ liftIO getLine +-- | If we need to make a backup, then copy the sqlite database to a new file with a unique name based on current time. +backupCodebaseIfNecessary :: BackupStrategy -> LocalOrRemote -> Sqlite.Connection -> SchemaVersion -> SchemaVersion -> CodebasePath -> IO () +backupCodebaseIfNecessary backupStrategy localOrRemote conn currentSchemaVersion highestKnownSchemaVersion root = do + case (backupStrategy, localOrRemote) of + (NoBackup, _) -> pure () + (_, Remote) -> pure () + (Backup, Local) + | (currentSchemaVersion >= highestKnownSchemaVersion) -> pure () + | otherwise -> do + backupPath <- getPOSIXTime <&> (\t -> root backupCodebasePath currentSchemaVersion t) + Sqlite.vacuumInto conn backupPath + putStrLn ("📋 I backed up your codebase to " ++ (root backupPath)) + putStrLn "⚠️ Please close all other ucm processes and wait for the migration to complete before interacting with your codebase." diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema1To2.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema1To2.hs index 2df3d9a5e..14a7e8e68 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema1To2.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema1To2.hs @@ -1,9 +1,6 @@ {-# LANGUAGE ApplicativeDo #-} {-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ViewPatterns #-} module Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema1To2 ( migrateSchema1To2, @@ -66,7 +63,7 @@ import qualified Unison.DataDeclaration as DD import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Hash (Hash) import qualified Unison.Hash as Unison -import qualified Unison.Hashing.V2.Causal as Hashing +import qualified Unison.Hashing.V2 as Hashing import qualified Unison.Hashing.V2.Convert as Convert import Unison.Parser.Ann (Ann) import Unison.Pattern (Pattern) @@ -236,20 +233,18 @@ migrateCausal oldCausalHashId = fmap (either id id) . runExceptT $ do let (newParentHashes, newParentHashIds) = oldCausalParentHashIds - & fmap - (\oldParentHashId -> migratedCausals ^?! ix oldParentHashId) + & fmap (\oldParentHashId -> migratedCausals ^?! ix oldParentHashId) & unzip & bimap (Set.fromList . map unCausalHash) Set.fromList let newCausalHash :: CausalHash newCausalHash = CausalHash $ - Hashing.hashCausal - ( Hashing.Causal - { branchHash = unBranchHash newBranchHash, - parents = newParentHashes - } - ) + Hashing.contentHash + Hashing.Causal + { branchHash = unBranchHash newBranchHash, + parents = newParentHashes + } newCausalHashId <- lift . lift $ Q.saveCausalHash newCausalHash let newCausal = DbCausal @@ -317,7 +312,8 @@ migrateBranch oldObjectId = fmap (either id id) . runExceptT $ do ++ allMissingChildCausals when (not . null $ allMissingReferences) $ - throwE $ Sync.Missing allMissingReferences + throwE $ + Sync.Missing allMissingReferences let remapPatchObjectId patchObjId = case Map.lookup (unPatchObjectId patchObjId) migratedObjects of Nothing -> error $ "Expected patch: " <> show patchObjId <> " to be migrated" @@ -559,7 +555,9 @@ migrateTermComponent getDeclType termBuffer declBuffer oldHash = fmap (either id in filter (`Map.notMember` referencesMap) (missingTermRefs <> missingTypeRefs) when (not . null $ allMissingReferences) $ - throwE $ Sync.Missing . nubOrd $ (someReferenceIdToEntity <$> allMissingReferences) + throwE $ + Sync.Missing . nubOrd $ + (someReferenceIdToEntity <$> allMissingReferences) let getMigratedReference :: Old SomeReferenceId -> New SomeReferenceId getMigratedReference ref = @@ -654,7 +652,7 @@ migrateDeclComponent termBuffer declBuffer oldHash = fmap (either id id) . runEx . DD.constructors_ -- Get the data constructors . traversed -- traverse the list of them . _3 -- Select the Type term. - %~ remapTerm + %~ remapTerm let declNameToOldReference :: Map (DeclName v) (Old Reference.Id) declNameToOldReference = Map.fromList . fmap swap . Map.toList . fmap fst $ remappedReferences diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema1To2/DbHelpers.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema1To2/DbHelpers.hs index a9d8e6838..806415489 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema1To2/DbHelpers.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema1To2/DbHelpers.hs @@ -26,19 +26,7 @@ import qualified U.Codebase.Sqlite.Queries as Q import qualified U.Codebase.Sqlite.Reference as S import qualified U.Codebase.Sqlite.Referent as S import Unison.Hash (Hash) -import Unison.Hashing.V2.Branch (NameSegment (..)) -import qualified Unison.Hashing.V2.Branch as Hashing.Branch -import qualified Unison.Hashing.V2.Causal as Hashing.Causal -import qualified Unison.Hashing.V2.Patch as Hashing (Patch (..)) -import qualified Unison.Hashing.V2.Patch as Hashing.Patch -import qualified Unison.Hashing.V2.Reference as Hashing (Reference) -import qualified Unison.Hashing.V2.Reference as Hashing.Reference -import qualified Unison.Hashing.V2.Referent as Hashing (Referent) -import qualified Unison.Hashing.V2.Referent as Hashing.Referent -import qualified Unison.Hashing.V2.TermEdit as Hashing (TermEdit) -import qualified Unison.Hashing.V2.TermEdit as Hashing.TermEdit -import qualified Unison.Hashing.V2.TypeEdit as Hashing (TypeEdit) -import qualified Unison.Hashing.V2.TypeEdit as Hashing.TypeEdit +import qualified Unison.Hashing.V2 as Hashing import Unison.Prelude import Unison.Sqlite (Transaction) import qualified Unison.Util.Map as Map @@ -46,15 +34,15 @@ import qualified Unison.Util.Set as Set syncCausalHash :: S.SyncCausalFormat -> Transaction CausalHash syncCausalHash S.SyncCausalFormat {valueHash = valueHashId, parents = parentChIds} = do - fmap (CausalHash . Hashing.Causal.hashCausal) $ - Hashing.Causal.Causal + fmap (CausalHash . Hashing.contentHash) $ + Hashing.Causal <$> coerce @(Transaction BranchHash) @(Transaction Hash) (Q.expectBranchHash valueHashId) <*> fmap (Set.fromList . coerce @[CausalHash] @[Hash] . Vector.toList) (traverse Q.expectCausalHash parentChIds) dbBranchHash :: S.DbBranch -> Transaction BranchHash dbBranchHash (S.Branch.Full.Branch tms tps patches children) = - fmap (BranchHash . Hashing.Branch.hashBranch) $ - Hashing.Branch.Raw + fmap (BranchHash . Hashing.contentHash) $ + Hashing.Branch <$> doTerms tms <*> doTypes tps <*> doPatches patches @@ -62,7 +50,7 @@ dbBranchHash (S.Branch.Full.Branch tms tps patches children) = where doTerms :: Map Db.TextId (Map S.Referent S.DbMetadataSet) -> - Transaction (Map NameSegment (Map Hashing.Referent Hashing.Branch.MdValues)) + Transaction (Map Hashing.NameSegment (Map Hashing.Referent Hashing.MdValues)) doTerms = Map.bitraverse s2hNameSegment @@ -70,23 +58,23 @@ dbBranchHash (S.Branch.Full.Branch tms tps patches children) = doTypes :: Map Db.TextId (Map S.Reference S.DbMetadataSet) -> - Transaction (Map NameSegment (Map Hashing.Reference Hashing.Branch.MdValues)) + Transaction (Map Hashing.NameSegment (Map Hashing.Reference Hashing.MdValues)) doTypes = Map.bitraverse s2hNameSegment (Map.bitraverse s2hReference s2hMetadataSet) - doPatches :: Map Db.TextId Db.PatchObjectId -> Transaction (Map NameSegment Hash) + doPatches :: Map Db.TextId Db.PatchObjectId -> Transaction (Map Hashing.NameSegment Hash) doPatches = Map.bitraverse s2hNameSegment (Q.expectPrimaryHashByObjectId . Db.unPatchObjectId) - doChildren :: Map Db.TextId (Db.BranchObjectId, Db.CausalHashId) -> Transaction (Map NameSegment Hash) + doChildren :: Map Db.TextId (Db.BranchObjectId, Db.CausalHashId) -> Transaction (Map Hashing.NameSegment Hash) doChildren = Map.bitraverse s2hNameSegment \(_boId, chId) -> Q.expectHash (Db.unCausalHashId chId) dbPatchHash :: S.Patch -> Transaction PatchHash dbPatchHash S.Patch {S.termEdits, S.typeEdits} = - fmap (PatchHash . Hashing.Patch.hashPatch) $ + fmap (PatchHash . Hashing.contentHash) $ Hashing.Patch <$> doTermEdits termEdits <*> doTypeEdits typeEdits @@ -99,40 +87,40 @@ dbPatchHash S.Patch {S.termEdits, S.typeEdits} = doTypeEdits = Map.bitraverse s2hReferenceH (Set.traverse s2hTypeEdit) -s2hMetadataSet :: DbMetadataSet -> Transaction Hashing.Branch.MdValues +s2hMetadataSet :: DbMetadataSet -> Transaction Hashing.MdValues s2hMetadataSet = \case - S.MetadataSet.Inline rs -> Hashing.Branch.MdValues <$> Set.traverse s2hReference rs + S.MetadataSet.Inline rs -> Hashing.MdValues <$> Set.traverse s2hReference rs -s2hNameSegment :: Db.TextId -> Transaction NameSegment +s2hNameSegment :: Db.TextId -> Transaction Hashing.NameSegment s2hNameSegment = - fmap NameSegment . Q.expectText + fmap Hashing.NameSegment . Q.expectText s2hReferent :: S.Referent -> Transaction Hashing.Referent s2hReferent = \case - S.Referent.Ref r -> Hashing.Referent.Ref <$> s2hReference r - S.Referent.Con r cid -> Hashing.Referent.Con <$> s2hReference r <*> pure (fromIntegral cid) + S.Referent.Ref r -> Hashing.ReferentRef <$> s2hReference r + S.Referent.Con r cid -> Hashing.ReferentCon <$> s2hReference r <*> pure (fromIntegral cid) s2hReferentH :: S.ReferentH -> Transaction Hashing.Referent s2hReferentH = \case - S.Referent.Ref r -> Hashing.Referent.Ref <$> s2hReferenceH r - S.Referent.Con r cid -> Hashing.Referent.Con <$> s2hReferenceH r <*> pure (fromIntegral cid) + S.Referent.Ref r -> Hashing.ReferentRef <$> s2hReferenceH r + S.Referent.Con r cid -> Hashing.ReferentCon <$> s2hReferenceH r <*> pure (fromIntegral cid) s2hReference :: S.Reference -> Transaction Hashing.Reference s2hReference = \case - S.ReferenceBuiltin t -> Hashing.Reference.Builtin <$> Q.expectText t - S.Reference.Derived h i -> Hashing.Reference.Derived <$> Q.expectPrimaryHashByObjectId h <*> pure i + S.ReferenceBuiltin t -> Hashing.ReferenceBuiltin <$> Q.expectText t + S.Reference.Derived h i -> Hashing.ReferenceDerived <$> Q.expectPrimaryHashByObjectId h <*> pure i s2hReferenceH :: S.ReferenceH -> Transaction Hashing.Reference s2hReferenceH = \case - S.ReferenceBuiltin t -> Hashing.Reference.Builtin <$> Q.expectText t - S.Reference.Derived h i -> Hashing.Reference.Derived <$> Q.expectHash h <*> pure i + S.ReferenceBuiltin t -> Hashing.ReferenceBuiltin <$> Q.expectText t + S.Reference.Derived h i -> Hashing.ReferenceDerived <$> Q.expectHash h <*> pure i s2hTermEdit :: S.TermEdit -> Transaction Hashing.TermEdit s2hTermEdit = \case - S.TermEdit.Replace r _typing -> Hashing.TermEdit.Replace <$> s2hReferent r - S.TermEdit.Deprecate -> pure Hashing.TermEdit.Deprecate + S.TermEdit.Replace r _typing -> Hashing.TermEditReplace <$> s2hReferent r + S.TermEdit.Deprecate -> pure Hashing.TermEditDeprecate s2hTypeEdit :: S.TypeEdit -> Transaction Hashing.TypeEdit s2hTypeEdit = \case - S.TypeEdit.Replace r -> Hashing.TypeEdit.Replace <$> s2hReference r - S.TypeEdit.Deprecate -> pure Hashing.TypeEdit.Deprecate + S.TypeEdit.Replace r -> Hashing.TypeEditReplace <$> s2hReference r + S.TypeEdit.Deprecate -> pure Hashing.TypeEditDeprecate diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema3To4.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema3To4.hs index b83aa8af0..6bfe6a6f1 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema3To4.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema3To4.hs @@ -218,12 +218,13 @@ rehashAndCanonicalizeNamespace causalHashId possiblyIncorrectNamespaceHashId obj -- get a list of any unmigrated children, and also track whether any re-mappings actually -- occurred, so we don't do extra work when nothing changed. let ((unmigratedChildren, Any changes), remappedBranch) = - dbBranch & DBBranch.childrenHashes_ %%~ \(ids@(childBranchObjId, childCausalHashId)) -> do - case Map.lookup childCausalHashId canonicalBranchForCausalMap of - Nothing -> (([childCausalHashId], Any False), ids) - Just (_, canonicalObjId) -> - let changed = canonicalObjId /= childBranchObjId - in (([], Any changed), (canonicalObjId, childCausalHashId)) + dbBranch + & DBBranch.childrenHashes_ %%~ \(ids@(childBranchObjId, childCausalHashId)) -> do + case Map.lookup childCausalHashId canonicalBranchForCausalMap of + Nothing -> (([childCausalHashId], Any False), ids) + Just (_, canonicalObjId) -> + let changed = canonicalObjId /= childBranchObjId + in (([], Any changed), (canonicalObjId, childCausalHashId)) when (not . null $ unmigratedChildren) $ throwError (Sync.Missing unmigratedChildren) when changes $ do liftT $ replaceBranch objId remappedBranch diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema5To6.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema5To6.hs index a5faf5f6b..25fd28058 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema5To6.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema5To6.hs @@ -53,7 +53,7 @@ oldReflogEntries reflogPath now = -- least puts them in the correct order chronologically. let offsetTime = addUTCTime (negate $ fromInteger @NominalDiffTime n) now in case Text.words txt of - (Hash.fromBase32Hex -> Just old) : (Hash.fromBase32Hex -> Just new) : (Text.unwords -> reason) -> + (Hash.fromBase32HexText -> Just old) : (Hash.fromBase32HexText -> Just new) : (Text.unwords -> reason) -> Just $ Reflog.Entry { time = offsetTime, diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema7To8.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema7To8.hs new file mode 100644 index 000000000..b4fe19d0f --- /dev/null +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema7To8.hs @@ -0,0 +1,135 @@ +{-# LANGUAGE QuasiQuotes #-} + +module Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema7To8 (migrateSchema7To8) where + +import Data.String.Here.Uninterpolated (here) +import qualified U.Codebase.Sqlite.Queries as Q +import qualified Unison.Sqlite as Sqlite + +-- | Adds a table for tracking namespace statistics +-- Adds stats for all existing namespaces, even though missing stats are computed on-demand if missing. +migrateSchema7To8 :: Sqlite.Transaction () +migrateSchema7To8 = do + Q.expectSchemaVersion 7 + createScopedNameLookupTables + Q.setSchemaVersion 8 + +-- | Create the scoped name lookup tables. +createScopedNameLookupTables :: Sqlite.Transaction () +createScopedNameLookupTables = do + -- This table allows us to look up which causal hashes have a name lookup. + Sqlite.execute_ + [here| + CREATE TABLE name_lookups ( + root_branch_hash_id INTEGER PRIMARY KEY REFERENCES hash(id) ON DELETE CASCADE + ) + |] + + Sqlite.execute_ + [here| + CREATE TABLE scoped_term_name_lookup ( + root_branch_hash_id INTEGER NOT NULL REFERENCES hash(id) ON DELETE CASCADE, + + -- The name of the term in reversed form, with a trailing '.': + -- E.g. map.List.base. + -- + -- The trailing '.' is helpful when performing suffix queries where we may not know + -- whether the suffix is complete or not, e.g. we could suffix search using any of the + -- following globs and it would still find 'map.List.base.': + -- map.List.base.* + -- map.List.* + -- map.* + reversed_name TEXT NOT NULL, + + -- The last name segment of the name. This is used when looking up names for + -- suffixification when building PPEs. + -- E.g. for the name 'base.List.map' this would be 'map' + last_name_segment TEXT NOT NULL, + + -- The namespace containing this definition, not reversed, with a trailing '.' + -- The trailing '.' simplifies GLOB queries, so that 'base.*' matches both things in + -- 'base' and 'base.List', but not 'base1', which allows us to avoid an OR in our where + -- clauses which in turn helps the sqlite query planner use indexes more effectively. + -- + -- example value: 'base.List.' + namespace TEXT NOT NULL, + referent_builtin TEXT NULL, + referent_component_hash TEXT NULL, + referent_component_index INTEGER NULL, + referent_constructor_index INTEGER NULL, + referent_constructor_type INTEGER NULL, + PRIMARY KEY (root_branch_hash_id, reversed_name, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index) + ) + |] + + -- This index allows finding all names we need to consider within a given namespace for + -- suffixification of a name. + -- It may seem strange to use last_name_segment rather than a suffix search over reversed_name name here; + -- but SQLite will only optimize for a single prefix-glob at once, so we can't glob search + -- over both namespace and reversed_name, but we can EXACT match on last_name_segment and + -- then glob search on the namespace prefix, and have SQLite do the final glob search on + -- reversed_name over rows with a matching last segment without using an index and should be plenty fast. + Sqlite.execute_ + [here| + CREATE INDEX scoped_term_names_by_namespace_and_last_name_segment ON scoped_term_name_lookup(root_branch_hash_id, last_name_segment, namespace) + |] + -- This index allows us to find all names with a given ref within a specific namespace + Sqlite.execute_ + [here| + CREATE INDEX scoped_term_name_by_referent_lookup ON scoped_term_name_lookup(root_branch_hash_id, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, namespace) + |] + + -- Allows fetching ALL names within a specific namespace prefix. We currently use this to + -- pretty-print on share, but will be replaced with a more precise set of queries soon. + Sqlite.execute_ + [here| + CREATE INDEX scoped_term_names_by_namespace ON scoped_term_name_lookup(root_branch_hash_id, namespace) + |] + Sqlite.execute_ + [here| + CREATE TABLE scoped_type_name_lookup ( + root_branch_hash_id INTEGER NOT NULL REFERENCES hash(id), + -- The name of the term: E.g. List.base + reversed_name TEXT NOT NULL, + -- The last name segment of the name. This is used when looking up names for + -- suffixification when building PPEs. + -- E.g. for the name 'base.List.map' this would be 'map' + last_name_segment TEXT NOT NULL, + -- The namespace containing this definition, not reversed, with a trailing '.' + -- The trailing '.' simplifies GLOB queries, so that 'base.*' matches both things in + -- 'base' and 'base.List', but not 'base1', which allows us to avoid an OR in our where + -- clauses which in turn helps the sqlite query planner use indexes more effectively. + -- + -- example value: 'base.List.' + namespace TEXT NOT NULL, + reference_builtin TEXT NULL, + reference_component_hash INTEGER NULL, + reference_component_index INTEGER NULL, + PRIMARY KEY (reversed_name, reference_builtin, reference_component_hash, reference_component_index) + ); + |] + + -- This index allows finding all names we need to consider within a given namespace for + -- suffixification of a name. + -- It may seem strange to use last_name_segment rather than a suffix search over reversed_name name here; + -- but SQLite will only optimize for a single prefix-glob at once, so we can't glob search + -- over both namespace and reversed_name, but we can EXACT match on last_name_segment and + -- then glob search on the namespace prefix, and have SQLite do the final glob search on + -- reversed_name over rows with a matching last segment without using an index and should be plenty fast. + Sqlite.execute_ + [here| + CREATE INDEX scoped_type_names_by_namespace_and_last_name_segment ON scoped_type_name_lookup(root_branch_hash_id, last_name_segment, namespace) + |] + + -- This index allows us to find all names with a given ref within a specific namespace. + Sqlite.execute_ + [here| + CREATE INDEX scoped_type_name_by_reference_lookup ON scoped_type_name_lookup(root_branch_hash_id, reference_builtin, reference_component_hash, reference_component_index, namespace) + |] + + -- Allows fetching ALL names within a specific namespace prefix. We currently use this to + -- pretty-print on share, but will be replaced with a more precise set of queries soon. + Sqlite.execute_ + [here| + CREATE INDEX scoped_type_names_by_namespace ON scoped_type_name_lookup(root_branch_hash_id, namespace) + |] diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema8To9.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema8To9.hs new file mode 100644 index 000000000..b2b74f1bc --- /dev/null +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Migrations/MigrateSchema8To9.hs @@ -0,0 +1,11 @@ +module Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema8To9 (migrateSchema8To9) where + +import qualified U.Codebase.Sqlite.Queries as Q +import qualified Unison.Sqlite as Sqlite + +-- | Recreates the name lookup tables because the primary key was missing the root hash id. +migrateSchema8To9 :: Sqlite.Transaction () +migrateSchema8To9 = do + Q.expectSchemaVersion 8 + Q.fixScopedNameLookupTables + Q.setSchemaVersion 9 diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Operations.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Operations.hs index 2c54b7841..eaa2e173e 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Operations.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Operations.hs @@ -7,11 +7,9 @@ -- are unified with non-sqlite operations in the Codebase interface, like 'appendReflog'. module Unison.Codebase.SqliteCodebase.Operations where -import Control.Lens (ifor) import Data.Bitraversable (bitraverse) import Data.Either.Extra () import qualified Data.List as List -import qualified Data.List.NonEmpty as NEList import Data.List.NonEmpty.Extra (NonEmpty ((:|)), maximum1) import qualified Data.Map as Map import Data.Maybe (fromJust) @@ -19,7 +17,6 @@ import qualified Data.Set as Set import qualified Data.Text as Text import qualified U.Codebase.Branch as V2Branch import qualified U.Codebase.Branch.Diff as BranchDiff -import qualified U.Codebase.Causal as V2Causal import U.Codebase.HashTags (BranchHash, CausalHash (unCausalHash), PatchHash) import qualified U.Codebase.Reference as C.Reference import qualified U.Codebase.Referent as C.Referent @@ -30,11 +27,8 @@ import U.Codebase.Sqlite.Operations (NamesByPath (..)) import qualified U.Codebase.Sqlite.Operations as Ops import qualified U.Codebase.Sqlite.Queries as Q import U.Codebase.Sqlite.V2.HashHandle (v2HashHandle) -import qualified U.Util.Hash as H2 import qualified Unison.Builtin as Builtins import Unison.Codebase.Branch (Branch (..)) -import qualified Unison.Codebase.Branch as Branch -import qualified Unison.Codebase.Branch.Names as V1Branch import Unison.Codebase.Patch (Patch) import Unison.Codebase.Path (Path) import qualified Unison.Codebase.Path as Path @@ -97,9 +91,11 @@ data BufferEntry a = BufferEntry } deriving (Eq, Show) -prettyBufferEntry :: Show a => Hash -> BufferEntry a -> String +prettyBufferEntry :: (Show a) => Hash -> BufferEntry a -> String prettyBufferEntry (h :: Hash) BufferEntry {..} = - "BufferEntry " ++ show h ++ "\n" + "BufferEntry " + ++ show h + ++ "\n" ++ " { beComponentTargetSize = " ++ show beComponentTargetSize ++ "\n" @@ -151,7 +147,7 @@ tryFlushBuffer :: forall a. (Show a) => TVar (Map Hash (BufferEntry a)) -> - (H2.Hash -> [a] -> Transaction ()) -> + (Hash -> [a] -> Transaction ()) -> (Hash -> Transaction ()) -> Hash -> Transaction () @@ -621,31 +617,38 @@ namesAtPath namesRootPath relativeToPath = do Nothing -> Nothing Just stripped -> Just (Name.makeRelative stripped, ref) --- | Update the root namespace names index which is used by the share server for serving api --- requests. -updateNameLookupIndex :: +-- | Add an index for the provided branch hash if one doesn't already exist. +ensureNameLookupForBranchHash :: (C.Reference.Reference -> Sqlite.Transaction CT.ConstructorType) -> - Path -> - -- | "from" branch, if 'Nothing' use the empty branch + -- | An optional branch which we may already have an index for. + -- This should be a branch which is relatively similar to the branch we're creating a name + -- lookup for, e.g. a recent ancestor of the new branch. The more similar it is, the faster + -- the less work we'll need to do. Maybe BranchHash -> - -- | "to" branch BranchHash -> Sqlite.Transaction () -updateNameLookupIndex getDeclType pathPrefix mayFromBranchHash toBranchHash = do - fromBranch <- case mayFromBranchHash of - Nothing -> pure V2Branch.empty - Just fromBH -> Ops.expectBranchByBranchHash fromBH - toBranch <- Ops.expectBranchByBranchHash toBranchHash - treeDiff <- BranchDiff.diffBranches fromBranch toBranch - let namePrefix = case pathPrefix of - Path.Empty -> Nothing - (p Path.:< ps) -> Just $ Name.fromSegments (p :| Path.toList ps) - let BranchDiff.NameChanges {termNameAdds, termNameRemovals, typeNameAdds, typeNameRemovals} = BranchDiff.nameChanges namePrefix treeDiff - termNameAddsWithCT <- do - for termNameAdds \(name, ref) -> do - refWithCT <- addReferentCT ref - pure $ toNamedRef (name, refWithCT) - Ops.updateNameIndex (termNameAddsWithCT, toNamedRef <$> termNameRemovals) (toNamedRef <$> typeNameAdds, toNamedRef <$> typeNameRemovals) +ensureNameLookupForBranchHash getDeclType mayFromBranchHash toBranchHash = do + Ops.checkBranchHashNameLookupExists toBranchHash >>= \case + True -> pure () + False -> do + (fromBranch, mayExistingLookupBH) <- case mayFromBranchHash of + Nothing -> pure (V2Branch.empty, Nothing) + Just fromBH -> do + Ops.checkBranchHashNameLookupExists fromBH >>= \case + True -> (,Just fromBH) <$> Ops.expectBranchByBranchHash fromBH + False -> do + -- TODO: We can probably infer a good starting branch by crawling through + -- history looking for a Branch Hash we already have an index for. + pure (V2Branch.empty, Nothing) + toBranch <- Ops.expectBranchByBranchHash toBranchHash + treeDiff <- BranchDiff.diffBranches fromBranch toBranch + let namePrefix = Nothing + let BranchDiff.NameChanges {termNameAdds, termNameRemovals, typeNameAdds, typeNameRemovals} = BranchDiff.nameChanges namePrefix treeDiff + termNameAddsWithCT <- do + for termNameAdds \(name, ref) -> do + refWithCT <- addReferentCT ref + pure $ toNamedRef (name, refWithCT) + Ops.buildNameLookupForBranchHash mayExistingLookupBH toBranchHash (termNameAddsWithCT, toNamedRef <$> termNameRemovals) (toNamedRef <$> typeNameAdds, toNamedRef <$> typeNameRemovals) where toNamedRef :: (Name, ref) -> S.NamedRef ref toNamedRef (name, ref) = S.NamedRef {reversedSegments = coerce $ Name.reverseSegments name, ref = ref} @@ -656,80 +659,6 @@ updateNameLookupIndex getDeclType pathPrefix mayFromBranchHash toBranchHash = do ct <- getDeclType ref pure (referent, Just $ Cv.constructorType1to2 ct) --- | Compute the root namespace names index which is used by the share server for serving api --- requests. Using 'updateNameLookupIndex' is preferred whenever possible, since it's --- considerably faster. This can be used to reset the index if it ever gets out of sync due to --- a bug. --- --- This version can be used if you've already got the root Branch pre-loaded, otherwise --- it's faster to use 'initializeNameLookupIndexFromV2Root' -initializeNameLookupIndexFromV1Branch :: Branch Transaction -> Sqlite.Transaction () -initializeNameLookupIndexFromV1Branch root = do - Q.dropNameLookupTables - saveRootNamesIndexV1 (V1Branch.toNames . Branch.head $ root) - where - saveRootNamesIndexV1 :: Names -> Transaction () - saveRootNamesIndexV1 Names {Names.terms, Names.types} = do - let termNames :: [(S.NamedRef (C.Referent.Referent, Maybe C.Referent.ConstructorType))] - termNames = Rel.toList terms <&> \(name, ref) -> S.NamedRef {reversedSegments = nameSegments name, ref = splitReferent ref} - let typeNames :: [(S.NamedRef C.Reference.Reference)] - typeNames = - Rel.toList types - <&> ( \(name, ref) -> - S.NamedRef {reversedSegments = nameSegments name, ref = Cv.reference1to2 ref} - ) - Ops.updateNameIndex (termNames, []) (typeNames, []) - where - nameSegments :: Name -> NonEmpty Text - nameSegments = coerce @(NonEmpty NameSegment) @(NonEmpty Text) . Name.reverseSegments - splitReferent :: Referent.Referent -> (C.Referent.Referent, Maybe C.Referent.ConstructorType) - splitReferent referent = case referent of - Referent.Ref {} -> (Cv.referent1to2 referent, Nothing) - Referent.Con _ref ct -> (Cv.referent1to2 referent, Just (Cv.constructorType1to2 ct)) - --- | Compute the root namespace names index which is used by the share server for serving api --- requests. Using 'updateNameLookupIndex' is preferred whenever possible, since it's --- considerably faster. This can be used to reset the index if it ever gets out of sync due to --- a bug. --- --- This version should be used if you don't already have the root Branch pre-loaded, --- If you do, use 'initializeNameLookupIndexFromV1Branch' instead. -initializeNameLookupIndexFromV2Root :: (C.Reference.Reference -> Sqlite.Transaction CT.ConstructorType) -> Sqlite.Transaction () -initializeNameLookupIndexFromV2Root getDeclType = do - Q.dropNameLookupTables - rootHash <- Ops.expectRootCausalHash - causalBranch <- Ops.expectCausalBranchByCausalHash rootHash - (termNameMap, typeNameMap) <- nameMapsFromV2Branch [] causalBranch - let expandedTermNames = Map.toList termNameMap >>= (\(name, refs) -> (name,) <$> Set.toList refs) - termNameList <- do - for expandedTermNames \(name, ref) -> do - refWithCT <- addReferentCT ref - pure S.NamedRef {S.reversedSegments = coerce name, S.ref = refWithCT} - let typeNameList = do - (name, refs) <- Map.toList typeNameMap - ref <- Set.toList refs - pure $ S.NamedRef {S.reversedSegments = coerce name, S.ref = ref} - Ops.updateNameIndex (termNameList, []) (typeNameList, []) - where - addReferentCT :: C.Referent.Referent -> Transaction (C.Referent.Referent, Maybe C.Referent.ConstructorType) - addReferentCT referent = case referent of - C.Referent.Ref {} -> pure (referent, Nothing) - C.Referent.Con ref _conId -> do - ct <- getDeclType ref - pure (referent, Just $ Cv.constructorType1to2 ct) - - -- Traverse a v2 branch - -- Collects two maps, one with all term names and one with all type names. - -- Note that unlike the `Name` type in `unison-core1`, this list of name segments is - -- in reverse order, e.g. `["map", "List", "base"]` - nameMapsFromV2Branch :: Monad m => [NameSegment] -> V2Branch.CausalBranch m -> m (Map (NonEmpty NameSegment) (Set C.Referent.Referent), Map (NonEmpty NameSegment) (Set C.Reference.Reference)) - nameMapsFromV2Branch reversedNamePrefix cb = do - b <- V2Causal.value cb - let (shallowTermNames, shallowTypeNames) = (Map.keysSet <$> V2Branch.terms b, Map.keysSet <$> V2Branch.types b) - (prefixedChildTerms, prefixedChildTypes) <- - fold <$> (ifor (V2Branch.children b) $ \nameSegment cb -> (nameMapsFromV2Branch (nameSegment : reversedNamePrefix) cb)) - pure (Map.mapKeys (NEList.:| reversedNamePrefix) shallowTermNames <> prefixedChildTerms, Map.mapKeys (NEList.:| reversedNamePrefix) shallowTypeNames <> prefixedChildTypes) - -- | Given a transaction, return a transaction that first checks a semispace cache of the given size. -- -- The transaction should probably be read-only, as we (of course) don't hit SQLite on a cache hit. diff --git a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Paths.hs b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Paths.hs index d0e6871ef..cc65bd7e7 100644 --- a/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Paths.hs +++ b/parser-typechecker/src/Unison/Codebase/SqliteCodebase/Paths.hs @@ -9,6 +9,7 @@ where import Data.Time (NominalDiffTime) import System.FilePath (()) +import U.Codebase.Sqlite.DbId (SchemaVersion (SchemaVersion)) import Unison.Codebase (CodebasePath) -- | Prefer makeCodebasePath or makeCodebaseDirPath when possible. @@ -27,6 +28,6 @@ makeCodebaseDirPath :: CodebasePath -> FilePath makeCodebaseDirPath root = root ".unison" "v2" -- | Makes a path to store a backup of a sqlite database given the current time. -backupCodebasePath :: NominalDiffTime -> FilePath -backupCodebasePath now = - codebasePath ++ "." ++ show @Int (floor now) +backupCodebasePath :: SchemaVersion -> NominalDiffTime -> FilePath +backupCodebasePath (SchemaVersion schemaVersion) now = + codebasePath ++ ".v" ++ show schemaVersion ++ "." ++ show @Int (floor now) diff --git a/parser-typechecker/src/Unison/Codebase/TermEdit/Typing.hs b/parser-typechecker/src/Unison/Codebase/TermEdit/Typing.hs index 1d9db07a0..c2ca1d4ef 100644 --- a/parser-typechecker/src/Unison/Codebase/TermEdit/Typing.hs +++ b/parser-typechecker/src/Unison/Codebase/TermEdit/Typing.hs @@ -5,7 +5,7 @@ import Unison.Type (Type) import qualified Unison.Typechecker as Typechecker import Unison.Var (Var) -typing :: Var v => Type v loc -> Type v loc -> Typing +typing :: (Var v) => Type v loc -> Type v loc -> Typing typing newType oldType | Typechecker.isEqual newType oldType = Same | Typechecker.isSubtype newType oldType = Subtype diff --git a/parser-typechecker/src/Unison/Codebase/Type.hs b/parser-typechecker/src/Unison/Codebase/Type.hs index 9e932d09d..dcaa675c1 100644 --- a/parser-typechecker/src/Unison/Codebase/Type.hs +++ b/parser-typechecker/src/Unison/Codebase/Type.hs @@ -13,14 +13,13 @@ module Unison.Codebase.Type ) where -import U.Codebase.HashTags (BranchHash, CausalHash) +import U.Codebase.HashTags (CausalHash) import qualified U.Codebase.Reference as V2 import Unison.Codebase.Branch (Branch) import qualified Unison.Codebase.Editor.Git as Git import Unison.Codebase.Editor.RemoteRepo (ReadGitRemoteNamespace, ReadGitRepo, WriteGitRepo) import Unison.Codebase.GitError (GitCodebaseError, GitProtocolError) import Unison.Codebase.Init.OpenCodebaseError (OpenCodebaseError (..)) -import Unison.Codebase.Path (Path) import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError (..)) import Unison.Codebase.SyncMode (SyncMode) import Unison.CodebasePath (CodebasePath) @@ -61,7 +60,7 @@ data Codebase m v a = Codebase -- semantics of 'putTypeDeclaration'. getTypeDeclaration :: Reference.Id -> Sqlite.Transaction (Maybe (Decl v a)), -- | Get the type of a given decl. - getDeclType :: V2.Reference -> m CT.ConstructorType, + getDeclType :: V2.Reference -> Sqlite.Transaction CT.ConstructorType, -- | Enqueue the put of a user-defined term (with its type) into the codebase, if it doesn't already exist. The -- implementation may choose to delay the put until all of the term's (and its type's) references are stored as -- well. @@ -101,21 +100,6 @@ data Codebase m v a = Codebase termsMentioningTypeImpl :: Reference -> Sqlite.Transaction (Set Referent.Id), -- | Get the set of user-defined terms-or-constructors whose hash matches the given prefix. termReferentsByPrefix :: ShortHash -> Sqlite.Transaction (Set Referent.Id), - -- Updates the root namespace names index from an old BranchHash to a new one. - -- This isn't run automatically because it can be a bit slow. - updateNameLookup :: - -- Path to the root of the _changes_. - -- E.g. if you know that all the changes occur at "base.List", you can pass "base.List" - -- here, and pass the old and new branch hashes for the branch as "base.List". - -- This allows us to avoid searching for changes in areas where it's impossible for it - -- to have occurred. - Path -> - -- The branch hash at 'Path' which the existing index was built from. - -- Pass 'Nothing' to build the index from scratch (i.e. compute a diff from an empty branch). - Maybe BranchHash -> - -- The new branch - BranchHash -> - Sqlite.Transaction (), -- | Acquire a new connection to the same underlying database file this codebase object connects to. withConnection :: forall x. (Sqlite.Connection -> m x) -> m x, -- | Acquire a new connection to the same underlying database file this codebase object connects to. diff --git a/parser-typechecker/src/Unison/Codebase/Verbosity.hs b/parser-typechecker/src/Unison/Codebase/Verbosity.hs index 2182dae0d..fdbe6d3b4 100644 --- a/parser-typechecker/src/Unison/Codebase/Verbosity.hs +++ b/parser-typechecker/src/Unison/Codebase/Verbosity.hs @@ -1,8 +1,8 @@ module Unison.Codebase.Verbosity where -data Verbosity = Default | Silent deriving (Eq, Show) +data Verbosity = Verbose | Silent deriving (Eq, Show) isSilent :: Verbosity -> Bool isSilent v = case v of - Default -> False + Verbose -> False Silent -> True diff --git a/parser-typechecker/src/Unison/Codebase/Watch.hs b/parser-typechecker/src/Unison/Codebase/Watch.hs index 063f55549..609de3b8b 100644 --- a/parser-typechecker/src/Unison/Codebase/Watch.hs +++ b/parser-typechecker/src/Unison/Codebase/Watch.hs @@ -35,11 +35,11 @@ import UnliftIO.MVar ) import UnliftIO.STM (atomically) -untilJust :: Monad m => m (Maybe a) -> m a +untilJust :: (Monad m) => m (Maybe a) -> m a untilJust act = act >>= maybe (untilJust act) return watchDirectory' :: - forall m. MonadIO m => FilePath -> m (IO (), IO (FilePath, UTCTime)) + forall m. (MonadIO m) => FilePath -> m (IO (), IO (FilePath, UTCTime)) watchDirectory' d = do mvar <- newEmptyMVar let handler :: Event -> IO () @@ -85,7 +85,7 @@ collectUntilPause queue minPauseµsec = do watchDirectory :: forall m. - MonadIO m => + (MonadIO m) => FilePath -> (FilePath -> Bool) -> m (IO (), IO (FilePath, Text)) @@ -126,7 +126,8 @@ watchDirectory dir allow = do forever $ do event@(file, _) <- watcher when (allow file) $ - STM.atomically $ TQueue.enqueue queue event + STM.atomically $ + TQueue.enqueue queue event pending <- newIORef [] let await :: IO (FilePath, Text) await = diff --git a/parser-typechecker/src/Unison/CodebasePath.hs b/parser-typechecker/src/Unison/CodebasePath.hs index 49554b478..2a1d9ecaf 100644 --- a/parser-typechecker/src/Unison/CodebasePath.hs +++ b/parser-typechecker/src/Unison/CodebasePath.hs @@ -10,5 +10,5 @@ import UnliftIO.Directory (getHomeDirectory) -- | A directory that contains a codebase. type CodebasePath = FilePath -getCodebaseDir :: MonadIO m => Maybe CodebasePath -> m CodebasePath +getCodebaseDir :: (MonadIO m) => Maybe CodebasePath -> m CodebasePath getCodebaseDir = maybe getHomeDirectory pure diff --git a/parser-typechecker/src/Unison/FileParsers.hs b/parser-typechecker/src/Unison/FileParsers.hs index 44e940cb1..7bfbeaecf 100644 --- a/parser-typechecker/src/Unison/FileParsers.hs +++ b/parser-typechecker/src/Unison/FileParsers.hs @@ -96,7 +96,6 @@ resolveNames :: (Term v, TDNRMap v, TL.TypeLookup v Ann) resolveNames typeLookupf preexistingNames uf = do let tm = UF.typecheckingTerm uf - deps = Term.dependencies tm possibleDeps = [ (Name.toText name, Var.name v, r) | (name, r) <- Rel.toList (Names.terms preexistingNames), @@ -104,9 +103,7 @@ resolveNames typeLookupf preexistingNames uf = do name `Name.endsWithReverseSegments` List.NonEmpty.toList (Name.reverseSegments (Name.unsafeFromVar v)) ] possibleRefs = Referent.toReference . view _3 <$> possibleDeps - tl <- - lift . lift . fmap (UF.declsToTypeLookup uf <>) $ - typeLookupf (deps <> Set.fromList possibleRefs) + tl <- lift . lift $ fmap (UF.declsToTypeLookup uf <>) (typeLookupf (UF.dependencies uf <> Set.fromList possibleRefs)) -- For populating the TDNR environment, we pick definitions -- from the namespace and from the local file whose full name -- has a suffix that equals one of the free variables in the file. diff --git a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs index c56743dcc..ea0f249ff 100644 --- a/parser-typechecker/src/Unison/Hashing/V2/Convert.hs +++ b/parser-typechecker/src/Unison/Hashing/V2/Convert.hs @@ -39,21 +39,8 @@ import qualified Unison.ConstructorReference as Memory.ConstructorReference import qualified Unison.ConstructorType as CT import qualified Unison.ConstructorType as Memory.ConstructorType import qualified Unison.DataDeclaration as Memory.DD -import Unison.Hash (Hash) -import qualified Unison.Hashing.V2.Branch as Hashing.Branch -import qualified Unison.Hashing.V2.Causal as Hashing.Causal -import qualified Unison.Hashing.V2.DataDeclaration as Hashing.DD -import Unison.Hashing.V2.Hashable (HashFor (HashFor), Hashable) -import qualified Unison.Hashing.V2.Hashable as Hashable -import qualified Unison.Hashing.V2.Kind as Hashing.Kind -import qualified Unison.Hashing.V2.Patch as Hashing.Patch -import qualified Unison.Hashing.V2.Pattern as Hashing.Pattern -import qualified Unison.Hashing.V2.Reference as Hashing.Reference -import qualified Unison.Hashing.V2.Referent as Hashing.Referent -import qualified Unison.Hashing.V2.Term as Hashing.Term -import qualified Unison.Hashing.V2.TermEdit as Hashing.TermEdit -import qualified Unison.Hashing.V2.Type as Hashing.Type -import qualified Unison.Hashing.V2.TypeEdit as Hashing.TypeEdit +import Unison.Hash (Hash, HashFor (HashFor)) +import qualified Unison.Hashing.V2 as Hashing import qualified Unison.Kind as Memory.Kind import qualified Unison.NameSegment as Memory.NameSegment import Unison.Names.ResolutionResult (ResolutionResult) @@ -68,11 +55,12 @@ import qualified Unison.Util.Relation as Relation import qualified Unison.Util.Star3 as Memory.Star3 import Unison.Var (Var) -typeToReference :: Var v => Memory.Type.Type v a -> Memory.Reference.Reference -typeToReference = h2mReference . Hashing.Type.toReference . m2hType . Memory.Type.removeAllEffectVars +typeToReference :: (Var v) => Memory.Type.Type v a -> Memory.Reference.Reference +typeToReference = h2mReference . Hashing.typeToReference . m2hType . Memory.Type.removeAllEffectVars -typeToReferenceMentions :: Var v => Memory.Type.Type v a -> Set Memory.Reference.Reference -typeToReferenceMentions = Set.map h2mReference . Hashing.Type.toReferenceMentions . m2hType . Memory.Type.removeAllEffectVars +typeToReferenceMentions :: (Var v) => Memory.Type.Type v a -> Set Memory.Reference.Reference +typeToReferenceMentions = + Set.map h2mReference . Hashing.typeToReferenceMentions . m2hType . Memory.Type.removeAllEffectVars -- TODO: remove non-prime version -- include type in hash @@ -83,18 +71,18 @@ hashTermComponents :: Map v (Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a, extra) hashTermComponents mTerms = case h2mTermMap mTerms of - (hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.Term.hashComponents hTerms + (hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.hashTermComponents hTerms where h2mTermMap m = m & traverse (\(trm, typ, extra) -> liftA3 (,,) (m2hTerm trm) (pure $ m2hType typ) (pure extra)) & Writer.runWriter h2mTermResult :: - Ord v => + (Ord v) => ( Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType ) -> - (Hashing.Reference.Id, Hashing.Term.Term v a, Hashing.Type.Type v a, extra) -> + (Hashing.ReferenceId, Hashing.Term v a, Hashing.Type v a, extra) -> (Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a, extra) h2mTermResult getCtorType (id, tm, typ, extra) = (h2mReferenceId id, h2mTerm getCtorType tm, h2mType typ, extra) @@ -103,155 +91,155 @@ hashTermComponents mTerms = -- saving them. hashTermComponentsWithoutTypes :: forall v a. - Var v => + (Var v) => Map v (Memory.Term.Term v a) -> Map v (Memory.Reference.Id, Memory.Term.Term v a) hashTermComponentsWithoutTypes mTerms = case Writer.runWriter (traverse m2hTerm mTerms) of - (hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.Term.hashComponentsWithoutTypes hTerms + (hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.hashTermComponentsWithoutTypes hTerms where - h2mTermResult :: Ord v => (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> (Hashing.Reference.Id, Hashing.Term.Term v a) -> (Memory.Reference.Id, Memory.Term.Term v a) + h2mTermResult :: (Ord v) => (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> (Hashing.ReferenceId, Hashing.Term v a) -> (Memory.Reference.Id, Memory.Term.Term v a) h2mTermResult getCtorType (id, tm) = (h2mReferenceId id, h2mTerm getCtorType tm) -hashClosedTerm :: Var v => Memory.Term.Term v a -> Memory.Reference.Id -hashClosedTerm = h2mReferenceId . Hashing.Term.hashClosedTerm . fst . Writer.runWriter . m2hTerm +hashClosedTerm :: (Var v) => Memory.Term.Term v a -> Memory.Reference.Id +hashClosedTerm = h2mReferenceId . Hashing.hashClosedTerm . fst . Writer.runWriter . m2hTerm -m2hTerm :: Ord v => Memory.Term.Term v a -> Writer (Map Memory.Reference.Reference Memory.ConstructorType.ConstructorType) (Hashing.Term.Term v a) +m2hTerm :: (Ord v) => Memory.Term.Term v a -> Writer (Map Memory.Reference.Reference Memory.ConstructorType.ConstructorType) (Hashing.Term v a) m2hTerm = ABT.transformM \case - Memory.Term.Int i -> pure (Hashing.Term.Int i) - Memory.Term.Nat n -> pure (Hashing.Term.Nat n) - Memory.Term.Float d -> pure (Hashing.Term.Float d) - Memory.Term.Boolean b -> pure (Hashing.Term.Boolean b) - Memory.Term.Text t -> pure (Hashing.Term.Text t) - Memory.Term.Char c -> pure (Hashing.Term.Char c) - Memory.Term.Blank b -> pure (Hashing.Term.Blank b) - Memory.Term.Ref r -> pure (Hashing.Term.Ref (m2hReference r)) - Memory.Term.Constructor (Memory.ConstructorReference.ConstructorReference r i) -> pure (Hashing.Term.Constructor (m2hReference r) i) - Memory.Term.Request (Memory.ConstructorReference.ConstructorReference r i) -> pure (Hashing.Term.Request (m2hReference r) i) - Memory.Term.Handle x y -> pure (Hashing.Term.Handle x y) - Memory.Term.App f x -> pure (Hashing.Term.App f x) - Memory.Term.Ann e t -> pure (Hashing.Term.Ann e (m2hType t)) - Memory.Term.List as -> pure (Hashing.Term.List as) - Memory.Term.And p q -> pure (Hashing.Term.And p q) - Memory.Term.If c t f -> pure (Hashing.Term.If c t f) - Memory.Term.Or p q -> pure (Hashing.Term.Or p q) - Memory.Term.Lam a -> pure (Hashing.Term.Lam a) - Memory.Term.LetRec _isTop bs body -> pure (Hashing.Term.LetRec bs body) - Memory.Term.Let _isTop b body -> pure (Hashing.Term.Let b body) - Memory.Term.Match scr cases -> pure (Hashing.Term.Match scr (fmap m2hMatchCase cases)) - Memory.Term.TermLink r -> Hashing.Term.TermLink <$> m2hReferent r - Memory.Term.TypeLink r -> pure (Hashing.Term.TypeLink (m2hReference r)) + Memory.Term.Int i -> pure (Hashing.TermInt i) + Memory.Term.Nat n -> pure (Hashing.TermNat n) + Memory.Term.Float d -> pure (Hashing.TermFloat d) + Memory.Term.Boolean b -> pure (Hashing.TermBoolean b) + Memory.Term.Text t -> pure (Hashing.TermText t) + Memory.Term.Char c -> pure (Hashing.TermChar c) + Memory.Term.Blank b -> pure (Hashing.TermBlank b) + Memory.Term.Ref r -> pure (Hashing.TermRef (m2hReference r)) + Memory.Term.Constructor (Memory.ConstructorReference.ConstructorReference r i) -> pure (Hashing.TermConstructor (m2hReference r) i) + Memory.Term.Request (Memory.ConstructorReference.ConstructorReference r i) -> pure (Hashing.TermRequest (m2hReference r) i) + Memory.Term.Handle x y -> pure (Hashing.TermHandle x y) + Memory.Term.App f x -> pure (Hashing.TermApp f x) + Memory.Term.Ann e t -> pure (Hashing.TermAnn e (m2hType t)) + Memory.Term.List as -> pure (Hashing.TermList as) + Memory.Term.And p q -> pure (Hashing.TermAnd p q) + Memory.Term.If c t f -> pure (Hashing.TermIf c t f) + Memory.Term.Or p q -> pure (Hashing.TermOr p q) + Memory.Term.Lam a -> pure (Hashing.TermLam a) + Memory.Term.LetRec _isTop bs body -> pure (Hashing.TermLetRec bs body) + Memory.Term.Let _isTop b body -> pure (Hashing.TermLet b body) + Memory.Term.Match scr cases -> pure (Hashing.TermMatch scr (fmap m2hMatchCase cases)) + Memory.Term.TermLink r -> Hashing.TermTermLink <$> m2hReferent r + Memory.Term.TypeLink r -> pure (Hashing.TermTypeLink (m2hReference r)) -m2hMatchCase :: Memory.Term.MatchCase a a1 -> Hashing.Term.MatchCase a a1 -m2hMatchCase (Memory.Term.MatchCase pat m_a1 a1) = Hashing.Term.MatchCase (m2hPattern pat) m_a1 a1 +m2hMatchCase :: Memory.Term.MatchCase a a1 -> Hashing.MatchCase a a1 +m2hMatchCase (Memory.Term.MatchCase pat m_a1 a1) = Hashing.MatchCase (m2hPattern pat) m_a1 a1 -m2hPattern :: Memory.Pattern.Pattern a -> Hashing.Pattern.Pattern a +m2hPattern :: Memory.Pattern.Pattern a -> Hashing.Pattern a m2hPattern = \case - Memory.Pattern.Unbound loc -> Hashing.Pattern.Unbound loc - Memory.Pattern.Var loc -> Hashing.Pattern.Var loc - Memory.Pattern.Boolean loc b -> Hashing.Pattern.Boolean loc b - Memory.Pattern.Int loc i -> Hashing.Pattern.Int loc i - Memory.Pattern.Nat loc n -> Hashing.Pattern.Nat loc n - Memory.Pattern.Float loc f -> Hashing.Pattern.Float loc f - Memory.Pattern.Text loc t -> Hashing.Pattern.Text loc t - Memory.Pattern.Char loc c -> Hashing.Pattern.Char loc c + Memory.Pattern.Unbound loc -> Hashing.PatternUnbound loc + Memory.Pattern.Var loc -> Hashing.PatternVar loc + Memory.Pattern.Boolean loc b -> Hashing.PatternBoolean loc b + Memory.Pattern.Int loc i -> Hashing.PatternInt loc i + Memory.Pattern.Nat loc n -> Hashing.PatternNat loc n + Memory.Pattern.Float loc f -> Hashing.PatternFloat loc f + Memory.Pattern.Text loc t -> Hashing.PatternText loc t + Memory.Pattern.Char loc c -> Hashing.PatternChar loc c Memory.Pattern.Constructor loc (Memory.ConstructorReference.ConstructorReference r i) ps -> - Hashing.Pattern.Constructor loc (m2hReference r) i (fmap m2hPattern ps) - Memory.Pattern.As loc p -> Hashing.Pattern.As loc (m2hPattern p) - Memory.Pattern.EffectPure loc p -> Hashing.Pattern.EffectPure loc (m2hPattern p) + Hashing.PatternConstructor loc (m2hReference r) i (fmap m2hPattern ps) + Memory.Pattern.As loc p -> Hashing.PatternAs loc (m2hPattern p) + Memory.Pattern.EffectPure loc p -> Hashing.PatternEffectPure loc (m2hPattern p) Memory.Pattern.EffectBind loc (Memory.ConstructorReference.ConstructorReference r i) ps k -> - Hashing.Pattern.EffectBind loc (m2hReference r) i (fmap m2hPattern ps) (m2hPattern k) - Memory.Pattern.SequenceLiteral loc ps -> Hashing.Pattern.SequenceLiteral loc (fmap m2hPattern ps) - Memory.Pattern.SequenceOp loc l op r -> Hashing.Pattern.SequenceOp loc (m2hPattern l) (m2hSequenceOp op) (m2hPattern r) + Hashing.PatternEffectBind loc (m2hReference r) i (fmap m2hPattern ps) (m2hPattern k) + Memory.Pattern.SequenceLiteral loc ps -> Hashing.PatternSequenceLiteral loc (fmap m2hPattern ps) + Memory.Pattern.SequenceOp loc l op r -> Hashing.PatternSequenceOp loc (m2hPattern l) (m2hSequenceOp op) (m2hPattern r) -m2hSequenceOp :: Memory.Pattern.SeqOp -> Hashing.Pattern.SeqOp +m2hSequenceOp :: Memory.Pattern.SeqOp -> Hashing.SeqOp m2hSequenceOp = \case - Memory.Pattern.Cons -> Hashing.Pattern.Cons - Memory.Pattern.Snoc -> Hashing.Pattern.Snoc - Memory.Pattern.Concat -> Hashing.Pattern.Concat + Memory.Pattern.Cons -> Hashing.Cons + Memory.Pattern.Snoc -> Hashing.Snoc + Memory.Pattern.Concat -> Hashing.Concat -m2hReferent :: Memory.Referent.Referent -> Writer (Map Memory.Reference.Reference Memory.ConstructorType.ConstructorType) Hashing.Referent.Referent +m2hReferent :: Memory.Referent.Referent -> Writer (Map Memory.Reference.Reference Memory.ConstructorType.ConstructorType) Hashing.Referent m2hReferent = \case - Memory.Referent.Ref ref -> pure (Hashing.Referent.Ref (m2hReference ref)) + Memory.Referent.Ref ref -> pure (Hashing.ReferentRef (m2hReference ref)) Memory.Referent.Con (Memory.ConstructorReference.ConstructorReference ref n) ct -> do Writer.tell (Map.singleton ref ct) - pure (Hashing.Referent.Con (m2hReference ref) n) + pure (Hashing.ReferentCon (m2hReference ref) n) -h2mTerm :: Ord v => (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> Hashing.Term.Term v a -> Memory.Term.Term v a +h2mTerm :: (Ord v) => (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> Hashing.Term v a -> Memory.Term.Term v a h2mTerm getCT = ABT.transform \case - Hashing.Term.Int i -> Memory.Term.Int i - Hashing.Term.Nat n -> Memory.Term.Nat n - Hashing.Term.Float d -> Memory.Term.Float d - Hashing.Term.Boolean b -> Memory.Term.Boolean b - Hashing.Term.Text t -> Memory.Term.Text t - Hashing.Term.Char c -> Memory.Term.Char c - Hashing.Term.Blank b -> Memory.Term.Blank b - Hashing.Term.Ref r -> Memory.Term.Ref (h2mReference r) - Hashing.Term.Constructor r i -> Memory.Term.Constructor (Memory.ConstructorReference.ConstructorReference (h2mReference r) i) - Hashing.Term.Request r i -> Memory.Term.Request (Memory.ConstructorReference.ConstructorReference (h2mReference r) i) - Hashing.Term.Handle x y -> Memory.Term.Handle x y - Hashing.Term.App f x -> Memory.Term.App f x - Hashing.Term.Ann e t -> Memory.Term.Ann e (h2mType t) - Hashing.Term.List as -> Memory.Term.List as - Hashing.Term.If c t f -> Memory.Term.If c t f - Hashing.Term.And p q -> Memory.Term.And p q - Hashing.Term.Or p q -> Memory.Term.Or p q - Hashing.Term.Lam a -> Memory.Term.Lam a - Hashing.Term.LetRec bs body -> Memory.Term.LetRec False bs body - Hashing.Term.Let b body -> Memory.Term.Let False b body - Hashing.Term.Match scr cases -> Memory.Term.Match scr (h2mMatchCase <$> cases) - Hashing.Term.TermLink r -> Memory.Term.TermLink (h2mReferent getCT r) - Hashing.Term.TypeLink r -> Memory.Term.TypeLink (h2mReference r) + Hashing.TermInt i -> Memory.Term.Int i + Hashing.TermNat n -> Memory.Term.Nat n + Hashing.TermFloat d -> Memory.Term.Float d + Hashing.TermBoolean b -> Memory.Term.Boolean b + Hashing.TermText t -> Memory.Term.Text t + Hashing.TermChar c -> Memory.Term.Char c + Hashing.TermBlank b -> Memory.Term.Blank b + Hashing.TermRef r -> Memory.Term.Ref (h2mReference r) + Hashing.TermConstructor r i -> Memory.Term.Constructor (Memory.ConstructorReference.ConstructorReference (h2mReference r) i) + Hashing.TermRequest r i -> Memory.Term.Request (Memory.ConstructorReference.ConstructorReference (h2mReference r) i) + Hashing.TermHandle x y -> Memory.Term.Handle x y + Hashing.TermApp f x -> Memory.Term.App f x + Hashing.TermAnn e t -> Memory.Term.Ann e (h2mType t) + Hashing.TermList as -> Memory.Term.List as + Hashing.TermIf c t f -> Memory.Term.If c t f + Hashing.TermAnd p q -> Memory.Term.And p q + Hashing.TermOr p q -> Memory.Term.Or p q + Hashing.TermLam a -> Memory.Term.Lam a + Hashing.TermLetRec bs body -> Memory.Term.LetRec False bs body + Hashing.TermLet b body -> Memory.Term.Let False b body + Hashing.TermMatch scr cases -> Memory.Term.Match scr (h2mMatchCase <$> cases) + Hashing.TermTermLink r -> Memory.Term.TermLink (h2mReferent getCT r) + Hashing.TermTypeLink r -> Memory.Term.TypeLink (h2mReference r) -h2mMatchCase :: Hashing.Term.MatchCase a b -> Memory.Term.MatchCase a b -h2mMatchCase (Hashing.Term.MatchCase pat m_b b) = Memory.Term.MatchCase (h2mPattern pat) m_b b +h2mMatchCase :: Hashing.MatchCase a b -> Memory.Term.MatchCase a b +h2mMatchCase (Hashing.MatchCase pat m_b b) = Memory.Term.MatchCase (h2mPattern pat) m_b b -h2mPattern :: Hashing.Pattern.Pattern a -> Memory.Pattern.Pattern a +h2mPattern :: Hashing.Pattern a -> Memory.Pattern.Pattern a h2mPattern = \case - Hashing.Pattern.Unbound loc -> Memory.Pattern.Unbound loc - Hashing.Pattern.Var loc -> Memory.Pattern.Var loc - Hashing.Pattern.Boolean loc b -> Memory.Pattern.Boolean loc b - Hashing.Pattern.Int loc i -> Memory.Pattern.Int loc i - Hashing.Pattern.Nat loc n -> Memory.Pattern.Nat loc n - Hashing.Pattern.Float loc f -> Memory.Pattern.Float loc f - Hashing.Pattern.Text loc t -> Memory.Pattern.Text loc t - Hashing.Pattern.Char loc c -> Memory.Pattern.Char loc c - Hashing.Pattern.Constructor loc r i ps -> + Hashing.PatternUnbound loc -> Memory.Pattern.Unbound loc + Hashing.PatternVar loc -> Memory.Pattern.Var loc + Hashing.PatternBoolean loc b -> Memory.Pattern.Boolean loc b + Hashing.PatternInt loc i -> Memory.Pattern.Int loc i + Hashing.PatternNat loc n -> Memory.Pattern.Nat loc n + Hashing.PatternFloat loc f -> Memory.Pattern.Float loc f + Hashing.PatternText loc t -> Memory.Pattern.Text loc t + Hashing.PatternChar loc c -> Memory.Pattern.Char loc c + Hashing.PatternConstructor loc r i ps -> Memory.Pattern.Constructor loc (Memory.ConstructorReference.ConstructorReference (h2mReference r) i) (h2mPattern <$> ps) - Hashing.Pattern.As loc p -> Memory.Pattern.As loc (h2mPattern p) - Hashing.Pattern.EffectPure loc p -> Memory.Pattern.EffectPure loc (h2mPattern p) - Hashing.Pattern.EffectBind loc r i ps k -> + Hashing.PatternAs loc p -> Memory.Pattern.As loc (h2mPattern p) + Hashing.PatternEffectPure loc p -> Memory.Pattern.EffectPure loc (h2mPattern p) + Hashing.PatternEffectBind loc r i ps k -> Memory.Pattern.EffectBind loc (Memory.ConstructorReference.ConstructorReference (h2mReference r) i) (h2mPattern <$> ps) (h2mPattern k) - Hashing.Pattern.SequenceLiteral loc ps -> Memory.Pattern.SequenceLiteral loc (h2mPattern <$> ps) - Hashing.Pattern.SequenceOp loc l op r -> Memory.Pattern.SequenceOp loc (h2mPattern l) (h2mSequenceOp op) (h2mPattern r) + Hashing.PatternSequenceLiteral loc ps -> Memory.Pattern.SequenceLiteral loc (h2mPattern <$> ps) + Hashing.PatternSequenceOp loc l op r -> Memory.Pattern.SequenceOp loc (h2mPattern l) (h2mSequenceOp op) (h2mPattern r) -h2mSequenceOp :: Hashing.Pattern.SeqOp -> Memory.Pattern.SeqOp +h2mSequenceOp :: Hashing.SeqOp -> Memory.Pattern.SeqOp h2mSequenceOp = \case - Hashing.Pattern.Cons -> Memory.Pattern.Cons - Hashing.Pattern.Snoc -> Memory.Pattern.Snoc - Hashing.Pattern.Concat -> Memory.Pattern.Concat + Hashing.Cons -> Memory.Pattern.Cons + Hashing.Snoc -> Memory.Pattern.Snoc + Hashing.Concat -> Memory.Pattern.Concat -h2mReferent :: (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> Hashing.Referent.Referent -> Memory.Referent.Referent +h2mReferent :: (Memory.Reference.Reference -> Memory.ConstructorType.ConstructorType) -> Hashing.Referent -> Memory.Referent.Referent h2mReferent getCT = \case - Hashing.Referent.Ref ref -> Memory.Referent.Ref (h2mReference ref) - Hashing.Referent.Con ref n -> + Hashing.ReferentRef ref -> Memory.Referent.Ref (h2mReference ref) + Hashing.ReferentCon ref n -> let mRef = h2mReference ref in Memory.Referent.Con (Memory.ConstructorReference.ConstructorReference mRef n) (getCT mRef) hashDataDecls :: - Var v => + (Var v) => Map v (Memory.DD.DataDeclaration v a) -> ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.DataDeclaration v a)] hashDataDecls memDecls = do let hashingDecls = fmap m2hDecl memDecls - hashingResult <- Hashing.DD.hashDecls Name.unsafeFromVar hashingDecls + hashingResult <- Hashing.hashDecls Name.unsafeFromVar hashingDecls pure $ map h2mDeclResult hashingResult where - h2mDeclResult :: Ord v => (v, Hashing.Reference.Id, Hashing.DD.DataDeclaration v a) -> (v, Memory.Reference.Id, Memory.DD.DataDeclaration v a) + h2mDeclResult :: (Ord v) => (v, Hashing.ReferenceId, Hashing.DataDeclaration v a) -> (v, Memory.Reference.Id, Memory.DD.DataDeclaration v a) h2mDeclResult (v, id, dd) = (v, h2mReferenceId id, h2mDecl dd) hashDecls :: - Var v => + (Var v) => Map v (Memory.DD.Decl v a) -> ResolutionResult v a [(v, Memory.Reference.Id, Memory.DD.Decl v a)] hashDecls memDecls = do @@ -272,75 +260,75 @@ hashDecls memDecls = do retag CT.Effect = Left . Memory.DD.EffectDeclaration retag CT.Data = Right -m2hDecl :: Ord v => Memory.DD.DataDeclaration v a -> Hashing.DD.DataDeclaration v a +m2hDecl :: (Ord v) => Memory.DD.DataDeclaration v a -> Hashing.DataDeclaration v a m2hDecl (Memory.DD.DataDeclaration mod ann bound ctors) = - Hashing.DD.DataDeclaration (m2hModifier mod) ann bound $ fmap (Lens.over _3 m2hType) ctors + Hashing.DataDeclaration (m2hModifier mod) ann bound $ fmap (Lens.over _3 m2hType) ctors -m2hType :: Ord v => Memory.Type.Type v a -> Hashing.Type.Type v a +m2hType :: (Ord v) => Memory.Type.Type v a -> Hashing.Type v a m2hType = ABT.transform \case - Memory.Type.Ref ref -> Hashing.Type.Ref (m2hReference ref) - Memory.Type.Arrow a1 a1' -> Hashing.Type.Arrow a1 a1' - Memory.Type.Ann a1 ki -> Hashing.Type.Ann a1 (m2hKind ki) - Memory.Type.App a1 a1' -> Hashing.Type.App a1 a1' - Memory.Type.Effect a1 a1' -> Hashing.Type.Effect a1 a1' - Memory.Type.Effects a1s -> Hashing.Type.Effects a1s - Memory.Type.Forall a1 -> Hashing.Type.Forall a1 - Memory.Type.IntroOuter a1 -> Hashing.Type.IntroOuter a1 + Memory.Type.Ref ref -> Hashing.TypeRef (m2hReference ref) + Memory.Type.Arrow a1 a1' -> Hashing.TypeArrow a1 a1' + Memory.Type.Ann a1 ki -> Hashing.TypeAnn a1 (m2hKind ki) + Memory.Type.App a1 a1' -> Hashing.TypeApp a1 a1' + Memory.Type.Effect a1 a1' -> Hashing.TypeEffect a1 a1' + Memory.Type.Effects a1s -> Hashing.TypeEffects a1s + Memory.Type.Forall a1 -> Hashing.TypeForall a1 + Memory.Type.IntroOuter a1 -> Hashing.TypeIntroOuter a1 -m2hKind :: Memory.Kind.Kind -> Hashing.Kind.Kind +m2hKind :: Memory.Kind.Kind -> Hashing.Kind m2hKind = \case - Memory.Kind.Star -> Hashing.Kind.Star - Memory.Kind.Arrow k1 k2 -> Hashing.Kind.Arrow (m2hKind k1) (m2hKind k2) + Memory.Kind.Star -> Hashing.KindStar + Memory.Kind.Arrow k1 k2 -> Hashing.KindArrow (m2hKind k1) (m2hKind k2) -m2hReference :: Memory.Reference.Reference -> Hashing.Reference.Reference +m2hReference :: Memory.Reference.Reference -> Hashing.Reference m2hReference = \case - Memory.Reference.Builtin t -> Hashing.Reference.Builtin t - Memory.Reference.DerivedId d -> Hashing.Reference.DerivedId (m2hReferenceId d) + Memory.Reference.Builtin t -> Hashing.ReferenceBuiltin t + Memory.Reference.DerivedId d -> Hashing.ReferenceDerivedId (m2hReferenceId d) -m2hReferenceId :: Memory.Reference.Id -> Hashing.Reference.Id -m2hReferenceId (Memory.Reference.Id h i) = Hashing.Reference.Id h i +m2hReferenceId :: Memory.Reference.Id -> Hashing.ReferenceId +m2hReferenceId (Memory.Reference.Id h i) = Hashing.ReferenceId h i -h2mModifier :: Hashing.DD.Modifier -> Memory.DD.Modifier +h2mModifier :: Hashing.Modifier -> Memory.DD.Modifier h2mModifier = \case - Hashing.DD.Structural -> Memory.DD.Structural - Hashing.DD.Unique text -> Memory.DD.Unique text + Hashing.Structural -> Memory.DD.Structural + Hashing.Unique text -> Memory.DD.Unique text -m2hModifier :: Memory.DD.Modifier -> Hashing.DD.Modifier +m2hModifier :: Memory.DD.Modifier -> Hashing.Modifier m2hModifier = \case - Memory.DD.Structural -> Hashing.DD.Structural - Memory.DD.Unique text -> Hashing.DD.Unique text + Memory.DD.Structural -> Hashing.Structural + Memory.DD.Unique text -> Hashing.Unique text -h2mDecl :: Ord v => Hashing.DD.DataDeclaration v a -> Memory.DD.DataDeclaration v a -h2mDecl (Hashing.DD.DataDeclaration mod ann bound ctors) = +h2mDecl :: (Ord v) => Hashing.DataDeclaration v a -> Memory.DD.DataDeclaration v a +h2mDecl (Hashing.DataDeclaration mod ann bound ctors) = Memory.DD.DataDeclaration (h2mModifier mod) ann bound (over _3 h2mType <$> ctors) -h2mType :: Ord v => Hashing.Type.Type v a -> Memory.Type.Type v a +h2mType :: (Ord v) => Hashing.Type v a -> Memory.Type.Type v a h2mType = ABT.transform \case - Hashing.Type.Ref ref -> Memory.Type.Ref (h2mReference ref) - Hashing.Type.Arrow a1 a1' -> Memory.Type.Arrow a1 a1' - Hashing.Type.Ann a1 ki -> Memory.Type.Ann a1 (h2mKind ki) - Hashing.Type.App a1 a1' -> Memory.Type.App a1 a1' - Hashing.Type.Effect a1 a1' -> Memory.Type.Effect a1 a1' - Hashing.Type.Effects a1s -> Memory.Type.Effects a1s - Hashing.Type.Forall a1 -> Memory.Type.Forall a1 - Hashing.Type.IntroOuter a1 -> Memory.Type.IntroOuter a1 + Hashing.TypeRef ref -> Memory.Type.Ref (h2mReference ref) + Hashing.TypeArrow a1 a1' -> Memory.Type.Arrow a1 a1' + Hashing.TypeAnn a1 ki -> Memory.Type.Ann a1 (h2mKind ki) + Hashing.TypeApp a1 a1' -> Memory.Type.App a1 a1' + Hashing.TypeEffect a1 a1' -> Memory.Type.Effect a1 a1' + Hashing.TypeEffects a1s -> Memory.Type.Effects a1s + Hashing.TypeForall a1 -> Memory.Type.Forall a1 + Hashing.TypeIntroOuter a1 -> Memory.Type.IntroOuter a1 -h2mKind :: Hashing.Kind.Kind -> Memory.Kind.Kind +h2mKind :: Hashing.Kind -> Memory.Kind.Kind h2mKind = \case - Hashing.Kind.Star -> Memory.Kind.Star - Hashing.Kind.Arrow k1 k2 -> Memory.Kind.Arrow (h2mKind k1) (h2mKind k2) + Hashing.KindStar -> Memory.Kind.Star + Hashing.KindArrow k1 k2 -> Memory.Kind.Arrow (h2mKind k1) (h2mKind k2) -h2mReference :: Hashing.Reference.Reference -> Memory.Reference.Reference +h2mReference :: Hashing.Reference -> Memory.Reference.Reference h2mReference = \case - Hashing.Reference.Builtin t -> Memory.Reference.Builtin t - Hashing.Reference.DerivedId d -> Memory.Reference.DerivedId (h2mReferenceId d) + Hashing.ReferenceBuiltin t -> Memory.Reference.Builtin t + Hashing.ReferenceDerivedId d -> Memory.Reference.DerivedId (h2mReferenceId d) -h2mReferenceId :: Hashing.Reference.Id -> Memory.Reference.Id -h2mReferenceId (Hashing.Reference.Id h i) = Memory.Reference.Id h i +h2mReferenceId :: Hashing.ReferenceId -> Memory.Reference.Id +h2mReferenceId (Hashing.ReferenceId h i) = Memory.Reference.Id h i -m2hPatch :: Memory.Patch.Patch -> Hashing.Patch.Patch +m2hPatch :: Memory.Patch.Patch -> Hashing.Patch m2hPatch (Memory.Patch.Patch termEdits typeEdits) = - Hashing.Patch.Patch termEdits' typeEdits' + Hashing.Patch termEdits' typeEdits' where typeEdits' = Map.fromList @@ -349,33 +337,33 @@ m2hPatch (Memory.Patch.Patch termEdits typeEdits) = $ Relation.toMultimap typeEdits termEdits' = Map.fromList - . map (bimap (Hashing.Referent.Ref . m2hReference) (Set.map m2hTermEdit)) + . map (bimap (Hashing.ReferentRef . m2hReference) (Set.map m2hTermEdit)) . Map.toList $ Relation.toMultimap termEdits m2hTermEdit = \case - Memory.TermEdit.Replace r _ -> Hashing.TermEdit.Replace (Hashing.Referent.Ref $ m2hReference r) - Memory.TermEdit.Deprecate -> Hashing.TermEdit.Deprecate + Memory.TermEdit.Replace r _ -> Hashing.TermEditReplace (Hashing.ReferentRef $ m2hReference r) + Memory.TermEdit.Deprecate -> Hashing.TermEditDeprecate m2hTypeEdit = \case - Memory.TypeEdit.Replace r -> Hashing.TypeEdit.Replace (m2hReference r) - Memory.TypeEdit.Deprecate -> Hashing.TypeEdit.Deprecate + Memory.TypeEdit.Replace r -> Hashing.TypeEditReplace (m2hReference r) + Memory.TypeEdit.Deprecate -> Hashing.TypeEditDeprecate hashPatch :: Memory.Patch.Patch -> Hash -hashPatch = Hashing.Patch.hashPatch . m2hPatch +hashPatch = Hashing.contentHash . m2hPatch hashBranch0 :: Memory.Branch.Branch0 m -> Hash -hashBranch0 = Hashing.Branch.hashBranch . m2hBranch0 +hashBranch0 = Hashing.contentHash . m2hBranch0 -hashCausal :: Hashable e => e -> Set CausalHash -> (CausalHash, HashFor e) +hashCausal :: (Hashing.ContentAddressable e) => e -> Set CausalHash -> (CausalHash, HashFor e) hashCausal e tails = - let valueHash@(HashFor vh) = (Hashable.hashFor e) + let valueHash = Hashing.contentHash e causalHash = - CausalHash . Hashing.Causal.hashCausal $ - Hashing.Causal.Causal vh (Set.map unCausalHash tails) - in (causalHash, valueHash) + CausalHash . Hashing.contentHash $ + Hashing.Causal valueHash (Set.map unCausalHash tails) + in (causalHash, HashFor valueHash) -m2hBranch0 :: Memory.Branch.Branch0 m -> Hashing.Branch.Raw +m2hBranch0 :: Memory.Branch.Branch0 m -> Hashing.Branch m2hBranch0 b = - Hashing.Branch.Raw + Hashing.Branch (doTerms (Memory.Branch._terms b)) (doTypes (Memory.Branch._types b)) (doPatches (Memory.Branch._edits b)) @@ -384,7 +372,7 @@ m2hBranch0 b = -- is there a more readable way to structure these that's also linear? doTerms :: Memory.Branch.Star Memory.Referent.Referent Memory.NameSegment.NameSegment -> - Map Hashing.Branch.NameSegment (Map Hashing.Referent.Referent Hashing.Branch.MdValues) + Map Hashing.NameSegment (Map Hashing.Referent Hashing.MdValues) doTerms s = Map.fromList [ (m2hNameSegment ns, m2) @@ -394,13 +382,13 @@ m2hBranch0 b = [ (fst (Writer.runWriter (m2hReferent r)), md) | r <- toList . Relation.lookupRan ns $ Memory.Star3.d1 s, let mdrefs1to2 (_typeR1, valR1) = m2hReference valR1 - md = Hashing.Branch.MdValues . Set.map mdrefs1to2 . Relation.lookupDom r $ Memory.Star3.d3 s + md = Hashing.MdValues . Set.map mdrefs1to2 . Relation.lookupDom r $ Memory.Star3.d3 s ] ] doTypes :: Memory.Branch.Star Memory.Reference.Reference Memory.NameSegment.NameSegment -> - Map Hashing.Branch.NameSegment (Map Hashing.Reference.Reference Hashing.Branch.MdValues) + Map Hashing.NameSegment (Map Hashing.Reference Hashing.MdValues) doTypes s = Map.fromList [ (m2hNameSegment ns, m2) @@ -410,20 +398,20 @@ m2hBranch0 b = [ (m2hReference r, md) | r <- toList . Relation.lookupRan ns $ Memory.Star3.d1 s, let mdrefs1to2 (_typeR1, valR1) = m2hReference valR1 - md :: Hashing.Branch.MdValues - md = Hashing.Branch.MdValues . Set.map mdrefs1to2 . Relation.lookupDom r $ Memory.Star3.d3 s + md :: Hashing.MdValues + md = Hashing.MdValues . Set.map mdrefs1to2 . Relation.lookupDom r $ Memory.Star3.d3 s ] ] doPatches :: Map Memory.NameSegment.NameSegment (PatchHash, m Memory.Patch.Patch) -> - Map Hashing.Branch.NameSegment Hash + Map Hashing.NameSegment Hash doPatches = Map.bimap m2hNameSegment (unPatchHash . fst) doChildren :: Map Memory.NameSegment.NameSegment (Memory.Branch.Branch m) -> - Map Hashing.Branch.NameSegment Hash + Map Hashing.NameSegment Hash doChildren = Map.bimap m2hNameSegment (unCausalHash . Memory.Branch.headHash) -m2hNameSegment :: Memory.NameSegment.NameSegment -> Hashing.Branch.NameSegment -m2hNameSegment (Memory.NameSegment.NameSegment s) = Hashing.Branch.NameSegment s +m2hNameSegment :: Memory.NameSegment.NameSegment -> Hashing.NameSegment +m2hNameSegment (Memory.NameSegment.NameSegment s) = Hashing.NameSegment s diff --git a/parser-typechecker/src/Unison/Parsers.hs b/parser-typechecker/src/Unison/Parsers.hs index 7ccb530db..3a21170c7 100644 --- a/parser-typechecker/src/Unison/Parsers.hs +++ b/parser-typechecker/src/Unison/Parsers.hs @@ -25,7 +25,7 @@ unsafeGetRightFrom src = either (error . Pr.toANSI defaultWidth . prettyParseError src) id parse :: - Var v => + (Var v) => Parser.P v a -> String -> Parser.ParsingEnv -> @@ -33,21 +33,21 @@ parse :: parse p = Parser.run (Parser.root p) parseTerm :: - Var v => + (Var v) => String -> Parser.ParsingEnv -> Either (Parser.Err v) (Term v Ann) parseTerm = parse TermParser.term parseType :: - Var v => + (Var v) => String -> Parser.ParsingEnv -> Either (Parser.Err v) (Type v Ann) parseType = Parser.run (Parser.root TypeParser.valueType) parseFile :: - Var v => + (Var v) => FilePath -> String -> Parser.ParsingEnv -> @@ -55,7 +55,7 @@ parseFile :: parseFile filename s = Parser.run' (Parser.rootFile FileParser.file) s filename readAndParseFile :: - Var v => + (Var v) => Parser.ParsingEnv -> FilePath -> IO (Either (Parser.Err v) (UnisonFile v Ann)) @@ -64,7 +64,7 @@ readAndParseFile penv fileName = do let src = Text.unpack txt pure $ parseFile fileName src penv -unsafeParseTerm :: Var v => String -> Parser.ParsingEnv -> Term v Ann +unsafeParseTerm :: (Var v) => String -> Parser.ParsingEnv -> Term v Ann unsafeParseTerm s = fmap (unsafeGetRightFrom s) . parseTerm $ s unsafeReadAndParseFile :: diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage.hs b/parser-typechecker/src/Unison/PatternMatchCoverage.hs new file mode 100644 index 000000000..037e2937c --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage.hs @@ -0,0 +1,82 @@ +-- | Pattern match coverage checking is implemented following the +-- algorithm described in [Lower Your +-- Guards](https://simon.peytonjones.org/assets/pdfs/lower-your-guards.pdf). The +-- goal of pattern match coverage checking is to identify the +-- following problems that may arise in a pattern match: +-- +-- * It is missing clauses (/i.e./ it is non-exhaustive) +-- * It contains redundant patterns (/i.e./ the case can be deleted without altering the program) +-- * It contains inaccessible patterns (/i.e/ the rhs can never be entered) +-- +-- Furthermore, in the case of a non-exhaustive match, the goal to +-- present the user with concrete values that do not match any of the +-- existing patterns. +-- +-- /N.B./ An inaccessible pattern in unison would be one that performs +-- effects in a guard although the constraints are unsatisfiable. Such +-- a pattern cannot be deleted without altering the program. +-- +-- == High-level algorithm overview +-- +-- 1. [Desugar]("Unison.PatternMatchCoverage.Desugar") a match expression into a 'Unison.PatternMatchCoverage.GrdTree.GrdTree'. +-- 2. Annotate the @GrdTree@ leaves with [refinement types]("Unison.PatternMatchCoverage.NormalizedConstraints") +-- describing values that match this branch. Redundant and inaccessible patterns are then identified by @GrdTree@ leaves +-- with uninhabited refinement types. Inaccessible patterns are distinguished by an effect being performed between the +-- @GrdTree@ root and the leaf. +-- 3. Traverse the @GrdTree@ building up a refinement type describing uncovered values. If the resulting refinement type +-- is inhabited then the match is missing clauses. +-- 4. Find inhabitants of the uncovered refinement type to present to the user. +-- +-- Step (1) is implemented by 'desugarMatch'. Steps (2) and (3) are +-- implemented as a single traversal: 'uncoverAnnotate'/'classify'. Step (4) is +-- implemented by 'expandSolution'/'generateInhabitants'. +module Unison.PatternMatchCoverage + ( checkMatch, + ) +where + +import qualified Data.Set as Set +import Debug.Trace +import Unison.Debug +import Unison.Pattern (Pattern) +import Unison.PatternMatchCoverage.Class (Pmc (..)) +import Unison.PatternMatchCoverage.Desugar (desugarMatch) +import Unison.PatternMatchCoverage.GrdTree (prettyGrdTree) +import qualified Unison.PatternMatchCoverage.NormalizedConstraints as NC +import Unison.PatternMatchCoverage.PmGrd (prettyPmGrd) +import Unison.PatternMatchCoverage.Solve (classify, expandSolution, generateInhabitants, uncoverAnnotate) +import qualified Unison.Term as Term +import qualified Unison.Type as Type +import qualified Unison.Util.Pretty as P + +-- | Perform pattern match coverage checking on a match expression +checkMatch :: + forall vt v loc m. + (Pmc vt v loc m) => + -- | the match location + loc -> + -- | scrutinee type + Type.Type vt loc -> + -- | match cases + [Term.MatchCase loc (Term.Term' vt v loc)] -> + -- | (redundant locations, inaccessible locations, inhabitants of uncovered refinement type) + m ([loc], [loc], [Pattern ()]) +checkMatch matchLocation scrutineeType cases = do + v0 <- fresh + grdtree0 <- desugarMatch matchLocation scrutineeType v0 cases + (uncovered, grdtree1) <- uncoverAnnotate (Set.singleton (NC.markDirty v0 $ NC.declVar v0 scrutineeType id NC.emptyNormalizedConstraints)) grdtree0 + uncoveredExpanded <- concat . fmap Set.toList <$> traverse (expandSolution v0) (Set.toList uncovered) + let sols = map (generateInhabitants v0) uncoveredExpanded + let (_accessible, inaccessible, redundant) = classify grdtree1 + let debugOutput = + P.sep + "\n" + [ P.hang "desugared:" (prettyGrdTree prettyPmGrd (\_ -> "") grdtree0), + P.hang "annotated:" (prettyGrdTree NC.prettyDnf (NC.prettyDnf . fst) grdtree1), + P.hang "uncovered:" (NC.prettyDnf uncovered), + P.hang "uncovered expanded:" (NC.prettyDnf (Set.fromList uncoveredExpanded)) + ] + doDebug = case shouldDebug PatternCoverage of + True -> trace (P.toPlainUnbroken debugOutput) + False -> id + doDebug (pure (redundant, inaccessible, sols)) diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/Class.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/Class.hs new file mode 100644 index 000000000..55647a570 --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/Class.hs @@ -0,0 +1,44 @@ +{-# LANGUAGE FunctionalDependencies #-} + +module Unison.PatternMatchCoverage.Class + ( Pmc (..), + EnumeratedConstructors (..), + traverseConstructors, + ) +where + +import Control.Monad.Fix (MonadFix) +import Unison.ConstructorReference (ConstructorReference) +import Unison.PatternMatchCoverage.ListPat (ListPat) +import Unison.Type (Type) +import Unison.Var (Var) + +-- | A typeclass for the queries required to perform pattern match +-- coverage checking. +class (Ord loc, Var vt, Var v, MonadFix m) => Pmc vt v loc m | m -> vt v loc where + -- | Get the constructors of a type + getConstructors :: Type vt loc -> m (EnumeratedConstructors vt v loc) + + -- | Get the types of the arguments of a specific constructor + getConstructorVarTypes :: Type vt loc -> ConstructorReference -> m [Type vt loc] + + -- | Get a fresh variable + fresh :: m v + +data EnumeratedConstructors vt v loc + = ConstructorType [(v, ConstructorReference, Type vt loc)] + | SequenceType [(ListPat, [Type vt loc])] + | BooleanType + | OtherType + deriving stock (Show) + +traverseConstructors :: + (Applicative f) => + (v -> ConstructorReference -> Type vt loc -> f (v, ConstructorReference, Type vt loc)) -> + EnumeratedConstructors vt v loc -> + f (EnumeratedConstructors vt v loc) +traverseConstructors f = \case + ConstructorType xs -> ConstructorType <$> traverse (\(a, b, c) -> f a b c) xs + SequenceType x -> pure (SequenceType x) + BooleanType -> pure BooleanType + OtherType -> pure OtherType diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/Constraint.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/Constraint.hs new file mode 100644 index 000000000..70bc4613d --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/Constraint.hs @@ -0,0 +1,72 @@ +module Unison.PatternMatchCoverage.Constraint + ( Constraint (..), + prettyConstraint, + ) +where + +import Unison.ConstructorReference (ConstructorReference) +import Unison.PatternMatchCoverage.IntervalSet (IntervalSet) +import Unison.PatternMatchCoverage.PmLit +import qualified Unison.PrettyPrintEnv as PPE +import qualified Unison.Syntax.TypePrinter as TypePrinter +import Unison.Type (Type) +import Unison.Util.Pretty +import Unison.Var (Var) + +-- | A constraint to add to a [normalized constraint +-- set]("Unison.PatternMatchCoverage.NormalizedConstraints") (fig 6) +-- See 'Unison.PatternMatchCoverage.Solve.addConstraint' +data Constraint vt v loc + = -- | Positive constraint regarding data type. States that the + -- given variable must be the given constructor, and it also binds + -- variables corresponding to constructor arguments. + PosCon v ConstructorReference [(v, Type vt loc)] + | -- | Negative constraint concerning data type. States that the + -- given variable must not be the given constructor. + NegCon v ConstructorReference + | -- | Positive constraint regarding literal + PosLit v PmLit + | -- | Negative constraint regarding literal + NegLit v PmLit + | -- | Positive constraint on list element with position relative to head of list + PosListHead + v + -- ^ list root + Int + -- ^ cons position (0 is head) + v + -- ^ element variable + | -- | Positive constraint on list element with position relative to end of list + PosListTail + v + -- ^ list root + Int + -- ^ snoc position (0 is last) + v + -- ^ element variable + | -- | Negative constraint on length of the list (/i.e./ the list + -- may not be an element of the interval set) + NegListInterval v IntervalSet + | -- | An effect is performed + Effectful v + | -- | Equality constraint + Eq v v + deriving stock (Eq, Ord) + +prettyConstraint :: (Var vt, Var v) => Constraint vt v loc -> Pretty ColorText +prettyConstraint = \case + PosCon var con convars -> + let xs = pc con : fmap (\(trm, typ) -> sep " " [pv trm, ":", TypePrinter.pretty PPE.empty typ]) convars ++ ["<-", pv var] + in sep " " xs + NegCon var con -> sep " " [pv var, "≠", pc con] + PosLit var lit -> sep " " [prettyPmLit lit, "<-", pv var] + NegLit var lit -> sep " " [pv var, "≠", prettyPmLit lit] + PosListHead root n el -> sep " " [pv el, "<-", "head", pc n, pv root] + PosListTail root n el -> sep " " [pv el, "<-", "tail", pc n, pv root] + NegListInterval var x -> sep " " [pv var, "≠", string (show x)] + Effectful var -> "!" <> pv var + Eq v0 v1 -> sep " " [pv v0, "=", pv v1] + where + pv = string . show + pc :: forall a. (Show a) => a -> Pretty ColorText + pc = string . show diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/Desugar.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/Desugar.hs new file mode 100644 index 000000000..5a3741f14 --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/Desugar.hs @@ -0,0 +1,210 @@ +module Unison.PatternMatchCoverage.Desugar + ( desugarMatch, + ) +where + +import Data.List.NonEmpty (NonEmpty (..)) +import qualified U.Core.ABT as ABT +import Unison.Pattern +import qualified Unison.Pattern as Pattern +import Unison.PatternMatchCoverage.Class +import Unison.PatternMatchCoverage.Fix +import Unison.PatternMatchCoverage.GrdTree +import Unison.PatternMatchCoverage.PmGrd +import qualified Unison.PatternMatchCoverage.PmLit as PmLit +import Unison.Term (MatchCase (..), Term', app, var) +import Unison.Type (Type) +import qualified Unison.Type as Type + +-- | Desugar a match into a 'GrdTree' +desugarMatch :: + forall loc vt v m. + (Pmc vt v loc m) => + -- | loc of match + loc -> + -- | scrutinee type + Type vt loc -> + -- | scrutinee variable + v -> + -- | match cases + [MatchCase loc (Term' vt v loc)] -> + m (GrdTree (PmGrd vt v loc) loc) +desugarMatch loc0 scrutineeType v0 cs0 = + traverse desugarClause cs0 >>= \case + [] -> pure $ Leaf loc0 + x : xs -> pure $ Fork (x :| xs) + where + desugarClause :: MatchCase loc (Term' vt v loc) -> m (GrdTree (PmGrd vt v loc) loc) + desugarClause MatchCase {matchPattern, matchGuard} = + desugarPattern scrutineeType v0 matchPattern (finalK (Pattern.loc matchPattern) matchGuard) [] + + finalK :: loc -> Maybe (Term' vt v loc) -> [v] -> m (GrdTree (PmGrd vt v loc) loc) + finalK loc mterm vs = case mterm of + Nothing -> pure (Leaf loc) + Just grdExpr -> do + let ann = ABT.annotation grdExpr + expr = foldr (\a b -> app ann (var ann a) b) grdExpr vs + typ = Type.boolean ann + v <- fresh + pure (Grd (PmLet v expr typ) (Grd (PmLit v (PmLit.Boolean True)) (Leaf loc))) + +desugarPattern :: + forall v vt loc m. + (Pmc vt v loc m) => + Type vt loc -> + v -> + Pattern loc -> + ([v] -> m (GrdTree (PmGrd vt v loc) loc)) -> + [v] -> + m (GrdTree (PmGrd vt v loc) loc) +desugarPattern typ v0 pat k vs = case pat of + Unbound _ -> k vs + Var _ -> k (v0 : vs) + Boolean _ x -> Grd (PmLit v0 $ PmLit.Boolean x) <$> k vs + Int _ x -> Grd (PmLit v0 $ PmLit.Int x) <$> k vs + Nat _ x -> Grd (PmLit v0 $ PmLit.Nat x) <$> k vs + Float _ x -> Grd (PmLit v0 $ PmLit.Float x) <$> k vs + Text _ x -> Grd (PmLit v0 $ PmLit.Text x) <$> k vs + Char _ x -> Grd (PmLit v0 $ PmLit.Char x) <$> k vs + Constructor _loc consRef pats -> do + contyps <- getConstructorVarTypes typ consRef + patvars <- assignFreshPatternVars pats + let c = PmCon v0 consRef convars + convars :: [(v, Type vt loc)] + convars = map (\(v, _, t) -> (v, t)) tpatvars + tpatvars = zipWith (\(v, p) t -> (v, p, t)) patvars contyps + rest <- foldr (\(v, pat, t) b -> desugarPattern t v pat b) k tpatvars vs + pure (Grd c rest) + As _ rest -> desugarPattern typ v0 rest k (v0 : vs) + EffectPure {} -> k vs + EffectBind {} -> k vs + SequenceLiteral {} -> handleSequence typ v0 pat k vs + SequenceOp {} -> handleSequence typ v0 pat k vs + +handleSequence :: + forall v vt loc m. + (Pmc vt v loc m) => + Type vt loc -> + v -> + Pattern loc -> + ([v] -> m (GrdTree (PmGrd vt v loc) loc)) -> + [v] -> + m (GrdTree (PmGrd vt v loc) loc) +handleSequence typ v pat k vs = do + let listArg = case typ of + Type.App' _list arg -> arg + _ -> error "list type is not an application?" + listToGrdTree typ listArg v (normalizeList pat) k vs + +listToGrdTree :: + forall v vt loc m. + (Pmc vt v loc m) => + Type vt loc -> + Type vt loc -> + v -> + NormalizedList loc -> + ([v] -> m (GrdTree (PmGrd vt v loc) loc)) -> + [v] -> + m (GrdTree (PmGrd vt v loc) loc) +listToGrdTree _listTyp elemTyp listVar nl0 k0 vs0 = + let (minLen, maxLen) = countMinListLen nl0 + in Grd (PmListInterval listVar minLen maxLen) <$> go 0 0 nl0 k0 vs0 + where + go consCount snocCount (Fix pat) k vs = case pat of + N'ConsF x xs -> do + element <- fresh + let grd = PmListHead listVar consCount element elemTyp + let !consCount' = consCount + 1 + Grd grd <$> desugarPattern elemTyp element x (go consCount' snocCount xs k) vs + N'SnocF xs x -> do + element <- fresh + let grd = PmListTail listVar snocCount element elemTyp + let !snocCount' = snocCount + 1 + Grd grd <$> go consCount snocCount' xs (desugarPattern elemTyp element x k) vs + N'NilF -> k vs + N'VarF _ -> k (listVar : vs) + N'UnboundF _ -> k vs + + countMinListLen :: NormalizedList loc -> (Int, Int) + countMinListLen = + ($ 0) . cata \case + N'ConsF _ b -> \acc -> b $! acc + 1 + N'SnocF b _ -> \acc -> b $! acc + 1 + N'NilF -> \ !n -> (n, n) + N'VarF _ -> \ !n -> (n, maxBound) + N'UnboundF _ -> \ !n -> (n, maxBound) + +data NormalizedListF loc a + = N'ConsF (Pattern loc) a + | N'SnocF a (Pattern loc) + | N'NilF + | N'VarF loc + | N'UnboundF loc + deriving stock (Functor) + +type NormalizedList loc = Fix (NormalizedListF loc) + +pattern N'Cons :: Pattern loc -> Fix (NormalizedListF loc) -> Fix (NormalizedListF loc) +pattern N'Cons x xs = Fix (N'ConsF x xs) + +pattern N'Snoc :: Fix (NormalizedListF loc) -> Pattern loc -> Fix (NormalizedListF loc) +pattern N'Snoc xs x = Fix (N'SnocF xs x) + +pattern N'Nil :: Fix (NormalizedListF loc) +pattern N'Nil = Fix N'NilF + +pattern N'Var :: loc -> Fix (NormalizedListF loc) +pattern N'Var x = Fix (N'VarF x) + +pattern N'Unbound :: loc -> Fix (NormalizedListF loc) +pattern N'Unbound x = Fix (N'UnboundF x) + +-- | strip out sequence literals and concats +normalizeList :: Pattern loc -> NormalizedList loc +normalizeList pat0 = case goCons pat0 of + Left f -> f N'Nil + Right x -> x + where + goCons :: Pattern loc -> Either (NormalizedList loc -> NormalizedList loc) (NormalizedList loc) + goCons = \case + SequenceLiteral _loc xs -> + Left \nil -> foldr N'Cons nil xs + SequenceOp _loc lhs op rhs -> case op of + Cons -> + case goCons rhs of + Left f -> Left (N'Cons lhs . f) + Right x -> Right (N'Cons lhs x) + Snoc -> + case goCons lhs of + Left f -> Left (f . N'Cons rhs) + Right x -> Right (N'Snoc x rhs) + Concat -> + case goCons lhs of + Left f -> case goCons rhs of + Left g -> Left (f . g) + Right x -> Right (f x) + Right x -> Right (goSnoc rhs x) + Var loc -> Right (N'Var loc) + Unbound loc -> Right (N'Unbound loc) + -- as-patterns are not handled properly here, which is fine while we + -- only have boolean guards, but this needs to be fixed if we + -- introduce pattern guards + As _loc pat -> goCons pat + _ -> error "goCons: unexpected pattern" + + goSnoc :: Pattern loc -> NormalizedList loc -> NormalizedList loc + goSnoc pat nlp = case pat of + SequenceLiteral _loc xs -> + foldl N'Snoc nlp xs + SequenceOp _loc lhs op rhs -> case op of + Cons -> + goSnoc rhs (N'Snoc nlp lhs) + Snoc -> + N'Snoc (goSnoc rhs nlp) lhs + Concat -> + goSnoc rhs (goSnoc lhs nlp) + As _loc pat -> goSnoc pat nlp + _ -> error "goSnoc: unexpected pattern" + +assignFreshPatternVars :: (Pmc vt v loc m) => [Pattern loc] -> m [(v, Pattern loc)] +assignFreshPatternVars pats = traverse (\p -> (,p) <$> fresh) pats diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/Fix.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/Fix.hs new file mode 100644 index 000000000..9accc06fb --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/Fix.hs @@ -0,0 +1,20 @@ +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE QuantifiedConstraints #-} +{-# LANGUAGE TypeSynonymInstances #-} +{-# LANGUAGE UndecidableInstances #-} + +module Unison.PatternMatchCoverage.Fix where + +newtype Fix f = Fix {unFix :: f (Fix f)} + +deriving instance (forall a. (Show a) => Show (f a)) => Show (Fix f) + +deriving instance (forall a. (Eq a) => Eq (f a)) => Eq (Fix f) + +deriving instance (Eq (Fix f), forall a. (Ord a) => Ord (f a)) => Ord (Fix f) + +cata :: (Functor f) => (f a -> a) -> Fix f -> a +cata alg = let c = alg . fmap c . unFix in c + +para :: (Functor f) => (f (Fix f, a) -> a) -> Fix f -> a +para alg = let c = alg . fmap (\x -> (x, c x)) . unFix in c diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/GrdTree.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/GrdTree.hs new file mode 100644 index 000000000..7644cacf3 --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/GrdTree.hs @@ -0,0 +1,88 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Unison.PatternMatchCoverage.GrdTree + ( GrdTree, + GrdTreeF (..), + pattern Leaf, + pattern Grd, + pattern Fork, + prettyGrdTree, + ) +where + +import Data.List.NonEmpty (NonEmpty (..)) +import qualified Data.List.NonEmpty as NEL +import Data.ListLike (ListLike) +import Unison.PatternMatchCoverage.Fix +import Unison.Prelude +import Unison.Util.Pretty + +-- | A @GrdTree@ is the simple language to desugar matches into. All +-- pattern matching constructs (/e.g./ structural pattern matching, +-- boolean guards, pattern guards, view patterns, etc) are desugared +-- into this simpler structure. +-- +-- It is parameterized by the values at guard nodes, @n@, and the +-- values at the leaves, @l@. When desugaring, @n@ is +-- 'Unison.PatternMatchCoverage.PmGrd.PmGrd' and @l@ is the source +-- location. After annotating the @GrdTree@, @n@ is a refinement type +-- representing matching values and the @l@ is pairs of the +-- aforementioned refinement type and source location. +-- +-- For example: +-- +-- @ +-- example : Optional Nat -> Nat +-- example = cases +-- None -> 0 +-- Some x +-- | isEven x -> 0 +-- | otherwise -> 1 +-- @ +-- +-- is desugared into +-- +-- @ +-- ──┬─ None <- v0 ── srcloc +-- ├─ Some ( v1 :: ##Nat ) <- v0 ── let v2 = isEven v1 ── True <- v2 ── srcloc +-- └─ Some ( v3 :: ##Nat ) <- v0 ── srcloc +-- @ +type GrdTree n l = Fix (GrdTreeF n l) + +data GrdTreeF n l a + = -- | A successful match + LeafF l + | -- | A constraint of some kind (structural pattern match, boolan guard, etc) + GrdF n a + | -- | A list of alternative matches, tried in order + ForkF (NonEmpty a) + deriving stock (Functor, Show) + +prettyGrdTree :: forall n l s. (ListLike s Char, IsString s) => (n -> Pretty s) -> (l -> Pretty s) -> GrdTree n l -> Pretty s +prettyGrdTree prettyNode prettyLeaf = cata phi + where + phi = \case + LeafF l -> prettyLeaf l + GrdF n rest -> sep " " [prettyNode n, "──", rest] + ForkF xs -> "──" <> group (sep "\n" (makeTree $ NEL.toList xs)) + makeTree :: [Pretty s] -> [Pretty s] + makeTree = \case + [] -> [] + x : [] -> [sep " " ["──", x]] + x0 : x1 : xs -> + sep " " ["┬─", x0] + : let go y0 = \case + [] -> [sep " " ["└─", y0]] + y1 : ys -> "├─ " <> y0 : go y1 ys + in [indent " " (sep "\n" (go x1 xs))] + +pattern Leaf :: l -> GrdTree n l +pattern Leaf x = Fix (LeafF x) + +pattern Grd :: n -> GrdTree n l -> GrdTree n l +pattern Grd x rest = Fix (GrdF x rest) + +pattern Fork :: NonEmpty (GrdTree n l) -> GrdTree n l +pattern Fork alts = Fix (ForkF alts) + +{-# COMPLETE Leaf, Grd, Fork #-} diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/IntervalSet.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/IntervalSet.hs new file mode 100644 index 000000000..ab1d2d4b3 --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/IntervalSet.hs @@ -0,0 +1,203 @@ +module Unison.PatternMatchCoverage.IntervalSet + ( IntervalSet, + empty, + singleton, + fromList, + insert, + delete, + difference, + intersection, + complement, + null, + member, + extractSingleton, + intersectIntervals, + map, + foldr, + lookupMin, + lookupMax, + ) +where + +import Data.Coerce (coerce) +import Data.Function (on) +import Data.IntMap (IntMap) +import qualified Data.IntMap.Strict as IntMap +import Data.List (sortOn) +import Data.Maybe (catMaybes, fromMaybe, maybeToList) +import Prelude hiding (foldr, map, null) +import qualified Prelude + +newtype IntervalSet = IntervalSet {unIntervalSet :: IntMap Int} + deriving stock (Show, Eq, Ord) + +empty :: IntervalSet +empty = IntervalSet IntMap.empty + +singleton :: (Int, Int) -> IntervalSet +singleton x = insert x empty + +lookupMin :: IntervalSet -> Maybe Int +lookupMin = fmap fst . IntMap.lookupMin . unIntervalSet + +lookupMax :: IntervalSet -> Maybe Int +lookupMax = fmap snd . IntMap.lookupMax . unIntervalSet + +member :: Int -> IntervalSet -> Bool +member i is = + case splitLookupLE i is of + (_, m, _) -> case m of + Nothing -> False + Just (_, ub) -> i <= ub + +foldr :: (Int -> Int -> b -> b) -> b -> IntervalSet -> b +foldr f z = IntMap.foldrWithKey f z . unIntervalSet + +map :: ((Int, Int) -> (Int, Int)) -> IntervalSet -> IntervalSet +map f = IntervalSet . foldr phi IntMap.empty + where + phi k v b = let (k', v') = f (k, v) in IntMap.insert k' v' b + +-- | insert inclusive bounds interval into set +insert :: (Int, Int) -> IntervalSet -> IntervalSet +insert i@(lb, ub) is + | nullInterval i = is + | otherwise = + case splitLookupLE lb is of + (smaller, m1, xs) -> + case splitLookupLE ub xs of + (_, m2, larger) -> + IntervalSet $ + IntMap.unions + [ unIntervalSet smaller, + unIntervalSet $ fromList (maybeToList m1 ++ [i] ++ maybeToList m2), + unIntervalSet larger + ] + +delete :: (Int, Int) -> IntervalSet -> IntervalSet +delete i@(lb, ub) is + | nullInterval i = is + | otherwise = + case splitLookupLE lb is of + (smaller, m1, xs) -> + case splitLookupLE ub xs of + (_, m2, larger) -> + IntervalSet $ + IntMap.unions + [ unIntervalSet smaller, + case m1 of + Nothing -> IntMap.empty + Just j -> IntMap.fromList (catMaybes (Prelude.map (intersectIntervals j =<<) [upTo lb, downTo ub])), + fromMaybe IntMap.empty do + j <- m2 + aboveDelete <- downTo ub + uncurry IntMap.singleton <$> intersectIntervals aboveDelete j, + unIntervalSet larger + ] + +complement :: IntervalSet -> IntervalSet +complement (IntervalSet m) = fromAscList . (\xs -> Prelude.foldr phi z xs Nothing) . IntMap.toAscList $ m + where + phi (lb, ub) b mprevUb = + case mprevUb of + Nothing -> case upTo lb of + Nothing -> b (Just ub) + Just x -> x : b (Just ub) + Just lastUb -> + let !lbPred = safeAdd lb (-1) + !lastUbSucc = safeAdd lastUb 1 + proposedInterval = (lastUbSucc, lbPred) + in case nullInterval proposedInterval of + True -> b (Just ub) + False -> proposedInterval : b (Just ub) + z = \case + Nothing -> [(0, maxBound)] + Just prev -> case downTo prev of + Nothing -> [] + Just x -> [x] + +intersection :: IntervalSet -> IntervalSet -> IntervalSet +intersection a b = difference a (complement b) + +null :: IntervalSet -> Bool +null = IntMap.null . unIntervalSet + +extractSingleton :: IntervalSet -> Maybe Int +extractSingleton (IntervalSet m) = case IntMap.toList m of + [(lb, ub)] + | lb == ub -> Just lb + _ -> Nothing + +-- | add two integers, sticking to a bound if it would overflow +safeAdd :: Int -> Int -> Int +safeAdd a b = + let c = a + b + in case a > 0 && b > 0 of + True -> case c < 0 of + True -> maxBound + False -> c + False -> case a < 0 && b < 0 of + True -> case c >= 0 of + True -> minBound + False -> c + False -> c + +difference :: IntervalSet -> IntervalSet -> IntervalSet +difference x (IntervalSet y) = IntMap.foldlWithKey' (\b k v -> delete (k, v) b) x y + +-- | the interval [0, lb) +upTo :: Int -> Maybe (Int, Int) +upTo lb = case lb <= 0 of + True -> Nothing + False -> Just (0, safeAdd lb (-1)) + +-- | the interval (ub, maxBound] +downTo :: Int -> Maybe (Int, Int) +downTo ub = case ub == maxBound of + True -> Nothing + False -> Just (safeAdd ub 1, maxBound) + +nullInterval :: (Int, Int) -> Bool +nullInterval (lb, ub) = ub < lb + +-- | merge a list sorted on the lower bound ascending +fromAscList :: [(Int, Int)] -> IntervalSet +fromAscList = IntervalSet . IntMap.fromAscList . mergeOverlappingAscList + +fromList :: [(Int, Int)] -> IntervalSet +fromList = fromAscList . sortOn fst . filter (not . nullInterval) + +intersectIntervals :: (Int, Int) -> (Int, Int) -> Maybe (Int, Int) +intersectIntervals a b + | doOverlap a b = + let !lb = on max fst a b + !ub = on min snd a b + in Just (lb, ub) + | otherwise = Nothing + +mergeOverlappingAscList :: [(Int, Int)] -> [(Int, Int)] +mergeOverlappingAscList = \case + x0 : x1 : xs -> case doOverlap x0 x1 of + True -> spanIntervals x0 x1 : mergeOverlappingAscList xs + False -> x0 : x1 : mergeOverlappingAscList xs + [x] -> [x] + [] -> [] + +doOverlap :: (Int, Int) -> (Int, Int) -> Bool +doOverlap (lb0, ub0) (lb1, ub1) + | ub0 >= lb1 && lb0 <= ub1 = True + | otherwise = False + +spanIntervals :: (Int, Int) -> (Int, Int) -> (Int, Int) +spanIntervals (lb0, ub0) (lb1, ub1) = + let !lb = min lb0 lb1 + !ub = max ub0 ub1 + in (lb, ub) + +splitLookupLE :: Int -> IntervalSet -> (IntervalSet, Maybe (Int, Int), IntervalSet) +splitLookupLE k (IntervalSet m) = + coerce case IntMap.splitLookup k m of + (smaller, Just v, larger) -> (smaller, Just (k, v), larger) + (smaller, Nothing, larger) -> case IntMap.maxViewWithKey smaller of + Just ((k, v), smaller) -> (smaller, Just (k, v), larger) + Nothing -> (smaller, Nothing, larger) diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/ListPat.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/ListPat.hs new file mode 100644 index 000000000..60178fb11 --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/ListPat.hs @@ -0,0 +1,15 @@ +module Unison.PatternMatchCoverage.ListPat where + +import Unison.Util.Pretty + +data ListPat + = Cons + | Snoc + | Nil + deriving stock (Show, Eq, Ord) + +prettyListPat :: ListPat -> Pretty ColorText +prettyListPat = \case + Cons -> "Cons" + Snoc -> "Snoc" + Nil -> "Nil" diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/Literal.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/Literal.hs new file mode 100644 index 000000000..312ba394b --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/Literal.hs @@ -0,0 +1,81 @@ +module Unison.PatternMatchCoverage.Literal + ( Literal (..), + prettyLiteral, + ) +where + +import Unison.ConstructorReference (ConstructorReference) +import Unison.PatternMatchCoverage.IntervalSet (IntervalSet) +import Unison.PatternMatchCoverage.PmLit (PmLit, prettyPmLit) +import qualified Unison.PrettyPrintEnv as PPE +import qualified Unison.Syntax.TermPrinter as TermPrinter +import qualified Unison.Syntax.TypePrinter as TypePrinter +import Unison.Term (Term') +import Unison.Type (Type) +import Unison.Typechecker.TypeVar (TypeVar, lowerTerm) +import Unison.Util.Pretty +import Unison.Var (Var) + +-- | Refinement type literals (fig 3) +data Literal vt v loc + = -- | True + T + | -- | False + F + | -- | Positive constraint regarding data type. States that the + -- given variable must be the given constructor, and it also binds + -- variables corresponding to constructor arguments. + PosCon v ConstructorReference [(v, Type vt loc)] + | -- | Negative constraint concerning data type. States that the + -- given variable must not be the given constructor. + NegCon v ConstructorReference + | -- | Positive constraint regarding literal + PosLit v PmLit + | -- | Negative constraint regarding literal + NegLit v PmLit + | -- | Positive constraint on list element with position relative to head of list + PosListHead + v + -- ^ list root + Int + -- ^ cons position (0 is head) + v + -- ^ element variable + (Type vt loc) + | -- | Positive constraint on list element with position relative to end of list + PosListTail + v + -- ^ list root + Int + -- ^ snoc position (0 is last) + v + -- ^ element variable + (Type vt loc) + | -- | Negative constraint on length of the list (/i.e./ the list + -- may not be an element of the interval set) + NegListInterval v IntervalSet + | -- | An effect is performed + Effectful v + | -- | Introduce a binding for a term + Let v (Term' vt v loc) (Type vt loc) + deriving stock (Show) + +prettyLiteral :: (Var v) => Literal (TypeVar b v) v loc -> Pretty ColorText +prettyLiteral = \case + T -> "✓" + F -> "⨉" + PosCon var con convars -> + let xs = pc con : fmap (\(trm, typ) -> sep " " [pv trm, ":", TypePrinter.pretty PPE.empty typ]) convars ++ ["<-", pv var] + in sep " " xs + NegCon var con -> sep " " [pv var, "≠", pc con] + PosLit var lit -> sep " " [prettyPmLit lit, "<-", pv var] + NegLit var lit -> sep " " [pv var, "≠", prettyPmLit lit] + PosListHead root n el _ -> sep " " [pv el, "<-", "head", pc n, pv root] + PosListTail root n el _ -> sep " " [pv el, "<-", "tail", pc n, pv root] + NegListInterval var x -> sep " " [pv var, "≠", string (show x)] + Effectful var -> "!" <> pv var + Let var expr typ -> sep " " ["let", pv var, "=", TermPrinter.pretty PPE.empty (lowerTerm expr), ":", TypePrinter.pretty PPE.empty typ] + where + pv = string . show + pc :: forall a. (Show a) => a -> Pretty ColorText + pc = string . show diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/NormalizedConstraints.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/NormalizedConstraints.hs new file mode 100644 index 000000000..97740b417 --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/NormalizedConstraints.hs @@ -0,0 +1,278 @@ +module Unison.PatternMatchCoverage.NormalizedConstraints + ( NormalizedConstraints (..), + VarInfo (..), + VarConstraints (..), + EffectInfo (..), + markDirty, + emptyNormalizedConstraints, + updateF, + ConstraintUpdate (..), + expectCanon, + declVar, + prettyNormalizedConstraints, + prettyDnf, + ) +where + +import Data.Functor.Compose +import Data.List (intersperse) +import Data.Sequence (pattern Empty) +import qualified Data.Set as Set +import Unison.ConstructorReference (ConstructorReference) +import Unison.PatternMatchCoverage.Constraint +import Unison.PatternMatchCoverage.IntervalSet (IntervalSet) +import qualified Unison.PatternMatchCoverage.IntervalSet as IntervalSet +import qualified Unison.PatternMatchCoverage.PmLit as PmLit +import Unison.PatternMatchCoverage.UFMap (UFMap) +import qualified Unison.PatternMatchCoverage.UFMap as UFMap +import Unison.Prelude +import qualified Unison.PrettyPrintEnv as PPE +import qualified Unison.Syntax.TypePrinter as TypePrinter +import Unison.Type (Type, booleanRef, charRef, floatRef, intRef, listRef, natRef, textRef, pattern App', pattern Ref') +import Unison.Util.Pretty +import Unison.Var (Var) + +-- | Normalized refinement types (fig 6) +-- +-- Each variable may be associated with a number of constraints +-- represented by 'VarInfo'. A 'NormalizedConstraints' is conceptually +-- the conjunction of all constraints in the map. Disjunction is +-- represented by passing a Set of NormalizedConstraints. So, the +-- constraints are normalized into disjunctive normal form and each +-- @NormalizedConstraints@ is a DNF term. +-- +-- Additionally, the following invariants are enforced (Section 3.4) +-- +-- * Mutual compatibility: No two constraints should conflict with +-- each other. +-- +-- * Inhabitation: There must be at least one value that inhabits each +-- refinement type. (N.B. We don't truly know if a type is inhabited, +-- see 'inhabited' in "Unison.PatternMatchCoverage.Solve" for +-- details. However, the refinement type is inhabited as far as our +-- inhabitation checker is concerned.) +-- +-- These invariants ensure that each term in our DNF has at least one +-- solution, and it is easy to expand and print these solutions. +data NormalizedConstraints vt v loc = NormalizedConstraints + { -- | Constraints keyed by the variable they constrain. Equality + -- constraints are handled by 'UFMap'. + constraintMap :: UFMap v (VarInfo vt v loc), + -- | dirty variables are ones that must be checked for inhabitance + dirtySet :: Set v + } + deriving stock (Eq, Ord, Show) + +-- | Mark a variable as requiring a new test for inhabitation. +markDirty :: + (Ord v) => + v -> + NormalizedConstraints vt v loc -> + NormalizedConstraints vt v loc +markDirty k nc@NormalizedConstraints {dirtySet} = + nc {dirtySet = Set.insert k dirtySet} + +emptyNormalizedConstraints :: (Ord v) => NormalizedConstraints vt v loc +emptyNormalizedConstraints = + NormalizedConstraints + { constraintMap = UFMap.empty, + dirtySet = mempty + } + +-- | Lookup the canonical value of @v@ from the constraint map. Throws +-- an error if the variable is not in the map. +expectCanon :: + forall vt v loc. + (Var v) => + v -> + NormalizedConstraints vt v loc -> + (v, VarInfo vt v loc, NormalizedConstraints vt v loc) +expectCanon k nc = + let ((v, vi), nc') = updateF k (\v vi -> ((v, vi), Ignore)) nc + in (v, vi, nc') + +-- | Alter a constraint, marks var as dirty if updated +alterF :: + forall vt v loc f. + (Var v, Functor f) => + v -> + f (ConstraintUpdate (VarInfo vt v loc)) -> + (v -> VarInfo vt v loc -> f (ConstraintUpdate (VarInfo vt v loc))) -> + NormalizedConstraints vt v loc -> + f (NormalizedConstraints vt v loc) +alterF v nothing just nc = + (\(f, x) -> f nc {constraintMap = x}) + <$> getCompose + ( UFMap.alterF + v + nothing' + just' + (constraintMap nc) + ) + where + just' canonK eqClassSize vi = + fmap (UFMap.Canonical eqClassSize) $ + Compose $ + just canonK vi <&> \case + Ignore -> (id, vi) + Update vi -> (markDirty canonK, vi) + nothing' = + Compose $ + nothing <&> \case + Ignore -> (id, Nothing) + Update x -> (markDirty v, Just x) +{-# INLINE alterF #-} + +-- | Generic function to lookup or alter constraints in the constraint +-- map. Throws an error if the variable is not in the map. +updateF :: + forall vt v loc f. + (Var v, Functor f) => + -- | variable to lookup + v -> + -- | update function + (v -> VarInfo vt v loc -> f (ConstraintUpdate (VarInfo vt v loc))) -> + -- | constraint map + NormalizedConstraints vt v loc -> + f (NormalizedConstraints vt v loc) +updateF v just nc = + alterF v nothing just nc + where + nothing = error ("expected " <> show v <> " to be in UFMap") + +data ConstraintUpdate a + = Update a + | Ignore + deriving stock (Functor) + +-- | Install a new variable into the constraint map. Throws an error +-- if the variable already exists in the map. +declVar :: + forall vt v loc. + (Var v) => + -- | new variable to install + v -> + -- | type of variable + Type vt loc -> + -- | modifier for the default var info of the given type + (VarInfo vt v loc -> VarInfo vt v loc) -> + -- | Normalized constraints to install the variable into + NormalizedConstraints vt v loc -> + NormalizedConstraints vt v loc +declVar v t f nc@NormalizedConstraints {constraintMap} = + nc {constraintMap = UFMap.alter v nothing just constraintMap} + where + nothing = + let !vi = f (mkVarInfo v t) + in Just vi + just _ _ _ = error ("attempted to declare: " <> show v <> " but it already exists") + +mkVarInfo :: forall vt v loc. v -> Type vt loc -> VarInfo vt v loc +mkVarInfo v t = + VarInfo + { vi_id = v, + vi_typ = t, + vi_con = case t of + App' (Ref' r) t + | r == listRef -> Vc'ListRoot t Empty Empty (IntervalSet.singleton (0, maxBound)) + Ref' r + | r == booleanRef -> Vc'Boolean Nothing mempty + | r == intRef -> Vc'Int Nothing mempty + | r == natRef -> Vc'Nat Nothing mempty + | r == floatRef -> Vc'Float Nothing mempty + | r == textRef -> Vc'Text Nothing mempty + | r == charRef -> Vc'Char Nothing mempty + -- this may not be a constructor, but we won't be producing + -- any constraints for it in that case anyway + _ -> Vc'Constructor Nothing mempty, + vi_eff = IsNotEffectful + } + +-- | Normalized constraints on a specific variable +data VarInfo vt v loc = VarInfo + { vi_id :: v, + vi_typ :: Type vt loc, + vi_con :: VarConstraints vt v loc, + vi_eff :: EffectInfo + } + deriving stock (Show, Eq, Ord, Generic) + +-- | The constraints are different for different types, although most +-- of them take the form of an optional positive constraint and a set +-- of negative constraints. +data VarConstraints vt v loc + = Vc'Constructor + (Maybe (ConstructorReference, [(v, Type vt loc)])) + (Set ConstructorReference) + | Vc'Boolean (Maybe Bool) (Set Bool) + | Vc'Int (Maybe Int64) (Set Int64) + | Vc'Nat (Maybe Word64) (Set Word64) + | Vc'Float (Maybe Double) (Set Double) + | Vc'Text (Maybe Text) (Set Text) + | Vc'Char (Maybe Char) (Set Char) + | Vc'ListRoot + (Type vt loc) + -- ^ type of list elems + (Seq v) + -- ^ Positive constraint on cons elements + (Seq v) + -- ^ Positive constraint on snoc elements + IntervalSet + -- ^ positive constraint on input list size + deriving stock (Show, Eq, Ord, Generic) + +data EffectInfo + = IsEffectful + | IsNotEffectful + deriving stock (Show, Eq, Ord) + +prettyNormalizedConstraints :: forall vt v loc. (Var v, Var vt) => NormalizedConstraints vt v loc -> Pretty ColorText +prettyNormalizedConstraints (NormalizedConstraints {constraintMap}) = sep " " ["⟨", pconstraints, "⟩"] + where + cls = UFMap.toClasses constraintMap + + pconstraints = sep " " (intersperse "," $ prettyCon <$> cls) + prettyCon (kcanon, ks, vi) = + let posCon = fromMaybe [] $ case vi_con vi of + Vc'Constructor pos _neg -> + (\(datacon, convars) -> [PosCon kcanon datacon convars]) <$> pos + Vc'Boolean pos _neg -> + (\x -> [PosLit kcanon (PmLit.Boolean x)]) <$> pos + Vc'Int pos _neg -> + (\x -> [PosLit kcanon (PmLit.Int x)]) <$> pos + Vc'Nat pos _neg -> + (\x -> [PosLit kcanon (PmLit.Nat x)]) <$> pos + Vc'Float pos _neg -> + (\x -> [PosLit kcanon (PmLit.Float x)]) <$> pos + Vc'Text pos _neg -> + (\x -> [PosLit kcanon (PmLit.Text x)]) <$> pos + Vc'Char pos _neg -> + (\x -> [PosLit kcanon (PmLit.Char x)]) <$> pos + Vc'ListRoot _typ posCons posSnoc _iset -> + let consConstraints = fmap (\(i, x) -> PosListHead kcanon i x) (zip [0 ..] (toList posCons)) + snocConstraints = fmap (\(i, x) -> PosListTail kcanon i x) (zip [0 ..] (toList posSnoc)) + in Just (consConstraints ++ snocConstraints) + negConK :: forall x. Set x -> (v -> x -> Constraint vt v loc) -> [Constraint vt v loc] + negConK s f = foldr (\a b -> f kcanon a : b) [] s + negCon = case vi_con vi of + Vc'Constructor _pos neg -> negConK neg NegCon + Vc'Boolean _pos neg -> negConK neg (\v a -> NegLit v (PmLit.Boolean a)) + Vc'Int _pos neg -> negConK neg (\v a -> NegLit v (PmLit.Int a)) + Vc'Nat _pos neg -> negConK neg (\v a -> NegLit v (PmLit.Nat a)) + Vc'Float _pos neg -> negConK neg (\v a -> NegLit v (PmLit.Float a)) + Vc'Text _pos neg -> negConK neg (\v a -> NegLit v (PmLit.Text a)) + Vc'Char _pos neg -> negConK neg (\v a -> NegLit v (PmLit.Char a)) + Vc'ListRoot _typ _posCons _posSnoc iset -> [NegListInterval kcanon (IntervalSet.complement iset)] + botCon = case vi_eff vi of + IsNotEffectful -> [] + IsEffectful -> [Effectful kcanon] + in sep " " $ + pv kcanon + : fmap pv (Set.toList $ Set.delete kcanon ks) + ++ [":", TypePrinter.pretty PPE.empty (vi_typ vi)] + ++ ["|"] + ++ [sep ", " $ fmap prettyConstraint (posCon ++ negCon ++ botCon)] + pv = string . show + +prettyDnf :: (Var v, Var vt) => Set (NormalizedConstraints vt v loc) -> Pretty ColorText +prettyDnf xs = sep " " ("{" : intersperse "," (prettyNormalizedConstraints <$> Set.toList xs) ++ ["}"]) diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/PmGrd.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/PmGrd.hs new file mode 100644 index 000000000..64d86b5a5 --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/PmGrd.hs @@ -0,0 +1,64 @@ +module Unison.PatternMatchCoverage.PmGrd where + +import Unison.ConstructorReference (ConstructorReference) +import Unison.PatternMatchCoverage.PmLit (PmLit, prettyPmLit) +import qualified Unison.PrettyPrintEnv as PPE +import qualified Unison.Syntax.TypePrinter as TypePrinter +import Unison.Term (Term') +import Unison.Type (Type) +import Unison.Util.Pretty +import Unison.Var (Var) + +data + PmGrd + vt -- Type variable + v -- Term variable + loc -- annotation + = -- | @PmCon x Con xs ys@ corresponds to the constraint @Con ys <- x@ + PmCon + v + -- ^ Variable + ConstructorReference + -- ^ Constructor + [(v, Type vt loc)] + -- ^ Constructor argument values and types + | PmLit v PmLit + | PmListHead + v + -- ^ list root + Int + -- ^ cons position (0 is head) + v + -- ^ element variable + (Type vt loc) + -- ^ element type + | PmListTail + v + -- ^ list root + Int + -- ^ snoc position (0 is last) + v + -- ^ element variable + (Type vt loc) + -- ^ element type + | -- | The size of the list must fall within this inclusive range + PmListInterval v Int Int + | -- | If a guard performs an effect + PmBang v + | -- | @PmLet x expr@ corresponds to a @let x = expr@ guard. This actually + -- /binds/ @x@. + PmLet v (Term' vt v loc) (Type vt loc) + deriving stock (Show) + +prettyPmGrd :: (Var vt, Var v) => PmGrd vt v loc -> Pretty ColorText +prettyPmGrd = \case + PmCon var con convars -> + let xs = string (show con) : (formatConVar <$> convars) ++ ["<-", string (show var)] + formatConVar (v, t) = sep " " ["(", string (show v), ":", TypePrinter.pretty PPE.empty t, ")"] + in sep " " xs + PmListHead var n el _ -> sep " " ["Cons", string (show n), string (show el), "<-", string (show var)] + PmListTail var n el _ -> sep " " ["Snoc", string (show n), string (show el), "<-", string (show var)] + PmListInterval var minLen maxLen -> sep " " ["Interval", string (show (minLen, maxLen)), "<-", string (show var)] + PmLit var lit -> sep " " [prettyPmLit lit, "<-", string (show var)] + PmBang v -> "!" <> string (show v) + PmLet v _expr _ -> sep " " ["let", string (show v), "=", ""] diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/PmLit.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/PmLit.hs new file mode 100644 index 000000000..1a2f5e2a2 --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/PmLit.hs @@ -0,0 +1,23 @@ +module Unison.PatternMatchCoverage.PmLit where + +import Unison.Prelude +import Unison.Util.Pretty (Pretty, string) + +data PmLit + = Int Int64 + | Nat Word64 + | Boolean Bool + | Float Double + | Text Text + | Char Char + deriving stock (Show, Eq, Ord) + +prettyPmLit :: (IsString s) => PmLit -> Pretty s +prettyPmLit = + string . \case + Int x -> show x + Nat x -> show x + Boolean x -> show x + Float x -> show x + Text x -> show x + Char x -> show x diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/Solve.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/Solve.hs new file mode 100644 index 000000000..d1917d1eb --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/Solve.hs @@ -0,0 +1,883 @@ +{-# LANGUAGE DataKinds #-} + +module Unison.PatternMatchCoverage.Solve + ( uncoverAnnotate, + classify, + expandSolution, + generateInhabitants, + ) +where + +import Control.Monad.State +import Control.Monad.Trans.Compose +import Control.Monad.Trans.Maybe +import Data.Foldable +import Data.Function +import Data.Functor +import Data.Functor.Compose +import Data.List.NonEmpty (NonEmpty (..)) +import qualified Data.Sequence as Seq +import qualified Data.Set as Set +import Unison.Builtin.Decls (unitRef) +import Unison.ConstructorReference (ConstructorReference) +import Unison.Debug (DebugFlag (PatternCoverageConstraintSolver), shouldDebug) +import Unison.Pattern (Pattern) +import qualified Unison.Pattern as Pattern +import Unison.PatternMatchCoverage.Class +import Unison.PatternMatchCoverage.Constraint (Constraint) +import qualified Unison.PatternMatchCoverage.Constraint as C +import Unison.PatternMatchCoverage.Fix +import Unison.PatternMatchCoverage.GrdTree +import Unison.PatternMatchCoverage.IntervalSet (IntervalSet) +import qualified Unison.PatternMatchCoverage.IntervalSet as IntervalSet +import Unison.PatternMatchCoverage.Literal +import Unison.PatternMatchCoverage.NormalizedConstraints +import Unison.PatternMatchCoverage.PmGrd +import Unison.PatternMatchCoverage.PmLit (PmLit) +import qualified Unison.PatternMatchCoverage.PmLit as PmLit +import qualified Unison.PatternMatchCoverage.UFMap as UFMap +import Unison.Prelude +import Unison.Type (Type) +import qualified Unison.Type as Type +import qualified Unison.Util.Pretty as P +import Unison.Var (Var) + +-- | top-down traversal of the 'GrdTree' that produces: +-- +-- * a refinement type describing values that do not match the 'GrdTree' +-- (the "uncovered" set) +-- * a new 'GrdTree' annotated with refinement types at the nodes describing +-- values that cause an effect to be performed and values that match +-- the case at the leaves. +-- +-- If the former is inhabited then its inhabitants are unmatched +-- values. If the leaves of the latter are inhabited then the case is +-- redundant. +uncoverAnnotate :: + forall vt v loc m l. + (Pmc vt v loc m) => + Set (NormalizedConstraints vt v loc) -> + GrdTree (PmGrd vt v loc) l -> + ( m + ( Set (NormalizedConstraints vt v loc), + GrdTree (Set (NormalizedConstraints vt v loc)) (Set (NormalizedConstraints vt v loc), l) + ) + ) +uncoverAnnotate z grdtree0 = cata phi grdtree0 z + where + phi = \case + -- There is no way to fail matching a leaf, return the empty set + -- to represent false. + LeafF l -> \nc -> do + nc' <- ensureInhabited' nc + pure (Set.empty, Leaf (nc', l)) + ForkF (kinit :| ks) -> \nc0 -> do + -- depth-first fold in match-case order to acculate the + -- constraints for a match failure at every case. + (nc1, t1) <- kinit nc0 + (ncfinal, ts) <- foldlM (\(nc, ts) a -> a nc >>= \(nc', t) -> pure (nc', t : ts)) (nc1, []) ks + pure (ncfinal, Fork (t1 :| reverse ts)) + GrdF grd k -> \nc0 -> case grd of + PmCon var con convars -> do + handleGrd (PosCon var con convars) (NegCon var con) k nc0 + PmLit var lit -> do + handleGrd (PosLit var lit) (NegLit var lit) k nc0 + PmListHead listVar n el elt -> do + nc <- addLiteral' nc0 (PosListHead listVar n el elt) + k nc + PmListTail listVar n el elt -> do + nc <- addLiteral' nc0 (PosListTail listVar n el elt) + k nc + PmListInterval listVar lb ub -> do + let iset = IntervalSet.singleton (lb, ub) + handleGrd (NegListInterval listVar (IntervalSet.complement iset)) (NegListInterval listVar iset) k nc0 + PmBang var -> do + (ncCont, t) <- k nc0 + ncEff <- addLiteral' nc0 (Effectful var) + let t' = Grd ncEff t + pure (ncCont, t') + PmLet var expr typ -> do + nc <- addLiteral' nc0 (Let var expr typ) + k nc + + -- Constructors and literals are handled uniformly except that + -- they pass different positive and negative literals. + handleGrd pos neg k nc0 = do + ncNoMatch <- addLiteral' nc0 neg + ncMatch <- addLiteral' nc0 pos + (ncMatch, t) <- k ncMatch + -- A match can fail bacause it fails to match the immediate + -- pattern or it can match the immediate pattern but fail to + -- match some pattern or guard defined later in this same case. + -- + -- This split can lead to an exponential number of terms, so we + -- limit this growth to a constant, conservatively + -- approximating. This is known as "throttling" in the paper and + -- described in section 5.2. + let ncFinalCandidate = Set.union ncMatch ncNoMatch + ncFinal = case Set.size ncFinalCandidate >= 30 of + True -> nc0 + False -> ncFinalCandidate + pure (ncFinal, t) + + ensureInhabited' :: + Set (NormalizedConstraints vt v loc) -> + m (Set (NormalizedConstraints vt v loc)) + ensureInhabited' ncs0 = foldlM phi Set.empty ncs0 + where + phi ncs nc = + ensureInhabited initFuel nc <&> \case + Nothing -> ncs + Just nc -> Set.insert nc ncs + + -- Add a literal to each term in our DNF, dropping terms that + -- become contradictory + addLiteral' :: + Set (NormalizedConstraints vt v loc) -> + Literal vt v loc -> + m (Set (NormalizedConstraints vt v loc)) + addLiteral' ncs0 lit = foldlM phi Set.empty ncs0 + where + phi ncs nc = + addLiteral lit nc <&> \case + Nothing -> ncs + Just nc -> Set.insert nc ncs + +-- | Collect accessible, inaccessible, and redundant GRHSs +classify :: + forall vt v loc l. + GrdTree (Set (NormalizedConstraints vt v loc)) (Set (NormalizedConstraints vt v loc), l) -> + ([l], [l], [l]) +classify = cata classifyAlg + +classifyAlg :: + forall vt v loc l. + GrdTreeF (Set (NormalizedConstraints vt v loc)) (Set (NormalizedConstraints vt v loc), l) ([l], [l], [l]) -> + ([l], [l], [l]) +classifyAlg = \case + LeafF (rt, l) -> + case inh rt of + True -> ([l], [], []) + False -> ([], [], [l]) + GrdF rt rest -> + -- The presence of a 'GrdF' node indicates that an effect was + -- performed (see 'uncoverAnnotate'). + case inh rt of + True -> + -- The rest of the subtree is redundant, but an effect is + -- performed. Classify this as "Inaccessible". + case rest of + ([], [], x : xs) -> ([], [x], xs) + _ -> rest + False -> rest + ForkF xs -> foldr (\(a, b, c) ~(acc, inacc, redun) -> (a ++ acc, b ++ inacc, c ++ redun)) ([], [], []) xs + where + -- inhabitation check + inh = not . Set.null + +-- | Expand a full DNF term (i.e. each term identifies exactly one +-- solution) into an inhabiting pattern. +generateInhabitants :: + forall vt v loc. + (Var v) => + v -> + NormalizedConstraints vt v loc -> + Pattern () +generateInhabitants x nc = + let (_xcanon, xvi, nc') = expectCanon x nc + in case vi_con xvi of + Vc'Constructor pos _neg -> case pos of + Nothing -> Pattern.Unbound () + Just (dc, convars) -> + Pattern.Constructor () dc (map (\(v, _) -> generateInhabitants v nc') convars) + Vc'Boolean pos _neg -> case pos of + Nothing -> Pattern.Unbound () + Just b -> Pattern.Boolean () b + Vc'ListRoot _typ consPos snocPos intset -> + let matchedLength = on (+) length consPos snocPos + mmaxLength = IntervalSet.lookupMax intset + matchIsIncomplete = case mmaxLength of + Nothing -> True + Just maxLength -> matchedLength < maxLength + rootPat = case matchIsIncomplete of + True -> Pattern.Unbound () + False -> Pattern.SequenceLiteral () [] + snoced = foldr (\a b -> Pattern.SequenceOp () b Pattern.Snoc (generateInhabitants a nc')) rootPat snocPos + consed = foldr (\a b -> Pattern.SequenceOp () (generateInhabitants a nc') Pattern.Cons b) snoced consPos + in consed + _ -> Pattern.Unbound () + +-- | Instantiate a variable to a given constructor. +instantiate :: + forall vt v loc x m. + (Pmc vt v loc m) => + Fuel -> + NormalizedConstraints vt v loc -> + v -> + -- | constructor + x -> + -- | type of datacon's args + [Type vt loc] -> + -- | produce positive constraint + (v -> x -> [(v, Type vt loc)] -> [Constraint vt v loc]) -> + m (Maybe (NormalizedConstraints vt v loc, [(v, Type vt loc)])) +instantiate fuel nc x c argTyps posConstraint = do + -- todo: centralize this declVar logic. Currently in 'addLiteral' and here. + newVars :: [(var, typ)] <- traverse (\t -> (,t) <$> fresh) argTyps + let nc' = foldr (\(v, t) b -> declVar v t id b) nc newVars + cons = posConstraint x c newVars + mnc <- runMaybeT do + nc <- MaybeT (addConstraints cons nc') + -- mark all new fields as dirty as we need to ensure they are + -- inhabited + let nc' = foldr (\(v, _) b -> markDirty v b) nc newVars + -- branching factor + let newFuel = case length newVars > 1 of + True -> min fuel 3 + False -> fuel + -- we must ensure that all strict fields are inhabited + MaybeT (ensureInhabited newFuel nc') + pure ((\x -> (x, newVars)) <$> mnc) + +-- | Given a variable and a term in DNF, expand it to an identical DNF +-- expression with enough positive info to print pattern suggestions. +expandSolution :: + forall vt v loc m. + (Pmc vt v loc m) => + v -> + NormalizedConstraints vt v loc -> + m (Set (NormalizedConstraints vt v loc)) +expandSolution x nc = + let go fuel x nc + -- If we run out of fuel conservatively assume the term is + -- inhabited. + | fuel == 0 = pure (Set.singleton nc) + | otherwise = + let (_xcanon, xvi, nc') = expectCanon x nc + in withConstructors (pure (Set.singleton nc')) xvi \cs posConstraint _negConstraint -> + -- We have some constructors to attempt + -- instantiation with. Instantiate each one, if + -- doesn't lead to a contradiction then add it to + -- the set of valid solutions. + let phi (cref, cvt) = do + instantiate initFuel nc' x cref cvt posConstraint >>= \case + Nothing -> pure Set.empty -- contradiction + Just (nc'', newVars) -> case newVars of + [] -> pure (Set.singleton nc'') + _ -> + -- If we have the match expression: + -- @ + -- match blerg : Maybe (Maybe ()) with + -- Nothing -> () + -- Just Nothing -> () + -- @ + -- + -- Then we would like to suggest @Just (Just _)@ rather than @Just _@. + -- To accomplish this, we recurse and expand variables for which we have + -- positive or negative information. + + -- branching factor + let newFuel = case length newVars > 1 of + True -> min fuel 3 + False -> fuel + in Set.fromList + <$> foldlM + ( \b (v, _t) -> + Set.toList . Set.unions + <$> traverse + ( \nc -> + case expectCanon v nc of + (_vc, vi, nc') -> case vi_con vi of + Vc'Constructor pos neg + -- always instantiate unit, this ensures we print tuple patterns correctly + | Type.Ref' x <- vi_typ vi, x == unitRef -> go newFuel v nc' + | Just _ <- pos -> go newFuel v nc' + | not (Set.null neg) -> go (newFuel - 1) v nc' + Vc'Boolean _pos neg + | not (Set.null neg) -> go (newFuel - 1) v nc' + Vc'ListRoot _typ _posCons _posSnoc neg + | not (IntervalSet.singleton (0, maxBound) == neg) -> go (newFuel - 1) v nc' + _ -> pure (Set.singleton nc') + ) + b + ) + [nc''] + newVars + in foldr (\a b s -> phi a >>= \a' -> b (Set.union a' s)) pure cs Set.empty + in go initFuel x nc + +withConstructors :: + forall vt v loc r m. + (Pmc vt v loc m) => + m r -> + VarInfo vt v loc -> + ( forall x. + [(x, [Type vt loc])] -> + (v -> x -> [(v, Type vt loc)] -> [Constraint vt v loc]) -> + (v -> x -> Constraint vt v loc) -> + m r + ) -> + m r +withConstructors nil vinfo k = do + getConstructors typ >>= \case + ConstructorType cs -> do + arg <- for cs \(v, cref, _) -> do + cvts <- getConstructorVarTypes typ cref + pure ((v, cref), cvts) + k arg (\v (_, cref) args -> [C.PosCon v cref args]) (\v (_, cref) -> C.NegCon v cref) + SequenceType _cs -> + let Vc'ListRoot elemType consPos snocPos iset = case vi_con vinfo of + Vc'ListRoot {} -> vi_con vinfo + _ -> error "impossible: constraint for sequence type not a list root" + varCount = length consPos + length snocPos + minLen = fromMaybe 0 $ IntervalSet.lookupMin iset + + mkPosCons :: (Int -> [v] -> [Constraint vt v loc]) -> Int -> [v] -> [Constraint vt v loc] + mkPosCons z elvs0 = foldr (\_ b n (elv : elvs) -> C.PosListHead v n elv : b (n + 1) elvs) z consPos elvs0 + + mkPosSnoc :: (Int -> [v] -> [Constraint vt v loc]) -> Int -> [v] -> [Constraint vt v loc] + mkPosSnoc z elvs0 = foldr (\_ b n (elv : elvs) -> C.PosListTail v n elv : b (n + 1) elvs) z snocPos elvs0 + + constraints :: [(([(v, Type vt loc)] -> [Constraint vt v loc], Constraint vt v loc), [Type vt loc])] + constraints = + let mk f elvs = mkPosCons (\_ elvs -> mkPosSnoc (\_ elvs -> f elvs) 0 elvs) 0 (map fst elvs) + in [ ((mk \[] -> [], C.NegListInterval v (IntervalSet.singleton (minLen, maxBound))), replicate varCount elemType) + ] + + mkPos _v (pos, _neg) args = + pos args + mkNeg _v (_pos, neg) = + neg + in k constraints mkPos mkNeg + BooleanType -> do + k [(True, []), (False, [])] (\v b _ -> [C.PosLit v (PmLit.Boolean b)]) (\v b -> C.NegLit v (PmLit.Boolean b)) + OtherType -> nil + where + typ = vi_typ vinfo + v = vi_id vinfo + +-- | Test that the given variable is inhabited. This test is +-- undecidable in general so we adopt a fuel based approach as +-- described in section 3.7. +inhabited :: + forall vt v loc m. + (Pmc vt v loc m) => + Fuel -> + v -> + NormalizedConstraints vt v loc -> + m (Maybe (NormalizedConstraints vt v loc)) +inhabited fuel x nc0 = + let (_xcanon, xvi, nc') = expectCanon x nc0 + in withConstructors (pure (Just nc')) xvi \cs posConstraint negConstraint -> + -- one of the constructors must be inhabited, Return the + -- first non-contradictory instantiation. + let phi (cref, cvt) b nc = do + instantiate fuel nc x cref cvt posConstraint >>= \case + Nothing -> do + -- record failed instantiation attempt so we don't + -- attempt to instantiate this constructor again + addConstraint (negConstraint x cref) nc >>= \case + Nothing -> b nc + Just nc -> b nc + Just _ -> pure (Just nc) + in foldr phi (\_ -> pure Nothing) cs nc' + +newtype Fuel = Fuel Int + deriving newtype (Show, Eq, Ord, Enum, Bounded, Num) + +initFuel :: Fuel +initFuel = 8 + +-- | Check that all variables marked dirty are inhabited. +ensureInhabited :: + forall vt v loc m. + (Pmc vt v loc m) => + Fuel -> + NormalizedConstraints vt v loc -> + m (Maybe (NormalizedConstraints vt v loc)) +ensureInhabited fuel nc0@NormalizedConstraints {dirtySet} + | fuel == 0 = pure (Just clean) -- out of fuel, assume inhabited + | otherwise = do + -- all dirty vars must be inhabited or this NormalizedConstraints + -- is dropped + let phi dirtyVar b nc = do + nc <- MaybeT (inhabited (fuel - 1) dirtyVar nc) + b nc + in runMaybeT (foldr phi pure dirtySet clean) + where + clean = nc0 {dirtySet = mempty} + +-- | Add a formula literal to our normalized constraint set. This +-- corresponds to fig 7. +addLiteral :: + forall vt v loc m. + (Pmc vt v loc m) => + Literal vt v loc -> + NormalizedConstraints vt v loc -> + m (Maybe (NormalizedConstraints vt v loc)) +addLiteral lit0 nabla0 = runMaybeT do + nc <- MaybeT $ case lit0 of + F -> pure Nothing + T -> pure (Just nabla0) + PosCon var datacon convars -> + let ctx = foldr (\(trm, typ) b -> declVar trm typ id b) nabla0 convars + c = C.PosCon var datacon convars + in addConstraint c ctx + NegCon var datacon -> addConstraint (C.NegCon var datacon) nabla0 + PosLit var lit -> addConstraint (C.PosLit var lit) nabla0 + NegLit var lit -> addConstraint (C.NegLit var lit) nabla0 + PosListHead listRoot n listElem listElemType -> do + let nabla1 = declVar listElem listElemType id nabla0 + c = C.PosListHead listRoot n listElem + addConstraint c nabla1 + PosListTail listRoot n listElem listElemType -> do + let nabla1 = declVar listElem listElemType id nabla0 + c = C.PosListTail listRoot n listElem + addConstraint c nabla1 + NegListInterval listVar iset -> addConstraint (C.NegListInterval listVar iset) nabla0 + Effectful var -> addConstraint (C.Effectful var) nabla0 + Let var _expr typ -> pure (Just (declVar var typ id nabla0)) + MaybeT (ensureInhabited initFuel nc) + +insertVarInfo :: + forall vt v loc. + (Ord v) => + v -> + VarInfo vt v loc -> + NormalizedConstraints vt v loc -> + NormalizedConstraints vt v loc +insertVarInfo k v nc@NormalizedConstraints {constraintMap} = + nc {constraintMap = UFMap.insert k v constraintMap} + +-- | Add a constraint to our normalized constraint set. This +-- corresponds to fig 7. +addConstraint :: + forall vt v loc m. + (Pmc vt v loc m) => + Constraint vt v loc -> + NormalizedConstraints vt v loc -> + m (Maybe (NormalizedConstraints vt v loc)) +addConstraint con0 nc = + debugConstraint <$> case con0 of + C.PosLit var pmlit -> + let updateLiteral pos neg lit + | Just lit1 <- pos, + lit1 == lit = case lit1 == lit of + -- we already have this positive constraint + True -> (pure (), Ignore) + -- contradicts positive info + False -> (contradiction, Ignore) + -- the constraint contradicts negative info + | Set.member lit neg = (contradiction, Ignore) + | otherwise = (pure (), Update (Just lit, neg)) + in modifyLiteralC var pmlit updateLiteral nc + C.NegLit var pmlit -> + let updateLiteral pos neg lit + -- the constraint contradicts positive info + | Just lit1 <- pos, lit1 == lit = (contradiction, Ignore) + -- we already have this negative constraint + | Set.member lit neg = (pure (), Ignore) + | otherwise = (pure (), Update (pos, Set.insert lit neg)) + in modifyLiteralC var pmlit updateLiteral nc + C.NegListInterval var negMatchInterval -> + let updateList _typ pCons pSnoc posMatchInterval + -- No lengths are accepted + | IntervalSet.null newMatchInterval = (contradiction, Ignore) + -- This length constraint forces equating some cons and snoc matches + | let unconflictedLen = length pCons + length pSnoc, + Just maxLen <- IntervalSet.lookupMax newMatchInterval, + maxLen < unconflictedLen = + let varsToEquate = unconflictedLen - maxLen + (newPSnoc, vars) = + let (_as, bs) = Seq.splitAt (length pCons - varsToEquate) pCons + (cs, ds) = Seq.splitAt (length pSnoc - varsToEquate) pSnoc + in (cs, zip (toList bs) (toList ds)) + in (equate vars, Update (pCons, newPSnoc, newMatchInterval)) + | otherwise = + (populateCons var pCons newMatchInterval, Update (pCons, pSnoc, newMatchInterval)) + where + newMatchInterval = IntervalSet.difference posMatchInterval negMatchInterval + in modifyListC var updateList nc + C.PosListHead r n e -> + let updateList _elmType posCons posSnocs iset + -- there is an existing positive constraint on this element + | Just existingElemVar <- Seq.lookup n posCons = (equate [(e, existingElemVar)], Ignore) + -- a list of this length is proscribed + | let minPatLen = length posCons + 1, + Just maxLen <- IntervalSet.lookupMax iset, + maxLen < minPatLen = + (contradiction, Ignore) + -- the length constraint forces us to equate some cons and snoc patterns + | let unconflictedLen = length posCons + length posSnocs + 1, + Just maxLen <- IntervalSet.lookupMax iset, + maxLen < unconflictedLen = + let posCons' = posCons Seq.|> e + e' = Seq.index posSnocs (maxLen - length posCons') + in (equate [(e, e')], Update (posCons', posSnocs, iset)) + | otherwise = + let posCons' = posCons Seq.|> e + iset' = IntervalSet.delete (0, length posCons' - 1) iset + in (pure (), Update (posCons', posSnocs, iset')) + in modifyListC r updateList nc + C.PosListTail r n e -> + let updateList _elmType posCons posSnoc iset + -- there is an existing positive constraint on this element + | Just existingElemVar <- Seq.lookup n posSnoc = (equate [(e, existingElemVar)], Ignore) + -- a list of this length is proscribed + | let minPatLen = length posSnoc + 1, + Just maxLen <- IntervalSet.lookupMax iset, + maxLen < minPatLen = + (contradiction, Ignore) + -- the length constraint forces us to equate some cons and snoc patterns + | let unconflictedLen = length posCons + length posSnoc + 1, + Just maxLen <- IntervalSet.lookupMax iset, + maxLen < unconflictedLen = + let posSnoc' = posSnoc Seq.|> e + e' = Seq.index posCons (maxLen - length posSnoc') + in (equate [(e, e')], Update (posCons, posSnoc', iset)) + | otherwise = + let posSnoc' = posSnoc Seq.|> e + iset' = IntervalSet.delete (0, length posSnoc' - 1) iset + in (populateCons r posCons iset', Update (posCons, posSnoc', iset')) + in modifyListC r updateList nc + C.PosCon var datacon convars -> + let updateConstructor pos neg + | Just (datacon1, convars1) <- pos = case datacon == datacon1 of + True -> do + -- we already have an assertion, so equate convars + let varsToEquate = zipWith (\(y, _) (z, _) -> (y, z)) convars convars1 + (equate varsToEquate, Ignore) + False -> (contradiction, Ignore) + -- contradicts negative info + | True <- Set.member datacon neg = (contradiction, Ignore) + | otherwise = + -- no conflicting info, add constraint + (pure (), Update (Just (datacon, convars), neg)) + in modifyConstructorC var updateConstructor nc -- runC nc (put =<< modifyConstructor var updateConstructor =<< get) + C.NegCon var datacon -> + let updateConstructor pos neg + -- contradicts positive info + | Just (datacon1, _) <- pos, datacon1 == datacon = (contradiction, Ignore) + -- we already have this negative constraint + | Set.member datacon neg = (pure (), Ignore) + | otherwise = (pure (), Update (pos, Set.insert datacon neg)) + in modifyConstructorC var updateConstructor nc + C.Effectful var -> + case expectCanon var nc of + (var, vi, nc) + | otherwise -> pure $ Just $ insertVarInfo var vi {vi_eff = IsEffectful} nc + C.Eq x y -> union x y nc + where + debugConstraint x = + let debugOutput = + P.sep + "\n" + [ P.hang (P.red "input constraints: ") (prettyNormalizedConstraints nc), + P.hang (P.yellow "additional constraint: ") (C.prettyConstraint con0), + P.hang (P.green "resulting constraint: ") (maybe "contradiction" prettyNormalizedConstraints x), + "" + ] + in if shouldDebug PatternCoverageConstraintSolver then trace (P.toAnsiUnbroken debugOutput) x else x + +-- | Like 'addConstraint', but for a list of constraints +addConstraints :: + forall vt v loc m. + (Pmc vt v loc m) => + [Constraint vt v loc] -> + NormalizedConstraints vt v loc -> + m (Maybe (NormalizedConstraints vt v loc)) +addConstraints cs nc0 = runMaybeT $ foldlM (\b a -> MaybeT (addConstraint a b)) nc0 cs + +-- | Equate two variables +union :: + forall vt v loc m. + (Pmc vt v loc m) => + v -> + v -> + NormalizedConstraints vt v loc -> + m (Maybe (NormalizedConstraints vt v loc)) +union v0 v1 nc@NormalizedConstraints {constraintMap} = + UFMap.union v0 v1 constraintMap \chosenCanon nonCanonValue m -> + -- In this block we want to collect the constraints from the + -- non-canonical value and add them to the canonical value. + + -- literals are handled uniformly + let handleLit :: forall x. (x -> PmLit) -> Maybe x -> Set x -> ([Constraint vt v loc], [Constraint vt v loc]) + handleLit toPmLit pos neg = + let posC = case pos of + Nothing -> [] + Just lit -> [C.PosLit chosenCanon (toPmLit lit)] + negC = foldr (\a b -> C.NegLit chosenCanon (toPmLit a) : b) [] neg + in (posC, negC) + constraints = posCon ++ negCon ++ effCon + (posCon, negCon) = case vi_con nonCanonValue of + Vc'Constructor pos neg -> + let posC = case pos of + Nothing -> [] + Just (datacon, convars) -> [C.PosCon chosenCanon datacon convars] + negC = foldr (\a b -> C.NegCon chosenCanon a : b) [] neg + in (posC, negC) + Vc'ListRoot _typ posCons posSnoc iset -> + let consConstraints = map (\(i, x) -> C.PosListHead chosenCanon i x) (zip [0 ..] (toList posCons)) + snocConstraints = map (\(i, x) -> C.PosListTail chosenCanon i x) (zip [0 ..] (toList posSnoc)) + neg = [C.NegListInterval chosenCanon (IntervalSet.complement iset)] + in (consConstraints ++ snocConstraints, neg) + Vc'Boolean pos neg -> handleLit PmLit.Boolean pos neg + Vc'Int pos neg -> handleLit PmLit.Int pos neg + Vc'Nat pos neg -> handleLit PmLit.Nat pos neg + Vc'Float pos neg -> handleLit PmLit.Float pos neg + Vc'Text pos neg -> handleLit PmLit.Text pos neg + Vc'Char pos neg -> handleLit PmLit.Char pos neg + effCon = case vi_eff nonCanonValue of + IsNotEffectful -> [] + IsEffectful -> [C.Effectful chosenCanon] + in addConstraints constraints nc {constraintMap = m} + +modifyListC :: + forall vt v loc m. + (Pmc vt v loc m) => + v -> + ( Type vt loc -> + Seq v -> + Seq v -> + IntervalSet -> + (C vt v loc m (), ConstraintUpdate (Seq v, Seq v, IntervalSet)) + ) -> + NormalizedConstraints vt v loc -> + m (Maybe (NormalizedConstraints vt v loc)) +modifyListC v f nc0 = + let (ccomp, nc1) = modifyListF v f nc0 + in fmap snd <$> runC nc1 ccomp + +modifyListF :: + forall vt v loc f. + (Var v, Functor f) => + v -> + ( Type vt loc -> + Seq v -> + Seq v -> + IntervalSet -> + f (ConstraintUpdate (Seq v, Seq v, IntervalSet)) + ) -> + NormalizedConstraints vt v loc -> + f (NormalizedConstraints vt v loc) +modifyListF v f nc = + let g vc = getCompose (posAndNegList (\typ pcons psnoc iset -> Compose (f typ pcons psnoc iset)) vc) + in modifyVarConstraints v g nc + +modifyConstructorC :: + forall vt v loc m. + (Pmc vt v loc m) => + v -> + ( (Maybe (ConstructorReference, [(v, Type vt loc)])) -> + Set ConstructorReference -> + (C vt v loc m (), ConstraintUpdate (Maybe (ConstructorReference, [(v, Type vt loc)]), Set ConstructorReference)) + ) -> + NormalizedConstraints vt v loc -> + m (Maybe (NormalizedConstraints vt v loc)) +modifyConstructorC v f nc0 = + let (ccomp, nc1) = modifyConstructorF v f nc0 + in fmap snd <$> runC nc1 ccomp + +modifyConstructorF :: + forall vt v loc f. + (Var v, Functor f) => + v -> + ( (Maybe (ConstructorReference, [(v, Type vt loc)])) -> + Set ConstructorReference -> + f (ConstraintUpdate (Maybe (ConstructorReference, [(v, Type vt loc)]), Set ConstructorReference)) + ) -> + NormalizedConstraints vt v loc -> + f (NormalizedConstraints vt v loc) +modifyConstructorF v f nc = + let g vc = getCompose (posAndNegConstructor (\pos neg -> Compose (f pos neg)) vc) + in modifyVarConstraints v g nc + +modifyLiteralC :: + forall vt v loc m. + (Pmc vt v loc m) => + v -> + PmLit -> + ( forall a. + (Ord a) => + -- positive info + Maybe a -> + -- negative info + Set a -> + -- the passed in PmLit, unpacked + a -> + (C vt v loc m (), ConstraintUpdate (Maybe a, Set a)) + ) -> + NormalizedConstraints vt v loc -> + m (Maybe (NormalizedConstraints vt v loc)) +modifyLiteralC v lit f nc0 = + let (ccomp, nc1) = modifyLiteralF v lit f nc0 + in fmap snd <$> runC nc1 ccomp + +-- | Update constraints on some literal by only depending on their Ord +-- instance. +modifyLiteralF :: + forall vt v loc f. + (Var v, Functor f) => + v -> + PmLit -> + ( forall a. + (Ord a) => + -- positive info + Maybe a -> + -- negative info + Set a -> + -- the passed in PmLit, unpacked + a -> + f (ConstraintUpdate (Maybe a, Set a)) + ) -> + NormalizedConstraints vt v loc -> + f (NormalizedConstraints vt v loc) +modifyLiteralF v lit f nc = + let g vc = getCompose (posAndNegLiteral (\pos neg candidate -> Compose (f pos neg candidate)) lit vc) + in modifyVarConstraints v g nc + +modifyVarConstraints :: + forall vt v loc f. + (Var v, Functor f) => + v -> + ( VarConstraints vt v loc -> + f (ConstraintUpdate (VarConstraints vt v loc)) + ) -> + NormalizedConstraints vt v loc -> + -- | applied to 'Vc'Constructor' + f (NormalizedConstraints vt v loc) +modifyVarConstraints v updateVarConstraint nc0 = do + updateF v (\_v vi -> fmap (\vc -> vi {vi_con = vc}) <$> updateVarConstraint (vi_con vi)) nc0 +{-# INLINE modifyVarConstraints #-} + +-- | Modify the positive and negative constraints of a constructor. +posAndNegConstructor :: + forall f vt v loc. + (Functor f) => + ( (Maybe (ConstructorReference, [(v, Type vt loc)])) -> + Set ConstructorReference -> + f (Maybe (ConstructorReference, [(v, Type vt loc)]), Set ConstructorReference) + ) -> + VarConstraints vt v loc -> + f (VarConstraints vt v loc) +posAndNegConstructor f = \case + Vc'Constructor pos neg -> uncurry Vc'Constructor <$> f pos neg + _ -> error "impossible: posAndNegConstructor called on a literal" +{-# INLINE posAndNegConstructor #-} + +-- | Modify the positive and negative constraints in a way that +-- doesn't rely upon the particular literal type, but only on it being +-- an instance of Ord. +posAndNegLiteral :: + forall f vt v loc. + (Functor f) => + ( forall a. + (Ord a) => + Maybe a -> + Set a -> + a -> + f (Maybe a, Set a) + ) -> + PmLit -> + VarConstraints vt v loc -> + f (VarConstraints vt v loc) +posAndNegLiteral f lit = \case + Vc'Boolean pos neg + | PmLit.Boolean b <- lit -> uncurry Vc'Boolean <$> f pos neg b + Vc'Int pos neg + | PmLit.Int b <- lit -> uncurry Vc'Int <$> f pos neg b + Vc'Nat pos neg + | PmLit.Nat b <- lit -> uncurry Vc'Nat <$> f pos neg b + Vc'Float pos neg + | PmLit.Float b <- lit -> uncurry Vc'Float <$> f pos neg b + Vc'Text pos neg + | PmLit.Text b <- lit -> uncurry Vc'Text <$> f pos neg b + Vc'Char pos neg + | PmLit.Char b <- lit -> uncurry Vc'Char <$> f pos neg b + Vc'Constructor _ _ -> error "impossible: posAndNegLiteral called on constructor" + _ -> error "impossible: incompatible PmLit and VarConstraints types" +{-# INLINE posAndNegLiteral #-} + +posAndNegList :: + forall f vt v loc. + (Functor f) => + ( Type vt loc -> + Seq v -> + Seq v -> + IntervalSet -> + f (Seq v, Seq v, IntervalSet) + ) -> + VarConstraints vt v loc -> + f (VarConstraints vt v loc) +posAndNegList f = \case + Vc'ListRoot typ posCons posSnocs iset -> (\(posCons, posSnocs, iset) -> Vc'ListRoot typ posCons posSnocs iset) <$> f typ posCons posSnocs iset + _ -> error "impossible: posAndNegList called on a something that isn't a list" +{-# INLINE posAndNegList #-} + +newtype C vt v loc m a = C + { unC :: + NormalizedConstraints vt v loc -> + m (Maybe (a, NormalizedConstraints vt v loc)) + } + deriving + (Functor, Applicative, Monad, MonadState (NormalizedConstraints vt v loc)) + via StateT (NormalizedConstraints vt v loc) (MaybeT m) + deriving (MonadTrans) via ComposeT (StateT (NormalizedConstraints vt v loc)) MaybeT + +contradiction :: (Applicative m) => C vt v loc m a +contradiction = C \_ -> pure Nothing + +equate :: (Pmc vt v loc m) => [(v, v)] -> C vt v loc m () +equate vs = addConstraintsC (map (uncurry C.Eq) vs) + +lookupListElemTypeC :: (Pmc vt v loc m) => v -> C vt v loc m (Type vt loc) +lookupListElemTypeC listVar = do + nc0 <- get + let (_var, vi, nc1) = expectCanon listVar nc0 + put nc1 + pure $ getConst (posAndNegList (\elemTyp _ _ _ -> Const elemTyp) (vi_con vi)) + +addConstraintsC :: (Pmc vt v loc m) => [Constraint vt v loc] -> C vt v loc m () +addConstraintsC cs = do + nc <- get + lift (addConstraints cs nc) >>= \case + Nothing -> contradiction + Just nc -> put nc + +declVarC :: + (Pmc vt v loc m) => + v -> + Type vt loc -> + (VarInfo vt v loc -> VarInfo vt v loc) -> + C vt v loc m () +declVarC v vt vimod = do + nc0 <- get + let nc1 = declVar v vt vimod nc0 + put nc1 + +freshC :: + (Pmc vt v loc m) => + C vt v loc m v +freshC = lift fresh + +populateCons :: (Pmc vt v loc m) => v -> Seq v -> IntervalSet -> C vt v loc m () +populateCons listVar pCons iset = do + case IntervalSet.lookupMin iset of + Just minLen + | minLen > 0, + let targets = [length pCons .. minLen - 1], + not (null targets) -> do + elemTyp <- lookupListElemTypeC listVar + for_ targets \idx -> do + elv <- freshC + declVarC elv elemTyp id + addConstraintsC [C.PosListHead listVar idx elv] + _ -> pure () + +runC :: + (Applicative m) => + NormalizedConstraints vt v loc -> + C vt v loc m a -> + m (Maybe (a, NormalizedConstraints vt v loc)) +runC nc0 ca = unC ca nc0 diff --git a/parser-typechecker/src/Unison/PatternMatchCoverage/UFMap.hs b/parser-typechecker/src/Unison/PatternMatchCoverage/UFMap.hs new file mode 100644 index 000000000..f33e1a5bd --- /dev/null +++ b/parser-typechecker/src/Unison/PatternMatchCoverage/UFMap.hs @@ -0,0 +1,243 @@ +{-# LANGUAGE RecursiveDo #-} + +module Unison.PatternMatchCoverage.UFMap + ( UFMap, + UFValue (..), + empty, + lookupCanon, + insert, + union, + alterF, + alter, + keys, + toClasses, + ) +where + +import Control.Monad.Fix (MonadFix) +import Control.Monad.Trans.Except (ExceptT (..)) +import Data.Foldable (foldl') +import Data.Functor ((<&>)) +import Data.Functor.Compose (Compose (..)) +import Data.Functor.Identity (Identity (Identity, runIdentity)) +import Data.Functor.Sum (Sum (..)) +import Data.Map (Map) +import qualified Data.Map.Lazy as LazyMap +import qualified Data.Map.Strict as Map +import Data.Maybe (fromJust) +import Data.Set (Set) +import qualified Data.Set as Set + +-- | A union-find structure. Used by +-- 'Unison.PatternMatchCoverage.NormalizedConstraints.NormalizedConstraints' +-- to provide efficient unification. +newtype UFMap k v = UFMap (Map k (UFValue k v)) + deriving stock (Eq, Ord, Show) + +data UFValue k v + = -- | This is not the canonical value, lookup k in the map to try again + Indirection !k + | -- | The number of elements in the equivalence class + Canonical !Int !v + deriving stock (Eq, Ord, Show) + +empty :: UFMap k v +empty = UFMap Map.empty + +insert :: (Ord k) => k -> v -> UFMap k v -> UFMap k v +insert k !v m = + alter k (Just v) (\_ s _ -> Canonical s v) m + +alterF' :: + forall f k v. + (Functor f, Ord k) => + -- | The key to lookup + k -> + -- | The canonical key (use laziness to supply if unknown) + k -> + -- | Return Just to short-circuit the indirection lookup loop + (k -> UFMap k v -> Maybe (f (UFMap k v))) -> + -- | Nothing case + f (Maybe v) -> + -- | Just case + -- + -- @canonicalKey -> size -> value -> new value@ + -- + -- /N.B./ deleting a value is not supported + (k -> Int -> v -> f (UFValue k v)) -> + UFMap k v -> + -- | Returns the canonical k, the size, the value, and the path + -- compressed UFMap + f (UFMap k v) +alterF' k0 kcanon loopGuard handleNothing handleJust map0 = + let phi :: k -> Maybe (UFValue k v) -> Sum ((,) k) f (Maybe (UFValue k v)) + phi k = + \case + Nothing -> InR (fmap (Canonical 1) <$> handleNothing) + Just alpha -> case alpha of + Indirection k -> InL (k, Just (Indirection kcanon)) + Canonical sizeOrig v -> InR (Just <$> handleJust k sizeOrig v) + go :: k -> UFMap k v -> f (UFMap k v) + go k ufm@(UFMap m) = case loopGuard k ufm of + Just short -> short + Nothing -> case LazyMap.alterF (phi k) k m of + InL (k, m') -> go k (UFMap m') + InR res -> UFMap <$> res + in go k0 map0 +{-# INLINE alterF' #-} + +alterFWithHalt :: + forall f k v. + (Functor f, Ord k) => + k -> + (k -> UFMap k v -> Maybe (f (UFMap k v))) -> + f (Maybe v) -> + (k -> Int -> v -> f (UFValue k v)) -> + UFMap k v -> + f (UFMap k v) +alterFWithHalt k0 isCanonical handleNothing handleJust map0 = + -- tie the canonicalK knot + let (canonicalK, res) = getCompose (alterF' k0 canonicalK loopGuard handleNothing' handleJust' map0) + handleNothing' = Compose (k0, handleNothing) + handleJust' k s v = Compose (k, handleJust k s v) + -- if the key is canonical then we halt and return it as the + -- left element of the tuple + loopGuard k m = Compose . (k,) <$> isCanonical k m + in res +{-# INLINE alterFWithHalt #-} + +alterF :: + forall f k v. + (Functor f, Ord k) => + k -> + f (Maybe v) -> + (k -> Int -> v -> f (UFValue k v)) -> + UFMap k v -> + f (UFMap k v) +alterF k = alterFWithHalt k (\_ _ -> Nothing) +{-# INLINE alterF #-} + +alter :: + forall k v. + (Ord k) => + k -> + Maybe v -> + (k -> Int -> v -> UFValue k v) -> + UFMap k v -> + UFMap k v +alter k handleNothing handleJust map0 = + runIdentity (alterF k (Identity handleNothing) (\k s v -> Identity (handleJust k s v)) map0) + +-- | Lookup the canonical value +lookupCanon :: + (Ord k) => + k -> + UFMap k v -> + -- | returns: + -- + -- * the canonical member of the equivalence set + -- * the size of the equivalence set + -- * the associated value + -- * the @UFMap@ after path compression + Maybe (k, Int, v, UFMap k v) +lookupCanon k m = + getCompose (alterF k nothing just m) + where + nothing = Compose Nothing + just k s v = Compose (Just (k, s, v, Canonical s v)) + +data UnionHaltReason k v + = KeyNotFound k + | MergeFailed v v + +data UnionValue k v a + = UnionValue k Int v (UFValue k v) a + deriving stock (Functor) + +union :: + forall m k v r. + (MonadFix m, Ord k) => + k -> + k -> + UFMap k v -> + (k -> v -> UFMap k v -> m (Maybe r)) -> + m (Maybe r) +union k0 k1 mapinit mergeValues = toMaybe do + rec let lu :: + k -> + UFMap k v -> + (k -> UFMap k v -> Maybe (Compose (Either (UnionHaltReason k v)) (UnionValue k v) (UFMap k v))) -> + Either (UnionHaltReason k v) (UnionValue k v (UFMap k v)) + lu k m loopGuard = getCompose (alterFWithHalt k loopGuard luNothing luJust m) + where + luNothing = Compose (Left (KeyNotFound k)) + luJust k s v = + -- a final value thunk is inserted before it is resolved, + -- as the final result cannot be known before we have + -- looked up both values and merged them + let newValue = + let newSize = case kcanon0 == kcanon1 of + True -> size0 + False -> size0 + size1 + in case chosenCanon == k of + True -> Canonical newSize canonValue + False -> Indirection chosenCanon + in Compose (Right (UnionValue k s v newValue newValue)) + UnionValue kcanon0 size0 v0 vfinal0 map0 <- ExceptT $ pure $ lu k0 mapinit \_ _ -> Nothing + UnionValue kcanon1 size1 v1 vfinal1 map1 <- ExceptT $ + pure $ lu k1 map0 \k m -> case k == kcanon0 of + False -> Nothing + True -> Just (Compose (Right (UnionValue k size0 v0 vfinal0 m))) + -- Join the smaller equivalence class to the larger to bound + -- worst case number of lookups to log(n). This is the same + -- strategy as the weighted fast-union algorithm. + let (chosenCanon, canonValue, nonCanonValue) = case size0 > size1 of + True -> (kcanon0, v0, v1) + False -> (kcanon1, v1, v0) + map2 <- + let res = + ExceptT $ + mergeValues chosenCanon nonCanonValue map1 <&> \case + Nothing -> Left (MergeFailed v0 v1) + Just x -> Right x + in -- Now that both lookups have completed we can safely force the + -- final values + vfinal0 `seq` vfinal1 `seq` res + pure map2 + where + toMaybe :: ExceptT (UnionHaltReason k v) m r -> m (Maybe r) + toMaybe (ExceptT action) = + action <&> \case + Right m -> Just m + Left r -> case r of + KeyNotFound _k -> Nothing + MergeFailed _v0 _v1 -> Nothing + +-- | Dump the @UFmap@ to a list grouped by equivalence class +toClasses :: + forall k v. + (Ord k) => + UFMap k v -> + -- | [(canonical key, equivalence class, value)] + [(k, Set k, v)] +toClasses m0 = + let cmFinal :: Map k (k, Set k, v) + (_mfinal, cmFinal) = + -- we fold over the UFMap's keys and build up a Map that + -- groups the keys by equivalence class. + foldl' buildCmFinal (m0, Map.empty) keys + keys = case m0 of + UFMap m -> Map.keys m + buildCmFinal (m, cm) k = + let (kcanon, _, v, m') = fromJust (lookupCanon k m) + cm' = + Map.insertWith + (\(k0, s0, v0) (_k1, s1, _v1) -> (k0, s0 <> s1, v0)) + kcanon + (k, Set.singleton k, v) + cm + in (m', cm') + in Map.elems cmFinal + +keys :: UFMap k v -> [k] +keys (UFMap m) = Map.keys m diff --git a/parser-typechecker/src/Unison/PrettyPrintEnv/MonadPretty.hs b/parser-typechecker/src/Unison/PrettyPrintEnv/MonadPretty.hs index f6660a123..33e07d668 100644 --- a/parser-typechecker/src/Unison/PrettyPrintEnv/MonadPretty.hs +++ b/parser-typechecker/src/Unison/PrettyPrintEnv/MonadPretty.hs @@ -11,37 +11,37 @@ import Unison.Var (Var) type MonadPretty v m = (Var v, MonadReader (PrettyPrintEnv, Set v) m) -getPPE :: MonadPretty v m => m PrettyPrintEnv +getPPE :: (MonadPretty v m) => m PrettyPrintEnv getPPE = view _1 -- | Run a computation with a modified PrettyPrintEnv, restoring the original -withPPE :: MonadPretty v m => PrettyPrintEnv -> m a -> m a +withPPE :: (MonadPretty v m) => PrettyPrintEnv -> m a -> m a withPPE p = local (set _1 p) -applyPPE :: MonadPretty v m => (PrettyPrintEnv -> a) -> m a +applyPPE :: (MonadPretty v m) => (PrettyPrintEnv -> a) -> m a applyPPE = views _1 -applyPPE2 :: MonadPretty v m => (PrettyPrintEnv -> a -> b) -> a -> m b +applyPPE2 :: (MonadPretty v m) => (PrettyPrintEnv -> a -> b) -> a -> m b applyPPE2 f a = views _1 (`f` a) -applyPPE3 :: MonadPretty v m => (PrettyPrintEnv -> a -> b -> c) -> a -> b -> m c +applyPPE3 :: (MonadPretty v m) => (PrettyPrintEnv -> a -> b -> c) -> a -> b -> m c applyPPE3 f a b = views _1 (\ppe -> f ppe a b) -- | Run a computation with a modified PrettyPrintEnv, restoring the original -modifyPPE :: MonadPretty v m => (PrettyPrintEnv -> PrettyPrintEnv) -> m a -> m a +modifyPPE :: (MonadPretty v m) => (PrettyPrintEnv -> PrettyPrintEnv) -> m a -> m a modifyPPE = local . over _1 -modifyTypeVars :: MonadPretty v m => (Set v -> Set v) -> m a -> m a +modifyTypeVars :: (MonadPretty v m) => (Set v -> Set v) -> m a -> m a modifyTypeVars = local . over _2 -- | Add type variables to the set of variables that need to be avoided -addTypeVars :: MonadPretty v m => [v] -> m a -> m a +addTypeVars :: (MonadPretty v m) => [v] -> m a -> m a addTypeVars = modifyTypeVars . Set.union . Set.fromList -- | Check if a list of type variables contains any variables that need to be -- avoided -willCapture :: MonadPretty v m => [v] -> m Bool +willCapture :: (MonadPretty v m) => [v] -> m Bool willCapture vs = views _2 (not . Set.null . Set.intersection (Set.fromList vs)) -runPretty :: Var v => PrettyPrintEnv -> Reader (PrettyPrintEnv, Set v) a -> a -runPretty ppe m = runReader m (ppe, mempty) \ No newline at end of file +runPretty :: (Var v) => PrettyPrintEnv -> Reader (PrettyPrintEnv, Set v) a -> a +runPretty ppe m = runReader m (ppe, mempty) diff --git a/parser-typechecker/src/Unison/PrettyPrintEnv/Names.hs b/parser-typechecker/src/Unison/PrettyPrintEnv/Names.hs index e213ecfad..732cbbb13 100644 --- a/parser-typechecker/src/Unison/PrettyPrintEnv/Names.hs +++ b/parser-typechecker/src/Unison/PrettyPrintEnv/Names.hs @@ -58,5 +58,5 @@ fromSuffixNames len names = PrettyPrintEnv terms' types' -- | Reduce the provided names to their minimal unique suffix within the scope of the given -- relation. -shortestUniqueSuffixes :: Ord ref => ref -> Rel.Relation Name ref -> [(a, HQ'.HashQualified Name)] -> [(a, HQ'.HashQualified Name)] +shortestUniqueSuffixes :: (Ord ref) => ref -> Rel.Relation Name ref -> [(a, HQ'.HashQualified Name)] -> [(a, HQ'.HashQualified Name)] shortestUniqueSuffixes ref rel names = names <&> second (fmap (\name -> Name.shortestUniqueSuffix name ref rel)) diff --git a/parser-typechecker/src/Unison/PrintError.hs b/parser-typechecker/src/Unison/PrintError.hs index 6b33c546d..4c52a7ccf 100644 --- a/parser-typechecker/src/Unison/PrintError.hs +++ b/parser-typechecker/src/Unison/PrintError.hs @@ -17,7 +17,7 @@ import qualified Data.Text as Text import Data.Void (Void) import qualified Text.Megaparsec as P import qualified Unison.ABT as ABT -import Unison.Builtin.Decls (pattern TupleType', unitRef) +import Unison.Builtin.Decls (unitRef, pattern TupleType') import qualified Unison.Codebase.Path as Path import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..)) import Unison.HashQualified (HashQualified) @@ -29,6 +29,7 @@ import qualified Unison.Names as Names import qualified Unison.Names.ResolutionResult as Names import qualified Unison.NamesWithHistory as NamesWithHistory import Unison.Parser.Ann (Ann (..)) +import Unison.Pattern (Pattern) import Unison.Prelude import qualified Unison.PrettyPrintEnv as PPE import qualified Unison.PrettyPrintEnv.Names as PPE @@ -37,6 +38,7 @@ import Unison.Referent (Referent, pattern Ref) import Unison.Result (Note (..)) import qualified Unison.Result as Result import qualified Unison.Settings as Settings +import Unison.Symbol (Symbol) import qualified Unison.Syntax.HashQualified as HQ (toString) import qualified Unison.Syntax.Lexer as L import qualified Unison.Syntax.Name as Name (toText) @@ -89,11 +91,11 @@ defaultWidth :: Pr.Width defaultWidth = 60 -- Various links used in error messages, collected here for a quick overview -structuralVsUniqueDocsLink :: IsString a => Pretty a +structuralVsUniqueDocsLink :: (IsString a) => Pretty a structuralVsUniqueDocsLink = "https://www.unison-lang.org/learn/language-reference/unique-types/" fromOverHere' :: - Ord a => + (Ord a) => String -> [Maybe (Range, a)] -> [Maybe (Range, a)] -> @@ -102,7 +104,7 @@ fromOverHere' s spots0 removing = fromOverHere s (catMaybes spots0) (catMaybes removing) fromOverHere :: - Ord a => String -> [(Range, a)] -> [(Range, a)] -> Pretty (AnnotatedText a) + (Ord a) => String -> [(Range, a)] -> [(Range, a)] -> Pretty (AnnotatedText a) fromOverHere src spots0 removing = let spots = toList $ Set.fromList spots0 Set.\\ Set.fromList removing in case length spots of @@ -122,7 +124,7 @@ showTypeWithProvenance env src color typ = <> ".\n" <> fromOverHere' src [styleAnnotated color typ] [] -styleAnnotated :: Annotated a => sty -> a -> Maybe (Range, sty) +styleAnnotated :: (Annotated a) => sty -> a -> Maybe (Range, sty) styleAnnotated sty a = (,sty) <$> rangeForAnnotated a style :: s -> String -> Pretty (AnnotatedText s) @@ -147,7 +149,8 @@ renderTypeInfo :: renderTypeInfo i env = case i of TopLevelComponent {..} -> case definitions of [def] -> - Pr.wrap "🌟 I found and typechecked a definition:" <> Pr.newline + Pr.wrap "🌟 I found and typechecked a definition:" + <> Pr.newline <> mconcat (renderOne def) [] -> mempty @@ -156,7 +159,7 @@ renderTypeInfo i env = case i of <> Pr.newline <> intercalateMap Pr.newline (foldMap ("\t" <>) . renderOne) definitions where - renderOne :: IsString s => (v, Type v loc, RedundantTypeAnnotation) -> [s] + renderOne :: (IsString s) => (v, Type v loc, RedundantTypeAnnotation) -> [s] renderOne (v, typ, _) = [fromString . Text.unpack $ Var.name v, " : ", renderType' env typ] @@ -286,7 +289,9 @@ renderTypeError e env src curPath = case e of in mconcat [ Pr.lines [ Pr.wrap $ - "The " <> ordinal argNum <> " argument to " + "The " + <> ordinal argNum + <> " argument to " <> Pr.backticked (style ErrorSite (renderTerm env f)), "", " has type: " <> style Type2 (renderType' env foundType), @@ -400,17 +405,20 @@ renderTypeError e env src curPath = case e of ], debugSummary note ] - where - unitHintMsg = - "\nHint: Actions within a block must have type " <> - style Type2 (renderType' env expectedLeaf) <> ".\n" <> - " Use " <> style Type1 "_ = " <> " to ignore a result." - unitHint = if giveUnitHint then unitHintMsg else "" - giveUnitHint = case expectedType of - Type.Ref' u | u == unitRef -> case mismatchSite of - Term.Let1Named' v _ _ -> Var.isAction v - _ -> False + where + unitHintMsg = + "\nHint: Actions within a block must have type " + <> style Type2 (renderType' env expectedLeaf) + <> ".\n" + <> " Use " + <> style Type1 "_ = " + <> " to ignore a result." + unitHint = if giveUnitHint then unitHintMsg else "" + giveUnitHint = case expectedType of + Type.Ref' u | u == unitRef -> case mismatchSite of + Term.Let1Named' v _ _ -> Var.isAction v _ -> False + _ -> False AbilityCheckFailure {..} | [tv@(Type.Var' ev)] <- ambient, ev `Set.member` foldMap Type.freeVars requested -> @@ -580,10 +588,28 @@ renderTypeError e env src curPath = case e of if ann typeSite == External then "I don't know about the type " <> style ErrorSite (renderVar unknownTypeV) <> ". " else - "I don't know about the type " <> style ErrorSite (renderVar unknownTypeV) <> ":\n" + "I don't know about the type " + <> style ErrorSite (renderVar unknownTypeV) + <> ":\n" <> annotatedAsErrorSite src typeSite, "Make sure it's imported and spelled correctly." ] + UncoveredPatterns loc tms -> + mconcat + [ Pr.hang + "Pattern match doesn't cover all possible cases:" + (annotatedAsErrorSite src loc), + "\n\n" + ] + <> Pr.hang + "Patterns not matched:\n" + ( Pr.bulleted + (map (\x -> Pr.lit (renderPattern env x)) (Nel.toList tms)) + ) + RedundantPattern loc -> + Pr.hang + "This case would be ignored because it's already covered by the preceding case(s):" + (annotatedAsErrorSite src loc) UnknownTerm {..} -> let (correct, wrongTypes, wrongNames) = foldr sep id suggestions ([], [], []) @@ -657,7 +683,8 @@ renderTypeError e env src curPath = case e of "", annotatedAsErrorSite src loc, Pr.wrap $ - "has type " <> stylePretty ErrorSite (Pr.group (renderType' env typ)) + "has type " + <> stylePretty ErrorSite (Pr.group (renderType' env typ)) <> "but I'm expecting a function of the form" <> Pr.group (Pr.blue (renderType' env exHandler) <> ".") ] @@ -801,6 +828,26 @@ renderTypeError e env src curPath = case e of -- C.InMatchBody -> "InMatchBody" simpleCause :: C.Cause v loc -> Pretty ColorText simpleCause = \case + C.UncoveredPatterns loc tms -> + mconcat + [ "Incomplete pattern matches:\n", + annotatedAsErrorSite src loc, + "\n\n", + "Uncovered cases:\n" + ] + <> Pr.sep "\n" (map (\x -> Pr.lit (renderPattern env x)) (Nel.toList tms)) + C.RedundantPattern loc -> + mconcat + [ "Redundant pattern match: ", + "\n", + annotatedAsErrorSite src loc + ] + C.InaccessiblePattern loc -> + mconcat + [ "Inaccessible pattern match: ", + "\n", + annotatedAsErrorSite src loc + ] C.TypeMismatch c -> mconcat ["TypeMismatch\n", " context:\n", renderContext env c] C.HandlerOfUnexpectedType loc typ -> @@ -927,7 +974,7 @@ renderCompilerBug env _src bug = mconcat $ case bug of C.Data -> " data type" C.Effect -> " ability", "\n", - " reerence = ", + " reference = ", showTypeRef env rf ] C.UnknownConstructor sort (ConstructorReference rf i) _decl -> @@ -1011,13 +1058,16 @@ renderContext env ctx@(C.Context es) = shortName v <> " : " <> renderType' env (C.apply ctx t) showElem _ (C.Marker v) = "|" <> shortName v <> "|" -renderTerm :: (IsString s, Var v) => Env -> C.Term v loc -> s +renderTerm :: (IsString s, Var v) => Env -> Term.Term' (TypeVar.TypeVar loc0 v) v loc1 -> s renderTerm env e = let s = Color.toPlain $ TermPrinter.pretty' (Just 80) env (TypeVar.lowerTerm e) in if length s > Settings.renderTermMaxLength then fromString (take Settings.renderTermMaxLength s <> "...") else fromString s +renderPattern :: Env -> Pattern ann -> ColorText +renderPattern env e = Pr.renderUnbroken . Pr.syntaxToColor . fst $ TermPrinter.prettyPattern env TermPrinter.emptyAc 0 ([] :: [Symbol]) e + -- | renders a type with no special styling renderType' :: (IsString s, Var v) => Env -> Type v loc -> s renderType' env typ = @@ -1026,7 +1076,7 @@ renderType' env typ = -- | `f` may do some styling based on `loc`. -- | You can pass `(const id)` if no styling is needed, or call `renderType'`. renderType :: - Var v => + (Var v) => Env -> (loc -> Pretty (AnnotatedText a) -> Pretty (AnnotatedText a)) -> Type v loc -> @@ -1065,7 +1115,8 @@ renderType env f t = renderType0 env f (0 :: Int) (Type.removePureEffects t) renderSuggestion :: (IsString s, Semigroup s, Var v) => Env -> C.Suggestion v loc -> s renderSuggestion env sug = - fromString (Text.unpack $ C.suggestionName sug) <> " : " + fromString (Text.unpack $ C.suggestionName sug) + <> " : " <> renderType' env (C.suggestionType sug) @@ -1087,21 +1138,21 @@ renderVar' env ctx v = case C.lookupSolved ctx v of Nothing -> "unsolved" Just t -> renderType' env $ Type.getPolytype t -prettyVar :: Var v => v -> Pretty ColorText +prettyVar :: (Var v) => v -> Pretty ColorText prettyVar = Pr.text . Var.name renderKind :: Kind -> Pretty (AnnotatedText a) renderKind Kind.Star = "*" renderKind (Kind.Arrow k1 k2) = renderKind k1 <> " -> " <> renderKind k2 -showTermRef :: IsString s => Env -> Referent -> s +showTermRef :: (IsString s) => Env -> Referent -> s showTermRef env r = fromString . HQ.toString $ PPE.termName env r -showTypeRef :: IsString s => Env -> R.Reference -> s +showTypeRef :: (IsString s) => Env -> R.Reference -> s showTypeRef env r = fromString . HQ.toString $ PPE.typeName env r -- todo: do something different/better if cid not found -showConstructor :: IsString s => Env -> ConstructorReference -> s +showConstructor :: (IsString s) => Env -> ConstructorReference -> s showConstructor env r = fromString . HQ.toString $ PPE.patternName env r @@ -1117,14 +1168,14 @@ styleInOverallType e overallType leafType c = renderType e f overallType where f loc s = if loc == ABT.annotation leafType then Color.style c <$> s else s -_posToEnglish :: IsString s => L.Pos -> s +_posToEnglish :: (IsString s) => L.Pos -> s _posToEnglish (L.Pos l c) = fromString $ "Line " ++ show l ++ ", Column " ++ show c rangeForToken :: L.Token a -> Range rangeForToken t = Range (L.start t) (L.end t) -rangeToEnglish :: IsString s => Range -> s +rangeToEnglish :: (IsString s) => Range -> s rangeToEnglish (Range (L.Pos l c) (L.Pos l' c')) = fromString $ let showColumn = True @@ -1156,7 +1207,7 @@ annotatedToEnglish a = case ann a of External -> "an external" Ann start end -> rangeToEnglish $ Range start end -rangeForAnnotated :: Annotated a => a -> Maybe Range +rangeForAnnotated :: (Annotated a) => a -> Maybe Range rangeForAnnotated a = case ann a of Intrinsic -> Nothing External -> Nothing @@ -1175,7 +1226,7 @@ renderNoteAsANSI :: String renderNoteAsANSI w e s curPath n = Pr.toANSI w $ printNoteWithSource e s curPath n -renderParseErrorAsANSI :: Var v => Pr.Width -> String -> Parser.Err v -> String +renderParseErrorAsANSI :: (Var v) => Pr.Width -> String -> Parser.Err v -> String renderParseErrorAsANSI w src = Pr.toANSI w . prettyParseError src printNoteWithSource :: @@ -1214,13 +1265,13 @@ _printArrowsAtPos s line column = pattern LexerError :: [L.Token L.Lexeme] -> L.Err -> Maybe (P.ErrorItem (L.Token L.Lexeme)) pattern LexerError ts e <- Just (P.Tokens (firstLexerError -> Just (ts, e))) -firstLexerError :: Foldable t => t (L.Token L.Lexeme) -> Maybe ([L.Token L.Lexeme], L.Err) +firstLexerError :: (Foldable t) => t (L.Token L.Lexeme) -> Maybe ([L.Token L.Lexeme], L.Err) firstLexerError ts = find (const True) [(toList ts, e) | (L.payload -> L.Err e) <- toList ts] prettyParseError :: forall v. - Var v => + (Var v) => String -> Parser.Err v -> Pretty ColorText @@ -1235,7 +1286,7 @@ prettyParseError s e = renderParseErrors :: forall v. - Var v => + (Var v) => String -> Parser.Err v -> [(Pretty ColorText, [Range])] @@ -1246,11 +1297,13 @@ renderParseErrors s = \case excerpt = showSource s ((\r -> (r, ErrorSite)) <$> ranges) go = \case L.UnexpectedDelimiter s -> - "I found a " <> style ErrorSite (fromString s) + "I found a " + <> style ErrorSite (fromString s) <> " here, but I didn't see a list or tuple that it might be a separator for.\n\n" <> excerpt L.CloseWithoutMatchingOpen open close -> - "I found a closing " <> style ErrorSite (fromString close) + "I found a closing " + <> style ErrorSite (fromString close) <> " here without a matching " <> style ErrorSite (fromString open) <> ".\n\n" @@ -1476,7 +1529,9 @@ renderParseErrors s = \case "You can write" <> Pr.group ( Pr.blue $ - "use " <> Pr.text (Name.toText (Name.makeRelative parent)) <> " " + "use " + <> Pr.text (Name.toText (Name.makeRelative parent)) + <> " " <> Pr.text (Name.toText (Name.unqualified (L.payload tok))) ) <> "to introduce " @@ -1516,7 +1571,9 @@ renderParseErrors s = \case where ranges = ts >>= snd >>= toList . rangeForAnnotated showDup (v, locs) = - "I found multiple types with the name " <> errorVar v <> ":\n\n" + "I found multiple types with the name " + <> errorVar v + <> ":\n\n" <> annotatedsStartingLineAsStyle ErrorSite s locs go (Parser.DuplicateTermNames ts) = (Pr.fatalCallout $ intercalateMap "\n\n" showDup ts, ranges) @@ -1575,7 +1632,9 @@ renderParseErrors s = \case "\n - A binding, like " <> t <> style Code " = 42" <> " OR", "\n " <> t <> style Code " : Nat", "\n " <> t <> style Code " = 42", - "\n - A watch expression, like " <> style Code "> " <> t + "\n - A watch expression, like " + <> style Code "> " + <> t <> style Code " + 1", @@ -1637,7 +1696,7 @@ renderParseErrors s = \case <> style ErrorSite "match" <> "/" <> style ErrorSite "with" - <> " but I didn't find any." + <> " or cases but I didn't find any." ), "", tokenAsErrorSite s tok @@ -1735,7 +1794,7 @@ renderParseErrors s = \case ] annotatedAsErrorSite :: - Annotated a => String -> a -> Pretty ColorText + (Annotated a) => String -> a -> Pretty ColorText annotatedAsErrorSite = annotatedAsStyle ErrorSite annotatedAsStyle :: @@ -1769,17 +1828,17 @@ tokensAsErrorSite src ts = showSource src [(rangeForToken t, ErrorSite) | t <- ts] showSourceMaybes :: - Ord a => String -> [Maybe (Range, a)] -> Pretty (AnnotatedText a) + (Ord a) => String -> [Maybe (Range, a)] -> Pretty (AnnotatedText a) showSourceMaybes src annotations = showSource src $ catMaybes annotations -showSource :: Ord a => String -> [(Range, a)] -> Pretty (AnnotatedText a) +showSource :: (Ord a) => String -> [(Range, a)] -> Pretty (AnnotatedText a) showSource src annotations = Pr.lit . AT.condensedExcerptToText 6 $ AT.markup (fromString src) (Map.fromList annotations) -showSource1 :: Ord a => String -> (Range, a) -> Pretty (AnnotatedText a) +showSource1 :: (Ord a) => String -> (Range, a) -> Pretty (AnnotatedText a) showSource1 src annotation = showSource src [annotation] findTerm :: Seq (C.PathElement v loc) -> Maybe loc diff --git a/parser-typechecker/src/Unison/Result.hs b/parser-typechecker/src/Unison/Result.hs index 9004d42bd..11fe80dcf 100644 --- a/parser-typechecker/src/Unison/Result.hs +++ b/parser-typechecker/src/Unison/Result.hs @@ -43,41 +43,41 @@ pattern Result notes may = MaybeT (WriterT (Identity (may, notes))) {-# COMPLETE Result #-} -isSuccess :: Functor f => ResultT note f a -> f Bool +isSuccess :: (Functor f) => ResultT note f a -> f Bool isSuccess = (isJust . fst <$>) . runResultT -isFailure :: Functor f => ResultT note f a -> f Bool +isFailure :: (Functor f) => ResultT note f a -> f Bool isFailure = (isNothing . fst <$>) . runResultT -toMaybe :: Functor f => ResultT note f a -> f (Maybe a) +toMaybe :: (Functor f) => ResultT note f a -> f (Maybe a) toMaybe = (fst <$>) . runResultT runResultT :: ResultT notes f a -> f (Maybe a, notes) runResultT = runWriterT . runMaybeT -- Returns the `Result` in the `f` functor. -getResult :: Functor f => ResultT notes f a -> f (Result notes a) +getResult :: (Functor f) => ResultT notes f a -> f (Result notes a) getResult r = uncurry (flip Result) <$> runResultT r -toEither :: Functor f => ResultT notes f a -> ExceptT notes f a +toEither :: (Functor f) => ResultT notes f a -> ExceptT notes f a toEither r = ExceptT (go <$> runResultT r) where go (may, notes) = note notes may -tell1 :: Monad f => note -> ResultT (Seq note) f () +tell1 :: (Monad f) => note -> ResultT (Seq note) f () tell1 = tell . pure fromParsing :: - Monad f => Either (Parser.Err v) a -> ResultT (Seq (Note v loc)) f a + (Monad f) => Either (Parser.Err v) a -> ResultT (Seq (Note v loc)) f a fromParsing (Left e) = do tell1 $ Parsing e Fail.fail "" fromParsing (Right a) = pure a -tellAndFail :: Monad f => note -> ResultT (Seq note) f a +tellAndFail :: (Monad f) => note -> ResultT (Seq note) f a tellAndFail note = tell1 note *> Fail.fail "Elegantly and responsibly" -compilerBug :: Monad f => CompilerBug v loc -> ResultT (Seq (Note v loc)) f a +compilerBug :: (Monad f) => CompilerBug v loc -> ResultT (Seq (Note v loc)) f a compilerBug = tellAndFail . CompilerBug hoist :: diff --git a/parser-typechecker/src/Unison/Runtime/ANF.hs b/parser-typechecker/src/Unison/Runtime/ANF.hs index ee59c2631..bd0928397 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF.hs @@ -58,6 +58,8 @@ module Unison.Runtime.ANF ANFM, Branched (.., MatchDataCover), Func (..), + SGEqv (..), + equivocate, superNormalize, anfTerm, valueTermLinks, @@ -66,6 +68,8 @@ module Unison.Runtime.ANF groupLinks, normalLinks, prettyGroup, + prettySuperNormal, + prettyANF, ) where @@ -603,7 +607,7 @@ data ANormalF v e | AApp (Func v) [v] | AFrc v | AVar v - deriving (Show) + deriving (Show, Eq) -- Types representing components that will go into the runtime tag of -- a data type value. RTags correspond to references, while CTags @@ -704,6 +708,122 @@ instance Bifoldable ANormalF where bifoldMap f _ (AFrc v) = f v bifoldMap f _ (AApp func args) = foldMap f func <> foldMap f args +instance ABTN.Align ANormalF where + align f _ (AVar u) (AVar v) = Just $ AVar <$> f u v + align _ _ (ALit l) (ALit r) + | l == r = Just $ pure (ALit l) + align _ g (ALet dl ccl bl el) (ALet dr ccr br er) + | dl == dr, + ccl == ccr = + Just $ ALet dl ccl <$> g bl br <*> g el er + align f g (AName hl asl el) (AName hr asr er) + | length asl == length asr, + Just hs <- alignEither f hl hr = + Just $ + AName + <$> hs + <*> traverse (uncurry f) (zip asl asr) + <*> g el er + align f g (AMatch vl bsl) (AMatch vr bsr) + | Just bss <- alignBranch g bsl bsr = + Just $ AMatch <$> f vl vr <*> bss + align f g (AHnd rl hl bl) (AHnd rr hr br) + | rl == rr = Just $ AHnd rl <$> f hl hr <*> g bl br + align _ g (AShift rl bl) (AShift rr br) + | rl == rr = Just $ AShift rl <$> g bl br + align f _ (AFrc u) (AFrc v) = Just $ AFrc <$> f u v + align f _ (AApp hl asl) (AApp hr asr) + | Just hs <- alignFunc f hl hr, + length asl == length asr = + Just $ AApp <$> hs <*> traverse (uncurry f) (zip asl asr) + align _ _ _ _ = Nothing + +alignEither :: + (Applicative f) => + (l -> r -> f s) -> + Either Reference l -> + Either Reference r -> + Maybe (f (Either Reference s)) +alignEither _ (Left rl) (Left rr) | rl == rr = Just . pure $ Left rl +alignEither f (Right u) (Right v) = Just $ Right <$> f u v +alignEither _ _ _ = Nothing + +alignMaybe :: + (Applicative f) => + (l -> r -> f s) -> + Maybe l -> + Maybe r -> + Maybe (f (Maybe s)) +alignMaybe f (Just l) (Just r) = Just $ Just <$> f l r +alignMaybe _ Nothing Nothing = Just (pure Nothing) +alignMaybe _ _ _ = Nothing + +alignFunc :: + (Applicative f) => + (vl -> vr -> f vs) -> + Func vl -> + Func vr -> + Maybe (f (Func vs)) +alignFunc f (FVar u) (FVar v) = Just $ FVar <$> f u v +alignFunc _ (FComb rl) (FComb rr) | rl == rr = Just . pure $ FComb rl +alignFunc f (FCont u) (FCont v) = Just $ FCont <$> f u v +alignFunc _ (FCon rl tl) (FCon rr tr) + | rl == rr, tl == tr = Just . pure $ FCon rl tl +alignFunc _ (FReq rl tl) (FReq rr tr) + | rl == rr, tl == tr = Just . pure $ FReq rl tl +alignFunc _ (FPrim ol) (FPrim or) + | ol == or = Just . pure $ FPrim ol +alignFunc _ _ _ = Nothing + +alignBranch :: + (Applicative f) => + (el -> er -> f es) -> + Branched el -> + Branched er -> + Maybe (f (Branched es)) +alignBranch _ MatchEmpty MatchEmpty = Just $ pure MatchEmpty +alignBranch f (MatchIntegral bl dl) (MatchIntegral br dr) + | keysSet bl == keysSet br, + Just ds <- alignMaybe f dl dr = + Just $ + MatchIntegral + <$> interverse f bl br + <*> ds +alignBranch f (MatchText bl dl) (MatchText br dr) + | Map.keysSet bl == Map.keysSet br, + Just ds <- alignMaybe f dl dr = + Just $ + MatchText + <$> traverse id (Map.intersectionWith f bl br) + <*> ds +alignBranch f (MatchRequest bl pl) (MatchRequest br pr) + | Map.keysSet bl == Map.keysSet br, + all p (Map.keysSet bl) = + Just $ + MatchRequest + <$> traverse id (Map.intersectionWith (interverse (alignCCs f)) bl br) + <*> f pl pr + where + p r = keysSet hsl == keysSet hsr && all q (keys hsl) + where + hsl = bl Map.! r + hsr = br Map.! r + q t = fst (hsl ! t) == fst (hsr ! t) +alignBranch f (MatchData rfl bl dl) (MatchData rfr br dr) + | rfl == rfr, + keysSet bl == keysSet br, + all (\t -> fst (bl ! t) == fst (br ! t)) (keys bl), + Just ds <- alignMaybe f dl dr = + Just $ MatchData rfl <$> interverse (alignCCs f) bl br <*> ds +alignBranch f (MatchSum bl) (MatchSum br) + | keysSet bl == keysSet br, + all (\w -> fst (bl ! w) == fst (br ! w)) (keys bl) = + Just $ MatchSum <$> interverse (alignCCs f) bl br +alignBranch _ _ _ = Nothing + +alignCCs :: (Functor f) => (l -> r -> f s) -> (a, l) -> (a, r) -> f (a, s) +alignCCs f (ccs, l) (_, r) = (,) ccs <$> f l r + matchLit :: Term v a -> Maybe Lit matchLit (Int' i) = Just $ I i matchLit (Nat' n) = Just $ N n @@ -930,7 +1050,7 @@ data Branched e | MatchEmpty | MatchData Reference (EnumMap CTag ([Mem], e)) (Maybe e) | MatchSum (EnumMap Word64 ([Mem], e)) - deriving (Show, Functor, Foldable, Traversable) + deriving (Show, Eq, Functor, Foldable, Traversable) -- Data cases expected to cover all constructors pattern MatchDataCover :: Reference -> EnumMap CTag ([Mem], e) -> Branched e @@ -1041,7 +1161,7 @@ data Func v FReq !Reference !CTag | -- prim op FPrim (Either POp FOp) - deriving (Show, Functor, Foldable, Traversable) + deriving (Show, Eq, Functor, Foldable, Traversable) data Lit = I Int64 @@ -1051,7 +1171,7 @@ data Lit | C Char | LM Referent | LY Reference - deriving (Show) + deriving (Show, Eq) litRef :: Lit -> Reference litRef (I _) = Ty.intRef @@ -1196,6 +1316,7 @@ data POp | PRNT | INFO | TRCE + | DBTX | -- STM ATOM | TFRC -- try force @@ -1230,7 +1351,7 @@ type DNormal v = Directed () (ANormal v) -- Should be a completely closed term data SuperNormal v = Lambda {conventions :: [Mem], bound :: ANormal v} - deriving (Show) + deriving (Show, Eq) data SuperGroup v = Rec { group :: [(v, SuperNormal v)], @@ -1238,6 +1359,42 @@ data SuperGroup v = Rec } deriving (Show) +instance (Var v) => Eq (SuperGroup v) where + g0 == g1 | Left _ <- equivocate g0 g1 = False | otherwise = True + +-- Failure modes for SuperGroup alpha equivalence test +data SGEqv v + = -- mismatch number of definitions in group + NumDefns (SuperGroup v) (SuperGroup v) + | -- mismatched SuperNormal calling conventions + DefnConventions (SuperNormal v) (SuperNormal v) + | -- mismatched subterms in corresponding definition + Subterms (ANormal v) (ANormal v) + +-- Checks if two SuperGroups are equivalent up to renaming. The rest +-- of the structure must match on the nose. If the two groups are not +-- equivalent, an example of conflicting structure is returned. +equivocate :: + (Var v) => + SuperGroup v -> + SuperGroup v -> + Either (SGEqv v) () +equivocate g0@(Rec bs0 e0) g1@(Rec bs1 e1) + | length bs0 == length bs1 = + traverse_ eqvSN (zip ns0 ns1) *> eqvSN (e0, e1) + | otherwise = Left $ NumDefns g0 g1 + where + (vs0, ns0) = unzip bs0 + (vs1, ns1) = unzip bs1 + vm = Map.fromList (zip vs1 vs0) + + promote (Left (l, r)) = Left $ Subterms l r + promote (Right v) = Right v + + eqvSN (Lambda ccs0 e0, Lambda ccs1 e1) + | ccs0 == ccs1 = promote $ ABTN.alpha vm e0 e1 + eqvSN (n0, n1) = Left $ DefnConventions n0 n1 + type ANFM v = ReaderT (Set v) diff --git a/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs b/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs index 201335dcb..0bfcdc70c 100644 --- a/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs +++ b/parser-typechecker/src/Unison/Runtime/ANF/Serialize.hs @@ -216,7 +216,7 @@ instance Tag CoTag where 2 -> pure PushT t -> unknownTag "CoTag" t -index :: Eq v => [v] -> v -> Maybe Word64 +index :: (Eq v) => [v] -> v -> Maybe Word64 index ctx u = go 0 ctx where go !_ [] = Nothing @@ -224,7 +224,7 @@ index ctx u = go 0 ctx | v == u = Just n | otherwise = go (n + 1) vs -deindex :: HasCallStack => [v] -> Word64 -> v +deindex :: (HasCallStack) => [v] -> Word64 -> v deindex [] _ = exn "deindex: bad index" deindex (v : vs) n | n == 0 = v @@ -233,34 +233,34 @@ deindex (v : vs) n pushCtx :: [v] -> [v] -> [v] pushCtx us vs = reverse us ++ vs -putIndex :: MonadPut m => Word64 -> m () +putIndex :: (MonadPut m) => Word64 -> m () putIndex = serialize . VarInt -getIndex :: MonadGet m => m Word64 +getIndex :: (MonadGet m) => m Word64 getIndex = unVarInt <$> deserialize -putVar :: MonadPut m => Eq v => [v] -> v -> m () +putVar :: (MonadPut m) => (Eq v) => [v] -> v -> m () putVar ctx v | Just i <- index ctx v = putIndex i | otherwise = exn "putVar: variable not in context" -getVar :: MonadGet m => [v] -> m v +getVar :: (MonadGet m) => [v] -> m v getVar ctx = deindex ctx <$> getIndex -putArgs :: MonadPut m => Eq v => [v] -> [v] -> m () +putArgs :: (MonadPut m) => (Eq v) => [v] -> [v] -> m () putArgs ctx is = putFoldable (putVar ctx) is -getArgs :: MonadGet m => [v] -> m [v] +getArgs :: (MonadGet m) => [v] -> m [v] getArgs ctx = getList (getVar ctx) -putCCs :: MonadPut m => [Mem] -> m () +putCCs :: (MonadPut m) => [Mem] -> m () putCCs ccs = putLength n *> traverse_ putCC ccs where n = length ccs putCC UN = putWord8 0 putCC BX = putWord8 1 -getCCs :: MonadGet m => m [Mem] +getCCs :: (MonadGet m) => m [Mem] getCCs = getList $ getWord8 <&> \case @@ -269,8 +269,8 @@ getCCs = _ -> exn "getCCs: bad calling convention" putGroup :: - MonadPut m => - Var v => + (MonadPut m) => + (Var v) => EC.EnumMap FOp Text -> SuperGroup v -> m () @@ -281,7 +281,7 @@ putGroup fops (Rec bs e) = (us, cs) = unzip bs ctx = pushCtx us [] -getGroup :: MonadGet m => Var v => m (SuperGroup v) +getGroup :: (MonadGet m) => (Var v) => m (SuperGroup v) getGroup = do l <- getLength let n = fromIntegral l @@ -291,8 +291,8 @@ getGroup = do Rec (zip vs cs) <$> getComb ctx n putComb :: - MonadPut m => - Var v => + (MonadPut m) => + (Var v) => EC.EnumMap FOp Text -> [v] -> SuperNormal v -> @@ -300,10 +300,10 @@ putComb :: putComb fops ctx (Lambda ccs (TAbss us e)) = putCCs ccs *> putNormal fops (pushCtx us ctx) e -getFresh :: Var v => Word64 -> v +getFresh :: (Var v) => Word64 -> v getFresh n = freshenId n $ typed ANFBlank -getComb :: MonadGet m => Var v => [v] -> Word64 -> m (SuperNormal v) +getComb :: (MonadGet m) => (Var v) => [v] -> Word64 -> m (SuperNormal v) getComb ctx frsh0 = do ccs <- getCCs let us = zipWith (\_ -> getFresh) ccs [frsh0 ..] @@ -311,8 +311,8 @@ getComb ctx frsh0 = do Lambda ccs . TAbss us <$> getNormal (pushCtx us ctx) frsh putNormal :: - MonadPut m => - Var v => + (MonadPut m) => + (Var v) => EC.EnumMap FOp Text -> [v] -> ANormal v -> @@ -328,20 +328,29 @@ putNormal fops ctx tm = case tm of TMatch v bs -> putTag MatchT *> putVar ctx v *> putBranches fops ctx bs TLit l -> putTag LitT *> putLit l TName v (Left r) as e -> - putTag NameRefT *> putReference r *> putArgs ctx as + putTag NameRefT + *> putReference r + *> putArgs ctx as *> putNormal fops (v : ctx) e TName v (Right u) as e -> - putTag NameVarT *> putVar ctx u *> putArgs ctx as + putTag NameVarT + *> putVar ctx u + *> putArgs ctx as *> putNormal fops (v : ctx) e TLets Direct us ccs l e -> - putTag LetDirT *> putCCs ccs *> putNormal fops ctx l + putTag LetDirT + *> putCCs ccs + *> putNormal fops ctx l *> putNormal fops (pushCtx us ctx) e TLets (Indirect w) us ccs l e -> - putTag LetIndT *> putWord16be w *> putCCs ccs *> putNormal fops ctx l + putTag LetIndT + *> putWord16be w + *> putCCs ccs + *> putNormal fops ctx l *> putNormal fops (pushCtx us ctx) e _ -> exn "putNormal: malformed term" -getNormal :: MonadGet m => Var v => [v] -> Word64 -> m (ANormal v) +getNormal :: (MonadGet m) => (Var v) => [v] -> Word64 -> m (ANormal v) getNormal ctx frsh0 = getTag >>= \case VarT -> TVar <$> getVar ctx @@ -387,8 +396,8 @@ getNormal ctx frsh0 = <*> getNormal (pushCtx us ctx) frsh putFunc :: - MonadPut m => - Var v => + (MonadPut m) => + (Var v) => EC.EnumMap FOp Text -> [v] -> Func v -> @@ -406,7 +415,7 @@ putFunc fops ctx f = case f of | otherwise -> exn $ "putFunc: could not serialize foreign operation: " ++ show f -getFunc :: MonadGet m => Var v => [v] -> m (Func v) +getFunc :: (MonadGet m) => (Var v) => [v] -> m (Func v) getFunc ctx = getTag >>= \case FVarT -> FVar <$> getVar ctx @@ -417,12 +426,12 @@ getFunc ctx = FPrimT -> FPrim . Left <$> getPOp FForeignT -> exn "getFunc: can't deserialize a foreign func" -putPOp :: MonadPut m => POp -> m () +putPOp :: (MonadPut m) => POp -> m () putPOp op | Just w <- Map.lookup op pop2word = putWord16be w | otherwise = exn $ "putPOp: unknown POp: " ++ show op -getPOp :: MonadGet m => m POp +getPOp :: (MonadGet m) => m POp getPOp = getWord16be >>= \w -> case Map.lookup w word2pop of Just op -> pure op @@ -549,6 +558,7 @@ pOpCode op = case op of TRCE -> 116 ATOM -> 117 TFRC -> 118 + DBTX -> 119 pOpAssoc :: [(POp, Word16)] pOpAssoc = map (\op -> (op, pOpCode op)) [minBound .. maxBound] @@ -561,7 +571,7 @@ word2pop = fromList $ swap <$> pOpAssoc where swap (x, y) = (y, x) -putLit :: MonadPut m => Lit -> m () +putLit :: (MonadPut m) => Lit -> m () putLit (I i) = putTag IT *> putInt i putLit (N n) = putTag NT *> putNat n putLit (F f) = putTag FT *> putFloat f @@ -570,7 +580,7 @@ putLit (C c) = putTag CT *> putChar c putLit (LM r) = putTag LMT *> putReferent r putLit (LY r) = putTag LYT *> putReference r -getLit :: MonadGet m => m Lit +getLit :: (MonadGet m) => m Lit getLit = getTag >>= \case IT -> I <$> getInt @@ -581,7 +591,7 @@ getLit = LMT -> LM <$> getReferent LYT -> LY <$> getReference -putBLit :: MonadPut m => BLit -> m () +putBLit :: (MonadPut m) => BLit -> m () putBLit (Text t) = putTag TextT *> putText (Util.Text.toText t) putBLit (List s) = putTag ListT *> putFoldable putValue s putBLit (TmLink r) = putTag TmLinkT *> putReferent r @@ -591,7 +601,7 @@ putBLit (Quote v) = putTag QuoteT *> putValue v putBLit (Code g) = putTag CodeT *> putGroup mempty g putBLit (BArr a) = putTag BArrT *> putByteArray a -getBLit :: MonadGet m => Version -> m BLit +getBLit :: (MonadGet m) => Version -> m BLit getBLit v = getTag >>= \case TextT -> Text . Util.Text.fromText <$> getText @@ -603,15 +613,15 @@ getBLit v = CodeT -> Code <$> getGroup BArrT -> BArr <$> getByteArray -putRefs :: MonadPut m => [Reference] -> m () +putRefs :: (MonadPut m) => [Reference] -> m () putRefs rs = putFoldable putReference rs -getRefs :: MonadGet m => m [Reference] +getRefs :: (MonadGet m) => m [Reference] getRefs = getList getReference putBranches :: - MonadPut m => - Var v => + (MonadPut m) => + (Var v) => EC.EnumMap FOp Text -> [v] -> Branched (ANormal v) -> @@ -643,7 +653,7 @@ putBranches fops ctx bs = case bs of _ -> exn "putBranches: malformed intermediate term" getBranches :: - MonadGet m => Var v => [v] -> Word64 -> m (Branched (ANormal v)) + (MonadGet m) => (Var v) => [v] -> Word64 -> m (Branched (ANormal v)) getBranches ctx frsh0 = getTag >>= \case MEmptyT -> pure MatchEmpty @@ -669,8 +679,8 @@ getBranches ctx frsh0 = MSumT -> MatchSum <$> getEnumMap getWord64be (getCase ctx frsh0) putCase :: - MonadPut m => - Var v => + (MonadPut m) => + (Var v) => EC.EnumMap FOp Text -> [v] -> ([Mem], ANormal v) -> @@ -678,7 +688,7 @@ putCase :: putCase fops ctx (ccs, (TAbss us e)) = putCCs ccs *> putNormal fops (pushCtx us ctx) e -getCase :: MonadGet m => Var v => [v] -> Word64 -> m ([Mem], ANormal v) +getCase :: (MonadGet m) => (Var v) => [v] -> Word64 -> m ([Mem], ANormal v) getCase ctx frsh0 = do ccs <- getCCs let l = length ccs @@ -686,20 +696,20 @@ getCase ctx frsh0 = do us = getFresh <$> take l [frsh0 ..] (,) ccs . TAbss us <$> getNormal (pushCtx us ctx) frsh -putCTag :: MonadPut m => CTag -> m () +putCTag :: (MonadPut m) => CTag -> m () putCTag c = serialize (VarInt $ fromEnum c) -getCTag :: MonadGet m => m CTag +getCTag :: (MonadGet m) => m CTag getCTag = toEnum . unVarInt <$> deserialize -putGroupRef :: MonadPut m => GroupRef -> m () +putGroupRef :: (MonadPut m) => GroupRef -> m () putGroupRef (GR r i) = putReference r *> putWord64be i -getGroupRef :: MonadGet m => m GroupRef +getGroupRef :: (MonadGet m) => m GroupRef getGroupRef = GR <$> getReference <*> getWord64be -putValue :: MonadPut m => Value -> m () +putValue :: (MonadPut m) => Value -> m () putValue (Partial gr ws vs) = putTag PartialT *> putGroupRef gr @@ -719,20 +729,21 @@ putValue (Cont us bs k) = putValue (BLit l) = putTag BLitT *> putBLit l -getValue :: MonadGet m => Version -> m Value +getValue :: (MonadGet m) => Version -> m Value getValue v = getTag >>= \case PartialT -> Partial <$> getGroupRef <*> getList getWord64be <*> getList (getValue v) DataT -> - Data <$> getReference + Data + <$> getReference <*> getWord64be <*> getList getWord64be <*> getList (getValue v) ContT -> Cont <$> getList getWord64be <*> getList (getValue v) <*> getCont v BLitT -> BLit <$> getBLit v -putCont :: MonadPut m => Cont -> m () +putCont :: (MonadPut m) => Cont -> m () putCont KE = putTag KET putCont (Mark ua ba rs ds k) = putTag MarkT @@ -750,7 +761,7 @@ putCont (Push i j m n gr k) = *> putGroupRef gr *> putCont k -getCont :: MonadGet m => Version -> m Cont +getCont :: (MonadGet m) => Version -> m Cont getCont v = getTag >>= \case KET -> pure KE @@ -762,13 +773,15 @@ getCont v = <*> getMap getReference (getValue v) <*> getCont v PushT -> - Push <$> getWord64be <*> getWord64be + Push + <$> getWord64be + <*> getWord64be <*> getWord64be <*> getWord64be <*> getGroupRef <*> getCont v -deserializeGroup :: Var v => ByteString -> Either String (SuperGroup v) +deserializeGroup :: (Var v) => ByteString -> Either String (SuperGroup v) deserializeGroup bs = runGetS (getVersion *> getGroup) bs where getVersion = @@ -777,7 +790,7 @@ deserializeGroup bs = runGetS (getVersion *> getGroup) bs n -> fail $ "deserializeGroup: unknown version: " ++ show n serializeGroup :: - Var v => EC.EnumMap FOp Text -> SuperGroup v -> ByteString + (Var v) => EC.EnumMap FOp Text -> SuperGroup v -> ByteString serializeGroup fops sg = runPutS (putVersion *> putGroup fops sg) where putVersion = putWord32be codeVersion diff --git a/parser-typechecker/src/Unison/Runtime/Array.hs b/parser-typechecker/src/Unison/Runtime/Array.hs new file mode 100644 index 000000000..748a37123 --- /dev/null +++ b/parser-typechecker/src/Unison/Runtime/Array.hs @@ -0,0 +1,378 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE ConstraintKinds #-} +{-# LANGUAGE StandaloneKindSignatures #-} + +-- This module wraps the operations in the primitive package so that +-- bounds checks can be toggled on during the build for debugging +-- purposes. It exports the entire API for the three array types +-- needed, and adds wrappers for the operations that are unchecked in +-- the base library. +-- +-- Checking is toggled using the `arraychecks` flag. +module Unison.Runtime.Array + ( module EPA, + readArray, + writeArray, + copyArray, + copyMutableArray, + cloneMutableArray, + readByteArray, + writeByteArray, + indexByteArray, + copyByteArray, + copyMutableByteArray, + moveByteArray, + readPrimArray, + writePrimArray, + indexPrimArray, + ) +where + +import Control.Monad.Primitive +import Data.Kind (Constraint) +import Data.Primitive.Array as EPA hiding + ( cloneMutableArray, + copyArray, + copyMutableArray, + readArray, + writeArray, + ) +import qualified Data.Primitive.Array as PA +import Data.Primitive.ByteArray as EPA hiding + ( copyByteArray, + copyMutableByteArray, + indexByteArray, + moveByteArray, + readByteArray, + writeByteArray, + ) +import qualified Data.Primitive.ByteArray as PA +import Data.Primitive.PrimArray as EPA hiding + ( indexPrimArray, + readPrimArray, + writePrimArray, + ) +import qualified Data.Primitive.PrimArray as PA +import Data.Primitive.Types + +#ifdef ARRAY_CHECK +import GHC.Stack + +type CheckCtx :: Constraint +type CheckCtx = HasCallStack + +type MA = MutableArray +type MBA = MutableByteArray +type A = Array +type BA = ByteArray + +-- check index mutable array +checkIMArray + :: CheckCtx + => String + -> (MA s a -> Int -> r) + -> MA s a -> Int -> r +checkIMArray name f arr i + | i < 0 || sizeofMutableArray arr <= i + = error $ name ++ " unsafe check out of bounds: " ++ show i + | otherwise = f arr i +{-# inline checkIMArray #-} + +-- check copy array +checkCArray + :: CheckCtx + => String + -> (MA s a -> Int -> A a -> Int -> Int -> r) + -> MA s a -> Int -> A a -> Int -> Int -> r +checkCArray name f dst d src s l + | d < 0 + || s < 0 + || sizeofMutableArray dst < d + l + || sizeofArray src < s + l + = error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l) + | otherwise = f dst d src s l +{-# inline checkCArray #-} + +-- check copy mutable array +checkCMArray + :: CheckCtx + => String + -> (MA s a -> Int -> MA s a -> Int -> Int -> r) + -> MA s a -> Int -> MA s a -> Int -> Int -> r +checkCMArray name f dst d src s l + | d < 0 + || s < 0 + || sizeofMutableArray dst < d + l + || sizeofMutableArray src < s + l + = error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l) + | otherwise = f dst d src s l +{-# inline checkCMArray #-} + +-- check range mutable array +checkRMArray + :: CheckCtx + => String + -> (MA s a -> Int -> Int -> r) + -> MA s a -> Int -> Int -> r +checkRMArray name f arr o l + | o < 0 || sizeofMutableArray arr < o+l + = error $ name ++ "unsafe check out of bounds: " ++ show (o, l) + | otherwise = f arr o l +{-# inline checkRMArray #-} + +-- check index byte array +checkIBArray + :: CheckCtx + => Prim a + => String + -> a + -> (ByteArray -> Int -> r) + -> ByteArray -> Int -> r +checkIBArray name a f arr i + | i < 0 || sizeofByteArray arr `quot` sizeOf a <= i + = error $ name ++ " unsafe check out of bounds: " ++ show i + | otherwise = f arr i +{-# inline checkIBArray #-} + +-- check index mutable byte array +checkIMBArray + :: CheckCtx + => Prim a + => String + -> a + -> (MutableByteArray s -> Int -> r) + -> MutableByteArray s -> Int -> r +checkIMBArray name a f arr i + | i < 0 || sizeofMutableByteArray arr `quot` sizeOf a <= i + = error $ name ++ " unsafe check out of bounds: " ++ show i + | otherwise = f arr i +{-# inline checkIMBArray #-} + +-- check copy byte array +checkCBArray + :: CheckCtx + => String + -> (MBA s -> Int -> BA -> Int -> Int -> r) + -> MBA s -> Int -> BA -> Int -> Int -> r +checkCBArray name f dst d src s l + | d < 0 + || s < 0 + || sizeofMutableByteArray dst < d + l + || sizeofByteArray src < s + l + = error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l) + | otherwise = f dst d src s l +{-# inline checkCBArray #-} + +-- check copy mutable byte array +checkCMBArray + :: CheckCtx + => String + -> (MBA s -> Int -> MBA s -> Int -> Int -> r) + -> MBA s -> Int -> MBA s -> Int -> Int -> r +checkCMBArray name f dst d src s l + | d < 0 + || s < 0 + || sizeofMutableByteArray dst < d + l + || sizeofMutableByteArray src < s + l + = error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l) + | otherwise = f dst d src s l +{-# inline checkCMBArray #-} + +-- check index prim array +checkIPArray + :: CheckCtx + => Prim a + => String + -> (PrimArray a -> Int -> r) + -> PrimArray a -> Int -> r +checkIPArray name f arr i + | i < 0 || sizeofPrimArray arr <= i + = error $ name ++ " unsafe check out of bounds: " ++ show i + | otherwise = f arr i +{-# inline checkIPArray #-} + +-- check index mutable prim array +checkIMPArray + :: CheckCtx + => Prim a + => String + -> (MutablePrimArray s a -> Int -> r) + -> MutablePrimArray s a -> Int -> r +checkIMPArray name f arr i + | i < 0 || sizeofMutablePrimArray arr <= i + = error $ name ++ " unsafe check out of bounds: " ++ show i + | otherwise = f arr i +{-# inline checkIMPArray #-} + +#else +type CheckCtx :: Constraint +type CheckCtx = () + +checkIMArray, checkIMPArray, checkIPArray :: String -> r -> r +checkCArray, checkCMArray, checkRMArray :: String -> r -> r +checkIMArray _ = id +checkIMPArray _ = id +checkCArray _ = id +checkCMArray _ = id +checkRMArray _ = id +checkIPArray _ = id + +checkIBArray, checkIMBArray :: String -> a -> r -> r +checkCBArray, checkCMBArray :: String -> r -> r +checkIBArray _ _ = id +checkIMBArray _ _ = id +checkCBArray _ = id +checkCMBArray _ = id +#endif + +readArray :: + (CheckCtx) => + (PrimMonad m) => + MutableArray (PrimState m) a -> + Int -> + m a +readArray = checkIMArray "readArray" PA.readArray +{-# INLINE readArray #-} + +writeArray :: + (CheckCtx) => + (PrimMonad m) => + MutableArray (PrimState m) a -> + Int -> + a -> + m () +writeArray = checkIMArray "writeArray" PA.writeArray +{-# INLINE writeArray #-} + +copyArray :: + (CheckCtx) => + (PrimMonad m) => + MutableArray (PrimState m) a -> + Int -> + Array a -> + Int -> + Int -> + m () +copyArray = checkCArray "copyArray" PA.copyArray +{-# INLINE copyArray #-} + +cloneMutableArray :: + (CheckCtx) => + (PrimMonad m) => + MutableArray (PrimState m) a -> + Int -> + Int -> + m (MutableArray (PrimState m) a) +cloneMutableArray = checkRMArray "cloneMutableArray" PA.cloneMutableArray +{-# INLINE cloneMutableArray #-} + +copyMutableArray :: + (CheckCtx) => + (PrimMonad m) => + MutableArray (PrimState m) a -> + Int -> + MutableArray (PrimState m) a -> + Int -> + Int -> + m () +copyMutableArray = checkCMArray "copyMutableArray" PA.copyMutableArray +{-# INLINE copyMutableArray #-} + +readByteArray :: + forall a m. + (CheckCtx) => + (PrimMonad m) => + (Prim a) => + MutableByteArray (PrimState m) -> + Int -> + m a +readByteArray = checkIMBArray @a "readByteArray" undefined PA.readByteArray +{-# INLINE readByteArray #-} + +writeByteArray :: + forall a m. + (CheckCtx) => + (PrimMonad m) => + (Prim a) => + MutableByteArray (PrimState m) -> + Int -> + a -> + m () +writeByteArray = checkIMBArray @a "writeByteArray" undefined PA.writeByteArray +{-# INLINE writeByteArray #-} + +indexByteArray :: + forall a. + (CheckCtx) => + (Prim a) => + ByteArray -> + Int -> + a +indexByteArray = checkIBArray @a "indexByteArray" undefined PA.indexByteArray +{-# INLINE indexByteArray #-} + +copyByteArray :: + (CheckCtx) => + (PrimMonad m) => + MutableByteArray (PrimState m) -> + Int -> + ByteArray -> + Int -> + Int -> + m () +copyByteArray = checkCBArray "copyByteArray" PA.copyByteArray +{-# INLINE copyByteArray #-} + +copyMutableByteArray :: + (CheckCtx) => + (PrimMonad m) => + MutableByteArray (PrimState m) -> + Int -> + MutableByteArray (PrimState m) -> + Int -> + Int -> + m () +copyMutableByteArray = checkCMBArray "copyMutableByteArray" PA.copyMutableByteArray +{-# INLINE copyMutableByteArray #-} + +moveByteArray :: + (CheckCtx) => + (PrimMonad m) => + MutableByteArray (PrimState m) -> + Int -> + MutableByteArray (PrimState m) -> + Int -> + Int -> + m () +moveByteArray = checkCMBArray "moveByteArray" PA.moveByteArray +{-# INLINE moveByteArray #-} + +readPrimArray :: + (CheckCtx) => + (PrimMonad m) => + (Prim a) => + MutablePrimArray (PrimState m) a -> + Int -> + m a +readPrimArray = checkIMPArray "readPrimArray" PA.readPrimArray +{-# INLINE readPrimArray #-} + +writePrimArray :: + (CheckCtx) => + (PrimMonad m) => + (Prim a) => + MutablePrimArray (PrimState m) a -> + Int -> + a -> + m () +writePrimArray = checkIMPArray "writePrimArray" PA.writePrimArray +{-# INLINE writePrimArray #-} + +indexPrimArray :: + (CheckCtx) => + (Prim a) => + PrimArray a -> + Int -> + a +indexPrimArray = checkIPArray "indexPrimArray" PA.indexPrimArray +{-# INLINE indexPrimArray #-} diff --git a/parser-typechecker/src/Unison/Runtime/Builtin.hs b/parser-typechecker/src/Unison/Runtime/Builtin.hs index 5fa23695c..61fea0fb2 100644 --- a/parser-typechecker/src/Unison/Runtime/Builtin.hs +++ b/parser-typechecker/src/Unison/Runtime/Builtin.hs @@ -43,6 +43,7 @@ import qualified Data.ByteArray as BA import Data.ByteString (hGet, hGetSome, hPut) import qualified Data.ByteString.Lazy as L import Data.Default (def) +import Data.Digest.Murmur64 (asWord64, hash64) import Data.IORef as SYS ( IORef, newIORef, @@ -51,7 +52,6 @@ import Data.IORef as SYS ) import qualified Data.Map as Map import Data.PEM (PEM, pemContent, pemParseLBS) -import qualified Data.Primitive as PA import Data.Set (insert) import qualified Data.Set as Set import qualified Data.Text @@ -101,6 +101,8 @@ import System.Environment as SYS ( getArgs, getEnv, ) +import System.Exit as SYS (ExitCode (..)) +import System.FilePath (isPathSeparator) import System.IO (Handle) import System.IO as SYS ( IOMode (..), @@ -122,6 +124,14 @@ import System.IO as SYS stdout, ) import System.IO.Temp (createTempDirectory) +import System.Process as SYS + ( getProcessExitCode, + proc, + runInteractiveProcess, + terminateProcess, + waitForProcess, + withCreateProcess, + ) import qualified System.X509 as X import Unison.ABT.Normalized hiding (TTm) import qualified Unison.Builtin as Ty (builtinTypes) @@ -131,6 +141,7 @@ import Unison.Reference import Unison.Referent (pattern Ref) import Unison.Runtime.ANF as ANF import Unison.Runtime.ANF.Serialize as ANF +import qualified Unison.Runtime.Array as PA import Unison.Runtime.Exception (die) import Unison.Runtime.Foreign ( Foreign (Wrap), @@ -142,9 +153,21 @@ import Unison.Runtime.Foreign.Function import Unison.Runtime.Stack (Closure) import qualified Unison.Runtime.Stack as Closure import Unison.Symbol +import Unison.Type (charRef) import qualified Unison.Type as Ty import qualified Unison.Util.Bytes as Bytes import Unison.Util.EnumContainers as EC +import Unison.Util.RefPromise + ( Promise, + Ticket, + casIORef, + newPromise, + peekTicket, + readForCAS, + readPromise, + tryReadPromise, + writePromise, + ) import Unison.Util.Text (Text) import qualified Unison.Util.Text as Util.Text import qualified Unison.Util.Text.Pattern as TPat @@ -152,10 +175,10 @@ import Unison.Var type Failure = F.Failure Closure -freshes :: Var v => Int -> [v] +freshes :: (Var v) => Int -> [v] freshes = freshes' mempty -freshes' :: Var v => Set v -> Int -> [v] +freshes' :: (Var v) => Set v -> Int -> [v] freshes' avoid0 = go avoid0 [] where go _ vs 0 = vs @@ -165,108 +188,108 @@ freshes' avoid0 = go avoid0 [] class Fresh t where fresh :: t -fresh1 :: Var v => v +fresh1 :: (Var v) => v fresh1 = head $ freshes 1 -instance Var v => Fresh (v, v) where +instance (Var v) => Fresh (v, v) where fresh = (v1, v2) where [v1, v2] = freshes 2 -instance Var v => Fresh (v, v, v) where +instance (Var v) => Fresh (v, v, v) where fresh = (v1, v2, v3) where [v1, v2, v3] = freshes 3 -instance Var v => Fresh (v, v, v, v) where +instance (Var v) => Fresh (v, v, v, v) where fresh = (v1, v2, v3, v4) where [v1, v2, v3, v4] = freshes 4 -instance Var v => Fresh (v, v, v, v, v) where +instance (Var v) => Fresh (v, v, v, v, v) where fresh = (v1, v2, v3, v4, v5) where [v1, v2, v3, v4, v5] = freshes 5 -instance Var v => Fresh (v, v, v, v, v, v) where +instance (Var v) => Fresh (v, v, v, v, v, v) where fresh = (v1, v2, v3, v4, v5, v6) where [v1, v2, v3, v4, v5, v6] = freshes 6 -instance Var v => Fresh (v, v, v, v, v, v, v) where +instance (Var v) => Fresh (v, v, v, v, v, v, v) where fresh = (v1, v2, v3, v4, v5, v6, v7) where [v1, v2, v3, v4, v5, v6, v7] = freshes 7 -instance Var v => Fresh (v, v, v, v, v, v, v, v) where +instance (Var v) => Fresh (v, v, v, v, v, v, v, v) where fresh = (v1, v2, v3, v4, v5, v6, v7, v8) where [v1, v2, v3, v4, v5, v6, v7, v8] = freshes 8 -instance Var v => Fresh (v, v, v, v, v, v, v, v, v) where +instance (Var v) => Fresh (v, v, v, v, v, v, v, v, v) where fresh = (v1, v2, v3, v4, v5, v6, v7, v8, v9) where [v1, v2, v3, v4, v5, v6, v7, v8, v9] = freshes 9 -instance Var v => Fresh (v, v, v, v, v, v, v, v, v, v) where +instance (Var v) => Fresh (v, v, v, v, v, v, v, v, v, v) where fresh = (v1, v2, v3, v4, v5, v6, v7, v8, v9, v10) where [v1, v2, v3, v4, v5, v6, v7, v8, v9, v10] = freshes 10 -instance Var v => Fresh (v, v, v, v, v, v, v, v, v, v, v) where +instance (Var v) => Fresh (v, v, v, v, v, v, v, v, v, v, v) where fresh = (v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11) where [v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11] = freshes 11 -instance Var v => Fresh (v, v, v, v, v, v, v, v, v, v, v, v, v) where +instance (Var v) => Fresh (v, v, v, v, v, v, v, v, v, v, v, v, v) where fresh = (v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13) where [v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13] = freshes 13 -instance Var v => Fresh (v, v, v, v, v, v, v, v, v, v, v, v, v, v) where +instance (Var v) => Fresh (v, v, v, v, v, v, v, v, v, v, v, v, v, v) where fresh = (v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14) where [v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14] = freshes 14 -fls, tru :: Var v => ANormal v +fls, tru :: (Var v) => ANormal v fls = TCon Ty.booleanRef 0 [] tru = TCon Ty.booleanRef 1 [] -none :: Var v => ANormal v +none :: (Var v) => ANormal v none = TCon Ty.optionalRef (fromIntegral Ty.noneId) [] -some, left, right :: Var v => v -> ANormal v +some, left, right :: (Var v) => v -> ANormal v some a = TCon Ty.optionalRef (fromIntegral Ty.someId) [a] left x = TCon Ty.eitherRef (fromIntegral Ty.eitherLeftId) [x] right x = TCon Ty.eitherRef (fromIntegral Ty.eitherRightId) [x] -seqViewEmpty :: Var v => ANormal v +seqViewEmpty :: (Var v) => ANormal v seqViewEmpty = TCon Ty.seqViewRef (fromIntegral Ty.seqViewEmpty) [] -seqViewElem :: Var v => v -> v -> ANormal v +seqViewElem :: (Var v) => v -> v -> ANormal v seqViewElem l r = TCon Ty.seqViewRef (fromIntegral Ty.seqViewElem) [l, r] -boolift :: Var v => v -> ANormal v +boolift :: (Var v) => v -> ANormal v boolift v = TMatch v $ MatchIntegral (mapFromList [(0, fls), (1, tru)]) Nothing -notlift :: Var v => v -> ANormal v +notlift :: (Var v) => v -> ANormal v notlift v = TMatch v $ MatchIntegral (mapFromList [(1, fls), (0, tru)]) Nothing -unbox :: Var v => v -> Reference -> v -> ANormal v -> ANormal v +unbox :: (Var v) => v -> Reference -> v -> ANormal v -> ANormal v unbox v0 r v b = TMatch v0 $ MatchData r (mapSingleton 0 $ ([UN], TAbs v b)) Nothing -unenum :: Var v => Int -> v -> Reference -> v -> ANormal v -> ANormal v +unenum :: (Var v) => Int -> v -> Reference -> v -> ANormal v -> ANormal v unenum n v0 r v nx = TMatch v0 $ MatchData r cases Nothing where mkCase i = (toEnum i, ([], TLetD v UN (TLit . I $ fromIntegral i) nx)) cases = mapFromList . fmap mkCase $ [0 .. n - 1] -unop0 :: Var v => Int -> ([v] -> ANormal v) -> SuperNormal v +unop0 :: (Var v) => Int -> ([v] -> ANormal v) -> SuperNormal v unop0 n f = Lambda [BX] . TAbss [x0] @@ -274,7 +297,7 @@ unop0 n f = where xs@(x0 : _) = freshes (1 + n) -binop0 :: Var v => Int -> ([v] -> ANormal v) -> SuperNormal v +binop0 :: (Var v) => Int -> ([v] -> ANormal v) -> SuperNormal v binop0 n f = Lambda [BX, BX] . TAbss [x0, y0] @@ -282,21 +305,21 @@ binop0 n f = where xs@(x0 : y0 : _) = freshes (2 + n) -unop :: Var v => POp -> Reference -> SuperNormal v +unop :: (Var v) => POp -> Reference -> SuperNormal v unop pop rf = unop' pop rf rf -unop' :: Var v => POp -> Reference -> Reference -> SuperNormal v +unop' :: (Var v) => POp -> Reference -> Reference -> SuperNormal v unop' pop rfi rfo = unop0 2 $ \[x0, x, r] -> unbox x0 rfi x . TLetD r UN (TPrm pop [x]) $ TCon rfo 0 [r] -binop :: Var v => POp -> Reference -> SuperNormal v +binop :: (Var v) => POp -> Reference -> SuperNormal v binop pop rf = binop' pop rf rf rf binop' :: - Var v => + (Var v) => POp -> Reference -> Reference -> @@ -309,7 +332,7 @@ binop' pop rfx rfy rfr = . TLetD r UN (TPrm pop [x, y]) $ TCon rfr 0 [r] -cmpop :: Var v => POp -> Reference -> SuperNormal v +cmpop :: (Var v) => POp -> Reference -> SuperNormal v cmpop pop rf = binop0 3 $ \[x0, y0, x, y, b] -> unbox x0 rf x @@ -317,7 +340,7 @@ cmpop pop rf = . TLetD b UN (TPrm pop [x, y]) $ boolift b -cmpopb :: Var v => POp -> Reference -> SuperNormal v +cmpopb :: (Var v) => POp -> Reference -> SuperNormal v cmpopb pop rf = binop0 3 $ \[x0, y0, x, y, b] -> unbox x0 rf x @@ -325,7 +348,7 @@ cmpopb pop rf = . TLetD b UN (TPrm pop [y, x]) $ boolift b -cmpopn :: Var v => POp -> Reference -> SuperNormal v +cmpopn :: (Var v) => POp -> Reference -> SuperNormal v cmpopn pop rf = binop0 3 $ \[x0, y0, x, y, b] -> unbox x0 rf x @@ -333,7 +356,7 @@ cmpopn pop rf = . TLetD b UN (TPrm pop [x, y]) $ notlift b -cmpopbn :: Var v => POp -> Reference -> SuperNormal v +cmpopbn :: (Var v) => POp -> Reference -> SuperNormal v cmpopbn pop rf = binop0 3 $ \[x0, y0, x, y, b] -> unbox x0 rf x @@ -341,7 +364,7 @@ cmpopbn pop rf = . TLetD b UN (TPrm pop [y, x]) $ notlift b -addi, subi, muli, divi, modi, shli, shri, powi :: Var v => SuperNormal v +addi, subi, muli, divi, modi, shli, shri, powi :: (Var v) => SuperNormal v addi = binop ADDI Ty.intRef subi = binop SUBI Ty.intRef muli = binop MULI Ty.intRef @@ -351,7 +374,7 @@ shli = binop' SHLI Ty.intRef Ty.natRef Ty.intRef shri = binop' SHRI Ty.intRef Ty.natRef Ty.intRef powi = binop' POWI Ty.intRef Ty.natRef Ty.intRef -addn, subn, muln, divn, modn, shln, shrn, pown :: Var v => SuperNormal v +addn, subn, muln, divn, modn, shln, shrn, pown :: (Var v) => SuperNormal v addn = binop ADDN Ty.natRef subn = binop' SUBN Ty.natRef Ty.natRef Ty.intRef muln = binop MULN Ty.natRef @@ -361,7 +384,7 @@ shln = binop SHLN Ty.natRef shrn = binop SHRN Ty.natRef pown = binop POWN Ty.natRef -eqi, eqn, lti, ltn, lei, len :: Var v => SuperNormal v +eqi, eqn, lti, ltn, lei, len :: (Var v) => SuperNormal v eqi = cmpop EQLI Ty.intRef lti = cmpopbn LEQI Ty.intRef lei = cmpop LEQI Ty.intRef @@ -369,21 +392,21 @@ eqn = cmpop EQLN Ty.natRef ltn = cmpopbn LEQN Ty.natRef len = cmpop LEQN Ty.natRef -gti, gtn, gei, gen :: Var v => SuperNormal v +gti, gtn, gei, gen :: (Var v) => SuperNormal v gti = cmpopn LEQI Ty.intRef gei = cmpopb LEQI Ty.intRef gtn = cmpopn LEQN Ty.intRef gen = cmpopb LEQN Ty.intRef -inci, incn :: Var v => SuperNormal v +inci, incn :: (Var v) => SuperNormal v inci = unop INCI Ty.intRef incn = unop INCN Ty.natRef -sgni, negi :: Var v => SuperNormal v +sgni, negi :: (Var v) => SuperNormal v sgni = unop SGNI Ty.intRef negi = unop NEGI Ty.intRef -lzeron, tzeron, lzeroi, tzeroi, popn, popi :: Var v => SuperNormal v +lzeron, tzeron, lzeroi, tzeroi, popn, popi :: (Var v) => SuperNormal v lzeron = unop LZRO Ty.natRef tzeron = unop TZRO Ty.natRef popn = unop POPC Ty.natRef @@ -391,7 +414,7 @@ popi = unop' POPC Ty.intRef Ty.natRef lzeroi = unop' LZRO Ty.intRef Ty.natRef tzeroi = unop' TZRO Ty.intRef Ty.natRef -andn, orn, xorn, compln, andi, ori, xori, compli :: Var v => SuperNormal v +andn, orn, xorn, compln, andi, ori, xori, compli :: (Var v) => SuperNormal v andn = binop ANDN Ty.natRef orn = binop IORN Ty.natRef xorn = binop XORN Ty.natRef @@ -409,7 +432,7 @@ addf, sqrtf, logf, logbf :: - Var v => SuperNormal v + (Var v) => SuperNormal v addf = binop ADDF Ty.floatRef subf = binop SUBF Ty.floatRef mulf = binop MULF Ty.floatRef @@ -419,11 +442,11 @@ sqrtf = unop SQRT Ty.floatRef logf = unop LOGF Ty.floatRef logbf = binop LOGB Ty.floatRef -expf, absf :: Var v => SuperNormal v +expf, absf :: (Var v) => SuperNormal v expf = unop EXPF Ty.floatRef absf = unop ABSF Ty.floatRef -cosf, sinf, tanf, acosf, asinf, atanf :: Var v => SuperNormal v +cosf, sinf, tanf, acosf, asinf, atanf :: (Var v) => SuperNormal v cosf = unop COSF Ty.floatRef sinf = unop SINF Ty.floatRef tanf = unop TANF Ty.floatRef @@ -438,7 +461,7 @@ coshf, asinhf, atanhf, atan2f :: - Var v => SuperNormal v + (Var v) => SuperNormal v coshf = unop COSH Ty.floatRef sinhf = unop SINH Ty.floatRef tanhf = unop TANH Ty.floatRef @@ -447,7 +470,7 @@ asinhf = unop ASNH Ty.floatRef atanhf = unop ATNH Ty.floatRef atan2f = binop ATN2 Ty.floatRef -ltf, gtf, lef, gef, eqf, neqf :: Var v => SuperNormal v +ltf, gtf, lef, gef, eqf, neqf :: (Var v) => SuperNormal v ltf = cmpopbn LEQF Ty.floatRef gtf = cmpopn LEQF Ty.floatRef lef = cmpop LEQF Ty.floatRef @@ -455,11 +478,11 @@ gef = cmpopb LEQF Ty.floatRef eqf = cmpop EQLF Ty.floatRef neqf = cmpopn EQLF Ty.floatRef -minf, maxf :: Var v => SuperNormal v +minf, maxf :: (Var v) => SuperNormal v minf = binop MINF Ty.floatRef maxf = binop MAXF Ty.floatRef -ceilf, floorf, truncf, roundf, i2f, n2f :: Var v => SuperNormal v +ceilf, floorf, truncf, roundf, i2f, n2f :: (Var v) => SuperNormal v ceilf = unop' CEIL Ty.floatRef Ty.intRef floorf = unop' FLOR Ty.floatRef Ty.intRef truncf = unop' TRNF Ty.floatRef Ty.intRef @@ -467,7 +490,7 @@ roundf = unop' RNDF Ty.floatRef Ty.intRef i2f = unop' ITOF Ty.intRef Ty.floatRef n2f = unop' NTOF Ty.natRef Ty.floatRef -trni :: Var v => SuperNormal v +trni :: (Var v) => SuperNormal v trni = unop0 3 $ \[x0, x, z, b] -> unbox x0 Ty.intRef x . TLetD z UN (TLit $ I 0) @@ -477,7 +500,7 @@ trni = unop0 3 $ \[x0, x, z, b] -> (mapSingleton 1 $ TCon Ty.natRef 0 [z]) (Just $ TCon Ty.natRef 0 [x]) -modular :: Var v => POp -> (Bool -> ANormal v) -> SuperNormal v +modular :: (Var v) => POp -> (Bool -> ANormal v) -> SuperNormal v modular pop ret = unop0 3 $ \[x0, x, m, t] -> unbox x0 Ty.intRef x @@ -488,13 +511,13 @@ modular pop ret = (mapSingleton 1 $ ret True) (Just $ ret False) -evni, evnn, oddi, oddn :: Var v => SuperNormal v +evni, evnn, oddi, oddn :: (Var v) => SuperNormal v evni = modular MODI (\b -> if b then fls else tru) oddi = modular MODI (\b -> if b then tru else fls) evnn = modular MODN (\b -> if b then fls else tru) oddn = modular MODN (\b -> if b then tru else fls) -dropn :: Var v => SuperNormal v +dropn :: (Var v) => SuperNormal v dropn = binop0 4 $ \[x0, y0, x, y, b, r] -> unbox x0 Ty.natRef x . unbox y0 Ty.natRef y @@ -510,7 +533,7 @@ dropn = binop0 4 $ \[x0, y0, x, y, b, r] -> ) $ TCon Ty.natRef 0 [r] -appendt, taket, dropt, sizet, unconst, unsnoct :: Var v => SuperNormal v +appendt, taket, dropt, sizet, unconst, unsnoct :: (Var v) => SuperNormal v appendt = binop0 0 $ \[x, y] -> TPrm CATT [x, y] taket = binop0 1 $ \[x0, y, x] -> unbox x0 Ty.natRef x $ @@ -556,17 +579,17 @@ unsnoct = unop0 7 $ \[x, t, c0, c, y, p, u, cp] -> ) ] -appends, conss, snocs :: Var v => SuperNormal v +appends, conss, snocs :: (Var v) => SuperNormal v appends = binop0 0 $ \[x, y] -> TPrm CATS [x, y] conss = binop0 0 $ \[x, y] -> TPrm CONS [x, y] snocs = binop0 0 $ \[x, y] -> TPrm SNOC [x, y] -coerceType :: Var v => Reference -> Reference -> SuperNormal v +coerceType :: (Var v) => Reference -> Reference -> SuperNormal v coerceType fromType toType = unop0 1 $ \[x, r] -> unbox x fromType r $ TCon toType 0 [r] -takes, drops, sizes, ats, emptys :: Var v => SuperNormal v +takes, drops, sizes, ats, emptys :: (Var v) => SuperNormal v takes = binop0 1 $ \[x0, y, x] -> unbox x0 Ty.natRef x $ TPrm TAKS [x, y] @@ -587,7 +610,7 @@ ats = binop0 3 $ \[x0, y, x, t, r] -> ] emptys = Lambda [] $ TPrm BLDS [] -viewls, viewrs :: Var v => SuperNormal v +viewls, viewrs :: (Var v) => SuperNormal v viewls = unop0 3 $ \[s, u, h, t] -> TLetD u UN (TPrm VWLS [s]) . TMatch u @@ -868,6 +891,18 @@ gen'trace = TLets Direct [] [] (TPrm TRCE [t, v]) $ TCon Ty.unitRef 0 [] +debug'text :: SuperNormal Symbol +debug'text = + unop0 3 $ \[c, r, t, e] -> + TLetD r UN (TPrm DBTX [c]) + . TMatch r + . MatchSum + $ mapFromList + [ (0, ([], none)), + (1, ([BX], TAbs t . TLetD e BX (left t) $ some e)), + (2, ([BX], TAbs t . TLetD e BX (right t) $ some e)) + ] + code'missing :: SuperNormal Symbol code'missing = unop0 1 $ \[link, b] -> @@ -979,7 +1014,7 @@ seek'handle instr = where (arg1, arg2, arg3, seek, nat, stack1, stack2, stack3, unit, fail, result) = fresh -no'buf, line'buf, block'buf, sblock'buf :: Enum e => e +no'buf, line'buf, block'buf, sblock'buf :: (Enum e) => e no'buf = toEnum $ fromIntegral Ty.bufferModeNoBufferingId line'buf = toEnum $ fromIntegral Ty.bufferModeLineBufferingId block'buf = toEnum $ fromIntegral Ty.bufferModeBlockBufferingId @@ -990,6 +1025,19 @@ infixr 0 --> (-->) :: a -> b -> (a, b) x --> y = (x, y) +start'process :: ForeignOp +start'process instr = + ([BX, BX],) + . TAbss [exe, args] + . TLets Direct [hin, hout, herr, hproc] [BX, BX, BX, BX] (TFOp instr [exe, args]) + . TLetD un BX (TCon Ty.unitRef 0 []) + . TLetD p3 BX (TCon Ty.pairRef 0 [hproc, un]) + . TLetD p2 BX (TCon Ty.pairRef 0 [herr, p3]) + . TLetD p1 BX (TCon Ty.pairRef 0 [hout, p2]) + $ TCon Ty.pairRef 0 [hin, p1] + where + (exe, args, hin, hout, herr, hproc, un, p3, p2, p1) = fresh + set'buffering :: ForeignOp set'buffering instr = ([BX, BX],) @@ -1000,12 +1048,16 @@ set'buffering instr = [ no'buf --> [] --> k1 no'buf, line'buf --> [] --> k1 line'buf, block'buf --> [] --> k1 block'buf, - sblock'buf --> [BX] - --> TAbs n . TMatch n . MatchDataCover Ty.bufferModeRef + sblock'buf + --> [BX] + --> TAbs n + . TMatch n + . MatchDataCover Ty.bufferModeRef $ mapFromList - [ 0 --> [UN] + [ 0 + --> [UN] --> TAbs w - . TLetD tag UN (TLit (N sblock'buf)) + . TLetD tag UN (TLit (N sblock'buf)) $ k2 [tag, w] ] ] @@ -1018,7 +1070,7 @@ set'buffering instr = outIoFailUnit s1 s2 s3 u f r (handle, bmode, tag, n, w, s1, s2, s3, u, f, r) = fresh -get'buffering'output :: forall v. Var v => v -> v -> v -> v -> v -> v -> v -> v -> ANormal v +get'buffering'output :: forall v. (Var v) => v -> v -> v -> v -> v -> v -> v -> v -> ANormal v get'buffering'output eitherResult stack1 stack2 stack3 resultTag anyVar failVar successVar = TMatch eitherResult . MatchSum $ mapFromList @@ -1029,19 +1081,23 @@ get'buffering'output eitherResult stack1 stack2 stack3 resultTag anyVar failVar . TMatch resultTag . MatchSum $ mapFromList - [ no'buf --> [] + [ no'buf + --> [] --> TLetD successVar BX (TCon Ty.bufferModeRef no'buf []) $ right successVar, - line'buf --> [] + line'buf + --> [] --> TLetD successVar BX (TCon Ty.bufferModeRef line'buf []) $ right successVar, - block'buf --> [] + block'buf + --> [] --> TLetD successVar BX (TCon Ty.bufferModeRef block'buf []) $ right successVar, - sblock'buf --> [UN] + sblock'buf + --> [UN] --> TAbs stack1 - . TLetD stack2 BX (TCon Ty.natRef 0 [stack1]) - . TLetD successVar BX (TCon Ty.bufferModeRef sblock'buf [stack2]) + . TLetD stack2 BX (TCon Ty.natRef 0 [stack1]) + . TLetD successVar BX (TCon Ty.bufferModeRef sblock'buf [stack2]) $ right successVar ] ) @@ -1063,6 +1119,16 @@ crypto'hash instr = where (alg, x, vl) = fresh +murmur'hash :: ForeignOp +murmur'hash instr = + ([BX],) + . TAbss [x] + . TLetD vl BX (TPrm VALU [x]) + . TLetD result UN (TFOp instr [vl]) + $ TCon Ty.natRef 0 [result] + where + (x, vl, result) = fresh + crypto'hmac :: ForeignOp crypto'hmac instr = ([BX, BX, BX],) @@ -1088,19 +1154,19 @@ crypto'hmac instr = -- -- () -> ... -inUnit :: forall v. Var v => v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) +inUnit :: forall v. (Var v) => v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) inUnit unit result cont instr = ([BX], TAbs unit $ TLetD result UN (TFOp instr []) cont) -- a -> ... -inBx :: forall v. Var v => v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) +inBx :: forall v. (Var v) => v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) inBx arg result cont instr = ([BX],) . TAbs arg $ TLetD result UN (TFOp instr [arg]) cont -- Nat -> ... -inNat :: forall v. Var v => v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) +inNat :: forall v. (Var v) => v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) inNat arg nat result cont instr = ([BX],) . TAbs arg @@ -1108,7 +1174,7 @@ inNat arg nat result cont instr = $ TLetD result UN (TFOp instr [nat]) cont -- Maybe a -> b -> ... -inMaybeBx :: forall v. Var v => v -> v -> v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) +inMaybeBx :: forall v. (Var v) => v -> v -> v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) inMaybeBx arg1 arg2 arg3 mb result cont instr = ([BX, BX],) . TAbss [arg1, arg2] @@ -1125,12 +1191,19 @@ inMaybeBx arg1 arg2 arg3 mb result cont instr = ] -- a -> b -> ... -inBxBx :: forall v. Var v => v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) +inBxBx :: forall v. (Var v) => v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) inBxBx arg1 arg2 result cont instr = ([BX, BX],) . TAbss [arg1, arg2] $ TLetD result UN (TFOp instr [arg1, arg2]) cont +-- a -> b -> c -> ... +inBxBxBx :: forall v. (Var v) => v -> v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) +inBxBxBx arg1 arg2 arg3 result cont instr = + ([BX, BX, BX],) + . TAbss [arg1, arg2, arg3] + $ TLetD result UN (TFOp instr [arg1, arg2, arg3]) cont + set'echo :: ForeignOp set'echo instr = ([BX, BX],) @@ -1142,7 +1215,7 @@ set'echo instr = (arg1, arg2, bol, stack1, stack2, stack3, unit, fail, result) = fresh -- a -> Nat -> ... -inBxNat :: forall v. Var v => v -> v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) +inBxNat :: forall v. (Var v) => v -> v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) inBxNat arg1 arg2 nat result cont instr = ([BX, BX],) . TAbss [arg1, arg2] @@ -1150,7 +1223,7 @@ inBxNat arg1 arg2 nat result cont instr = $ TLetD result UN (TFOp instr [arg1, nat]) cont inBxNatNat :: - Var v => v -> v -> v -> v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) + (Var v) => v -> v -> v -> v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) inBxNatNat arg1 arg2 arg3 nat1 nat2 result cont instr = ([BX, BX, BX],) . TAbss [arg1, arg2, arg3] @@ -1158,7 +1231,7 @@ inBxNatNat arg1 arg2 arg3 nat1 nat2 result cont instr = . unbox arg3 Ty.natRef nat2 $ TLetD result UN (TFOp instr [arg1, nat1, nat2]) cont -inBxNatBx :: Var v => v -> v -> v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) +inBxNatBx :: (Var v) => v -> v -> v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) inBxNatBx arg1 arg2 arg3 nat result cont instr = ([BX, BX, BX],) . TAbss [arg1, arg2, arg3] @@ -1166,7 +1239,7 @@ inBxNatBx arg1 arg2 arg3 nat result cont instr = $ TLetD result UN (TFOp instr [arg1, nat, arg3]) cont -- a -> IOMode -> ... -inBxIomr :: forall v. Var v => v -> v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) +inBxIomr :: forall v. (Var v) => v -> v -> v -> v -> ANormal v -> FOp -> ([Mem], ANormal v) inBxIomr arg1 arg2 fm result cont instr = ([BX, BX],) . TAbss [arg1, arg2] @@ -1184,7 +1257,8 @@ inBxIomr arg1 arg2 fm result cont instr = -- All of these functions will take a Var named result containing the -- result of the foreign call -- -outMaybe :: forall v. Var v => v -> v -> ANormal v + +outMaybe :: forall v. (Var v) => v -> v -> ANormal v outMaybe maybe result = TMatch result . MatchSum $ mapFromList @@ -1192,7 +1266,21 @@ outMaybe maybe result = (1, ([BX], TAbs maybe $ some maybe)) ] -outMaybeNTup :: forall v. Var v => v -> v -> v -> v -> v -> v -> v -> ANormal v +outMaybeNat :: (Var v) => v -> v -> v -> ANormal v +outMaybeNat tag result n = + TMatch tag . MatchSum $ + mapFromList + [ (0, ([], none)), + ( 1, + ( [UN], + TAbs result + . TLetD n BX (TCon Ty.natRef 0 [n]) + $ some n + ) + ) + ] + +outMaybeNTup :: forall v. (Var v) => v -> v -> v -> v -> v -> v -> v -> ANormal v outMaybeNTup a b n u bp p result = TMatch result . MatchSum $ mapFromList @@ -1209,7 +1297,7 @@ outMaybeNTup a b n u bp p result = ) ] -outMaybeTup :: Var v => v -> v -> v -> v -> v -> v -> ANormal v +outMaybeTup :: (Var v) => v -> v -> v -> v -> v -> v -> ANormal v outMaybeTup a b u bp ap result = TMatch result . MatchSum $ mapFromList @@ -1225,7 +1313,7 @@ outMaybeTup a b u bp ap result = ) ] -outIoFail :: forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v +outIoFail :: forall v. (Var v) => v -> v -> v -> v -> v -> v -> ANormal v outIoFail stack1 stack2 stack3 any fail result = TMatch result . MatchSum $ mapFromList @@ -1233,7 +1321,7 @@ outIoFail stack1 stack2 stack3 any fail result = (1, ([BX], TAbs stack1 $ right stack1)) ] -outIoFailNat :: forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v +outIoFailNat :: forall v. (Var v) => v -> v -> v -> v -> v -> v -> ANormal v outIoFailNat stack1 stack2 stack3 fail extra result = TMatch result . MatchSum $ mapFromList @@ -1246,7 +1334,7 @@ outIoFailNat stack1 stack2 stack3 fail extra result = ) ] -outIoFailChar :: forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v +outIoFailChar :: forall v. (Var v) => v -> v -> v -> v -> v -> v -> ANormal v outIoFailChar stack1 stack2 stack3 fail extra result = TMatch result . MatchSum $ mapFromList @@ -1260,25 +1348,27 @@ outIoFailChar stack1 stack2 stack3 fail extra result = ] failureCase :: - Var v => v -> v -> v -> v -> v -> (Word64, ([Mem], ANormal v)) + (Var v) => v -> v -> v -> v -> v -> (Word64, ([Mem], ANormal v)) failureCase stack1 stack2 stack3 any fail = - (0,) . ([BX, BX, BX],) + (0,) + . ([BX, BX, BX],) . TAbss [stack1, stack2, stack3] . TLetD any BX (TCon Ty.anyRef 0 [stack3]) . TLetD fail BX (TCon Ty.failureRef 0 [stack1, stack2, any]) $ left fail exnCase :: - Var v => v -> v -> v -> v -> v -> (Word64, ([Mem], ANormal v)) + (Var v) => v -> v -> v -> v -> v -> (Word64, ([Mem], ANormal v)) exnCase stack1 stack2 stack3 any fail = - (0,) . ([BX, BX, BX],) + (0,) + . ([BX, BX, BX],) . TAbss [stack1, stack2, stack3] . TLetD any BX (TCon Ty.anyRef 0 [stack3]) . TLetD fail BX (TCon Ty.failureRef 0 [stack1, stack2, any]) $ TReq Ty.exceptionRef 0 [fail] outIoExnNat :: - forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v + forall v. (Var v) => v -> v -> v -> v -> v -> v -> ANormal v outIoExnNat stack1 stack2 stack3 any fail result = TMatch result . MatchSum $ mapFromList @@ -1291,7 +1381,7 @@ outIoExnNat stack1 stack2 stack3 any fail result = ] outIoExnUnit :: - forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v + forall v. (Var v) => v -> v -> v -> v -> v -> v -> ANormal v outIoExnUnit stack1 stack2 stack3 any fail result = TMatch result . MatchSum $ mapFromList @@ -1300,7 +1390,7 @@ outIoExnUnit stack1 stack2 stack3 any fail result = ] outIoExnBox :: - Var v => v -> v -> v -> v -> v -> v -> ANormal v + (Var v) => v -> v -> v -> v -> v -> v -> ANormal v outIoExnBox stack1 stack2 stack3 any fail result = TMatch result . MatchSum $ mapFromList @@ -1308,7 +1398,7 @@ outIoExnBox stack1 stack2 stack3 any fail result = (1, ([BX], TAbs stack1 $ TVar stack1)) ] -outIoFailBox :: forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v +outIoFailBox :: forall v. (Var v) => v -> v -> v -> v -> v -> v -> ANormal v outIoFailBox stack1 stack2 stack3 any fail result = TMatch result . MatchSum $ mapFromList @@ -1320,7 +1410,7 @@ outIoFailBox stack1 stack2 stack3 any fail result = ) ] -outIoFailUnit :: forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v +outIoFailUnit :: forall v. (Var v) => v -> v -> v -> v -> v -> v -> ANormal v outIoFailUnit stack1 stack2 stack3 extra fail result = TMatch result . MatchSum $ mapFromList @@ -1333,7 +1423,7 @@ outIoFailUnit stack1 stack2 stack3 extra fail result = ) ] -outIoFailBool :: forall v. Var v => v -> v -> v -> v -> v -> v -> ANormal v +outIoFailBool :: forall v. (Var v) => v -> v -> v -> v -> v -> v -> ANormal v outIoFailBool stack1 stack2 stack3 extra fail result = TMatch result . MatchSum $ mapFromList @@ -1347,7 +1437,7 @@ outIoFailBool stack1 stack2 stack3 extra fail result = ] outIoFailG :: - Var v => + (Var v) => v -> v -> v -> @@ -1451,6 +1541,16 @@ boxBoxTo0 instr = where (arg1, arg2) = fresh +-- a -> b ->{E} Nat +boxBoxToNat :: ForeignOp +boxBoxToNat instr = + ([BX, BX],) + . TAbss [arg1, arg2] + . TLetD result UN (TFOp instr [arg1, arg2]) + $ TCon Ty.natRef 0 [result] + where + (arg1, arg2, result) = fresh + -- a -> b -> Option c -- a -> Bool @@ -1468,6 +1568,13 @@ boxBoxToBool = where (arg1, arg2, result) = fresh +-- a -> b -> c -> Bool +boxBoxBoxToBool :: ForeignOp +boxBoxBoxToBool = + inBxBxBx arg1 arg2 arg3 result $ boolift result + where + (arg1, arg2, arg3, result) = fresh + -- Nat -> c -- Works for an type that's packed into a word, just -- pass `wordDirect Ty.natRef`, `wordDirect Ty.floatRef` @@ -1481,6 +1588,16 @@ wordDirect wordType instr = where (b1, ub1) = fresh +-- Nat -> Bool +boxWordToBool :: Reference -> ForeignOp +boxWordToBool wordType instr = + ([BX, BX],) + . TAbss [b1, w1] + . unbox w1 wordType uw1 + $ TLetD result UN (TFOp instr [b1, uw1]) (boolift result) + where + (b1, w1, uw1, result) = fresh + -- Nat -> Nat -> c wordWordDirect :: Reference -> Reference -> ForeignOp wordWordDirect word1 word2 instr = @@ -1565,6 +1682,12 @@ boxToMaybeBox = where (arg, maybe, result) = fresh +-- a -> Maybe Nat +boxToMaybeNat :: ForeignOp +boxToMaybeNat = inBx arg tag $ outMaybeNat tag result n + where + (arg, tag, result, n) = fresh + -- a -> Maybe (Nat, b) boxToMaybeNTup :: ForeignOp boxToMaybeNTup = @@ -1894,6 +2017,7 @@ builtinLookup = ("todo", (Untracked, bug "builtin.todo")), ("Debug.watch", (Tracked, watch)), ("Debug.trace", (Tracked, gen'trace)), + ("Debug.toText", (Tracked, debug'text)), ("unsafe.coerceAbilities", (Untracked, poly'coerce)), ("Char.toNat", (Untracked, cast Ty.charRef Ty.natRef)), ("Char.fromNat", (Untracked, cast Ty.natRef Ty.charRef)), @@ -1970,7 +2094,7 @@ declareForeign sand name op func0 = do | sanitize, Tracked <- sand, FF r w _ <- func0 = - FF r w (bomb name) + FF r w (bomb name) | otherwise = func0 code = (name, (sand, uncurry Lambda (op w))) in (w + 1, code : codes, mapInsert w (name, func) funcs) @@ -2077,22 +2201,28 @@ declareForeigns = do declareForeign Tracked "IO.putBytes.impl.v3" boxBoxToEF0 . mkForeignIOF $ \(h, bs) -> hPut h (Bytes.toArray bs) declareForeign Tracked "IO.systemTime.impl.v3" unitToEFNat $ - mkForeignIOF $ \() -> getPOSIXTime + mkForeignIOF $ + \() -> getPOSIXTime declareForeign Tracked "IO.systemTimeMicroseconds.v1" unitToInt $ - mkForeign $ \() -> fmap (1e6 *) getPOSIXTime + mkForeign $ + \() -> fmap (1e6 *) getPOSIXTime declareForeign Tracked "Clock.internals.monotonic.v1" unitToEFBox $ - mkForeignIOF $ \() -> getTime Monotonic + mkForeignIOF $ + \() -> getTime Monotonic declareForeign Tracked "Clock.internals.realtime.v1" unitToEFBox $ - mkForeignIOF $ \() -> getTime Realtime + mkForeignIOF $ + \() -> getTime Realtime declareForeign Tracked "Clock.internals.processCPUTime.v1" unitToEFBox $ - mkForeignIOF $ \() -> getTime ProcessCPUTime + mkForeignIOF $ + \() -> getTime ProcessCPUTime declareForeign Tracked "Clock.internals.threadCPUTime.v1" unitToEFBox $ - mkForeignIOF $ \() -> getTime ThreadCPUTime + mkForeignIOF $ + \() -> getTime ThreadCPUTime declareForeign Tracked "Clock.internals.sec.v1" boxToInt $ mkForeign (\n -> pure (fromIntegral $ sec n :: Word64)) @@ -2102,13 +2232,16 @@ declareForeigns = do declareForeign Tracked "Clock.internals.nsec.v1" boxToNat $ mkForeign (\n -> pure (fromIntegral $ nsec n :: Word64)) + let chop = reverse . dropWhile isPathSeparator . reverse + declareForeign Tracked "IO.getTempDirectory.impl.v3" unitToEFBox $ - mkForeignIOF $ \() -> getTemporaryDirectory + mkForeignIOF $ + \() -> chop <$> getTemporaryDirectory declareForeign Tracked "IO.createTempDirectory.impl.v3" boxToEFBox $ mkForeignIOF $ \prefix -> do temp <- getTemporaryDirectory - createTempDirectory temp prefix + chop <$> createTempDirectory temp prefix declareForeign Tracked "IO.getCurrentDirectory.impl.v3" unitToEFBox . mkForeignIOF @@ -2124,28 +2257,33 @@ declareForeigns = do mkForeignIOF getEnv declareForeign Tracked "IO.getArgs.impl.v1" unitToEFBox $ - mkForeignIOF $ \() -> fmap Util.Text.pack <$> SYS.getArgs + mkForeignIOF $ + \() -> fmap Util.Text.pack <$> SYS.getArgs declareForeign Tracked "IO.isDirectory.impl.v3" boxToEFBool $ mkForeignIOF doesDirectoryExist declareForeign Tracked "IO.createDirectory.impl.v3" boxToEF0 $ - mkForeignIOF $ createDirectoryIfMissing True + mkForeignIOF $ + createDirectoryIfMissing True declareForeign Tracked "IO.removeDirectory.impl.v3" boxToEF0 $ mkForeignIOF removeDirectoryRecursive declareForeign Tracked "IO.renameDirectory.impl.v3" boxBoxToEF0 $ - mkForeignIOF $ uncurry renameDirectory + mkForeignIOF $ + uncurry renameDirectory declareForeign Tracked "IO.directoryContents.impl.v3" boxToEFBox $ - mkForeignIOF $ (fmap Util.Text.pack <$>) . getDirectoryContents + mkForeignIOF $ + (fmap Util.Text.pack <$>) . getDirectoryContents declareForeign Tracked "IO.removeFile.impl.v3" boxToEF0 $ mkForeignIOF removeFile declareForeign Tracked "IO.renameFile.impl.v3" boxBoxToEF0 $ - mkForeignIOF $ uncurry renameFile + mkForeignIOF $ + uncurry renameFile declareForeign Tracked "IO.getFileTimestamp.impl.v3" boxToEFNat . mkForeignIOF @@ -2205,7 +2343,7 @@ declareForeigns = do $ \(hs, n) -> maybe mempty Bytes.fromArray <$> SYS.recv hs n - declareForeign Tracked "IO.kill.impl.v3" boxTo0 $ mkForeignIOF killThread + declareForeign Tracked "IO.kill.impl.v3" boxToEF0 $ mkForeignIOF killThread declareForeign Tracked "IO.delay.impl.v3" natToEFUnit $ mkForeignIOF threadDelay @@ -2218,6 +2356,27 @@ declareForeigns = do 2 -> pure (Just SYS.stderr) _ -> pure Nothing + let exitDecode ExitSuccess = 0 + exitDecode (ExitFailure n) = n + + declareForeign Tracked "IO.process.call" boxBoxToNat . mkForeign $ + \(exe, map Util.Text.unpack -> args) -> + withCreateProcess (proc exe args) $ \_ _ _ p -> + exitDecode <$> waitForProcess p + + declareForeign Tracked "IO.process.start" start'process . mkForeign $ + \(exe, map Util.Text.unpack -> args) -> + runInteractiveProcess exe args Nothing Nothing + + declareForeign Tracked "IO.process.kill" boxTo0 . mkForeign $ + terminateProcess + + declareForeign Tracked "IO.process.wait" boxToNat . mkForeign $ + \ph -> exitDecode <$> waitForProcess ph + + declareForeign Tracked "IO.process.exitCode" boxToMaybeNat . mkForeign $ + fmap (fmap exitDecode) . getProcessExitCode + declareForeign Tracked "MVar.new" boxDirect . mkForeign $ \(c :: Closure) -> newMVar c @@ -2338,13 +2497,58 @@ declareForeigns = do declareForeign Tracked "IO.ref" boxDirect . mkForeign - $ \(c :: Closure) -> newIORef c + $ \(c :: Closure) -> evaluate c >>= newIORef + -- The docs for IORef state that IORef operations can be observed + -- out of order ([1]) but actually GHC does emit the appropriate + -- load and store barriers nowadays ([2], [3]). + -- + -- [1] https://hackage.haskell.org/package/base-4.17.0.0/docs/Data-IORef.html#g:2 + -- [2] https://github.com/ghc/ghc/blob/master/compiler/GHC/StgToCmm/Prim.hs#L286 + -- [3] https://github.com/ghc/ghc/blob/master/compiler/GHC/StgToCmm/Prim.hs#L298 declareForeign Untracked "Ref.read" boxDirect . mkForeign $ \(r :: IORef Closure) -> readIORef r declareForeign Untracked "Ref.write" boxBoxTo0 . mkForeign $ - \(r :: IORef Closure, c :: Closure) -> writeIORef r c + \(r :: IORef Closure, c :: Closure) -> evaluate c >>= writeIORef r + + declareForeign Tracked "Ref.readForCas" boxDirect . mkForeign $ + \(r :: IORef Closure) -> readForCAS r + + declareForeign Tracked "Ref.Ticket.read" boxDirect . mkForeign $ + \(t :: Ticket Closure) -> pure $ peekTicket t + + -- In GHC, CAS returns both a Boolean and the current value of the + -- IORef, which can be used to retry a failed CAS. + -- This strategy is more efficient than returning a Boolean only + -- because it uses a single call to cmpxchg in assembly (see [1]) to + -- avoid an extra read per CAS iteration, however it's not supported + -- in Scheme. + -- Therefore, we adopt the more common signature that only returns a + -- Boolean, which doesn't even suffer from spurious failures because + -- GHC issues loads of mutable variables with memory_order_acquire + -- (see [2]) + -- + -- [1]: https://github.com/ghc/ghc/blob/master/rts/PrimOps.cmm#L697 + -- [2]: https://github.com/ghc/ghc/blob/master/compiler/GHC/StgToCmm/Prim.hs#L285 + declareForeign Tracked "Ref.cas" boxBoxBoxToBool . mkForeign $ + \(r :: IORef Closure, t :: Ticket Closure, v :: Closure) -> fmap fst $ + do + t <- evaluate t + casIORef r t v + + declareForeign Tracked "Promise.new" unitDirect . mkForeign $ + \() -> newPromise @Closure + + -- the only exceptions from Promise.read are async and shouldn't be caught + declareForeign Tracked "Promise.read" boxDirect . mkForeign $ + \(p :: Promise Closure) -> readPromise p + + declareForeign Tracked "Promise.tryRead" boxToMaybeBox . mkForeign $ + \(p :: Promise Closure) -> tryReadPromise p + + declareForeign Tracked "Promise.write" boxBoxToBool . mkForeign $ + \(p :: Promise Closure, a :: Closure) -> writePromise p a declareForeign Tracked "Tls.newClient.impl.v3" boxBoxToEFBox . mkForeignTls $ \( config :: TLS.ClientParams, @@ -2359,7 +2563,7 @@ declareForeigns = do declareForeign Tracked "Tls.handshake.impl.v3" boxToEF0 . mkForeignTls $ \(tls :: TLS.Context) -> TLS.handshake tls - declareForeign Tracked "Tls.send.impl.v3" boxBoxToEFBox . mkForeignTls $ + declareForeign Tracked "Tls.send.impl.v3" boxBoxToEF0 . mkForeignTls $ \( tls :: TLS.Context, bytes :: Bytes.Bytes ) -> TLS.sendData tls (Bytes.toLazyByteString bytes) @@ -2389,7 +2593,7 @@ declareForeigns = do bs <- TLS.recvData tls pure $ Bytes.fromArray bs - declareForeign Tracked "Tls.terminate.impl.v3" boxToEFBox . mkForeignTls $ + declareForeign Tracked "Tls.terminate.impl.v3" boxToEF0 . mkForeignTls $ \(tls :: TLS.Context) -> TLS.bye tls declareForeign Untracked "Code.dependencies" boxDirect @@ -2415,7 +2619,7 @@ declareForeigns = do . mkForeign $ pure . deserializeValue . Bytes.toArray -- Hashing functions - let declareHashAlgorithm :: forall alg. Hash.HashAlgorithm alg => Data.Text.Text -> alg -> FDecl Symbol () + let declareHashAlgorithm :: forall alg. (Hash.HashAlgorithm alg) => Data.Text.Text -> alg -> FDecl Symbol () declareHashAlgorithm txt alg = do let algoRef = Builtin ("crypto.HashAlgorithm." <> txt) declareForeign Untracked ("crypto.HashAlgorithm." <> txt) direct . mkForeign $ \() -> @@ -2429,6 +2633,7 @@ declareForeigns = do declareHashAlgorithm "Blake2b_512" Hash.Blake2b_512 declareHashAlgorithm "Blake2b_256" Hash.Blake2b_256 declareHashAlgorithm "Blake2s_256" Hash.Blake2s_256 + declareHashAlgorithm "Md5" Hash.MD5 declareForeign Untracked "crypto.hashBytes" boxBoxDirect . mkForeign $ \(HashAlgorithm _ alg, b :: Bytes.Bytes) -> @@ -2446,7 +2651,7 @@ declareForeigns = do declareForeign Untracked "crypto.hash" crypto'hash . mkForeign $ \(HashAlgorithm _ alg, x) -> let hashlazy :: - Hash.HashAlgorithm a => + (Hash.HashAlgorithm a) => a -> L.ByteString -> Hash.Digest a @@ -2456,7 +2661,7 @@ declareForeigns = do declareForeign Untracked "crypto.hmac" crypto'hmac . mkForeign $ \(HashAlgorithm _ alg, key, x) -> let hmac :: - Hash.HashAlgorithm a => a -> L.ByteString -> HMAC.HMAC a + (Hash.HashAlgorithm a) => a -> L.ByteString -> HMAC.HMAC a hmac _ s = HMAC.finalize . HMAC.updates @@ -2471,6 +2676,9 @@ declareForeigns = do Left se -> Left (Util.Text.pack (show se)) Right a -> Right a + declareForeign Untracked "Universal.murmurHash" murmur'hash . mkForeign $ + pure . asWord64 . hash64 . serializeValueLazy + declareForeign Untracked "Bytes.zlib.compress" boxDirect . mkForeign $ pure . Bytes.zlibCompress declareForeign Untracked "Bytes.gzip.compress" boxDirect . mkForeign $ pure . Bytes.gzipCompress declareForeign Untracked "Bytes.zlib.decompress" boxToEBoxBox . mkForeign $ \bs -> @@ -2704,32 +2912,32 @@ declareForeigns = do declareForeign Untracked "Text.patterns.literal" boxDirect . mkForeign $ \txt -> evaluate . TPat.cpattern $ TPat.Literal txt declareForeign Untracked "Text.patterns.digit" direct . mkForeign $ - let v = TPat.cpattern TPat.Digit in \() -> pure v + let v = TPat.cpattern (TPat.Char (TPat.CharRange '0' '9')) in \() -> pure v declareForeign Untracked "Text.patterns.letter" direct . mkForeign $ - let v = TPat.cpattern TPat.Letter in \() -> pure v + let v = TPat.cpattern (TPat.Char (TPat.CharClass TPat.Letter)) in \() -> pure v declareForeign Untracked "Text.patterns.space" direct . mkForeign $ - let v = TPat.cpattern TPat.Space in \() -> pure v + let v = TPat.cpattern (TPat.Char (TPat.CharClass TPat.Whitespace)) in \() -> pure v declareForeign Untracked "Text.patterns.punctuation" direct . mkForeign $ - let v = TPat.cpattern TPat.Punctuation in \() -> pure v + let v = TPat.cpattern (TPat.Char (TPat.CharClass TPat.Punctuation)) in \() -> pure v declareForeign Untracked "Text.patterns.anyChar" direct . mkForeign $ - let v = TPat.cpattern TPat.AnyChar in \() -> pure v + let v = TPat.cpattern (TPat.Char TPat.Any) in \() -> pure v declareForeign Untracked "Text.patterns.eof" direct . mkForeign $ let v = TPat.cpattern TPat.Eof in \() -> pure v let ccd = wordWordDirect Ty.charRef Ty.charRef declareForeign Untracked "Text.patterns.charRange" ccd . mkForeign $ - \(beg, end) -> evaluate . TPat.cpattern $ TPat.CharRange beg end + \(beg, end) -> evaluate . TPat.cpattern . TPat.Char $ TPat.CharRange beg end declareForeign Untracked "Text.patterns.notCharRange" ccd . mkForeign $ - \(beg, end) -> evaluate . TPat.cpattern $ TPat.NotCharRange beg end + \(beg, end) -> evaluate . TPat.cpattern . TPat.Char . TPat.Not $ TPat.CharRange beg end declareForeign Untracked "Text.patterns.charIn" boxDirect . mkForeign $ \ccs -> do cs <- for ccs $ \case Closure.DataU1 _ _ i -> pure (toEnum i) _ -> die "Text.patterns.charIn: non-character closure" - evaluate . TPat.cpattern $ TPat.CharIn cs + evaluate . TPat.cpattern . TPat.Char $ TPat.CharSet cs declareForeign Untracked "Text.patterns.notCharIn" boxDirect . mkForeign $ \ccs -> do cs <- for ccs $ \case Closure.DataU1 _ _ i -> pure (toEnum i) _ -> die "Text.patterns.notCharIn: non-character closure" - evaluate . TPat.cpattern $ TPat.NotCharIn cs + evaluate . TPat.cpattern . TPat.Char . TPat.Not $ TPat.CharSet cs declareForeign Untracked "Pattern.many" boxDirect . mkForeign $ \(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Many p declareForeign Untracked "Pattern.capture" boxDirect . mkForeign $ @@ -2749,6 +2957,32 @@ declareForeigns = do declareForeign Untracked "Pattern.isMatch" boxBoxToBool . mkForeign $ \(TPat.CP _ matcher, input :: Text) -> pure . isJust $ matcher input + declareForeign Untracked "Char.Class.any" direct . mkForeign $ \() -> pure TPat.Any + declareForeign Untracked "Char.Class.not" boxDirect . mkForeign $ pure . TPat.Not + declareForeign Untracked "Char.Class.and" boxBoxDirect . mkForeign $ \(a, b) -> pure $ TPat.Intersect a b + declareForeign Untracked "Char.Class.or" boxBoxDirect . mkForeign $ \(a, b) -> pure $ TPat.Union a b + declareForeign Untracked "Char.Class.range" (wordWordDirect charRef charRef) . mkForeign $ \(a, b) -> pure $ TPat.CharRange a b + declareForeign Untracked "Char.Class.anyOf" boxDirect . mkForeign $ \ccs -> do + cs <- for ccs $ \case + Closure.DataU1 _ _ i -> pure (toEnum i) + _ -> die "Text.patterns.charIn: non-character closure" + evaluate $ TPat.CharSet cs + declareForeign Untracked "Char.Class.alphanumeric" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.AlphaNum) + declareForeign Untracked "Char.Class.upper" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.Upper) + declareForeign Untracked "Char.Class.lower" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.Lower) + declareForeign Untracked "Char.Class.whitespace" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.Whitespace) + declareForeign Untracked "Char.Class.control" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.Control) + declareForeign Untracked "Char.Class.printable" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.Printable) + declareForeign Untracked "Char.Class.mark" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.MarkChar) + declareForeign Untracked "Char.Class.number" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.Number) + declareForeign Untracked "Char.Class.punctuation" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.Punctuation) + declareForeign Untracked "Char.Class.symbol" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.Symbol) + declareForeign Untracked "Char.Class.separator" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.Separator) + declareForeign Untracked "Char.Class.letter" direct . mkForeign $ \() -> pure (TPat.CharClass TPat.Letter) + declareForeign Untracked "Char.Class.is" (boxWordToBool charRef) . mkForeign $ \(cl, c) -> evaluate $ TPat.charPatternPred cl c + declareForeign Untracked "Text.patterns.char" boxDirect . mkForeign $ \c -> + let v = TPat.cpattern (TPat.Char c) in pure v + type RW = PA.PrimState IO checkedRead :: diff --git a/parser-typechecker/src/Unison/Runtime/Debug.hs b/parser-typechecker/src/Unison/Runtime/Debug.hs index f40358650..143543a73 100644 --- a/parser-typechecker/src/Unison/Runtime/Debug.hs +++ b/parser-typechecker/src/Unison/Runtime/Debug.hs @@ -33,7 +33,7 @@ traceCombs _ False c = c traceCombs w True c = trace (prettyCombs w c "") c tracePretty :: - Var v => + (Var v) => PrettyPrintEnv -> Bool -> Term v -> @@ -42,7 +42,7 @@ tracePretty _ False tm = tm tracePretty ppe True tm = trace (toANSI 50 $ pretty ppe tm) tm tracePrettyGroup :: - Var v => + (Var v) => Word64 -> Bool -> SuperGroup v -> diff --git a/parser-typechecker/src/Unison/Runtime/Decompile.hs b/parser-typechecker/src/Unison/Runtime/Decompile.hs index c1907ab7a..42f88920c 100644 --- a/parser-typechecker/src/Unison/Runtime/Decompile.hs +++ b/parser-typechecker/src/Unison/Runtime/Decompile.hs @@ -61,14 +61,14 @@ import qualified Unison.Util.Text as Text import Unison.Var (Var) import Unsafe.Coerce -- for Int -> Double -con :: Var v => Reference -> Word64 -> Term v () +con :: (Var v) => Reference -> Word64 -> Term v () con rf ct = constructor () (ConstructorReference rf $ fromIntegral ct) err :: String -> Either Error a err = Left . lit . fromString decompile :: - Var v => + (Var v) => (Word64 -> Word64 -> Maybe (Term v ())) -> Closure -> Either Error (Term v ()) @@ -105,7 +105,7 @@ tag2bool 0 = Right False tag2bool 1 = Right True tag2bool _ = err "bad boolean tag" -substitute :: Var v => Term v () -> [Term v ()] -> Term v () +substitute :: (Var v) => Term v () -> [Term v ()] -> Term v () substitute = align [] where align vts (LamNamed' v bd) (t : ts) = align ((v, t) : vts) bd ts @@ -114,7 +114,7 @@ substitute = align [] align vts tm ts = apps' (substs vts tm) ts decompileUnboxed :: - Var v => Reference -> Word64 -> Int -> Either Error (Term v ()) + (Var v) => Reference -> Word64 -> Int -> Either Error (Term v ()) decompileUnboxed r _ i | r == natRef = pure . nat () $ fromIntegral i | r == intRef = pure . int () $ fromIntegral i @@ -124,7 +124,7 @@ decompileUnboxed r _ _ = err $ "cannot decompile unboxed data type with reference: " ++ show r decompileForeign :: - Var v => + (Var v) => (Word64 -> Word64 -> Maybe (Term v ())) -> Foreign -> Either Error (Term v ()) @@ -141,14 +141,14 @@ decompileForeign topTerms f decompileForeign _ f = err $ "cannot decompile Foreign: " ++ show f -decompileBytes :: Var v => By.Bytes -> Term v () +decompileBytes :: (Var v) => By.Bytes -> Term v () decompileBytes = app () (builtin () $ fromString "Bytes.fromList") . list () . fmap (nat () . fromIntegral) . By.toWord8s -decompileHashAlgorithm :: Var v => HashAlgorithm -> Term v () +decompileHashAlgorithm :: (Var v) => HashAlgorithm -> Term v () decompileHashAlgorithm (HashAlgorithm r _) = ref () r unwrapSeq :: Foreign -> Maybe (Seq Closure) diff --git a/parser-typechecker/src/Unison/Runtime/Exception.hs b/parser-typechecker/src/Unison/Runtime/Exception.hs index 4f6fe1903..11f5fdbc1 100644 --- a/parser-typechecker/src/Unison/Runtime/Exception.hs +++ b/parser-typechecker/src/Unison/Runtime/Exception.hs @@ -4,19 +4,19 @@ import Control.Exception import Data.String (fromString) import Data.Text import GHC.Stack +import Unison.Reference (Reference) import Unison.Runtime.Stack import Unison.Util.Pretty as P -import Unison.Reference (Reference) data RuntimeExn = PE CallStack (P.Pretty P.ColorText) - | BU [(Reference,Int)] Text Closure + | BU [(Reference, Int)] Text Closure deriving (Show) instance Exception RuntimeExn -die :: HasCallStack => String -> IO a +die :: (HasCallStack) => String -> IO a die = throwIO . PE callStack . P.lit . fromString -exn :: HasCallStack => String -> a +exn :: (HasCallStack) => String -> a exn = throw . PE callStack . P.lit . fromString diff --git a/parser-typechecker/src/Unison/Runtime/Foreign.hs b/parser-typechecker/src/Unison/Runtime/Foreign.hs index 6d10eba6c..dc4eb94ba 100644 --- a/parser-typechecker/src/Unison/Runtime/Foreign.hs +++ b/parser-typechecker/src/Unison/Runtime/Foreign.hs @@ -1,5 +1,6 @@ {-# LANGUAGE BangPatterns #-} {-# LANGUAGE GADTs #-} +{-# LANGUAGE InstanceSigs #-} {-# LANGUAGE PatternGuards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeApplications #-} @@ -28,6 +29,7 @@ import Network.Socket (Socket) import qualified Network.TLS as TLS (ClientParams, Context, ServerParams) import System.Clock (TimeSpec) import System.IO (Handle) +import System.Process (ProcessHandle) import Unison.Reference (Reference) import Unison.Referent (Referent) import Unison.Runtime.ANF (SuperGroup, Value) @@ -35,7 +37,7 @@ import Unison.Symbol (Symbol) import qualified Unison.Type as Ty import Unison.Util.Bytes (Bytes) import Unison.Util.Text (Text) -import Unison.Util.Text.Pattern (CPattern) +import Unison.Util.Text.Pattern (CPattern, CharPattern) import Unsafe.Coerce data Foreign where @@ -44,6 +46,80 @@ data Foreign where promote :: (a -> a -> r) -> b -> c -> r promote (~~) x y = unsafeCoerce x ~~ unsafeCoerce y +-- These functions are explicit aliases of the overloaded function. +-- When the overloaded function is used in their place, it seems to +-- cause issues with regard to `promote` above. Somehow, the +-- unsafeCoerce can cause memory faults, even when the values are +-- being coerced to appropriate types. Having an explicit, noinline +-- alias seems to prevent the faults. +txtEq :: Text -> Text -> Bool +txtEq l r = l == r +{-# NOINLINE txtEq #-} + +txtCmp :: Text -> Text -> Ordering +txtCmp l r = compare l r +{-# NOINLINE txtCmp #-} + +bytesEq :: Bytes -> Bytes -> Bool +bytesEq l r = l == r +{-# NOINLINE bytesEq #-} + +bytesCmp :: Bytes -> Bytes -> Ordering +bytesCmp l r = compare l r +{-# NOINLINE bytesCmp #-} + +mvarEq :: MVar () -> MVar () -> Bool +mvarEq l r = l == r +{-# NOINLINE mvarEq #-} + +socketEq :: Socket -> Socket -> Bool +socketEq l r = l == r +{-# NOINLINE socketEq #-} + +refEq :: IORef () -> IORef () -> Bool +refEq l r = l == r +{-# NOINLINE refEq #-} + +tidEq :: ThreadId -> ThreadId -> Bool +tidEq l r = l == r +{-# NOINLINE tidEq #-} + +tidCmp :: ThreadId -> ThreadId -> Ordering +tidCmp l r = compare l r +{-# NOINLINE tidCmp #-} + +marrEq :: MutableArray () () -> MutableArray () () -> Bool +marrEq l r = l == r +{-# NOINLINE marrEq #-} + +mbarrEq :: MutableByteArray () -> MutableByteArray () -> Bool +mbarrEq l r = l == r +{-# NOINLINE mbarrEq #-} + +barrEq :: ByteArray -> ByteArray -> Bool +barrEq l r = l == r +{-# NOINLINE barrEq #-} + +barrCmp :: ByteArray -> ByteArray -> Ordering +barrCmp l r = compare l r +{-# NOINLINE barrCmp #-} + +cpatEq :: CPattern -> CPattern -> Bool +cpatEq l r = l == r +{-# NOINLINE cpatEq #-} + +cpatCmp :: CPattern -> CPattern -> Ordering +cpatCmp l r = compare l r +{-# NOINLINE cpatCmp #-} + +charClassEq :: CharPattern -> CharPattern -> Bool +charClassEq l r = l == r +{-# NOINLINE charClassEq #-} + +charClassCmp :: CharPattern -> CharPattern -> Ordering +charClassCmp = compare +{-# NOINLINE charClassCmp #-} + tylEq :: Reference -> Reference -> Bool tylEq r l = r == l {-# NOINLINE tylEq #-} @@ -62,37 +138,43 @@ tmlCmp r l = compare r l ref2eq :: Reference -> Maybe (a -> b -> Bool) ref2eq r - | r == Ty.textRef = Just $ promote ((==) @Text) + | r == Ty.textRef = Just $ promote txtEq | r == Ty.termLinkRef = Just $ promote tmlEq | r == Ty.typeLinkRef = Just $ promote tylEq - | r == Ty.bytesRef = Just $ promote ((==) @Bytes) + | r == Ty.bytesRef = Just $ promote bytesEq -- Note: MVar equality is just reference equality, so it shouldn't -- matter what type the MVar holds. - | r == Ty.mvarRef = Just $ promote ((==) @(MVar ())) + | r == Ty.mvarRef = Just $ promote mvarEq -- Ditto - | r == Ty.refRef = Just $ promote ((==) @(IORef ())) - | r == Ty.threadIdRef = Just $ promote ((==) @ThreadId) - | r == Ty.marrayRef = Just $ promote ((==) @(MutableArray () ())) - | r == Ty.mbytearrayRef = Just $ promote ((==) @(MutableByteArray ())) - | r == Ty.ibytearrayRef = Just $ promote ((==) @ByteArray) - | r == Ty.patternRef = Just $ promote ((==) @CPattern) + | r == Ty.socketRef = Just $ promote socketEq + | r == Ty.refRef = Just $ promote refEq + | r == Ty.threadIdRef = Just $ promote tidEq + | r == Ty.marrayRef = Just $ promote marrEq + | r == Ty.mbytearrayRef = Just $ promote mbarrEq + | r == Ty.ibytearrayRef = Just $ promote barrEq + | r == Ty.patternRef = Just $ promote cpatEq + | r == Ty.charClassRef = Just $ promote charClassEq | otherwise = Nothing ref2cmp :: Reference -> Maybe (a -> b -> Ordering) ref2cmp r - | r == Ty.textRef = Just $ promote (compare @Text) + | r == Ty.textRef = Just $ promote txtCmp | r == Ty.termLinkRef = Just $ promote tmlCmp | r == Ty.typeLinkRef = Just $ promote tylCmp - | r == Ty.bytesRef = Just $ promote (compare @Bytes) - | r == Ty.threadIdRef = Just $ promote (compare @ThreadId) - | r == Ty.ibytearrayRef = Just $ promote (compare @ByteArray) - | r == Ty.patternRef = Just $ promote (compare @CPattern) + | r == Ty.bytesRef = Just $ promote bytesCmp + | r == Ty.threadIdRef = Just $ promote tidCmp + | r == Ty.ibytearrayRef = Just $ promote barrCmp + | r == Ty.patternRef = Just $ promote cpatCmp + | r == Ty.charClassRef = Just $ promote charClassCmp | otherwise = Nothing instance Eq Foreign where Wrap rl t == Wrap rr u | rl == rr, Just (~~) <- ref2eq rl = t ~~ u - _ == _ = error "Eq Foreign" + Wrap rl1 _ == Wrap rl2 _ = + error $ + "Attempting to check equality of two values of different types: " + <> show (rl1, rl2) instance Ord Foreign where Wrap rl t `compare` Wrap rr u @@ -123,12 +205,16 @@ maybeUnwrapForeign rt (Wrap r e) class BuiltinForeign f where foreignRef :: Tagged f Reference -instance BuiltinForeign Text where foreignRef = Tagged Ty.textRef +instance BuiltinForeign Text where + foreignRef :: Tagged Text Reference + foreignRef = Tagged Ty.textRef instance BuiltinForeign Bytes where foreignRef = Tagged Ty.bytesRef instance BuiltinForeign Handle where foreignRef = Tagged Ty.fileHandleRef +instance BuiltinForeign ProcessHandle where foreignRef = Tagged Ty.processHandleRef + instance BuiltinForeign Socket where foreignRef = Tagged Ty.socketRef instance BuiltinForeign ThreadId where foreignRef = Tagged Ty.threadIdRef @@ -154,7 +240,7 @@ instance BuiltinForeign TimeSpec where foreignRef = Tagged Ty.timeSpecRef data HashAlgorithm where -- Reference is a reference to the hash algorithm - HashAlgorithm :: Hash.HashAlgorithm a => Reference -> a -> HashAlgorithm + HashAlgorithm :: (Hash.HashAlgorithm a) => Reference -> a -> HashAlgorithm newtype Tls = Tls TLS.Context @@ -165,15 +251,18 @@ instance BuiltinForeign HashAlgorithm where foreignRef = Tagged Ty.hashAlgorithm instance BuiltinForeign CPattern where foreignRef = Tagged Ty.patternRef -wrapBuiltin :: forall f. BuiltinForeign f => f -> Foreign +instance BuiltinForeign CharPattern where + foreignRef = Tagged Ty.charClassRef + +wrapBuiltin :: forall f. (BuiltinForeign f) => f -> Foreign wrapBuiltin x = Wrap r x where Tagged r = foreignRef :: Tagged f Reference -unwrapBuiltin :: BuiltinForeign f => Foreign -> f +unwrapBuiltin :: (BuiltinForeign f) => Foreign -> f unwrapBuiltin (Wrap _ x) = unsafeCoerce x -maybeUnwrapBuiltin :: forall f. BuiltinForeign f => Foreign -> Maybe f +maybeUnwrapBuiltin :: forall f. (BuiltinForeign f) => Foreign -> Maybe f maybeUnwrapBuiltin (Wrap r x) | r == r0 = Just (unsafeCoerce x) | otherwise = Nothing diff --git a/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs b/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs index 39cc29e6d..db8636417 100644 --- a/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs +++ b/parser-typechecker/src/Unison/Runtime/Foreign/Function.hs @@ -15,6 +15,7 @@ import Control.Concurrent (ThreadId) import Control.Concurrent.MVar (MVar) import Control.Concurrent.STM (TVar) import Control.Exception (evaluate) +import Data.Atomics (Ticket) import qualified Data.Char as Char import Data.Foldable (toList) import Data.IORef (IORef) @@ -40,11 +41,14 @@ import Unison.Type marrayRef, mbytearrayRef, mvarRef, + promiseRef, refRef, + ticketRef, tvarRef, typeLinkRef, ) import Unison.Util.Bytes (Bytes) +import Unison.Util.RefPromise (Promise) import Unison.Util.Text (Text, pack, unpack) -- Foreign functions operating on stacks @@ -147,7 +151,7 @@ instance ForeignConvention POSIXTime where readForeign = readForeignAs (fromIntegral :: Int -> POSIXTime) writeForeign = writeForeignAs (round :: POSIXTime -> Int) -instance ForeignConvention a => ForeignConvention (Maybe a) where +instance (ForeignConvention a) => ForeignConvention (Maybe a) where readForeign (i : us) bs ustk bstk = peekOff ustk i >>= \case 0 -> pure (us, bs, Nothing) @@ -213,7 +217,7 @@ instance ForeignConvention IOException where writeForeign = writeForeignAs (ioeEncode . ioe_type) readForeignAs :: - ForeignConvention a => + (ForeignConvention a) => (a -> b) -> [Int] -> [Int] -> @@ -223,7 +227,7 @@ readForeignAs :: readForeignAs f us bs ustk bstk = fmap f <$> readForeign us bs ustk bstk writeForeignAs :: - ForeignConvention b => + (ForeignConvention b) => (a -> b) -> Stack 'UN -> Stack 'BX -> @@ -232,7 +236,7 @@ writeForeignAs :: writeForeignAs f ustk bstk x = writeForeign ustk bstk (f x) readForeignEnum :: - Enum a => + (Enum a) => [Int] -> [Int] -> Stack 'UN -> @@ -241,7 +245,7 @@ readForeignEnum :: readForeignEnum = readForeignAs toEnum writeForeignEnum :: - Enum a => + (Enum a) => Stack 'UN -> Stack 'BX -> a -> @@ -249,7 +253,7 @@ writeForeignEnum :: writeForeignEnum = writeForeignAs fromEnum readForeignBuiltin :: - BuiltinForeign b => + (BuiltinForeign b) => [Int] -> [Int] -> Stack 'UN -> @@ -258,7 +262,7 @@ readForeignBuiltin :: readForeignBuiltin = readForeignAs (unwrapBuiltin . marshalToForeign) writeForeignBuiltin :: - BuiltinForeign b => + (BuiltinForeign b) => Stack 'UN -> Stack 'BX -> b -> @@ -320,7 +324,7 @@ instance (ustk, bstk) <- writeForeign ustk bstk y writeForeign ustk bstk x -instance ForeignConvention a => ForeignConvention (Failure a) where +instance (ForeignConvention a) => ForeignConvention (Failure a) where readForeign us bs ustk bstk = do (us, bs, typeref) <- readTypelink us bs ustk bstk (us, bs, message) <- readForeign us bs ustk bstk @@ -350,6 +354,27 @@ instance (ustk, bstk) <- writeForeign ustk bstk b writeForeign ustk bstk a +instance + ( ForeignConvention a, + ForeignConvention b, + ForeignConvention c, + ForeignConvention d + ) => + ForeignConvention (a, b, c, d) + where + readForeign us bs ustk bstk = do + (us, bs, a) <- readForeign us bs ustk bstk + (us, bs, b) <- readForeign us bs ustk bstk + (us, bs, c) <- readForeign us bs ustk bstk + (us, bs, d) <- readForeign us bs ustk bstk + pure (us, bs, (a, b, c, d)) + + writeForeign ustk bstk (a, b, c, d) = do + (ustk, bstk) <- writeForeign ustk bstk d + (ustk, bstk) <- writeForeign ustk bstk c + (ustk, bstk) <- writeForeign ustk bstk b + writeForeign ustk bstk a + instance ( ForeignConvention a, ForeignConvention b, @@ -430,6 +455,14 @@ instance ForeignConvention (IORef Closure) where readForeign = readForeignAs (unwrapForeign . marshalToForeign) writeForeign = writeForeignAs (Foreign . Wrap refRef) +instance ForeignConvention (Ticket Closure) where + readForeign = readForeignAs (unwrapForeign . marshalToForeign) + writeForeign = writeForeignAs (Foreign . Wrap ticketRef) + +instance ForeignConvention (Promise Closure) where + readForeign = readForeignAs (unwrapForeign . marshalToForeign) + writeForeign = writeForeignAs (Foreign . Wrap promiseRef) + instance ForeignConvention (SuperGroup Symbol) where readForeign = readForeignBuiltin writeForeign = writeForeignBuiltin @@ -458,13 +491,14 @@ instance ForeignConvention PA.ByteArray where readForeign = readForeignAs (unwrapForeign . marshalToForeign) writeForeign = writeForeignAs (Foreign . Wrap ibytearrayRef) -instance {-# OVERLAPPABLE #-} BuiltinForeign b => ForeignConvention b where +instance {-# OVERLAPPABLE #-} (BuiltinForeign b) => ForeignConvention b where readForeign = readForeignBuiltin writeForeign = writeForeignBuiltin -instance {-# OVERLAPPABLE #-} BuiltinForeign b => ForeignConvention [b] where +instance {-# OVERLAPPABLE #-} (BuiltinForeign b) => ForeignConvention [b] where readForeign us (i : bs) _ bstk = - (us,bs,) . fmap (unwrapForeign . marshalToForeign) + (us,bs,) + . fmap (unwrapForeign . marshalToForeign) . toList <$> peekOffS bstk i readForeign _ _ _ _ = foreignCCError "[b]" diff --git a/parser-typechecker/src/Unison/Runtime/IOSource.hs b/parser-typechecker/src/Unison/Runtime/IOSource.hs index f0983132a..32dbe2a91 100644 --- a/parser-typechecker/src/Unison/Runtime/IOSource.hs +++ b/parser-typechecker/src/Unison/Runtime/IOSource.hs @@ -7,18 +7,23 @@ import Control.Lens (view, _2) import Control.Monad.Morph (hoist) import Data.List (elemIndex, genericIndex) import qualified Data.Map as Map -import Debug.RecoverRTTI (anythingToString) +import qualified Data.Text as Text import Text.RawString.QQ (r) import qualified Unison.Builtin as Builtin import Unison.Codebase.CodeLookup (CodeLookup (..)) import qualified Unison.Codebase.CodeLookup.Util as CL +import qualified Unison.Codebase.Path as Path import Unison.ConstructorReference (GConstructorReference (..)) import qualified Unison.DataDeclaration as DD import qualified Unison.DataDeclaration.ConstructorId as DD import Unison.FileParsers (parseAndSynthesizeFile) import qualified Unison.NamesWithHistory as Names +import qualified Unison.NamesWithHistory as NamesWithHistory import Unison.Parser.Ann (Ann (..)) import Unison.Prelude +import qualified Unison.PrettyPrintEnv as PPE +import qualified Unison.PrettyPrintEnv.Names as PPE +import qualified Unison.PrintError as PrintError import qualified Unison.Reference as R import qualified Unison.Result as Result import Unison.Symbol (Symbol) @@ -26,6 +31,8 @@ import qualified Unison.Syntax.Parser as Parser import qualified Unison.Term as Term import qualified Unison.Typechecker.TypeLookup as TL import qualified Unison.UnisonFile as UF +import qualified Unison.UnisonFile.Names as UF +import Unison.Util.Monoid (intercalateMap) import qualified Unison.Var as Var debug :: Bool @@ -42,10 +49,9 @@ typecheckedFile' = tl = const $ pure (External <$ Builtin.typeLookup) env = Parser.ParsingEnv mempty (Names.NamesWithHistory Builtin.names0 mempty) r = parseAndSynthesizeFile [] tl env "" source - in case runIdentity $ Result.runResultT r of - (Nothing, notes) -> error $ "parsing failed: " <> anythingToString (toList notes) - (Just Left {}, notes) -> error $ "typechecking failed" <> anythingToString (toList notes) - (Just (Right file), _) -> file + in case decodeResult (Text.unpack source) r of + Left str -> error str + Right file -> file typecheckedFileTerms :: Map.Map Symbol R.Reference typecheckedFileTerms = view _2 <$> UF.hashTerms typecheckedFile @@ -58,7 +64,7 @@ termNamed s = codeLookup :: CodeLookup Symbol Identity Ann codeLookup = CL.fromTypecheckedUnisonFile typecheckedFile -codeLookupM :: Applicative m => CodeLookup Symbol m Ann +codeLookupM :: (Applicative m) => CodeLookup Symbol m Ann codeLookupM = hoist (pure . runIdentity) codeLookup typeNamedId :: String -> R.Id @@ -194,6 +200,16 @@ doc2FrontMatterRef = typeNamed "Doc2.FrontMatter" pattern Doc2FrontMatterRef <- ((== doc2FrontMatterRef) -> True) +doc2LaTeXInlineRef :: R.Reference +doc2LaTeXInlineRef = typeNamed "Doc2.LaTeXInline" + +pattern Doc2LaTeXInlineRef <- ((== doc2LaTeXInlineRef) -> True) + +doc2SvgRef :: R.Reference +doc2SvgRef = typeNamed "Doc2.Svg" + +pattern Doc2SvgRef <- ((== doc2SvgRef) -> True) + pattern Doc2Word txt <- Term.App' (Term.Constructor' (ConstructorReference Doc2Ref ((==) doc2WordId -> True))) (Term.Text' txt) pattern Doc2Code d <- Term.App' (Term.Constructor' (ConstructorReference Doc2Ref ((==) doc2CodeId -> True))) d @@ -302,6 +318,10 @@ pattern Doc2SpecialFormEmbedVideo sources config <- Doc2SpecialFormEmbed (Term.A pattern Doc2SpecialFormEmbedFrontMatter frontMatter <- Doc2SpecialFormEmbed (Term.App' _ (Term.App' (Term.Constructor' (ConstructorReference Doc2FrontMatterRef _)) (Term.List' (toList -> frontMatter)))) +pattern Doc2SpecialFormEmbedLaTeXInline latex <- Doc2SpecialFormEmbedInline (Term.App' _ (Term.App' (Term.Constructor' (ConstructorReference Doc2LaTeXInlineRef _)) (Term.Text' latex))) + +pattern Doc2SpecialFormEmbedSvg svg <- Doc2SpecialFormEmbed (Term.App' _ (Term.App' (Term.Constructor' (ConstructorReference Doc2SvgRef _)) (Term.Text' svg))) + -- pulls out `vs body` in `Doc2.Term (Any '(vs -> body))`, where -- vs can be any number of parameters pattern Doc2Example vs body <- Term.App' _term (Term.App' _any (Term.LamNamed' _ (Term.LamsNamedOpt' vs body))) @@ -554,6 +574,15 @@ unique[b2ada5dfd4112ca3a7ba0a6483ce3d82811400c56eff8e6eca1b3fbf] type Doc2.Video unique[ea60b6205a6b25449a8784de87c113833bacbcdfe32829c7a76985d5] type Doc2.FrontMatter = FrontMatter [(Text, Text)] +-- Similar to using triple backticks with a latex pragma (```latex), but for +-- when you'd want to render LaTeX inline +unique[d1dc0515a2379df8a4c91571fe2f9bf9322adaf97677c87b806e49572447c688] type Doc2.LaTeXInline + = LaTeXInline Text + +-- Used for embedding SVGs +unique[ae4e05d8bede04825145db1a6a2222fdf2d890b3044d86fd4368f53b265de7f9] type Doc2.Svg + = Svg Text + -- ex: Doc2.term 'List.map Doc2.term : 'a -> Doc2.Term Doc2.term a = Doc2.Term.Term (Any a) @@ -685,6 +714,7 @@ Pretty.map f p = Lit _ t -> Lit () (f t) Wrap _ p -> Wrap () (go p) OrElse _ p1 p2 -> OrElse () (go p1) (go p2) + Table _ xs -> Table () (List.map (List.map go) xs) Indent _ i0 iN p -> Indent () (go i0) (go iN) (go p) Annotated.Append _ ps -> Annotated.Append () (List.map go ps) Pretty (go (Pretty.get p)) @@ -936,3 +966,38 @@ syntax.docFormatConsole d = Special sf -> Pretty.lit (Left sf) go d |] + +type Note = Result.Note Symbol Ann + +type TFile = UF.TypecheckedUnisonFile Symbol Ann + +type SynthResult = + Result.Result + (Seq Note) + (Either (UF.UnisonFile Symbol Ann) TFile) + +type EitherResult = Either String TFile + +showNotes :: (Foldable f) => String -> PrintError.Env -> f Note -> String +showNotes source env = + intercalateMap "\n\n" $ PrintError.renderNoteAsANSI 60 env source Path.absoluteEmpty + +decodeResult :: + String -> SynthResult -> EitherResult +decodeResult source (Result.Result notes Nothing) = + Left $ showNotes source ppEnv notes +decodeResult source (Result.Result notes (Just (Left uf))) = + let errNames = UF.toNames uf + in Left $ + showNotes + source + ( PPE.fromNames + 10 + (NamesWithHistory.shadowing errNames Builtin.names) + ) + notes +decodeResult _source (Result.Result _notes (Just (Right uf))) = + Right uf + +ppEnv :: PPE.PrettyPrintEnv +ppEnv = PPE.fromNames 10 Builtin.names diff --git a/parser-typechecker/src/Unison/Runtime/Interface.hs b/parser-typechecker/src/Unison/Runtime/Interface.hs index 57c7808dc..c1374f169 100644 --- a/parser-typechecker/src/Unison/Runtime/Interface.hs +++ b/parser-typechecker/src/Unison/Runtime/Interface.hs @@ -78,6 +78,7 @@ import Unison.Runtime.MCode.Serialize import Unison.Runtime.Machine ( ActiveThreads, CCache (..), + Tracer (..), apply0, baseCCache, cacheAdd, @@ -99,7 +100,6 @@ import Unison.Syntax.TermPrinter import qualified Unison.Term as Tm import Unison.Util.EnumContainers as EC import Unison.Util.Pretty as P -import qualified Unison.Util.Text as UT import qualified UnliftIO import qualified UnliftIO.Concurrent as UnliftIO @@ -151,6 +151,7 @@ recursiveDeclDeps :: Set RF.LabeledDependency -> CodeLookup Symbol IO () -> Decl Symbol () -> + -- (type deps, term deps) IO (Set Reference, Set Reference) recursiveDeclDeps seen0 cl d = do rec <- for (toList newDeps) $ \case @@ -176,6 +177,7 @@ recursiveTermDeps :: Set RF.LabeledDependency -> CodeLookup Symbol IO () -> Term Symbol -> + -- (type deps, term deps) IO (Set Reference, Set Reference) recursiveTermDeps seen0 cl tm = do rec <- for (toList (deps \\ seen0)) $ \case @@ -273,7 +275,7 @@ backrefLifted ref tm dcmp = Map.fromList . (fmap . fmap) (Map.singleton 0) $ (ref, tm) : dcmp intermediateTerms :: - HasCallStack => + (HasCallStack) => PrettyPrintEnv -> EvalCtx -> [(Reference, Term Symbol)] -> @@ -284,7 +286,7 @@ intermediateTerms ppe ctx rtms = foldMap (\(ref, tm) -> intermediateTerm ppe ref ctx tm) rtms intermediateTerm :: - HasCallStack => + (HasCallStack) => PrettyPrintEnv -> Reference -> EvalCtx -> @@ -311,7 +313,7 @@ intermediateTerm ppe ref ctx tm = tmName = HQ.toString . termName ppe $ RF.Ref ref prepareEvaluation :: - HasCallStack => + (HasCallStack) => PrettyPrintEnv -> Term Symbol -> EvalCtx -> @@ -319,7 +321,8 @@ prepareEvaluation :: prepareEvaluation ppe tm ctx = do missing <- cacheAdd rgrp (ccache ctx) when (not . null $ missing) . fail $ - reportBug "E029347" $ "Error in prepareEvaluation, cache is missing: " <> show missing + reportBug "E029347" $ + "Error in prepareEvaluation, cache is missing: " <> show missing (,) (backrefAdd rbkr ctx) <$> refNumTm (ccache ctx) rmn where (rmn, rtms) @@ -366,20 +369,19 @@ evalInContext ppe ctx activeThreads w = do prettyError (PE _ p) = p prettyError (BU tr nm c) = either id (bugMsg ppe tr nm) $ decom c - tr tx c = case decom c of - Right dv -> do - putStrLn $ "trace: " ++ UT.unpack tx - putStrLn . toANSI 50 $ pretty ppe dv - Left _ -> do - putStrLn $ "trace: " ++ UT.unpack tx - putStrLn "Couldn't decompile value." - print c + debugText fancy c = case decom c of + Right dv -> SimpleTrace . fmt $ pretty ppe dv + Left _ -> MsgTrace ("Couldn't decompile value") (show c) + where + fmt + | fancy = toANSI 50 + | otherwise = toPlain 50 result <- traverse (const $ readIORef r) . first prettyError <=< try - $ apply0 (Just hook) ((ccache ctx) {tracer = tr}) activeThreads w + $ apply0 (Just hook) ((ccache ctx) {tracer = debugText}) activeThreads w pure $ decom =<< result executeMainComb :: @@ -412,7 +414,8 @@ bugMsg ppe tr name tm | name == "blank expression" = P.callout icon . P.lines $ [ P.wrap - ( "I encountered a" <> P.red (P.text name) + ( "I encountered a" + <> P.red (P.text name) <> "with the following name/message:" ), "", @@ -423,7 +426,8 @@ bugMsg ppe tr name tm | "pattern match failure" `isPrefixOf` name = P.callout icon . P.lines $ [ P.wrap - ( "I've encountered a" <> P.red (P.text name) + ( "I've encountered a" + <> P.red (P.text name) <> "while scrutinizing:" ), "", @@ -447,7 +451,8 @@ bugMsg ppe tr name tm "pattern match failure" `isPrefixOf` msg = P.callout icon . P.lines $ [ P.wrap - ( "I've encountered a" <> P.red (P.text msg) + ( "I've encountered a" + <> P.red (P.text msg) <> "while scrutinizing:" ), "", @@ -461,7 +466,8 @@ bugMsg ppe tr name tm bugMsg ppe tr name tm = P.callout icon . P.lines $ [ P.wrap - ( "I've encountered a call to" <> P.red (P.text name) + ( "I've encountered a call to" + <> P.red (P.text name) <> "with the following value:" ), "", @@ -553,7 +559,7 @@ startRuntime sandboxed runtimeHost version = do ioTestType = builtinTest External } -withRuntime :: MonadUnliftIO m => Bool -> RuntimeHost -> Text -> (Runtime Symbol -> m a) -> m a +withRuntime :: (MonadUnliftIO m) => Bool -> RuntimeHost -> Text -> (Runtime Symbol -> m a) -> m a withRuntime sandboxed runtimeHost version action = UnliftIO.bracket (liftIO $ startRuntime sandboxed runtimeHost version) (liftIO . terminate) action @@ -580,7 +586,7 @@ data StoredCache (Map Reference (Set Reference)) deriving (Show) -putStoredCache :: MonadPut m => StoredCache -> m () +putStoredCache :: (MonadPut m) => StoredCache -> m () putStoredCache (SCache cs crs trs ftm fty int rtm rty sbs) = do putEnumMap putNat (putEnumMap putNat putComb) cs putEnumMap putNat putReference crs @@ -592,7 +598,7 @@ putStoredCache (SCache cs crs trs ftm fty int rtm rty sbs) = do putMap putReference putNat rty putMap putReference (putFoldable putReference) sbs -getStoredCache :: MonadGet m => m StoredCache +getStoredCache :: (MonadGet m) => m StoredCache getStoredCache = SCache <$> getEnumMap getNat (getEnumMap getNat getComb) @@ -618,9 +624,7 @@ restoreCache (SCache cs crs trs ftm fty int rtm rty sbs) = <*> newTVarIO (rty <> builtinTypeNumbering) <*> newTVarIO (sbs <> baseSandboxInfo) where - uglyTrace tx c = do - putStrLn $ "trace: " ++ UT.unpack tx - print c + uglyTrace _ c = SimpleTrace $ show c rns = emptyRNs {dnum = refLookup "ty" builtinTypeNumbering} rf k = builtinTermBackref ! k combs = diff --git a/parser-typechecker/src/Unison/Runtime/MCode.hs b/parser-typechecker/src/Unison/Runtime/MCode.hs index 3c5c05514..62c1ad360 100644 --- a/parser-typechecker/src/Unison/Runtime/MCode.hs +++ b/parser-typechecker/src/Unison/Runtime/MCode.hs @@ -387,6 +387,8 @@ data BPrim1 | CVLD -- validate | VALU | TLTT -- value, Term.Link.toText + -- debug + | DBTX -- debug text deriving (Show, Eq, Ord) data BPrim2 @@ -415,8 +417,8 @@ data BPrim2 | IDXB | CATB -- take,drop,index,append -- general - | THRO - | TRCE -- throw + | THRO -- throw + | TRCE -- trace -- code | SDBX -- sandbox deriving (Show, Eq, Ord) @@ -629,7 +631,7 @@ ctx vs cs = pushCtx (zip vs cs) ECtx -- Look up a variable in the context, getting its position on the -- relevant stack and its calling convention if it is there. -ctxResolve :: Var v => Ctx v -> v -> Maybe (Int, Mem) +ctxResolve :: (Var v) => Ctx v -> v -> Maybe (Int, Mem) ctxResolve ctx v = walk 0 0 ctx where walk _ _ ECtx = Nothing @@ -653,7 +655,7 @@ catCtx (Block l) r = Block $ catCtx l r catCtx (Var v m l) r = Var v m $ catCtx l r -- Split the context after a particular variable -breakAfter :: Eq v => (v -> Bool) -> Ctx v -> (Ctx v, Ctx v) +breakAfter :: (Eq v) => (v -> Bool) -> Ctx v -> (Ctx v, Ctx v) breakAfter _ ECtx = (ECtx, ECtx) breakAfter p (Tag vs) = first Tag $ breakAfter p vs breakAfter p (Block vs) = first Block $ breakAfter p vs @@ -665,13 +667,13 @@ breakAfter p (Var v m vs) = (Var v m lvs, rvs) -- Modify the context to contain the variables introduced by an -- unboxed sum -sumCtx :: Var v => Ctx v -> v -> [(v, Mem)] -> Ctx v +sumCtx :: (Var v) => Ctx v -> v -> [(v, Mem)] -> Ctx v sumCtx ctx v vcs | (lctx, rctx) <- breakAfter (== v) ctx = catCtx lctx $ pushCtx vcs rctx -- Look up a variable in the top let rec context -rctxResolve :: Var v => RCtx v -> v -> Maybe Word64 +rctxResolve :: (Var v) => RCtx v -> v -> Maybe Word64 rctxResolve ctx u = M.lookup u ctx -- Compile a top-level definition group to a collection of combinators. @@ -679,7 +681,7 @@ rctxResolve ctx u = M.lookup u ctx -- and intra-group calls are numbered locally, with 0 specifying -- the global entry point. emitCombs :: - Var v => + (Var v) => RefNums -> Reference -> Word64 -> @@ -751,7 +753,7 @@ countCtx0 ui bi (Block ctx) = countCtx0 ui bi ctx countCtx0 ui bi ECtx = (ui, bi) emitComb :: - Var v => + (Var v) => RefNums -> Reference -> Word64 -> @@ -768,7 +770,7 @@ addCount i j = onCount $ \(C u b x) -> C (u + i) (b + j) x -- Emit a machine code section from an ANF term emitSection :: - Var v => + (Var v) => RefNums -> Reference -> Word64 -> @@ -804,14 +806,16 @@ emitSection _ _ grpn rec ctx (TVar v) emitSection _ _ grpn _ ctx (TPrm p args) = -- 3 is a conservative estimate of how many extra stack slots -- a prim op will need for its results. - addCount 3 3 . countCtx ctx + addCount 3 3 + . countCtx ctx . Ins (emitPOp p $ emitArgs grpn ctx args) . Yield $ DArgV i j where (i, j) = countBlock ctx emitSection _ _ grpn _ ctx (TFOp p args) = - addCount 3 3 . countCtx ctx + addCount 3 3 + . countCtx ctx . Ins (emitFOp p $ emitArgs grpn ctx args) . Yield $ DArgV i j @@ -897,7 +901,7 @@ emitSection _ _ _ _ _ tm = -- Emit the code for a function call emitFunction :: - Var v => + (Var v) => RefNums -> Word64 -> -- self combinator number RCtx v -> -- recursive binding group @@ -1014,7 +1018,7 @@ litArg _ = UArg1 0 -- require a machine code Let, which uses more complicated stack -- manipulation. emitLet :: - Var v => + (Var v) => RefNums -> Reference -> Word64 -> @@ -1045,7 +1049,8 @@ emitLet rns grpr grpn rec d vcs ctx bnd internalBug $ "unsupported compound direct let: " ++ show bnd | Indirect w <- d = \esect -> - f <$> emitSection rns grpr grpn rec (Block ctx) bnd + f + <$> emitSection rns grpr grpn rec (Block ctx) bnd <*> record (pushCtx vcs ctx) w esect where f s w = Let s (CIx grpr grpn w) @@ -1176,6 +1181,7 @@ emitPOp ANF.SDBX = emitBP2 SDBX -- error call emitPOp ANF.EROR = emitBP2 THRO emitPOp ANF.TRCE = emitBP2 TRCE +emitPOp ANF.DBTX = emitBP1 DBTX -- non-prim translations emitPOp ANF.BLDS = Seq emitPOp ANF.FORK = \case @@ -1237,7 +1243,7 @@ emitBP2 p a = ++ show (p, a) emitDataMatching :: - Var v => + (Var v) => Reference -> RefNums -> Reference -> @@ -1263,7 +1269,7 @@ emitDataMatching r rns grpr grpn rec ctx cs df = -- already there, but it was unknown how many there were until -- branching on the tag. emitSumMatching :: - Var v => + (Var v) => RefNums -> Reference -> Word64 -> @@ -1279,7 +1285,7 @@ emitSumMatching rns grpr grpn rec ctx v i cs = edf = Die "uncovered unboxed sum case" emitRequestMatching :: - Var v => + (Var v) => RefNums -> Reference -> Word64 -> @@ -1298,8 +1304,8 @@ emitRequestMatching rns grpr grpn rec ctx hs df = MatchW 0 edf <$> tops edf = Die "unhandled ability" emitLitMatching :: - Var v => - Traversable f => + (Var v) => + (Traversable f) => (Int -> Section -> f Section -> Section) -> String -> RefNums -> @@ -1319,7 +1325,7 @@ emitLitMatching con err rns grpr grpn rec ctx i cs df = | otherwise = countCtx ctx $ Die err emitCase :: - Var v => + (Var v) => RefNums -> Reference -> Word64 -> @@ -1331,7 +1337,7 @@ emitCase rns grpr grpn rec ctx (ccs, TAbss vs bo) = emitSection rns grpr grpn rec (Tag $ pushCtx (zip vs ccs) ctx) bo emitSumCase :: - Var v => + (Var v) => RefNums -> Reference -> Word64 -> @@ -1361,7 +1367,7 @@ emitLit l = Lit $ case l of -- these allocations and passes the appropriate context into the -- provided continuation. emitClosures :: - Var v => + (Var v) => Word64 -> RCtx v -> Ctx v -> @@ -1379,12 +1385,14 @@ emitClosures grpn rec ctx args k = | otherwise = internalBug $ "emitClosures: unknown reference: " ++ show a -emitArgs :: Var v => Word64 -> Ctx v -> [v] -> Args +emitArgs :: (Var v) => Word64 -> Ctx v -> [v] -> Args emitArgs grpn ctx args | Just l <- traverse (ctxResolve ctx) args = demuxArgs l | otherwise = internalBug $ - "emitArgs[" ++ show grpn ++ "]: " + "emitArgs[" + ++ show grpn + ++ "]: " ++ "could not resolve argument variables: " ++ show args @@ -1428,6 +1436,9 @@ sectionTypes _ = [] instrTypes :: Instr -> [Word64] instrTypes (Pack _ w _) = [w `shiftR` 16] +instrTypes (Reset ws) = setToList ws +instrTypes (Capture w) = [w] +instrTypes (SetDyn w _) = [w] instrTypes _ = [] branchDeps :: Branch -> [Word64] @@ -1463,7 +1474,10 @@ prettyCombs w es = prettyComb :: Word64 -> Word64 -> Comb -> ShowS prettyComb w i (Lam ua ba _ _ s) = - shows w . showString ":" . shows i . shows [ua, ba] + shows w + . showString ":" + . shows i + . shows [ua, ba] . showString ":\n" . prettySection 2 s @@ -1480,13 +1494,16 @@ prettySection ind sec = Jump i as -> showString "Jump " . shows i . showString " " . prettyArgs as Match i bs -> - showString "Match " . shows i . showString "\n" + showString "Match " + . shows i + . showString "\n" . prettyBranches (ind + 1) bs Yield as -> showString "Yield " . prettyArgs as Ins i nx -> prettyIns i . showString "\n" . prettySection ind nx Let s n -> - showString "Let\n" . prettySection (ind + 2) s + showString "Let\n" + . prettySection (ind + 2) s . showString "\n" . indent ind . prettyIx n @@ -1495,7 +1512,8 @@ prettySection ind sec = prettyIx :: CombIx -> ShowS prettyIx (CIx _ c s) = - showString "Resume[" . shows c + showString "Resume[" + . shows c . showString "," . shows s . showString "]" @@ -1512,10 +1530,16 @@ prettyBranches ind bs = where pdf e = indent ind . showString "DFLT ->\n" . prettySection (ind + 1) e ptcase t e = - showString "\n" . indent ind . shows t . showString " ->\n" + showString "\n" + . indent ind + . shows t + . showString " ->\n" . prettySection (ind + 1) e picase i e = - showString "\n" . indent ind . shows i . showString " ->\n" + showString "\n" + . indent ind + . shows i + . showString " ->\n" . prettySection (ind + 1) e un :: ShowS @@ -1526,7 +1550,8 @@ bx = ('B' :) prettyIns :: Instr -> ShowS prettyIns (Pack r i as) = - showString "Pack " . showsPrec 10 r + showString "Pack " + . showsPrec 10 r . (' ' :) . shows i . (' ' :) @@ -1543,13 +1568,17 @@ prettyArgs (DArg2 i j) = un . shows [i] . (' ' :) . bx . shows [j] prettyArgs (UArgR i l) = un . shows (Prelude.take l [i ..]) prettyArgs (BArgR i l) = bx . shows (Prelude.take l [i ..]) prettyArgs (DArgR i l j k) = - un . shows (Prelude.take l [i ..]) . (' ' :) + un + . shows (Prelude.take l [i ..]) + . (' ' :) . bx . shows (Prelude.take k [j ..]) prettyArgs (UArgN v) = un . shows (primArrayToList v) prettyArgs (BArgN v) = bx . shows (primArrayToList v) prettyArgs (DArgN u b) = - un . shows (primArrayToList u) . (' ' :) + un + . shows (primArrayToList u) + . (' ' :) . bx . shows (primArrayToList b) prettyArgs (DArgV i j) = ('V' :) . shows [i, j] diff --git a/parser-typechecker/src/Unison/Runtime/MCode/Serialize.hs b/parser-typechecker/src/Unison/Runtime/MCode/Serialize.hs index 76c22a51e..dbeaa226a 100644 --- a/parser-typechecker/src/Unison/Runtime/MCode/Serialize.hs +++ b/parser-typechecker/src/Unison/Runtime/MCode/Serialize.hs @@ -19,11 +19,11 @@ import Unison.Runtime.MCode hiding (MatchT) import Unison.Runtime.Serialize import qualified Unison.Util.Text as Util.Text -putComb :: MonadPut m => Comb -> m () +putComb :: (MonadPut m) => Comb -> m () putComb (Lam ua ba uf bf body) = pInt ua *> pInt ba *> pInt uf *> pInt bf *> putSection body -getComb :: MonadGet m => m Comb +getComb :: (MonadGet m) => m Comb getComb = Lam <$> gInt <*> gInt <*> gInt <*> gInt <*> getSection data SectionT @@ -59,7 +59,7 @@ instance Tag SectionT where word2tag 8 = pure ExitT word2tag i = unknownTag "SectionT" i -putSection :: MonadPut m => Section -> m () +putSection :: (MonadPut m) => Section -> m () putSection (App b r a) = putTag AppT *> serialize b *> putRef r *> putArgs a putSection (Call b w a) = @@ -79,7 +79,7 @@ putSection (Die s) = putSection Exit = putTag ExitT -getSection :: MonadGet m => m Section +getSection :: (MonadGet m) => m Section getSection = getTag >>= \case AppT -> App <$> deserialize <*> getRef <*> getArgs @@ -152,7 +152,7 @@ instance Tag InstrT where word2tag 17 = pure TryForceT word2tag n = unknownTag "InstrT" n -putInstr :: MonadPut m => Instr -> m () +putInstr :: (MonadPut m) => Instr -> m () putInstr (UPrim1 up i) = putTag UPrim1T *> putTag up *> pInt i putInstr (UPrim2 up i j) = @@ -190,7 +190,7 @@ putInstr (Seq a) = putInstr (TryForce i) = putTag TryForceT *> pInt i -getInstr :: MonadGet m => m Instr +getInstr :: (MonadGet m) => m Instr getInstr = getTag >>= \case UPrim1T -> UPrim1 <$> getTag <*> gInt @@ -257,7 +257,7 @@ instance Tag ArgsT where word2tag 12 = pure DArgVT word2tag n = unknownTag "ArgsT" n -putArgs :: MonadPut m => Args -> m () +putArgs :: (MonadPut m) => Args -> m () putArgs ZArgs = putTag ZArgsT putArgs (UArg1 i) = putTag UArg1T *> pInt i putArgs (UArg2 i j) = putTag UArg1T *> pInt i *> pInt j @@ -274,7 +274,7 @@ putArgs (DArgN ua ba) = putTag DArgNT *> putIntArr ua *> putIntArr ba putArgs (DArgV i j) = putTag DArgVT *> pInt i *> pInt j -getArgs :: MonadGet m => m Args +getArgs :: (MonadGet m) => m Args getArgs = getTag >>= \case ZArgsT -> pure ZArgs @@ -303,22 +303,22 @@ instance Tag RefT where word2tag 2 = pure DynT word2tag n = unknownTag "RefT" n -putRef :: MonadPut m => Ref -> m () +putRef :: (MonadPut m) => Ref -> m () putRef (Stk i) = putTag StkT *> pInt i putRef (Env i j) = putTag EnvT *> pWord i *> pWord j putRef (Dyn i) = putTag DynT *> pWord i -getRef :: MonadGet m => m Ref +getRef :: (MonadGet m) => m Ref getRef = getTag >>= \case StkT -> Stk <$> gInt EnvT -> Env <$> gWord <*> gWord DynT -> Dyn <$> gWord -putCombIx :: MonadPut m => CombIx -> m () +putCombIx :: (MonadPut m) => CombIx -> m () putCombIx (CIx r n i) = putReference r *> pWord n *> pWord i -getCombIx :: MonadGet m => m CombIx +getCombIx :: (MonadGet m) => m CombIx getCombIx = CIx <$> getReference <*> gWord <*> gWord data MLitT = MIT | MDT | MTT | MMT | MYT @@ -337,14 +337,14 @@ instance Tag MLitT where word2tag 4 = pure MYT word2tag n = unknownTag "MLitT" n -putLit :: MonadPut m => MLit -> m () +putLit :: (MonadPut m) => MLit -> m () putLit (MI i) = putTag MIT *> pInt i putLit (MD d) = putTag MDT *> putFloat d putLit (MT t) = putTag MTT *> putText (Util.Text.toText t) putLit (MM r) = putTag MMT *> putReferent r putLit (MY r) = putTag MYT *> putReference r -getLit :: MonadGet m => m MLit +getLit :: (MonadGet m) => m MLit getLit = getTag >>= \case MIT -> MI <$> gInt @@ -367,7 +367,7 @@ instance Tag BranchT where word2tag 3 = pure TestTT word2tag n = unknownTag "BranchT" n -putBranch :: MonadPut m => Branch -> m () +putBranch :: (MonadPut m) => Branch -> m () putBranch (Test1 w s d) = putTag Test1T *> pWord w *> putSection s *> putSection d putBranch (Test2 a sa b sb d) = @@ -382,32 +382,34 @@ putBranch (TestW d m) = putBranch (TestT d m) = putTag TestTT *> putSection d *> putMap (putText . Util.Text.toText) putSection m -getBranch :: MonadGet m => m Branch +getBranch :: (MonadGet m) => m Branch getBranch = getTag >>= \case Test1T -> Test1 <$> gWord <*> getSection <*> getSection Test2T -> - Test2 <$> gWord <*> getSection + Test2 + <$> gWord + <*> getSection <*> gWord <*> getSection <*> getSection TestWT -> TestW <$> getSection <*> getEnumMap gWord getSection TestTT -> TestT <$> getSection <*> getMap (Util.Text.fromText <$> getText) getSection -gInt :: MonadGet m => m Int +gInt :: (MonadGet m) => m Int gInt = unVarInt <$> deserialize -pInt :: MonadPut m => Int -> m () +pInt :: (MonadPut m) => Int -> m () pInt i = serialize (VarInt i) -gWord :: MonadGet m => m Word64 +gWord :: (MonadGet m) => m Word64 gWord = unVarInt <$> deserialize -pWord :: MonadPut m => Word64 -> m () +pWord :: (MonadPut m) => Word64 -> m () pWord w = serialize (VarInt w) -putIntArr :: MonadPut m => PrimArray Int -> m () +putIntArr :: (MonadPut m) => PrimArray Int -> m () putIntArr pa = putFoldable pInt $ toList pa -getIntArr :: MonadGet m => m (PrimArray Int) +getIntArr :: (MonadGet m) => m (PrimArray Int) getIntArr = fromList <$> getList gInt diff --git a/parser-typechecker/src/Unison/Runtime/Machine.hs b/parser-typechecker/src/Unison/Runtime/Machine.hs index 35eda4feb..e54e5aa03 100644 --- a/parser-typechecker/src/Unison/Runtime/Machine.hs +++ b/parser-typechecker/src/Unison/Runtime/Machine.hs @@ -15,8 +15,6 @@ import Control.Exception import Data.Bits import qualified Data.Map.Strict as M import Data.Ord (comparing) -import qualified Data.Primitive.Array as PA -import qualified Data.Primitive.PrimArray as PA import qualified Data.Sequence as Sq import qualified Data.Set as S import qualified Data.Set as Set @@ -40,6 +38,7 @@ import Unison.Runtime.ANF as ANF valueLinks, ) import qualified Unison.Runtime.ANF as ANF +import Unison.Runtime.Array as PA import Unison.Runtime.Builtin import Unison.Runtime.Exception import Unison.Runtime.Foreign @@ -52,7 +51,6 @@ import qualified Unison.Type as Rf import qualified Unison.Util.Bytes as By import Unison.Util.EnumContainers as EC import Unison.Util.Pretty (toPlainUnbroken) -import Unison.Util.Text (Text) import qualified Unison.Util.Text as Util.Text import UnliftIO (IORef) import qualified UnliftIO @@ -70,11 +68,16 @@ type Tag = Word64 -- dynamic environment type DEnv = EnumMap Word64 Closure +data Tracer + = NoTrace + | MsgTrace String String + | SimpleTrace String + -- code caching environment data CCache = CCache { foreignFuncs :: EnumMap Word64 ForeignFunc, sandboxed :: Bool, - tracer :: Unison.Util.Text.Text -> Closure -> IO (), + tracer :: Bool -> Closure -> Tracer, combs :: TVar (EnumMap Word64 Combs), combRefs :: TVar (EnumMap Word64 Reference), tagRefs :: TVar (EnumMap Word64 Reference), @@ -121,7 +124,7 @@ baseCCache sandboxed = do <*> newTVarIO baseSandboxInfo where ffuncs | sandboxed = sandboxedForeigns | otherwise = builtinForeigns - noTrace _ _ = pure () + noTrace _ _ = NoTrace ftm = 1 + maximum builtinTermNumbering fty = 1 + maximum builtinTypeNumbering @@ -132,7 +135,7 @@ baseCCache sandboxed = do (\k v -> let r = builtinTermBackref ! k in emitComb @Symbol rns r k mempty (0, v)) numberedTermLookup -info :: Show a => String -> a -> IO () +info :: (Show a) => String -> a -> IO () info ctx x = infos ctx (show x) infos :: String -> String -> IO () @@ -341,7 +344,8 @@ exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim1 LOAD i) Left miss -> do poke ustk 0 pokeS bstk $ - Sq.fromList $ Foreign . Wrap Rf.termLinkRef . Ref <$> miss + Sq.fromList $ + Foreign . Wrap Rf.termLinkRef . Ref <$> miss Right x -> do poke ustk 1 poke bstk x @@ -352,6 +356,23 @@ exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim1 VALU i) = do bstk <- bump bstk pokeBi bstk =<< reflectValue m c pure (denv, ustk, bstk, k) +exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim1 DBTX i) + | sandboxed env = + die "attempted to use sandboxed operation: Debug.toText" + | otherwise = do + clo <- peekOff bstk i + ustk <- bump ustk + bstk <- case tracer env False clo of + NoTrace -> bstk <$ poke ustk 0 + MsgTrace _ tx -> do + poke ustk 1 + bstk <- bump bstk + bstk <$ pokeBi bstk (Util.Text.pack tx) + SimpleTrace tx -> do + poke ustk 2 + bstk <- bump bstk + bstk <$ pokeBi bstk (Util.Text.pack tx) + pure (denv, ustk, bstk, k) exec !_ !denv !_activeThreads !ustk !bstk !k _ (BPrim1 op i) = do (ustk, bstk) <- bprim1 ustk bstk op i pure (denv, ustk, bstk, k) @@ -384,7 +405,15 @@ exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim2 TRCE i j) | otherwise = do tx <- peekOffBi bstk i clo <- peekOff bstk j - tracer env tx clo + case tracer env True clo of + NoTrace -> pure () + SimpleTrace str -> do + putStrLn $ "trace: " ++ Util.Text.unpack tx + putStrLn str + MsgTrace msg str -> do + putStrLn $ "trace: " ++ Util.Text.unpack tx + putStrLn msg + putStrLn str pure (denv, ustk, bstk, k) exec !_ !denv !_trackThreads !ustk !bstk !k _ (BPrim2 op i j) = do (ustk, bstk) <- bprim2 ustk bstk op i j @@ -492,6 +521,8 @@ encodeExn ustk bstk (Left exn) = do (Rf.stmFailureRef, disp be, unitValue) | Just (be :: BlockedIndefinitelyOnMVar) <- fromException exn = (Rf.ioFailureRef, disp be, unitValue) + | Just (ie :: AsyncException) <- fromException exn = + (Rf.threadKilledFailureRef, disp ie, unitValue) | otherwise = (Rf.miscFailureRef, disp exn, unitValue) eval :: @@ -904,7 +935,8 @@ dumpData !_ !ustk !bstk (DataG _ t us bs) = do pure (ustk, bstk) dumpData !mr !_ !_ clo = die $ - "dumpData: bad closure: " ++ show clo + "dumpData: bad closure: " + ++ show clo ++ maybe "" (\r -> "\nexpected type: " ++ show r) mr {-# INLINE dumpData #-} @@ -921,7 +953,8 @@ closeArgs :: Args -> IO (Seg 'UN, Seg 'BX) closeArgs mode !ustk !bstk !useg !bseg args = - (,) <$> augSeg mode ustk useg uargs + (,) + <$> augSeg mode ustk useg uargs <*> augSeg mode bstk bseg bargs where (uargs, bargs) = case args of @@ -1338,29 +1371,33 @@ bprim1 !ustk !bstk UCNS i = pure (ustk, bstk) bprim1 !ustk !bstk TTOI i = peekOffBi bstk i >>= \t -> case readm $ Util.Text.unpack t of - Nothing -> do + Just n + | fromIntegral (minBound :: Int) <= n, + n <= fromIntegral (maxBound :: Int) -> do + ustk <- bumpn ustk 2 + poke ustk 1 + pokeOff ustk 1 (fromInteger n) + pure (ustk, bstk) + _ -> do ustk <- bump ustk poke ustk 0 pure (ustk, bstk) - Just n -> do - ustk <- bumpn ustk 2 - poke ustk 1 - pokeOff ustk 1 n - pure (ustk, bstk) where readm ('+' : s) = readMaybe s readm s = readMaybe s bprim1 !ustk !bstk TTON i = peekOffBi bstk i >>= \t -> case readMaybe $ Util.Text.unpack t of - Nothing -> do + Just n + | 0 <= n, + n <= fromIntegral (maxBound :: Word) -> do + ustk <- bumpn ustk 2 + poke ustk 1 + pokeOffN ustk 1 (fromInteger n) + pure (ustk, bstk) + _ -> do ustk <- bump ustk poke ustk 0 pure (ustk, bstk) - Just n -> do - ustk <- bumpn ustk 2 - poke ustk 1 - pokeOffN ustk 1 n - pure (ustk, bstk) bprim1 !ustk !bstk TTOF i = peekOffBi bstk i >>= \t -> case readMaybe $ Util.Text.unpack t of Nothing -> do @@ -1409,7 +1446,8 @@ bprim1 !ustk !bstk PAKT i = do bprim1 !ustk !bstk UPKT i = do t <- peekOffBi bstk i bstk <- bump bstk - pokeS bstk . Sq.fromList + pokeS bstk + . Sq.fromList . fmap (DataU1 Rf.charRef charTag . fromEnum) . Util.Text.unpack $ t @@ -1446,6 +1484,7 @@ bprim1 !ustk !bstk CVLD _ = pure (ustk, bstk) bprim1 !ustk !bstk TLTT _ = pure (ustk, bstk) bprim1 !ustk !bstk LOAD _ = pure (ustk, bstk) bprim1 !ustk !bstk VALU _ = pure (ustk, bstk) +bprim1 !ustk !bstk DBTX _ = pure (ustk, bstk) {-# INLINE bprim1 #-} bprim2 :: @@ -1465,7 +1504,11 @@ bprim2 !ustk !bstk DRPT i j = do n <- peekOff ustk i t <- peekOffBi bstk j bstk <- bump bstk - pokeBi bstk $ Util.Text.drop n t + -- Note; if n < 0, the Nat argument was greater than the maximum + -- signed integer. As an approximation, just return the empty + -- string, as a string larger than this would require an absurd + -- amount of memory. + pokeBi bstk $ if n < 0 then Util.Text.empty else Util.Text.drop n t pure (ustk, bstk) bprim2 !ustk !bstk CATT i j = do x <- peekOffBi bstk i @@ -1477,7 +1520,10 @@ bprim2 !ustk !bstk TAKT i j = do n <- peekOff ustk i t <- peekOffBi bstk j bstk <- bump bstk - pokeBi bstk $ Util.Text.take n t + -- Note: if n < 0, the Nat argument was greater than the maximum + -- signed integer. As an approximation, we just return the original + -- string, because it's unlikely such a large string exists. + pokeBi bstk $ if n < 0 then t else Util.Text.take n t pure (ustk, bstk) bprim2 !ustk !bstk EQLT i j = do x <- peekOffBi @Util.Text.Text bstk i @@ -1501,13 +1547,21 @@ bprim2 !ustk !bstk DRPS i j = do n <- peekOff ustk i s <- peekOffS bstk j bstk <- bump bstk - pokeS bstk $ Sq.drop n s + -- Note: if n < 0, then the Nat argument was larger than the largest + -- signed integer. Seq actually doesn't handle this well, despite it + -- being possible to build (lazy) sequences this large. So, + -- approximate by yielding the empty sequence. + pokeS bstk $ if n < 0 then Sq.empty else Sq.drop n s pure (ustk, bstk) bprim2 !ustk !bstk TAKS i j = do n <- peekOff ustk i s <- peekOffS bstk j bstk <- bump bstk - pokeS bstk $ Sq.take n s + -- Note: if n < 0, then the Nat argument was greater than the + -- largest signed integer. It is possible to build such large + -- sequences, but the internal size will actually be wrong then. So, + -- we just return the original sequence as an approximation. + pokeS bstk $ if n < 0 then s else Sq.take n s pure (ustk, bstk) bprim2 !ustk !bstk CONS i j = do x <- peekOff bstk i @@ -1577,13 +1631,17 @@ bprim2 !ustk !bstk TAKB i j = do n <- peekOff ustk i b <- peekOffBi bstk j bstk <- bump bstk - pokeBi bstk $ By.take n b + -- If n < 0, the Nat argument was larger than the maximum signed + -- integer. Building a value this large would reuire an absurd + -- amount of memory, so just assume n is larger. + pokeBi bstk $ if n < 0 then b else By.take n b pure (ustk, bstk) bprim2 !ustk !bstk DRPB i j = do n <- peekOff ustk i b <- peekOffBi bstk j bstk <- bump bstk - pokeBi bstk $ By.drop n b + -- See above for n < 0 + pokeBi bstk $ if n < 0 then By.empty else By.drop n b pure (ustk, bstk) bprim2 !ustk !bstk IDXB i j = do n <- peekOff ustk i @@ -1716,18 +1774,24 @@ resolve env _ _ (Env n i) = Just r -> pure $ PAp (CIx r n i) unull bnull Nothing -> die $ "resolve: missing reference for comb: " ++ show n resolve _ _ bstk (Stk i) = peekOff bstk i -resolve _ denv _ (Dyn i) = case EC.lookup i denv of +resolve env denv _ (Dyn i) = case EC.lookup i denv of Just clo -> pure clo - _ -> die $ "resolve: unhandled ability request: " ++ show i + Nothing -> readTVarIO (tagRefs env) >>= err + where + unhandled rs = case EC.lookup i rs of + Just r -> show r + Nothing -> show i + err rs = die $ "resolve: unhandled ability request: " ++ unhandled rs -combSection :: HasCallStack => CCache -> CombIx -> IO Comb +combSection :: (HasCallStack) => CCache -> CombIx -> IO Comb combSection env (CIx _ n i) = readTVarIO (combs env) >>= \cs -> case EC.lookup n cs of Just cmbs -> case EC.lookup i cmbs of Just cmb -> pure cmb Nothing -> die $ - "unknown section `" ++ show i + "unknown section `" + ++ show i ++ "` of combinator `" ++ show n ++ "`." @@ -1739,7 +1803,7 @@ dummyRef = Builtin (DTx.pack "dummy") reserveIds :: Word64 -> TVar Word64 -> IO Word64 reserveIds n free = atomically . stateTVar free $ \i -> (i, i + n) -updateMap :: Semigroup s => s -> TVar s -> STM s +updateMap :: (Semigroup s) => s -> TVar s -> STM s updateMap new r = stateTVar r $ \old -> let total = new <> old in (total, total) @@ -1991,7 +2055,8 @@ reifyValue0 (rty, rtm) = goV goK ANF.KE = pure KE goK (ANF.Mark ua ba ps de k) = - mrk <$> traverse refTy ps + mrk + <$> traverse refTy ps <*> traverse (\(k, v) -> (,) <$> refTy k <*> goV v) (M.toList de) <*> goK k where @@ -2003,7 +2068,8 @@ reifyValue0 (rty, rtm) = goV (fromIntegral bf) (fromIntegral ua) (fromIntegral ba) - <$> (goIx gr) <*> goK k + <$> (goIx gr) + <*> goK k goL (ANF.Text t) = pure . Foreign $ Wrap Rf.textRef t goL (ANF.List l) = Foreign . Wrap Rf.listRef <$> traverse goV l diff --git a/parser-typechecker/src/Unison/Runtime/Pattern.hs b/parser-typechecker/src/Unison/Runtime/Pattern.hs index 96ec92ebe..8bb29326a 100644 --- a/parser-typechecker/src/Unison/Runtime/Pattern.hs +++ b/parser-typechecker/src/Unison/Runtime/Pattern.hs @@ -103,7 +103,7 @@ builtinDataSpec = Map.fromList decls data PatternMatrix v = PM {_rows :: [PatternRow v]} deriving (Show) -usedVars :: Ord v => PatternMatrix v -> Set v +usedVars :: (Ord v) => PatternMatrix v -> Set v usedVars (PM rs) = foldMap usedR rs where usedR (PR ps g b) = @@ -139,12 +139,12 @@ firstRow _ _ = Nothing heuristics :: [Heuristic v] heuristics = [firstRow $ fmap loc . listToMaybe] -extractVar :: Var v => P.Pattern v -> Maybe v +extractVar :: (Var v) => P.Pattern v -> Maybe v extractVar p | P.Unbound {} <- p = Nothing | otherwise = Just (loc p) -extractVars :: Var v => [P.Pattern v] -> [v] +extractVars :: (Var v) => [P.Pattern v] -> [v] extractVars = catMaybes . fmap extractVar -- Splits a data type pattern, yielding its subpatterns. The provided @@ -158,7 +158,7 @@ extractVars = catMaybes . fmap extractVar -- but elsewhere these results are added to a list, so it is more -- convenient to yield a list here. decomposePattern :: - Var v => + (Var v) => Maybe Reference -> Int -> Int -> @@ -260,7 +260,7 @@ data SeqCover v -- Determines how a pattern corresponds to a sequence matching -- compilation target. -decomposeSeqP :: Var v => Set v -> SeqMatch -> P.Pattern v -> SeqCover v +decomposeSeqP :: (Var v) => Set v -> SeqMatch -> P.Pattern v -> SeqCover v decomposeSeqP _ E (P.SequenceLiteral _ []) = Cover [] decomposeSeqP _ E _ = Disjoint decomposeSeqP _ C (P.SequenceOp _ l Cons r) = Cover [l, r] @@ -316,7 +316,7 @@ decomposeSeqP _ _ _ = Overlap -- is used as the result value to indicate success or failure to match, -- because these results are accumulated into a larger list elsewhere. splitRow :: - Var v => + (Var v) => v -> Maybe Reference -> Int -> @@ -335,7 +335,7 @@ splitRow _ _ _ _ row = [([], row)] -- matched against the variable that may be collected to determine the -- cases the built-in value is matched against. splitRowBuiltin :: - Var v => + (Var v) => v -> PatternRow v -> [(P.Pattern (), [([P.Pattern v], PatternRow v)])] @@ -350,7 +350,7 @@ splitRowBuiltin _ r = [(P.Unbound (), [([], r)])] -- compilation. The outer list result is used to indicate success or -- failure. splitRowSeq :: - Var v => + (Var v) => Set v -> v -> SeqMatch -> @@ -368,7 +368,7 @@ splitRowSeq _ _ _ r = [([], r)] -- Renames the variables annotating the patterns in a row, for once a -- canonical choice has been made. -renameRow :: Var v => Map v v -> PatternRow v -> PatternRow v +renameRow :: (Var v) => Map v v -> PatternRow v -> PatternRow v renameRow m (PR p0 g0 b0) = PR p g b where access k @@ -383,7 +383,7 @@ renameRow m (PR p0 g0 b0) = PR p g b -- the variables in the first row, because it may have been generated -- by decomposing a variable or unbound pattern, which will make up -- variables for subpatterns. -chooseVars :: Var v => [[P.Pattern v]] -> [v] +chooseVars :: (Var v) => [[P.Pattern v]] -> [v] chooseVars [] = [] chooseVars ([] : rs) = chooseVars rs chooseVars ((P.Unbound {} : _) : rs) = chooseVars rs @@ -394,7 +394,7 @@ chooseVars (r : _) = extractVars r -- yields an indication of the type of the variables that the -- subpatterns match against, if possible. buildMatrix :: - Var v => + (Var v) => [([P.Pattern v], PatternRow v)] -> ([(v, PType)], PatternMatrix v) buildMatrix [] = ([], PM []) @@ -411,7 +411,7 @@ buildMatrix vrs = (zip cvs rs, PM $ fixRow <$> vrs) -- variables (although currently builtin patterns do not introduce -- variables). splitMatrixBuiltin :: - Var v => + (Var v) => v -> PatternMatrix v -> [(P.Pattern (), [(v, PType)], PatternMatrix v)] @@ -424,7 +424,7 @@ splitMatrixBuiltin v (PM rs) = $ splitRowBuiltin v =<< rs expandIrrefutable :: - Var v => + (Var v) => [(P.Pattern (), [([P.Pattern v], PatternRow v)])] -> [(P.Pattern (), [([P.Pattern v], PatternRow v)])] expandIrrefutable rss = concatMap expand rss @@ -452,7 +452,7 @@ matchPattern vrs = \case -- variables introduced for each case with their types, and new -- matricies for subsequent compilation. splitMatrixSeq :: - Var v => + (Var v) => Set v -> v -> PatternMatrix v -> @@ -474,7 +474,7 @@ splitMatrixSeq avoid v (PM rs) = -- ability match. Yields a new matrix for each constructor, with -- variables introduced and their types for each case. splitMatrix :: - Var v => + (Var v) => v -> Maybe Reference -> NCons -> @@ -490,7 +490,7 @@ splitMatrix v rf cons (PM rs) = -- prepared, and a variable renaming mapping. type PPM v a = State (Word64, [v], Map v v) a -freshVar :: Var v => PPM v v +freshVar :: (Var v) => PPM v v freshVar = state $ \(fw, vs, rn) -> let v = freshenId fw $ typed Pattern in (v, (fw + 1, vs, rn)) @@ -500,7 +500,7 @@ useVar = state $ \case (avoid, v : vs, rn) -> (v, (avoid, vs, rn)) _ -> error "useVar: Expected multiple vars" -renameTo :: Var v => v -> v -> PPM v () +renameTo :: (Var v) => v -> v -> PPM v () renameTo to from = modify $ \(avoid, vs, rn) -> ( avoid, @@ -538,7 +538,7 @@ normalizeSeqP p = p -- function, however, is used when a candidate variable for a pattern -- has already been chosen, as with an As pattern. This allows turning -- redundant names (like with the pattern u@v) into renamings. -prepareAs :: Var v => P.Pattern a -> v -> PPM v (P.Pattern v) +prepareAs :: (Var v) => P.Pattern a -> v -> PPM v (P.Pattern v) prepareAs (P.Unbound _) u = pure $ P.Var u prepareAs (P.As _ p) u = (useVar >>= renameTo u) *> prepareAs p u prepareAs (P.Var _) u = P.Var u <$ (renameTo u =<< useVar) @@ -563,7 +563,7 @@ prepareAs p u = pure $ u <$ p -- pattern is matching against. As patterns are eliminated and the -- variables they bind are used as candidates for what that level of -- the pattern matches against. -preparePattern :: Var v => P.Pattern a -> PPM v (P.Pattern v) +preparePattern :: (Var v) => P.Pattern a -> PPM v (P.Pattern v) preparePattern p = prepareAs p =<< freshVar buildPattern :: Bool -> ConstructorReference -> [v] -> Int -> P.Pattern () @@ -595,7 +595,7 @@ lookupAbil rf (Map.lookup rf -> Just econs) = Left cs -> Right $ fmap (1 +) cs lookupAbil rf _ = Left $ "unknown ability reference: " ++ show rf -compile :: Var v => DataSpec -> Ctx v -> PatternMatrix v -> Term v +compile :: (Var v) => DataSpec -> Ctx v -> PatternMatrix v -> Term v compile _ _ (PM []) = apps' bu [text () "pattern match failure"] where bu = ref () (Builtin "bug") @@ -638,7 +638,7 @@ compile spec ctx m@(PM (r : rs)) ty = Map.findWithDefault Unknown v ctx buildCaseBuiltin :: - Var v => + (Var v) => DataSpec -> Ctx v -> (P.Pattern (), [(v, PType)], PatternMatrix v) -> @@ -650,7 +650,7 @@ buildCaseBuiltin spec ctx0 (p, vrs, m) = ctx = Map.fromList vrs <> ctx0 buildCasePure :: - Var v => + (Var v) => DataSpec -> Ctx v -> (Int, [(v, PType)], PatternMatrix v) -> @@ -666,7 +666,7 @@ buildCasePure spec ctx0 (_, vts, m) = ctx = Map.fromList vts <> ctx0 buildCase :: - Var v => + (Var v) => DataSpec -> Reference -> Bool -> @@ -682,7 +682,7 @@ buildCase spec r eff cons ctx0 (t, vts, m) = ctx = Map.fromList vts <> ctx0 mkRow :: - Var v => + (Var v) => v -> MatchCase a (Term v) -> State Word64 (PatternRow v) @@ -706,7 +706,7 @@ mkRow sv (MatchCase (normalizeSeqP -> p0) g0 (AbsN' vs b)) = Nothing -> Nothing initialize :: - Var v => + (Var v) => PType -> Term v -> [MatchCase () (Term v)] -> @@ -722,7 +722,7 @@ initialize r sc cs = | pv <- freshenId 0 $ typed Pattern = (Just pv, pv) -splitPatterns :: Var v => DataSpec -> Term v -> Term v +splitPatterns :: (Var v) => DataSpec -> Term v -> Term v splitPatterns spec0 = visitPure $ \case Match' sc0 cs0 | ty <- determineType $ p <$> cs0, @@ -749,7 +749,7 @@ builtinCase = Rf.charRef ] -determineType :: Show a => [P.Pattern a] -> PType +determineType :: (Show a) => [P.Pattern a] -> PType determineType = foldMap f where f (P.As _ p) = f p diff --git a/parser-typechecker/src/Unison/Runtime/Serialize.hs b/parser-typechecker/src/Unison/Runtime/Serialize.hs index 6382eae29..6fae504ee 100644 --- a/parser-typechecker/src/Unison/Runtime/Serialize.hs +++ b/parser-typechecker/src/Unison/Runtime/Serialize.hs @@ -21,10 +21,10 @@ import Data.Text.Encoding (decodeUtf8, encodeUtf8) import qualified Data.Vector.Primitive as BA import Data.Word (Word64, Word8) import GHC.Exts as IL (IsList (..)) -import qualified U.Util.Hash as Hash import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..)) import qualified Unison.ConstructorType as CT import Unison.Hash (Hash) +import qualified Unison.Hash as Hash import Unison.Reference (Id (..), Reference (..), pattern Derived) import Unison.Referent (Referent, pattern Con, pattern Ref) import Unison.Runtime.Exception @@ -37,48 +37,51 @@ import Unison.Runtime.MCode import qualified Unison.Util.Bytes as Bytes import Unison.Util.EnumContainers as EC -unknownTag :: MonadGet m => String -> Word8 -> m a +unknownTag :: (MonadGet m) => String -> Word8 -> m a unknownTag t w = remaining >>= \r -> exn $ - "unknown " ++ t ++ " word: " ++ show w + "unknown " + ++ t + ++ " word: " + ++ show w ++ " (" ++ show (fromIntegral @_ @Int r) ++ " bytes remaining)" class Tag t where tag2word :: t -> Word8 - word2tag :: MonadGet m => Word8 -> m t + word2tag :: (MonadGet m) => Word8 -> m t -putTag :: MonadPut m => Tag t => t -> m () +putTag :: (MonadPut m) => (Tag t) => t -> m () putTag = putWord8 . tag2word -getTag :: MonadGet m => Tag t => m t +getTag :: (MonadGet m) => (Tag t) => m t getTag = word2tag =<< getWord8 -- Some basics, moved over from V1 serialization -putChar :: MonadPut m => Char -> m () +putChar :: (MonadPut m) => Char -> m () putChar = serialize . VarInt . fromEnum -getChar :: MonadGet m => m Char +getChar :: (MonadGet m) => m Char getChar = toEnum . unVarInt <$> deserialize -putFloat :: MonadPut m => Double -> m () +putFloat :: (MonadPut m) => Double -> m () putFloat = serializeBE -getFloat :: MonadGet m => m Double +getFloat :: (MonadGet m) => m Double getFloat = deserializeBE -putNat :: MonadPut m => Word64 -> m () +putNat :: (MonadPut m) => Word64 -> m () putNat = putWord64be -getNat :: MonadGet m => m Word64 +getNat :: (MonadGet m) => m Word64 getNat = getWord64be -putInt :: MonadPut m => Int64 -> m () +putInt :: (MonadPut m) => Int64 -> m () putInt = serializeBE -getInt :: MonadGet m => m Int64 +getInt :: (MonadGet m) => m Int64 getInt = deserializeBE putLength :: @@ -108,81 +111,81 @@ putFoldable putA as = do putLength (length as) traverse_ putA as -putMap :: MonadPut m => (a -> m ()) -> (b -> m ()) -> Map a b -> m () +putMap :: (MonadPut m) => (a -> m ()) -> (b -> m ()) -> Map a b -> m () putMap putA putB m = putFoldable (putPair putA putB) (Map.toList m) -getList :: MonadGet m => m a -> m [a] +getList :: (MonadGet m) => m a -> m [a] getList a = getLength >>= (`replicateM` a) getMap :: (MonadGet m, Ord a) => m a -> m b -> m (Map a b) getMap getA getB = Map.fromList <$> getList (getPair getA getB) putEnumMap :: - MonadPut m => - EnumKey k => + (MonadPut m) => + (EnumKey k) => (k -> m ()) -> (v -> m ()) -> EnumMap k v -> m () putEnumMap pk pv m = putFoldable (putPair pk pv) (mapToList m) -getEnumMap :: MonadGet m => EnumKey k => m k -> m v -> m (EnumMap k v) +getEnumMap :: (MonadGet m) => (EnumKey k) => m k -> m v -> m (EnumMap k v) getEnumMap gk gv = mapFromList <$> getList (getPair gk gv) -putEnumSet :: MonadPut m => EnumKey k => (k -> m ()) -> EnumSet k -> m () +putEnumSet :: (MonadPut m) => (EnumKey k) => (k -> m ()) -> EnumSet k -> m () putEnumSet pk s = putLength (setSize s) *> traverseSet_ pk s -getEnumSet :: MonadGet m => EnumKey k => m k -> m (EnumSet k) +getEnumSet :: (MonadGet m) => (EnumKey k) => m k -> m (EnumSet k) getEnumSet gk = setFromList <$> getList gk -putMaybe :: MonadPut m => Maybe a -> (a -> m ()) -> m () +putMaybe :: (MonadPut m) => Maybe a -> (a -> m ()) -> m () putMaybe Nothing _ = putWord8 0 putMaybe (Just a) putA = putWord8 1 *> putA a -getMaybe :: MonadGet m => m a -> m (Maybe a) +getMaybe :: (MonadGet m) => m a -> m (Maybe a) getMaybe getA = getWord8 >>= \tag -> case tag of 0 -> pure Nothing 1 -> Just <$> getA _ -> unknownTag "Maybe" tag -putPair :: MonadPut m => (a -> m ()) -> (b -> m ()) -> (a, b) -> m () +putPair :: (MonadPut m) => (a -> m ()) -> (b -> m ()) -> (a, b) -> m () putPair putA putB (a, b) = putA a *> putB b -getPair :: MonadGet m => m a -> m b -> m (a, b) +getPair :: (MonadGet m) => m a -> m b -> m (a, b) getPair = liftA2 (,) -getBytes :: MonadGet m => m Bytes.Bytes +getBytes :: (MonadGet m) => m Bytes.Bytes getBytes = Bytes.fromChunks <$> getList getBlock -putBytes :: MonadPut m => Bytes.Bytes -> m () +putBytes :: (MonadPut m) => Bytes.Bytes -> m () putBytes = putFoldable putBlock . Bytes.chunks -getByteArray :: MonadGet m => m PA.ByteArray +getByteArray :: (MonadGet m) => m PA.ByteArray getByteArray = PA.byteArrayFromList <$> getList getWord8 -putByteArray :: MonadPut m => PA.ByteArray -> m () +putByteArray :: (MonadPut m) => PA.ByteArray -> m () putByteArray a = putFoldable putWord8 (IL.toList a) -getBlock :: MonadGet m => m Bytes.Chunk +getBlock :: (MonadGet m) => m Bytes.Chunk getBlock = getLength >>= fmap Bytes.byteStringToChunk . getByteString -putBlock :: MonadPut m => Bytes.Chunk -> m () +putBlock :: (MonadPut m) => Bytes.Chunk -> m () putBlock b = putLength (BA.length b) *> putByteString (Bytes.chunkToByteString b) -putHash :: MonadPut m => Hash -> m () +putHash :: (MonadPut m) => Hash -> m () putHash h = do let bs = Hash.toByteString h putLength (B.length bs) putByteString bs -getHash :: MonadGet m => m Hash +getHash :: (MonadGet m) => m Hash getHash = do len <- getLength bs <- B.copy <$> Ser.getBytes len pure $ Hash.fromByteString bs -putReferent :: MonadPut m => Referent -> m () +putReferent :: (MonadPut m) => Referent -> m () putReferent = \case Ref r -> do putWord8 0 @@ -192,7 +195,7 @@ putReferent = \case putConstructorReference r putConstructorType ct -getReferent :: MonadGet m => m Referent +getReferent :: (MonadGet m) => m Referent getReferent = do tag <- getWord8 case tag of @@ -200,31 +203,31 @@ getReferent = do 1 -> Con <$> getConstructorReference <*> getConstructorType _ -> unknownTag "getReferent" tag -getConstructorType :: MonadGet m => m CT.ConstructorType +getConstructorType :: (MonadGet m) => m CT.ConstructorType getConstructorType = getWord8 >>= \case 0 -> pure CT.Data 1 -> pure CT.Effect t -> unknownTag "getConstructorType" t -putConstructorType :: MonadPut m => CT.ConstructorType -> m () +putConstructorType :: (MonadPut m) => CT.ConstructorType -> m () putConstructorType = \case CT.Data -> putWord8 0 CT.Effect -> putWord8 1 -putText :: MonadPut m => Text -> m () +putText :: (MonadPut m) => Text -> m () putText text = do let bs = encodeUtf8 text putLength $ B.length bs putByteString bs -getText :: MonadGet m => m Text +getText :: (MonadGet m) => m Text getText = do len <- getLength bs <- B.copy <$> Ser.getBytes len pure $ decodeUtf8 bs -putReference :: MonadPut m => Reference -> m () +putReference :: (MonadPut m) => Reference -> m () putReference r = case r of Builtin name -> do putWord8 0 @@ -234,7 +237,7 @@ putReference r = case r of putHash hash putLength i -getReference :: MonadGet m => m Reference +getReference :: (MonadGet m) => m Reference getReference = do tag <- getWord8 case tag of @@ -242,12 +245,12 @@ getReference = do 1 -> DerivedId <$> (Id <$> getHash <*> getLength) _ -> unknownTag "Reference" tag -putConstructorReference :: MonadPut m => ConstructorReference -> m () +putConstructorReference :: (MonadPut m) => ConstructorReference -> m () putConstructorReference (ConstructorReference r i) = do putReference r putLength i -getConstructorReference :: MonadGet m => m ConstructorReference +getConstructorReference :: (MonadGet m) => m ConstructorReference getConstructorReference = ConstructorReference <$> getReference <*> getLength @@ -401,6 +404,7 @@ instance Tag BPrim1 where tag2word CVLD = 22 tag2word VALU = 23 tag2word TLTT = 24 + tag2word DBTX = 25 word2tag 0 = pure SIZT word2tag 1 = pure USNC @@ -427,6 +431,7 @@ instance Tag BPrim1 where word2tag 22 = pure CVLD word2tag 23 = pure VALU word2tag 24 = pure TLTT + word2tag 25 = pure DBTX word2tag n = unknownTag "BPrim1" n instance Tag BPrim2 where diff --git a/parser-typechecker/src/Unison/Runtime/Stack.hs b/parser-typechecker/src/Unison/Runtime/Stack.hs index a22f185d5..7a08a6a3d 100644 --- a/parser-typechecker/src/Unison/Runtime/Stack.hs +++ b/parser-typechecker/src/Unison/Runtime/Stack.hs @@ -48,15 +48,13 @@ import Control.Monad (when) import Control.Monad.Primitive import Data.Foldable as F (for_) import qualified Data.Kind as Kind -import Data.Primitive.Array -import Data.Primitive.ByteArray -import Data.Primitive.PrimArray import Data.Sequence (Seq) import Data.Word import GHC.Exts as L (IsList (..)) import GHC.Stack (HasCallStack) import Unison.Reference (Reference) import Unison.Runtime.ANF as ANF (Mem (..)) +import Unison.Runtime.Array import Unison.Runtime.Foreign import Unison.Runtime.MCode import qualified Unison.Type as Ty @@ -111,12 +109,13 @@ data Closure deriving (Show, Eq, Ord) traceK :: Reference -> K -> [(Reference, Int)] -traceK begin = dedup (begin, 1) where - dedup p (Mark _ _ _ _ k) = dedup p k - dedup p@(cur,n) (Push _ _ _ _ (CIx r _ _) k) - | cur == r = dedup (cur,1+n) k - | otherwise = p : dedup (r,1) k - dedup p _ = [p] +traceK begin = dedup (begin, 1) + where + dedup p (Mark _ _ _ _ k) = dedup p k + dedup p@(cur, n) (Push _ _ _ _ (CIx r _ _) k) + | cur == r = dedup (cur, 1 + n) k + | otherwise = p : dedup (r, 1) k + dedup p _ = [p] splitData :: Closure -> Maybe (Reference, Word64, [Int], [Closure]) splitData (Enum r t) = Just (r, t, [], []) @@ -194,7 +193,7 @@ pattern CapV k ua ba us bs <- {-# COMPLETE DataC, PApV, CapV, Foreign, BlackHole #-} -marshalToForeign :: HasCallStack => Closure -> Foreign +marshalToForeign :: (HasCallStack) => Closure -> Foreign marshalToForeign (Foreign x) = x marshalToForeign c = error $ "marshalToForeign: unhandled closure: " ++ show c @@ -503,19 +502,19 @@ pokeOffD :: Stack 'UN -> Int -> Double -> IO () pokeOffD (US _ _ sp stk) i d = writeByteArray stk (sp - i) d {-# INLINE pokeOffD #-} -pokeBi :: BuiltinForeign b => Stack 'BX -> b -> IO () +pokeBi :: (BuiltinForeign b) => Stack 'BX -> b -> IO () pokeBi bstk x = poke bstk (Foreign $ wrapBuiltin x) {-# INLINE pokeBi #-} -pokeOffBi :: BuiltinForeign b => Stack 'BX -> Int -> b -> IO () +pokeOffBi :: (BuiltinForeign b) => Stack 'BX -> Int -> b -> IO () pokeOffBi bstk i x = pokeOff bstk i (Foreign $ wrapBuiltin x) {-# INLINE pokeOffBi #-} -peekBi :: BuiltinForeign b => Stack 'BX -> IO b +peekBi :: (BuiltinForeign b) => Stack 'BX -> IO b peekBi bstk = unwrapForeign . marshalToForeign <$> peek bstk {-# INLINE peekBi #-} -peekOffBi :: BuiltinForeign b => Stack 'BX -> Int -> IO b +peekOffBi :: (BuiltinForeign b) => Stack 'BX -> Int -> IO b peekOffBi bstk i = unwrapForeign . marshalToForeign <$> peekOff bstk i {-# INLINE peekOffBi #-} @@ -679,7 +678,7 @@ instance MEM 'BX where asize (BS ap fp _ _) = fp - ap -frameView :: MEM b => Show (Elem b) => Stack b -> IO () +frameView :: (MEM b) => (Show (Elem b)) => Stack b -> IO () frameView stk = putStr "|" >> gof False 0 where fsz = fsize stk @@ -703,7 +702,7 @@ uscount seg = words $ sizeofByteArray seg bscount :: Seg 'BX -> Int bscount seg = sizeofArray seg -closureTermRefs :: Monoid m => (Reference -> m) -> (Closure -> m) +closureTermRefs :: (Monoid m) => (Reference -> m) -> (Closure -> m) closureTermRefs f (PAp (CIx r _ _) _ cs) = f r <> foldMap (closureTermRefs f) cs closureTermRefs f (DataB1 _ _ c) = closureTermRefs f c @@ -718,7 +717,7 @@ closureTermRefs f (Foreign fo) foldMap (closureTermRefs f) cs closureTermRefs _ _ = mempty -contTermRefs :: Monoid m => (Reference -> m) -> K -> m +contTermRefs :: (Monoid m) => (Reference -> m) -> K -> m contTermRefs f (Mark _ _ _ m k) = foldMap (closureTermRefs f) m <> contTermRefs f k contTermRefs f (Push _ _ _ _ (CIx r _ _) k) = diff --git a/parser-typechecker/src/Unison/Runtime/Vector.hs b/parser-typechecker/src/Unison/Runtime/Vector.hs index fcdfc472d..ed9422231 100644 --- a/parser-typechecker/src/Unison/Runtime/Vector.hs +++ b/parser-typechecker/src/Unison/Runtime/Vector.hs @@ -9,7 +9,7 @@ import Unison.Prelude -- A `Vec a` denotes a `Nat -> Maybe a` data Vec a where Scalar :: a -> Vec a - Vec :: UV.Unbox a => UV.Vector a -> Vec a + Vec :: (UV.Unbox a) => UV.Vector a -> Vec a Pair :: Vec a -> Vec b -> Vec (a, b) Choose :: Vec Bool -> Vec a -> Vec a -> Vec a Mux :: Vec Nat -> Vec (Vec a) -> Vec a diff --git a/parser-typechecker/src/Unison/Syntax/DeclPrinter.hs b/parser-typechecker/src/Unison/Syntax/DeclPrinter.hs index 80c4c166f..ee5f57a63 100644 --- a/parser-typechecker/src/Unison/Syntax/DeclPrinter.hs +++ b/parser-typechecker/src/Unison/Syntax/DeclPrinter.hs @@ -38,7 +38,7 @@ import qualified Unison.Var as Var type SyntaxText = S.SyntaxText' Reference prettyDecl :: - Var v => + (Var v) => PrettyPrintEnvDecl -> Reference -> HQ.HashQualified Name -> @@ -49,7 +49,7 @@ prettyDecl ppe r hq d = case d of Right dd -> prettyDataDecl ppe r hq dd prettyEffectDecl :: - Var v => + (Var v) => PrettyPrintEnv -> Reference -> HQ.HashQualified Name -> @@ -58,7 +58,7 @@ prettyEffectDecl :: prettyEffectDecl ppe r name = prettyGADT ppe CT.Effect r name . toDataDecl prettyGADT :: - Var v => + (Var v) => PrettyPrintEnv -> CT.ConstructorType -> Reference -> @@ -75,7 +75,7 @@ prettyGADT env ctorType r name dd = constructor (n, (_, _, t)) = prettyPattern env ctorType name (ConstructorReference r n) <> fmt S.TypeAscriptionColon " :" - `P.hang` TypePrinter.prettySyntax env t + `P.hang` TypePrinter.prettySyntax env t header = prettyEffectHeader name (DD.EffectDeclaration dd) <> fmt S.ControlKeyword " where" prettyPattern :: @@ -97,7 +97,7 @@ prettyPattern env ctorType namespace ref = conRef = Referent.Con ref ctorType prettyDataDecl :: - Var v => + (Var v) => PrettyPrintEnvDecl -> Reference -> HQ.HashQualified Name -> @@ -146,7 +146,7 @@ prettyDataDecl (PrettyPrintEnvDecl unsuffixifiedPPE suffixifiedPPE) r name dd = -- the expected record naming convention. fieldNames :: forall v a. - Var v => + (Var v) => PrettyPrintEnv -> Reference -> HQ.HashQualified Name -> @@ -205,7 +205,7 @@ prettyModifier (DD.Unique _uid) = fmt S.DataTypeModifier "unique" -- <> ("[" <> P.text uid <> "] ") prettyDataHeader :: - Var v => HQ.HashQualified Name -> DD.DataDeclaration v a -> Pretty SyntaxText + (Var v) => HQ.HashQualified Name -> DD.DataDeclaration v a -> Pretty SyntaxText prettyDataHeader name dd = P.sepNonEmpty " " @@ -216,7 +216,7 @@ prettyDataHeader name dd = ] prettyEffectHeader :: - Var v => + (Var v) => HQ.HashQualified Name -> DD.EffectDeclaration v a -> Pretty SyntaxText @@ -232,7 +232,7 @@ prettyEffectHeader name ed = ] prettyDeclHeader :: - Var v => + (Var v) => HQ.HashQualified Name -> Either (DD.EffectDeclaration v a) (DD.DataDeclaration v a) -> Pretty SyntaxText @@ -240,7 +240,7 @@ prettyDeclHeader name (Left e) = prettyEffectHeader name e prettyDeclHeader name (Right d) = prettyDataHeader name d prettyDeclOrBuiltinHeader :: - Var v => + (Var v) => HQ.HashQualified Name -> DD.DeclOrBuiltin v a -> Pretty SyntaxText diff --git a/parser-typechecker/src/Unison/Syntax/FileParser.hs b/parser-typechecker/src/Unison/Syntax/FileParser.hs index be4adfef1..1c2fa05bd 100644 --- a/parser-typechecker/src/Unison/Syntax/FileParser.hs +++ b/parser-typechecker/src/Unison/Syntax/FileParser.hs @@ -34,10 +34,10 @@ import qualified Unison.Var as Var import qualified Unison.WatchKind as UF import Prelude hiding (readFile) -resolutionFailures :: Ord v => [Names.ResolutionFailure v Ann] -> P v x +resolutionFailures :: (Ord v) => [Names.ResolutionFailure v Ann] -> P v x resolutionFailures es = P.customFailure (ResolutionFailures es) -file :: forall v. Var v => P v (UnisonFile v Ann) +file :: forall v. (Var v) => P v (UnisonFile v Ann) file = do _ <- openBlock -- The file may optionally contain top-level imports, @@ -132,7 +132,7 @@ file = do pure uf -- | Final validations and sanity checks to perform before finishing parsing. -validateUnisonFile :: forall v. Var v => UnisonFile v Ann -> P v () +validateUnisonFile :: forall v. (Var v) => UnisonFile v Ann -> P v () validateUnisonFile uf = checkForDuplicateTermsAndConstructors uf @@ -190,14 +190,14 @@ data Stanza v term | Bindings [((Ann, v), term)] deriving (Foldable, Traversable, Functor) -getVars :: Var v => Stanza v term -> [v] +getVars :: (Var v) => Stanza v term -> [v] getVars = \case WatchBinding _ _ ((_, v), _) -> [v] WatchExpression _ guid _ _ -> [Var.unnamedTest guid] Binding ((_, v), _) -> [v] Bindings bs -> [v | ((_, v), _) <- bs] -stanza :: Var v => P v (Stanza v (Term v Ann)) +stanza :: (Var v) => P v (Stanza v (Term v Ann)) stanza = watchExpression <|> unexpectedAction <|> binding where unexpectedAction = failureIf (TermParser.blockTerm $> getErr) binding @@ -230,7 +230,7 @@ stanza = watchExpression <|> unexpectedAction <|> binding Nothing -> Binding binding Just doc -> Bindings [((ann doc, Var.joinDot v (Var.named "doc")), doc), binding] -watched :: Var v => P v (UF.WatchKind, Text, Ann) +watched :: (Var v) => P v (UF.WatchKind, Text, Ann) watched = P.try $ do kind <- optional wordyIdString guid <- uniqueName 10 @@ -249,7 +249,7 @@ watched = P.try $ do type Accessors v = [(L.Token v, [(L.Token v, Type v Ann)])] declarations :: - Var v => + (Var v) => P v ( Map v (DataDeclaration v Ann), @@ -260,7 +260,7 @@ declarations = do declarations <- many $ declaration <* optional semi let (dataDecls0, effectDecls) = partitionEithers declarations dataDecls = [(a, b) | (a, b, _) <- dataDecls0] - multimap :: Ord k => [(k, v)] -> Map k [v] + multimap :: (Ord k) => [(k, v)] -> Map k [v] multimap = foldl' mi Map.empty mi m (k, v) = Map.insertWith (++) k [v] m mds = multimap dataDecls @@ -280,7 +280,7 @@ declarations = do <> [(v, DD.annotation . DD.toDataDecl <$> es) | (v, es) <- Map.toList mesBad] -- unique[someguid] type Blah = ... -modifier :: Var v => P v (Maybe (L.Token DD.Modifier)) +modifier :: (Var v) => P v (Maybe (L.Token DD.Modifier)) modifier = do optional (unique <|> structural) where @@ -297,7 +297,7 @@ modifier = do pure (DD.Structural <$ tok) declaration :: - Var v => + (Var v) => P v ( Either @@ -310,13 +310,14 @@ declaration = do dataDeclaration :: forall v. - Var v => + (Var v) => Maybe (L.Token DD.Modifier) -> P v (v, DataDeclaration v Ann, Accessors v) dataDeclaration mod = do keywordTok <- fmap void (reserved "type") <|> openBlockWith "type" (name, typeArgs) <- - (,) <$> TermParser.verifyRelativeVarName prefixDefinitionName + (,) + <$> TermParser.verifyRelativeVarName prefixDefinitionName <*> many (TermParser.verifyRelativeVarName prefixDefinitionName) let typeArgVs = L.payload <$> typeArgs eq <- reserved "=" @@ -371,7 +372,7 @@ dataDeclaration mod = do ) effectDeclaration :: - Var v => Maybe (L.Token DD.Modifier) -> P v (v, EffectDeclaration v Ann) + (Var v) => Maybe (L.Token DD.Modifier) -> P v (v, EffectDeclaration v Ann) effectDeclaration mod = do keywordTok <- fmap void (reserved "ability") <|> openBlockWith "ability" name <- TermParser.verifyRelativeVarName prefixDefinitionName @@ -397,7 +398,7 @@ effectDeclaration mod = do ) where constructor :: - Var v => [L.Token v] -> L.Token v -> P v (Ann, v, Type v Ann) + (Var v) => [L.Token v] -> L.Token v -> P v (Ann, v, Type v Ann) constructor typeArgs name = explodeToken <$> TermParser.verifyRelativeVarName prefixDefinitionName diff --git a/parser-typechecker/src/Unison/Syntax/NamePrinter.hs b/parser-typechecker/src/Unison/Syntax/NamePrinter.hs index 4b6910476..49a79ab83 100644 --- a/parser-typechecker/src/Unison/Syntax/NamePrinter.hs +++ b/parser-typechecker/src/Unison/Syntax/NamePrinter.hs @@ -18,7 +18,7 @@ import qualified Unison.Util.SyntaxText as S type SyntaxText = S.SyntaxText' Reference -prettyName :: IsString s => Name -> Pretty s +prettyName :: (IsString s) => Name -> Pretty s prettyName = PP.text . Name.toText prettyHashQualified :: HQ.HashQualified Name -> Pretty SyntaxText @@ -27,7 +27,7 @@ prettyHashQualified hq = styleHashQualified' id (fmt $ S.HashQualifier hq) hq prettyHashQualified' :: HQ'.HashQualified Name -> Pretty SyntaxText prettyHashQualified' = prettyHashQualified . HQ'.toHQ -prettyHashQualified0 :: IsString s => HQ.HashQualified Name -> Pretty s +prettyHashQualified0 :: (IsString s) => HQ.HashQualified Name -> Pretty s prettyHashQualified0 = PP.text . HQ.toText -- | Pretty-print a reference as a name and the given number of characters of @@ -55,15 +55,15 @@ prettyReferent len = prettyLabeledDependency :: Int -> LabeledDependency -> Pretty SyntaxText prettyLabeledDependency len = LD.fold (prettyReference len) (prettyReferent len) -prettyShortHash :: IsString s => ShortHash -> Pretty s +prettyShortHash :: (IsString s) => ShortHash -> Pretty s prettyShortHash = fromString . SH.toString styleHashQualified :: - IsString s => (Pretty s -> Pretty s) -> HQ.HashQualified Name -> Pretty s + (IsString s) => (Pretty s -> Pretty s) -> HQ.HashQualified Name -> Pretty s styleHashQualified style hq = styleHashQualified' style id hq styleHashQualified' :: - IsString s => + (IsString s) => (Pretty s -> Pretty s) -> (Pretty s -> Pretty s) -> HQ.HashQualified Name -> diff --git a/parser-typechecker/src/Unison/Syntax/TermParser.hs b/parser-typechecker/src/Unison/Syntax/TermParser.hs index 7f69cb125..4d624f7d9 100644 --- a/parser-typechecker/src/Unison/Syntax/TermParser.hs +++ b/parser-typechecker/src/Unison/Syntax/TermParser.hs @@ -1,5 +1,4 @@ {-# LANGUAGE PartialTypeSignatures #-} -{-# OPTIONS_GHC -Wno-partial-type-signatures #-} module Unison.Syntax.TermParser where @@ -8,7 +7,7 @@ import qualified Data.Char as Char import Data.Foldable (foldrM) import qualified Data.List as List import qualified Data.List.Extra as List.Extra -import Data.List.NonEmpty (NonEmpty) +import Data.List.NonEmpty (NonEmpty ((:|))) import qualified Data.List.NonEmpty as NonEmpty import qualified Data.Maybe as Maybe import qualified Data.Sequence as Sequence @@ -48,7 +47,7 @@ import Unison.Var (Var) import qualified Unison.Var as Var import Prelude hiding (and, or, seq) -watch :: Show a => String -> a -> a +watch :: (Show a) => String -> a -> a watch msg a = let !_ = trace (msg ++ ": " ++ show a) () in a {- @@ -63,13 +62,13 @@ Sections / partial application of infix operators is not implemented. type TermP v = P v (Term v Ann) -term :: Var v => TermP v +term :: (Var v) => TermP v term = term2 -term2 :: Var v => TermP v +term2 :: (Var v) => TermP v term2 = lam term2 <|> term3 -term3 :: Var v => TermP v +term3 :: (Var v) => TermP v term3 = do t <- infixAppOrBooleanOp ot <- optional (reserved ":" *> TypeParser.computationType) @@ -77,10 +76,10 @@ term3 = do Nothing -> t Just y -> Term.ann (mkAnn t y) t y -keywordBlock :: Var v => TermP v +keywordBlock :: (Var v) => TermP v keywordBlock = letBlock <|> handle <|> ifthen <|> match <|> lamCase -typeLink' :: Var v => P v (L.Token Reference) +typeLink' :: (Var v) => P v (L.Token Reference) typeLink' = do id <- hqPrefixId ns <- asks names @@ -89,7 +88,7 @@ typeLink' = do | Set.size s == 1 -> pure $ const (Set.findMin s) <$> id | otherwise -> customFailure $ UnknownType id s -termLink' :: Var v => P v (L.Token Referent) +termLink' :: (Var v) => P v (L.Token Referent) termLink' = do id <- hqPrefixId ns <- asks names @@ -98,7 +97,7 @@ termLink' = do | Set.size s == 1 -> pure $ const (Set.findMin s) <$> id | otherwise -> customFailure $ UnknownTerm id s -link' :: Var v => P v (Either (L.Token Reference) (L.Token Referent)) +link' :: (Var v) => P v (Either (L.Token Reference) (L.Token Referent)) link' = do id <- hqPrefixId ns <- asks names @@ -107,7 +106,7 @@ link' = do (s, s2) | Set.size s2 == 1 && Set.null s -> pure . Left $ const (Set.findMin s2) <$> id (s, s2) -> customFailure $ UnknownId id s s2 -link :: Var v => TermP v +link :: (Var v) => TermP v link = termLink <|> typeLink where typeLink = do @@ -121,10 +120,10 @@ link = termLink <|> typeLink -- We disallow type annotations and lambdas, -- just function application and operators -blockTerm :: Var v => TermP v +blockTerm :: (Var v) => TermP v blockTerm = lam term <|> infixAppOrBooleanOp -match :: Var v => TermP v +match :: (Var v) => TermP v match = do start <- openBlockWith "match" scrutinee <- term @@ -141,10 +140,10 @@ match = do scrutinee (toList cases) -matchCases1 :: Var v => L.Token () -> P v (NonEmpty (Int, Term.MatchCase Ann (Term v Ann))) +matchCases1 :: (Var v) => L.Token () -> P v (NonEmpty (Int, Term.MatchCase Ann (Term v Ann))) matchCases1 start = do cases <- - sepBy1 semi matchCase + (sepBy semi matchCase) <&> \cases -> [(n, c) | (n, cs) <- cases, c <- cs] case cases of [] -> P.customFailure (EmptyMatch start) @@ -160,29 +159,35 @@ matchCases1 start = do -- -- 42, x -> ... -- (42, x) -> ... -matchCase :: Var v => P v (Int, [Term.MatchCase Ann (Term v Ann)]) +matchCase :: (Var v) => P v (Int, [Term.MatchCase Ann (Term v Ann)]) matchCase = do - pats <- sepBy1 (reserved ",") parsePattern + pats <- sepBy1 (label "\",\"" $ reserved ",") parsePattern let boundVars' = [v | (_, vs) <- pats, (_ann, v) <- vs] pat = case fst <$> pats of [p] -> p pats -> foldr pair (unit (ann . last $ pats)) pats unit ann = Pattern.Constructor ann (ConstructorReference DD.unitRef 0) [] pair p1 p2 = Pattern.Constructor (ann p1 <> ann p2) (ConstructorReference DD.pairRef 0) [p1, p2] - guardsAndBlocks <- many $ do - guard <- - asum - [ Nothing <$ P.try (reserved "|" *> quasikeyword "otherwise"), - optional $ reserved "|" *> infixAppOrBooleanOp - ] - t <- block "->" - pure (guard, t) + let guardedBlocks = label "pattern guard" . some $ do + reserved "|" + guard <- + asum + [ Nothing <$ P.try (quasikeyword "otherwise"), + Just <$> infixAppOrBooleanOp + ] + t <- block "->" + pure (guard, t) + let unguardedBlock = label "case match" $ do + t <- block "->" + pure (Nothing, t) + -- a pattern's RHS is either one or more guards, or a single unguarded block. + guardsAndBlocks <- guardedBlocks <|> (pure @[] <$> unguardedBlock) let absChain vs t = foldr (\v t -> ABT.abs' (ann t) v t) t vs let mk (guard, t) = Term.MatchCase pat (fmap (absChain boundVars') guard) (absChain boundVars' t) pure $ (length pats, mk <$> guardsAndBlocks) -parsePattern :: forall v. Var v => P v (Pattern Ann, [(Ann, v)]) -parsePattern = root +parsePattern :: forall v. (Var v) => P v (Pattern Ann, [(Ann, v)]) +parsePattern = label "pattern" root where root = chainl1 patternCandidates patternInfixApp patternCandidates = constructor <|> leaf @@ -203,7 +208,11 @@ parsePattern = root -- This order treats ambiguous patterns as nullary constructors if there's -- a constructor with a matching name. leaf = - literal <|> nullaryCtor <|> varOrAs <|> unbound <|> seqLiteral + literal + <|> nullaryCtor + <|> varOrAs + <|> unbound + <|> seqLiteral <|> parenthesizedOrTuplePattern <|> effect literal = (,[]) <$> asum [true, false, number, text, char] @@ -234,7 +243,7 @@ parsePattern = root else pure (Pattern.Var (ann v), [tokenToPair v]) unbound :: P v (Pattern Ann, [(Ann, v)]) unbound = (\tok -> (Pattern.Unbound (ann tok), [])) <$> blank - ctor :: CT.ConstructorType -> _ -> P v (L.Token ConstructorReference) + ctor :: CT.ConstructorType -> (L.Token (HQ.HashQualified Name) -> Set ConstructorReference -> Error v) -> P v (L.Token ConstructorReference) ctor ct err = do -- this might be a var, so we avoid consuming it at first tok <- P.try (P.lookAhead hqPrefixId) @@ -300,12 +309,12 @@ parsePattern = root where f loc = unzipPatterns ((,) . Pattern.SequenceLiteral loc) -lam :: Var v => TermP v -> TermP v +lam :: (Var v) => TermP v -> TermP v lam p = label "lambda" $ mkLam <$> P.try (some prefixDefinitionName <* reserved "->") <*> p where mkLam vs b = Term.lam' (ann (head vs) <> ann b) (map L.payload vs) b -letBlock, handle, ifthen :: Var v => TermP v +letBlock, handle, ifthen :: (Var v) => TermP v letBlock = label "let" $ block "let" handle = label "handle" $ do b <- block "handle" @@ -318,7 +327,7 @@ checkCasesArities cases@((i, _) NonEmpty.:| rest) = Nothing -> pure (i, snd <$> cases) Just (j, a) -> P.customFailure $ PatternArityMismatch i j (ann a) -lamCase :: Var v => TermP v +lamCase :: (Var v) => TermP v lamCase = do start <- openBlockWith "cases" cases <- matchCases1 start @@ -344,27 +353,27 @@ ifthen = label "if" $ do f <- block "else" pure $ Term.iff (ann start <> ann f) c t f -text :: Var v => TermP v +text :: (Var v) => TermP v text = tok Term.text <$> string -char :: Var v => TermP v +char :: (Var v) => TermP v char = tok Term.char <$> character -boolean :: Var v => TermP v +boolean :: (Var v) => TermP v boolean = ((\t -> Term.boolean (ann t) True) <$> reserved "true") <|> ((\t -> Term.boolean (ann t) False) <$> reserved "false") -list :: Var v => TermP v -> TermP v +list :: (Var v) => TermP v -> TermP v list = Parser.seq Term.list -hashQualifiedPrefixTerm :: Var v => TermP v +hashQualifiedPrefixTerm :: (Var v) => TermP v hashQualifiedPrefixTerm = resolveHashQualified =<< hqPrefixId -hashQualifiedInfixTerm :: Var v => TermP v +hashQualifiedInfixTerm :: (Var v) => TermP v hashQualifiedInfixTerm = resolveHashQualified =<< hqInfixId -quasikeyword :: Ord v => String -> P v (L.Token ()) +quasikeyword :: (Ord v) => String -> P v (L.Token ()) quasikeyword kw = queryToken $ \case L.WordyId s Nothing | s == kw -> Just () _ -> Nothing @@ -372,7 +381,7 @@ quasikeyword kw = queryToken $ \case -- If the hash qualified is name only, it is treated as a var, if it -- has a short hash, we resolve that short hash immediately and fail -- committed if that short hash can't be found in the current environment -resolveHashQualified :: Var v => L.Token (HQ.HashQualified Name) -> TermP v +resolveHashQualified :: (Var v) => L.Token (HQ.HashQualified Name) -> TermP v resolveHashQualified tok = do names <- asks names case L.payload tok of @@ -383,7 +392,7 @@ resolveHashQualified tok = do | Set.size s > 1 -> failCommitted $ UnknownTerm tok s | otherwise -> pure $ Term.fromReferent (ann tok) (Set.findMin s) -termLeaf :: forall v. Var v => TermP v +termLeaf :: forall v. (Var v) => TermP v termLeaf = asum [ hashQualifiedPrefixTerm, @@ -436,7 +445,7 @@ termLeaf = -- variables that will be looked up in the environment like anything else. This -- means that the documentation syntax can have its meaning changed by -- overriding what functions the names `syntax.doc*` correspond to. -doc2Block :: forall v. Var v => TermP v +doc2Block :: forall v. (Var v) => TermP v doc2Block = P.lookAhead (openBlockWith "syntax.docUntitledSection") *> elem where @@ -534,7 +543,7 @@ doc2Block = pure $ Term.apps' f [addDelay tm] _ -> regular -docBlock :: Var v => TermP v +docBlock :: (Var v) => TermP v docBlock = do openTok <- openBlockWith "[:" segs <- many segment @@ -657,7 +666,8 @@ docNormalize tm = case tm of _ -> error $ "unexpected doc structure: " ++ show tm where normalize = - Sequence.fromList . (map TupleE.fst3) + Sequence.fromList + . (map TupleE.fst3) . (tracing "after unbreakParas") . unbreakParas . (tracing "after full preprocess") @@ -675,7 +685,7 @@ docNormalize tm = case tm of seqs = map fst xs miniPreProcess seqs = zip (toList seqs) (lineStarteds seqs) unIndent :: - Ord v => + (Ord v) => [(Term v a, UnbreakCase)] -> [(Term v a, UnbreakCase)] unIndent tms = map go tms @@ -751,7 +761,9 @@ docNormalize tm = case tm of tr = const id $ trace $ - "\nprocessElement on blob " ++ (show txt) ++ ", result' = " + "\nprocessElement on blob " + ++ (show txt) + ++ ", result' = " ++ (show result') ++ ", lines: " ++ (show ls) @@ -802,7 +814,7 @@ docNormalize tm = case tm of -- See test2 in transcript doc-formatting.md for an example of how -- this looks when there is whitespace immediately following @[source] -- or @[evaluate]. - lastLines :: Show v => Sequence.Seq (Term v a) -> [Maybe UnbreakCase] + lastLines :: (Show v) => Sequence.Seq (Term v a) -> [Maybe UnbreakCase] lastLines tms = (flip fmap) (toList tms) $ \case DD.DocBlob txt -> unbreakCase txt DD.DocLink _ -> Nothing @@ -833,13 +845,15 @@ docNormalize tm = case tm of -- fighting to break free - overwriting elements that are 'shadowed' by -- a preceding element for which the predicate is true, with a copy of -- that element. - lineStarteds :: Show v => Sequence.Seq (Term v a) -> [UnbreakCase] + lineStarteds :: (Show v) => Sequence.Seq (Term v a) -> [UnbreakCase] lineStarteds tms = tr $ quenchRuns LineEnds StartsUnindented $ xs'' where tr = const id $ trace $ - "lineStarteds: xs = " ++ (show xs) ++ ", xss = " + "lineStarteds: xs = " + ++ (show xs) + ++ ", xss = " ++ (show xss) ++ ", xs' = " ++ (show xs') @@ -880,46 +894,46 @@ docNormalize tm = case tm of [] -> [] x : rest -> (fFirst x) : (map fRest rest) mapExceptLast fRest fLast = reverse . (mapExceptFirst fRest fLast) . reverse - tracing :: Show a => [Char] -> a -> a + tracing :: (Show a) => [Char] -> a -> a tracing when x = (const id $ trace ("at " ++ when ++ ": " ++ (show x) ++ "\n")) x blob aa ac at txt = Term.app aa (Term.constructor ac (ConstructorReference DD.docRef DD.docBlobId)) (Term.text at txt) join aa ac as segs = Term.app aa (Term.constructor ac (ConstructorReference DD.docRef DD.docJoinId)) (Term.list' as segs) - mapBlob :: Ord v => (Text -> Text) -> Term v a -> Term v a + mapBlob :: (Ord v) => (Text -> Text) -> Term v a -> Term v a -- this pattern is just `DD.DocBlob txt` but exploded to capture the annotations as well mapBlob f (aa@(Term.App' ac@(Term.Constructor' (ConstructorReference DD.DocRef DD.DocBlobId)) at@(Term.Text' txt))) = blob (ABT.annotation aa) (ABT.annotation ac) (ABT.annotation at) (f txt) mapBlob _ t = t -delayQuote :: Var v => TermP v +delayQuote :: (Var v) => TermP v delayQuote = P.label "quote" $ do start <- reserved "'" e <- termLeaf pure $ DD.delayTerm (ann start <> ann e) e -delayBlock :: Var v => TermP v +delayBlock :: (Var v) => TermP v delayBlock = P.label "do" $ do b <- block "do" pure $ DD.delayTerm (ann b) b -bang :: Var v => TermP v +bang :: (Var v) => TermP v bang = P.label "bang" $ do start <- reserved "!" e <- termLeaf pure $ DD.forceTerm (ann start <> ann e) (ann start) e -var :: Var v => L.Token v -> Term v Ann +var :: (Var v) => L.Token v -> Term v Ann var t = Term.var (ann t) (L.payload t) -seqOp :: Ord v => P v Pattern.SeqOp +seqOp :: (Ord v) => P v Pattern.SeqOp seqOp = (Pattern.Snoc <$ matchToken (L.SymbolyId ":+" Nothing)) <|> (Pattern.Cons <$ matchToken (L.SymbolyId "+:" Nothing)) <|> (Pattern.Concat <$ matchToken (L.SymbolyId "++" Nothing)) -term4 :: Var v => TermP v +term4 :: (Var v) => TermP v term4 = f <$> some termLeaf where f (func : args) = Term.apps func ((\a -> (ann func <> ann a, a)) <$> args) @@ -927,7 +941,7 @@ term4 = f <$> some termLeaf -- e.g. term4 + term4 - term4 -- or term4 || term4 && term4 -infixAppOrBooleanOp :: Var v => TermP v +infixAppOrBooleanOp :: (Var v) => TermP v infixAppOrBooleanOp = chainl1 term4 (or <|> and <|> infixApp) where or = orf <$> label "or" (reserved "||") @@ -937,25 +951,26 @@ infixAppOrBooleanOp = chainl1 term4 (or <|> and <|> infixApp) infixApp = infixAppf <$> label "infixApp" (hashQualifiedInfixTerm <* optional semi) infixAppf op lhs rhs = Term.apps' op [lhs, rhs] -typedecl :: Var v => P v (L.Token v, Type v Ann) +typedecl :: (Var v) => P v (L.Token v, Type v Ann) typedecl = - (,) <$> P.try (prefixDefinitionName <* reserved ":") + (,) + <$> P.try (prefixDefinitionName <* reserved ":") <*> TypeParser.valueType <* semi -verifyRelativeVarName :: Var v => P v (L.Token v) -> P v (L.Token v) +verifyRelativeVarName :: (Var v) => P v (L.Token v) -> P v (L.Token v) verifyRelativeVarName p = do v <- p verifyRelativeName' (Name.unsafeFromVar <$> v) pure v -verifyRelativeName :: Ord v => P v (L.Token Name) -> P v (L.Token Name) +verifyRelativeName :: (Ord v) => P v (L.Token Name) -> P v (L.Token Name) verifyRelativeName name = do name <- name verifyRelativeName' name pure name -verifyRelativeName' :: Ord v => L.Token Name -> P v () +verifyRelativeName' :: (Ord v) => L.Token Name -> P v () verifyRelativeName' name = do let txt = Name.toText . L.payload $ name when (Text.isPrefixOf "." txt && txt /= ".") $ @@ -972,20 +987,20 @@ verifyRelativeName' name = do -- (x,y) -> match [1,2,3] with -- hd +: tl | hd < 10 -> stuff -- -destructuringBind :: forall v. Var v => P v (Ann, Term v Ann -> Term v Ann) +destructuringBind :: forall v. (Var v) => P v (Ann, Term v Ann -> Term v Ann) destructuringBind = do -- We have to look ahead as far as the `=` to know if this is a bind or -- just an action, for instance: -- Some 42 -- vs -- Some 42 = List.head elems - (p, boundVars, guard) <- P.try $ do + (p, boundVars) <- P.try $ do (p, boundVars) <- parsePattern let boundVars' = snd <$> boundVars - guard <- optional $ reserved "|" *> infixAppOrBooleanOp P.lookAhead (openBlockWith "=") - pure (p, boundVars', guard) + pure (p, boundVars') scrute <- block "=" -- Dwight K. Scrute ("The People's Scrutinee") + let guard = Nothing let absChain vs t = foldr (\v t -> ABT.abs' (ann t) v t) t vs thecase t = Term.MatchCase p (fmap (absChain boundVars) guard) $ absChain boundVars t pure $ @@ -995,8 +1010,15 @@ destructuringBind = do in Term.match a scrute [thecase t] ) -binding :: forall v. Var v => P v ((Ann, v), Term v Ann) -binding = label "binding" $ do +-- | Rules for the annotation of the resulting binding is as follows: +-- * If the binding has a type signature, the top level scope of the annotation for the type +-- Ann node will contain the _entire_ binding, including the type signature. +-- * The body expression of the binding contains the entire lhs (including the name of the +-- binding) and the entire body. +-- * If the binding is a lambda, the lambda node includes the entire LHS of the binding, +-- including the name as well. +binding :: forall v. (Var v) => P v ((Ann, v), Term v Ann) +binding = label "binding" do typ <- optional typedecl -- a ++ b = ... let infixLhs = do @@ -1014,29 +1036,30 @@ binding = label "binding" $ do case typ of Nothing -> do -- we haven't seen a type annotation, so lookahead to '=' before commit - (loc, name, args) <- P.try (lhs <* P.lookAhead (openBlockWith "=")) + (lhsLoc, name, args) <- P.try (lhs <* P.lookAhead (openBlockWith "=")) body <- block "=" verifyRelativeName' (fmap Name.unsafeFromVar name) - pure $ mkBinding loc (L.payload name) args body + pure $ mkBinding (lhsLoc <> ann body) (L.payload name) args body Just (nameT, typ) -> do - (_, name, args) <- lhs + (lhsLoc, name, args) <- lhs verifyRelativeName' (fmap Name.unsafeFromVar name) when (L.payload name /= L.payload nameT) $ - customFailure $ SignatureNeedsAccompanyingBody nameT + customFailure $ + SignatureNeedsAccompanyingBody nameT body <- block "=" pure $ fmap (\e -> Term.ann (ann nameT <> ann e) e typ) - (mkBinding (ann nameT) (L.payload name) args body) + (mkBinding (ann lhsLoc <> ann body) (L.payload name) args body) where mkBinding loc f [] body = ((loc, f), body) mkBinding loc f args body = ((loc, f), Term.lam' (loc <> ann body) (L.payload <$> args) body) -customFailure :: P.MonadParsec e s m => e -> m a +customFailure :: (P.MonadParsec e s m) => e -> m a customFailure = P.customFailure -block :: forall v. Var v => String -> TermP v +block :: forall v. (Var v) => String -> TermP v block s = block' False s (openBlockWith s) closeBlock -- example: use Foo.bar.Baz + ++ x @@ -1049,7 +1072,7 @@ block s = block' False s (openBlockWith s) closeBlock -- names in the environment prefixed by `foo` -- -- todo: doesn't support use Foo.bar ++#abc, which lets you use `++` unqualified to refer to `Foo.bar.++#abc` -importp :: Ord v => P v [(Name, Name)] +importp :: (Ord v) => P v [(Name, Name)] importp = do kw <- reserved "use" -- we allow symbolyId here and parse the suffix optionaly, so we can generate @@ -1075,7 +1098,7 @@ data BlockElement v | DestructuringBind (Ann, Term v Ann -> Term v Ann) | Action (Term v Ann) -instance Show v => Show (BlockElement v) where +instance (Show v) => Show (BlockElement v) where show (Binding ((pos, name), _)) = show ("binding: " :: Text, pos, name) show (DestructuringBind (pos, _)) = show ("destructuring bind: " :: Text, pos) show (Action tm) = show ("action: " :: Text, ann tm) @@ -1083,7 +1106,7 @@ instance Show v => Show (BlockElement v) where -- subst -- use Foo.Bar + blah -- use Bar.Baz zonk zazzle -imports :: Var v => P v (NamesWithHistory, [(v, v)]) +imports :: (Var v) => P v (NamesWithHistory, [(v, v)]) imports = do let sem = P.try (semi <* P.lookAhead (reserved "use")) imported <- mconcat . reverse <$> sepBy sem importp @@ -1093,7 +1116,7 @@ imports = do -- A key feature of imports is we want to be able to say: -- `use foo.bar Baz qux` without having to specify whether `Baz` or `qux` are -- terms or types. -substImports :: Var v => NamesWithHistory -> [(v, v)] -> Term v Ann -> Term v Ann +substImports :: (Var v) => NamesWithHistory -> [(v, v)] -> Term v Ann -> Term v Ann substImports ns imports = ABT.substsInheritAnnotation [ (suffix, Term.var () full) @@ -1106,12 +1129,12 @@ substImports ns imports = NamesWithHistory.hasTypeNamed (Name.unsafeFromVar full) ns ] -block' :: Var v => IsTop -> String -> P v (L.Token ()) -> P v b -> TermP v +block' :: (Var v) => IsTop -> String -> P v (L.Token ()) -> P v (L.Token ()) -> TermP v block' isTop = block'' isTop False block'' :: forall v b. - Var v => + (Var v) => IsTop -> Bool -> -- `True` means insert `()` at end of block if it ends with a statement String -> @@ -1129,49 +1152,51 @@ block'' isTop implicitUnitAtEnd s openBlock closeBlock = do statement = asum [Binding <$> binding, DestructuringBind <$> destructuringBind, Action <$> blockTerm] go :: L.Token () -> [BlockElement v] -> P v (Term v Ann) go open bs = - let finish tm = case Components.minimize' tm of + let finish :: Term.Term v Ann -> TermP v + finish tm = case Components.minimize' tm of Left dups -> customFailure $ DuplicateTermNames (toList dups) Right tm -> pure tm - toTm bs = do - (bs, body) <- body bs - finish =<< foldrM step body bs + toTm :: [BlockElement v] -> TermP v + toTm [] = customFailure $ EmptyBlock (const s <$> open) + toTm (be : bes) = do + let (bs, blockResult) = determineBlockResult (be :| bes) + finish =<< foldrM step blockResult bs where - step elem body = case elem of + step :: BlockElement v -> Term v Ann -> TermP v + step elem result = case elem of Binding ((a, v), tm) -> pure $ Term.consLetRec isTop - (ann a <> ann body) + (ann a <> ann result) (a, v, tm) - body + result Action tm -> pure $ Term.consLetRec isTop - (ann tm <> ann body) + (ann tm <> ann result) (ann tm, positionalVar (ann tm) (Var.named "_"), tm) - body + result DestructuringBind (_, f) -> - f <$> finish body - body bs = case reverse bs of - Binding ((a, _v), _) : _ -> - pure $ - if implicitUnitAtEnd - then (bs, DD.unitTerm a) - else (bs, Term.var a (positionalVar a Var.missingResult)) - Action e : bs -> pure (reverse bs, e) - DestructuringBind (a, _) : _ -> - pure $ - if implicitUnitAtEnd - then (bs, DD.unitTerm a) - else (bs, Term.var a (positionalVar a Var.missingResult)) - [] -> customFailure $ EmptyBlock (const s <$> open) + f <$> finish result + determineBlockResult :: NonEmpty (BlockElement v) -> ([BlockElement v], Term v Ann) + determineBlockResult bs = case NonEmpty.reverse bs of + Binding ((a, _v), _) :| _ -> + if implicitUnitAtEnd + then (toList bs, DD.unitTerm a) + else (toList bs, Term.var a (positionalVar a Var.missingResult)) + Action e :| bs -> (reverse (toList bs), e) + DestructuringBind (a, _) :| _ -> + if implicitUnitAtEnd + then (toList bs, DD.unitTerm a) + else (toList bs, Term.var a (positionalVar a Var.missingResult)) in toTm bs -number :: Var v => TermP v +number :: (Var v) => TermP v number = number' (tok Term.int) (tok Term.nat) (tok Term.float) -bytes :: Var v => TermP v +bytes :: (Var v) => TermP v bytes = do b <- bytesToken let a = ann b @@ -1182,7 +1207,7 @@ bytes = do (Term.list a $ Term.nat a . fromIntegral <$> Bytes.toWord8s (L.payload b)) number' :: - Ord v => + (Ord v) => (L.Token Int64 -> a) -> (L.Token Word64 -> a) -> (L.Token Double -> a) -> @@ -1196,7 +1221,7 @@ number' i u f = fmap go numeric | take 1 p == "-" = i (read <$> num) | otherwise = u (read <$> num) -tupleOrParenthesizedTerm :: Var v => TermP v +tupleOrParenthesizedTerm :: (Var v) => TermP v tupleOrParenthesizedTerm = label "tuple" $ tupleOrParenthesized term DD.unitTerm pair where pair t1 t2 = diff --git a/parser-typechecker/src/Unison/Syntax/TermPrinter.hs b/parser-typechecker/src/Unison/Syntax/TermPrinter.hs index b93ddbd4e..417c12ee1 100644 --- a/parser-typechecker/src/Unison/Syntax/TermPrinter.hs +++ b/parser-typechecker/src/Unison/Syntax/TermPrinter.hs @@ -1,8 +1,22 @@ -module Unison.Syntax.TermPrinter (emptyAc, pretty, prettyBlock, prettyBlock', pretty', prettyBinding, prettyBinding', pretty0, runPretty) where +module Unison.Syntax.TermPrinter + ( emptyAc, + pretty, + prettyBlock, + prettyBlock', + pretty', + prettyBinding, + prettyBinding', + prettyBindingWithoutTypeSignature, + pretty0, + runPretty, + prettyPattern, + ) +where import Control.Lens (unsnoc, (^.)) import Control.Monad.State (evalState) import qualified Control.Monad.State as State +import Data.Char (isPrint) import Data.List import qualified Data.Map as Map import qualified Data.Set as Set @@ -51,18 +65,18 @@ import qualified Unison.Var as Var type SyntaxText = S.SyntaxText' Reference -pretty :: Var v => PrettyPrintEnv -> Term v a -> Pretty ColorText +pretty :: (Var v) => PrettyPrintEnv -> Term v a -> Pretty ColorText pretty ppe tm = PP.syntaxToColor . runPretty ppe $ pretty0 emptyAc $ printAnnotate ppe tm -prettyBlock :: Var v => Bool -> PrettyPrintEnv -> Term v a -> Pretty ColorText +prettyBlock :: (Var v) => Bool -> PrettyPrintEnv -> Term v a -> Pretty ColorText prettyBlock elideUnit ppe = PP.syntaxToColor . prettyBlock' elideUnit ppe -prettyBlock' :: Var v => Bool -> PrettyPrintEnv -> Term v a -> Pretty SyntaxText +prettyBlock' :: (Var v) => Bool -> PrettyPrintEnv -> Term v a -> Pretty SyntaxText prettyBlock' elideUnit ppe tm = runPretty ppe . pretty0 (emptyBlockAc {elideUnit = elideUnit}) $ printAnnotate ppe tm -pretty' :: Var v => Maybe Width -> PrettyPrintEnv -> Term v a -> ColorText +pretty' :: (Var v) => Maybe Width -> PrettyPrintEnv -> Term v a -> ColorText pretty' (Just width) n t = PP.render width . PP.syntaxToColor . runPretty n $ pretty0 emptyAc (printAnnotate n t) pretty' Nothing n t = @@ -74,12 +88,12 @@ data AmbientContext = AmbientContext { -- The operator precedence of the enclosing context (a number from 0 to 11, -- or -1 to render without outer parentheses unconditionally). -- Function application has precedence 10. - precedence :: Int, - blockContext :: BlockContext, - infixContext :: InfixContext, - imports :: Imports, - docContext :: DocLiteralContext, - elideUnit :: Bool -- `True` if a `()` at the end of a block should be elided + precedence :: !Int, + blockContext :: !BlockContext, + infixContext :: !InfixContext, + imports :: !Imports, + docContext :: !DocLiteralContext, + elideUnit :: !Bool -- `True` if a `()` at the end of a block should be elided } -- Description of the position of this ABT node, when viewed in the @@ -160,7 +174,7 @@ data DocLiteralContext pretty0 :: forall v m. - MonadPretty v m => + (MonadPretty v m) => AmbientContext -> Term3 v PrintAnnotation -> m (Pretty SyntaxText) @@ -214,6 +228,24 @@ pretty0 -- metaprograms), then it needs to be able to print them (and then the -- parser ought to be able to parse them, to maintain symmetry.) Boolean' b -> pure . fmt S.BooleanLiteral $ if b then l "true" else l "false" + Text' s + | Just quotes <- useRaw s -> + pure . fmt S.TextLiteral $ PP.text quotes <> "\n" <> PP.text s <> PP.text quotes + where + -- we only use this syntax if we're not wrapped in something else, + -- to avoid possible round trip issues if the text ends at an odd column + useRaw _ | p > 0 = Nothing + useRaw s | Text.find (== '\n') s == Just '\n' && Text.all ok s = n 3 + useRaw _ = Nothing + ok ch = isPrint ch || ch == '\n' || ch == '\r' + -- Picks smallest number of surrounding """ to be unique + n 10 = Nothing -- bail at 10, avoiding quadratic behavior in weird cases + n cur = + if null (Text.breakOnAll quotes s) + then Just quotes + else n (cur + 1) + where + quotes = Text.pack (replicate cur '"') Text' s -> pure . fmt S.TextLiteral $ l $ U.ushow s Char' c -> pure . fmt S.CharLiteral @@ -261,6 +293,10 @@ pretty0 px <- pretty0 (ac 0 Block im doc) x pure . paren (p >= 3) $ fmt S.ControlKeyword "do" `PP.hang` px + | Match' _ _ <- x -> do + px <- pretty0 (ac 0 Block im doc) x + pure . paren (p >= 3) $ + fmt S.ControlKeyword "do" `PP.hang` px | otherwise -> do px <- pretty0 (ac 10 Normal im doc) x pure . paren (p >= 11 || isBlock x && p >= 3) $ @@ -273,7 +309,8 @@ pretty0 PP.group <$> do xs' <- traverse (pretty0 (ac 0 Normal im doc)) xs pure $ - fmt S.DelimiterChar (l "[") <> optSpace + fmt S.DelimiterChar (l "[") + <> optSpace <> intercalateMap (fmt S.DelimiterChar (l ",") <> PP.softbreak <> optSpace <> optSpace) (PP.indentNAfterNewline 2) @@ -321,7 +358,7 @@ pretty0 -- blah -- See `isDestructuringBind` definition. Match' scrutinee cs@[MatchCase pat guard (AbsN' vs body)] - | p < 1 && isDestructuringBind scrutinee cs -> do + | p <= 2 && isDestructuringBind scrutinee cs -> do n <- getPPE let letIntro = case bc of Block -> id @@ -457,7 +494,7 @@ pretty0 paren (p >= 10) <$> do lastArg' <- pretty0 (ac 10 Normal im doc) lastArg booleanOps (fmt S.ControlKeyword "||") xs lastArg' - _ -> case (term, nonForcePred) of + _other -> case (term, nonForcePred) of OverappliedBinaryAppPred' f a b r | binaryOpsPred f -> -- Special case for overapplied binary op @@ -471,16 +508,21 @@ pretty0 f' <- pretty0 (ac 10 Normal im doc) f args' <- PP.spacedTraverse (pretty0 (ac 10 Normal im doc)) args pure $ f' `PP.hang` args' - _ -> case (term, \v -> nonUnitArgPred v && not (isDelay term)) of + _other -> case (term, \v -> nonUnitArgPred v && not (isDelay term)) of (LamsNamedMatch' [] branches, _) -> do pbs <- printCase im doc branches pure . paren (p >= 3) $ PP.group (fmt S.ControlKeyword "cases") `PP.hang` pbs LamsNamedPred' vs body -> do - prettyBody <- pretty0 (ac 2 Block im doc) body + prettyBody <- pretty0 (ac 2 Normal im doc) body + let hang = case body of + Delay' (Lets' _ _) -> PP.softHang + Lets' _ _ -> PP.softHang + Match' _ _ -> PP.softHang + _ -> PP.hang pure . paren (p >= 3) $ - PP.group (varList vs <> fmt S.ControlKeyword " ->") `PP.hang` prettyBody - _ -> go term + PP.group (varList vs <> fmt S.ControlKeyword " ->") `hang` prettyBody + _other -> go term isDelay (Delay' _) = True isDelay _ = False @@ -508,7 +550,7 @@ pretty0 printBinding (v, binding) = if Var.isAction v then pretty0 (ac (-1) Normal im doc) binding - else prettyBinding0 (ac (-1) Normal im doc) (HQ.unsafeFromVar v) binding + else renderPrettyBinding <$> prettyBinding0 (ac (-1) Normal im doc) (HQ.unsafeFromVar v) binding letIntro = case sc of Block -> id Normal -> \x -> fmt S.ControlKeyword "let" `PP.hang` x @@ -519,7 +561,7 @@ pretty0 Constructor' (ConstructorReference DD.DocRef _) -> False _ -> True - nonUnitArgPred :: Var v => v -> Bool + nonUnitArgPred :: (Var v) => v -> Bool nonUnitArgPred v = Var.name v /= "()" -- Render a binary infix operator sequence, like [(a2, f2), (a1, f1)], @@ -576,7 +618,7 @@ pretty0 prettyPattern :: forall v loc. - Var v => + (Var v) => PrettyPrintEnv -> AmbientContext -> Int -> @@ -661,7 +703,7 @@ prettyPattern n c@AmbientContext {imports = im} p vs patt = case patt of Pattern.Snoc -> f 0 ":+" Pattern.Concat -> f 0 "++" where - l :: IsString s => String -> s + l :: (IsString s) => String -> s l = fromString patterns p vs (pat : pats) = let (printed, tail_vs) = @@ -687,7 +729,7 @@ arity1Branches bs = [([pat], guard, body) | MatchCase pat guard body <- bs] -- -- Foo x y, [x,y], [(blah1 x, body1), (blah2 y, body2)] groupCases :: - Ord v => + (Ord v) => [MatchCase' () (Term3 v ann)] -> [([Pattern ()], [v], [(Maybe (Term3 v ann), Term3 v ann)])] groupCases ms = go0 ms @@ -700,7 +742,7 @@ groupCases ms = go0 ms | otherwise = (p0, vs0, reverse acc) : go0 ms printCase :: - MonadPretty v m => + (MonadPretty v m) => Imports -> DocLiteralContext -> [MatchCase' () (Term3 v PrintAnnotation)] -> @@ -743,7 +785,8 @@ printCase im doc ms0 = patLhs env vs pats = case pats of [pat] -> PP.group (fst (prettyPattern env (ac 0 Block im doc) (-1) vs pat)) - pats -> PP.group . PP.sep (PP.indentAfterNewline " " $ "," <> PP.softbreak) + pats -> PP.group + . PP.sep (PP.indentAfterNewline " " $ "," <> PP.softbreak) . (`evalState` vs) . for pats $ \pat -> do @@ -775,30 +818,66 @@ printCase im doc ms0 = <$> pretty0 (ac 2 Normal im doc) g printBody b = let (im', uses) = calcImports im b in goBody im' uses b -{- Render a binding, producing output of the form +-- A pretty term binding, split into the type signature (possibly empty) and the term. +data PrettyBinding = PrettyBinding + { typeSignature :: Maybe (Pretty SyntaxText), + term :: Pretty SyntaxText + } -foo : t -> u -foo a = ... +-- Render a pretty binding. +renderPrettyBinding :: PrettyBinding -> Pretty SyntaxText +renderPrettyBinding PrettyBinding {typeSignature, term} = + case typeSignature of + Nothing -> term + Just ty -> PP.lines [ty, term] -The first line is only output if the term has a type annotation as the -outermost constructor. +-- Render a pretty binding without a type signature. +renderPrettyBindingWithoutTypeSignature :: PrettyBinding -> Pretty SyntaxText +renderPrettyBindingWithoutTypeSignature PrettyBinding {term} = + term -Binary functions with symbolic names are output infix, as follows: - -(+) : t -> t -> t -a + b = ... - --} +-- | Render a binding, producing output of the form +-- +-- foo : t -> u +-- foo a = ... +-- +-- The first line is only output if the term has a type annotation as the +-- outermost constructor. +-- +-- Binary functions with symbolic names are output infix, as follows: +-- +-- (+) : t -> t -> t +-- a + b = ... prettyBinding :: - Var v => + (Var v) => PrettyPrintEnv -> HQ.HashQualified Name -> Term2 v at ap v a -> Pretty SyntaxText -prettyBinding ppe n = runPretty ppe . prettyBinding0 (ac (-1) Block Map.empty MaybeDoc) n +prettyBinding = + prettyBinding_ renderPrettyBinding + +-- | Like 'prettyBinding', but elides the type signature (if any). +prettyBindingWithoutTypeSignature :: + (Var v) => + PrettyPrintEnv -> + HQ.HashQualified Name -> + Term2 v at ap v a -> + Pretty SyntaxText +prettyBindingWithoutTypeSignature = + prettyBinding_ renderPrettyBindingWithoutTypeSignature + +prettyBinding_ :: + (Var v) => + (PrettyBinding -> Pretty SyntaxText) -> + PrettyPrintEnv -> + HQ.HashQualified Name -> + Term2 v at ap v a -> + Pretty SyntaxText +prettyBinding_ go ppe n = runPretty ppe . fmap go . prettyBinding0 (ac (-1) Block Map.empty MaybeDoc) n prettyBinding' :: - Var v => + (Var v) => PrettyPrintEnv -> Width -> HQ.HashQualified Name -> @@ -808,11 +887,11 @@ prettyBinding' ppe width v t = PP.render width . PP.syntaxToColor $ prettyBinding ppe v t prettyBinding0 :: - MonadPretty v m => + (MonadPretty v m) => AmbientContext -> HQ.HashQualified Name -> Term2 v at ap v a -> - m (Pretty SyntaxText) + m PrettyBinding prettyBinding0 a@AmbientContext {imports = im, docContext = doc} v term = go (symbolic && isBinary term) term where @@ -831,26 +910,28 @@ prettyBinding0 a@AmbientContext {imports = im, docContext = doc} v term = _ -> id tp' <- TypePrinter.pretty0 im (-1) tp tm' <- avoidCapture (prettyBinding0 a v tm) - pure $ - PP.lines - [ PP.group - ( renderName v - <> PP.hang - (fmt S.TypeAscriptionColon " :") - tp' - ), - PP.group tm' - ] + pure + PrettyBinding + { typeSignature = Just (PP.group (renderName v <> PP.hang (fmt S.TypeAscriptionColon " :") tp')), + term = PP.group (renderPrettyBinding tm') + } (printAnnotate env -> LamsNamedMatch' vs branches) -> do branches' <- printCase im doc branches - pure . PP.group $ - PP.group - ( defnLhs v vs <> fmt S.BindingEquals " =" <> " " - <> fmt - S.ControlKeyword - "cases" - ) - `PP.hang` branches' + pure + PrettyBinding + { typeSignature = Nothing, + term = + PP.group $ + PP.group + ( defnLhs v vs + <> fmt S.BindingEquals " =" + <> " " + <> fmt + S.ControlKeyword + "cases" + ) + `PP.hang` branches' + } LamsNamedOrDelay' vs body -> do -- In the case where we're being called from inside `pretty0`, this -- call to printAnnotate is unfortunately repeating work we've already @@ -861,11 +942,17 @@ prettyBinding0 a@AmbientContext {imports = im, docContext = doc} v term = -- Special case for 'let being on the same line let hang = case body' of Delay' (Lets' _ _) -> PP.softHang + Delay' (Match' _ _) -> PP.softHang _ -> PP.hang - pure . PP.group $ - PP.group (defnLhs v vs <> fmt S.BindingEquals " =") - `hang` uses [prettyBody] - t -> pure $ l "error: " <> l (show t) + pure + PrettyBinding + { typeSignature = Nothing, + term = + PP.group $ + PP.group (defnLhs v vs <> fmt S.BindingEquals " =") + `hang` uses [prettyBody] + } + t -> error ("prettyBinding0: unexpected term: " ++ show t) where defnLhs v vs | infix' = case vs of @@ -904,7 +991,7 @@ isDocLiteral term = case term of _ -> False -- Similar to DisplayValues.displayDoc, but does not follow and expand references. -prettyDoc :: Var v => PrettyPrintEnv -> Imports -> Term3 v a -> Pretty SyntaxText +prettyDoc :: (Var v) => PrettyPrintEnv -> Imports -> Term3 v a -> Pretty SyntaxText prettyDoc n im term = mconcat [ fmt S.DocDelimiter $ l "[: ", @@ -950,7 +1037,7 @@ parenIfInfix :: parenIfInfix name ic = if isSymbolic name && ic == NonInfix then paren True else id -l :: IsString s => String -> Pretty s +l :: (IsString s) => String -> Pretty s l = fromString isSymbolic :: HQ.HashQualified Name -> Bool @@ -1117,7 +1204,7 @@ instance Semigroup PrintAnnotation where instance Monoid PrintAnnotation where mempty = PrintAnnotation {usages = Map.empty} -suffixCounterTerm :: Var v => PrettyPrintEnv -> Term2 v at ap v a -> PrintAnnotation +suffixCounterTerm :: (Var v) => PrettyPrintEnv -> Term2 v at ap v a -> PrintAnnotation suffixCounterTerm n = \case Var' v -> countHQ $ HQ.unsafeFromVar v Ref' r -> countHQ $ PrettyPrintEnv.termName n (Referent.Ref r) @@ -1130,7 +1217,7 @@ suffixCounterTerm n = \case in foldMap (countPatternUsages n . pat) bs _ -> mempty -suffixCounterType :: Var v => PrettyPrintEnv -> Type v a -> PrintAnnotation +suffixCounterType :: (Var v) => PrettyPrintEnv -> Type v a -> PrintAnnotation suffixCounterType n = \case Type.Var' v -> countHQ $ HQ.unsafeFromVar v Type.Ref' r | noImportRefs r || r == Type.listRef -> mempty @@ -1140,7 +1227,7 @@ suffixCounterType n = \case printAnnotate :: (Var v, Ord v) => PrettyPrintEnv -> Term2 v at ap v a -> Term3 v PrintAnnotation printAnnotate n tm = fmap snd (go (reannotateUp (suffixCounterTerm n) tm)) where - go :: Ord v => Term2 v at ap v b -> Term2 v () () v b + go :: (Ord v) => Term2 v at ap v b -> Term2 v () () v b go = extraMap' id (const ()) (const ()) countTypeUsages :: (Var v, Ord v) => PrettyPrintEnv -> Type v a -> PrintAnnotation @@ -1241,7 +1328,8 @@ calcImports im tm = (im', render $ getUses result) -- Keep only names P.S where there is no other Q with Q.S also used in this scope. uniqueness :: Map Suffix (Map Prefix Int) -> Map Suffix (Prefix, Int) uniqueness m = - m |> Map.filter (\ps -> Map.size ps == 1) + m + |> Map.filter (\ps -> Map.size ps == 1) |> Map.map (head . Map.toList) -- Keep only names where the number of usages in this scope -- - is > 1, or @@ -1311,7 +1399,8 @@ calcImports im tm = (im', render $ getUses result) getImportMapAdditions = Map.map (\(_, s, _) -> s) getUses :: Map Name (Prefix, Suffix, Int) -> Map Prefix (Set Suffix) getUses m = - Map.elems m |> map (\(p, s, _) -> (p, Set.singleton s)) + Map.elems m + |> map (\(p, s, _) -> (p, Set.singleton s)) |> Map.fromListWith Set.union render :: Map Prefix (Set Suffix) -> [Pretty SyntaxText] -> Pretty SyntaxText render m rest = @@ -1417,7 +1506,7 @@ immediateChildBlockTerms = \case -- Has shadowing, is rendered as a regular `match`. -- match blah with 42 -> body -- Pattern has (is) a literal, rendered as a regular match (rather than `42 = blah; body`) -isDestructuringBind :: Ord v => ABT.Term f v a -> [MatchCase loc (ABT.Term f v a)] -> Bool +isDestructuringBind :: (Ord v) => ABT.Term f v a -> [MatchCase loc (ABT.Term f v a)] -> Bool isDestructuringBind scrutinee [MatchCase pat _ (ABT.AbsN' vs _)] = all (`Set.notMember` ABT.freeVars scrutinee) vs && not (hasLiteral pat) where @@ -1438,7 +1527,7 @@ isDestructuringBind scrutinee [MatchCase pat _ (ABT.AbsN' vs _)] = Pattern.Unbound _ -> False isDestructuringBind _ _ = False -isBlock :: Ord v => Term2 vt at ap v a -> Bool +isBlock :: (Ord v) => Term2 vt at ap v a -> Bool isBlock tm = case tm of If' {} -> True @@ -1448,7 +1537,7 @@ isBlock tm = _ -> False pattern LetBlock :: - Ord v => + (Ord v) => [(v, Term2 vt at ap v a)] -> Term2 vt at ap v a -> Term2 vt at ap v a @@ -1459,7 +1548,7 @@ pattern LetBlock bindings body <- (unLetBlock -> Just (bindings, body)) -- We preserve nesting when the inner block shadows definitions in the -- outer block. unLetBlock :: - Ord v => + (Ord v) => Term2 vt at ap v a -> Maybe ([(v, Term2 vt at ap v a)], Term2 vt at ap v a) unLetBlock t = rec t @@ -1486,7 +1575,7 @@ unLetBlock t = rec t _ -> Just (bindings, body) pattern LamsNamedMatch' :: - Var v => + (Var v) => [v] -> [([Pattern ap], Maybe (Term2 vt at ap v a), Term2 vt at ap v a)] -> Term2 vt at ap v a @@ -1524,7 +1613,7 @@ pattern LamsNamedMatch' vs branches <- (unLamsMatch' -> Just (vs, branches)) -- (For instance, `x -> match (x, 42) with ...` can't be written using -- lambda case) unLamsMatch' :: - Var v => + (Var v) => Term2 vt at ap v a -> Maybe ([v], [([Pattern ap], Maybe (Term2 vt at ap v a), Term2 vt at ap v a)]) unLamsMatch' t = case unLamsUntilDelay' t of @@ -1581,14 +1670,17 @@ toBytes _ = Nothing prettyDoc2 :: forall v m. - MonadPretty v m => + (MonadPretty v m) => AmbientContext -> Term3 v PrintAnnotation -> m (Maybe (Pretty SyntaxText)) prettyDoc2 ac tm = do ppe <- getPPE let brace p = - fmt S.DocDelimiter "{{" <> PP.softbreak <> p <> PP.softbreak + fmt S.DocDelimiter "{{" + <> PP.softbreak + <> p + <> PP.softbreak <> fmt S.DocDelimiter "}}" @@ -1770,7 +1862,7 @@ toDocVerbatim ppe (App' (Ref' r) (toDocWord ppe -> Just txt)) | nameEndsWith ppe ".docVerbatim" r = Just txt toDocVerbatim _ _ = Nothing -toDocEval :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) +toDocEval :: (Ord v) => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) toDocEval ppe (App' (Ref' r) (Delay' tm)) | nameEndsWith ppe ".docEval" r = Just tm | r == _oldDocEval = Just tm @@ -1785,17 +1877,17 @@ _oldDocEval, _oldDocEvalInline :: Reference _oldDocEval = Reference.unsafeFromText "#m2bmkdos2669tt46sh2gf6cmb4td5le8lcqnmsl9nfaqiv7s816q8bdtjdbt98tkk11ejlesepe7p7u8p0asu9758gdseffh0t78m2o" _oldDocEvalInline = Reference.unsafeFromText "#7pjlvdu42gmfvfntja265dmi08afk08l54kpsuu55l9hq4l32fco2jlrm8mf2jbn61esfsi972b6e66d9on4i5bkmfchjdare1v5npg" -toDocEvalInline :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) +toDocEvalInline :: (Ord v) => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) toDocEvalInline ppe (App' (Ref' r) (Delay' tm)) | nameEndsWith ppe ".docEvalInline" r = Just tm | r == _oldDocEvalInline = Just tm toDocEvalInline _ _ = Nothing -toDocExample, toDocExampleBlock :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) +toDocExample, toDocExampleBlock :: (Ord v) => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) toDocExample = toDocExample' ".docExample" toDocExampleBlock = toDocExample' ".docExampleBlock" -toDocExample' :: Ord v => Text -> PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) +toDocExample' :: (Ord v) => Text -> PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Term3 v PrintAnnotation) toDocExample' suffix ppe (Apps' (Ref' r) [Nat' n, l@(LamsNamed' vs tm)]) | nameEndsWith ppe suffix r, ABT.freeVars l == mempty, @@ -1811,7 +1903,7 @@ toDocTransclude ppe (App' (Ref' r) tm) | nameEndsWith ppe ".docTransclude" r = Just tm toDocTransclude _ _ = Nothing -toDocLink :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Either Reference Referent) +toDocLink :: (Ord v) => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Either Reference Referent) toDocLink ppe (App' (Ref' r) tm) | nameEndsWith ppe ".docLink" r = case tm of (toDocEmbedTermLink ppe -> Just tm) -> Just (Right tm) @@ -1839,7 +1931,7 @@ toDocParagraph ppe (App' (Ref' r) (List' tms)) | nameEndsWith ppe ".docParagraph" r = Just (toList tms) toDocParagraph _ _ = Nothing -toDocEmbedTermLink :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe Referent +toDocEmbedTermLink :: (Ord v) => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe Referent toDocEmbedTermLink ppe (App' (Ref' r) (Delay' (Referent' tm))) | nameEndsWith ppe ".docEmbedTermLink" r = Just tm toDocEmbedTermLink _ _ = Nothing @@ -1849,10 +1941,10 @@ toDocEmbedTypeLink ppe (App' (Ref' r) (TypeLink' typeref)) | nameEndsWith ppe ".docEmbedTypeLink" r = Just typeref toDocEmbedTypeLink _ _ = Nothing -toDocSourceAnnotations :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe [Referent] +toDocSourceAnnotations :: (Ord v) => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe [Referent] toDocSourceAnnotations _ppe _tm = Just [] -- todo fetch annotations -toDocSourceElement :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Either Reference Referent, [Referent]) +toDocSourceElement :: (Ord v) => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe (Either Reference Referent, [Referent]) toDocSourceElement ppe (Apps' (Ref' r) [tm, toDocSourceAnnotations ppe -> Just annotations]) | nameEndsWith ppe ".docSourceElement" r = (,annotations) <$> ok tm @@ -1863,7 +1955,7 @@ toDocSourceElement ppe (Apps' (Ref' r) [tm, toDocSourceAnnotations ppe -> Just a toDocSourceElement _ _ = Nothing toDocSource' :: - Ord v => + (Ord v) => Text -> PrettyPrintEnv -> Term3 v PrintAnnotation -> @@ -1877,19 +1969,19 @@ toDocSource' _ _ _ = Nothing toDocSource, toDocFoldedSource :: - Ord v => + (Ord v) => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe [(Either Reference Referent, [Referent])] toDocSource = toDocSource' ".docSource" toDocFoldedSource = toDocSource' ".docFoldedSource" -toDocSignatureInline :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe Referent +toDocSignatureInline :: (Ord v) => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe Referent toDocSignatureInline ppe (App' (Ref' r) (toDocEmbedSignatureLink ppe -> Just tm)) | nameEndsWith ppe ".docSignatureInline" r = Just tm toDocSignatureInline _ _ = Nothing -toDocEmbedSignatureLink :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe Referent +toDocEmbedSignatureLink :: (Ord v) => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe Referent toDocEmbedSignatureLink ppe (App' (Ref' r) (Delay' (Referent' tm))) | nameEndsWith ppe ".docEmbedSignatureLink" r = Just tm toDocEmbedSignatureLink _ _ = Nothing @@ -1907,7 +1999,7 @@ toDocEmbedSignatureLink _ _ = Nothing -- _ -> Nothing -- toDocEmbedAnnotations _ _ = Nothing -toDocSignature :: Ord v => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe [Referent] +toDocSignature :: (Ord v) => PrettyPrintEnv -> Term3 v PrintAnnotation -> Maybe [Referent] toDocSignature ppe (App' (Ref' r) (List' tms)) | nameEndsWith ppe ".docSignature" r = case [tm | Just tm <- toDocEmbedSignatureLink ppe <$> toList tms] of diff --git a/parser-typechecker/src/Unison/Syntax/TypeParser.hs b/parser-typechecker/src/Unison/Syntax/TypeParser.hs index 2f7e33522..8af9a2a3a 100644 --- a/parser-typechecker/src/Unison/Syntax/TypeParser.hs +++ b/parser-typechecker/src/Unison/Syntax/TypeParser.hs @@ -6,10 +6,10 @@ import qualified Text.Megaparsec as P import qualified Unison.Builtin.Decls as DD import qualified Unison.HashQualified as HQ import qualified Unison.NamesWithHistory as Names -import qualified Unison.Syntax.Name as Name (toVar) import Unison.Parser.Ann (Ann (..)) import Unison.Prelude import qualified Unison.Syntax.Lexer as L +import qualified Unison.Syntax.Name as Name (toVar) import Unison.Syntax.Parser import Unison.Type (Type) import qualified Unison.Type as Type @@ -22,20 +22,20 @@ type TypeP v = P v (Type v Ann) -- Value types cannot have effects, unless those effects appear to -- the right of a function arrow: -- valueType ::= Int | Text | App valueType valueType | Arrow valueType computationType -valueType :: Var v => TypeP v +valueType :: (Var v) => TypeP v valueType = forall type1 <|> type1 -- Computation -- computationType ::= [{effect*}] valueType -computationType :: Var v => TypeP v +computationType :: (Var v) => TypeP v computationType = effect <|> valueType -valueTypeLeaf :: Var v => TypeP v +valueTypeLeaf :: (Var v) => TypeP v valueTypeLeaf = tupleOrParenthesizedType valueType <|> typeAtom <|> sequenceTyp -- Examples: Optional, Optional#abc, woot, #abc -typeAtom :: Var v => TypeP v +typeAtom :: (Var v) => TypeP v typeAtom = hqPrefixId >>= \tok -> case L.payload tok of HQ.NameOnly n -> pure $ Type.var (ann tok) (Name.toVar n) @@ -46,13 +46,13 @@ typeAtom = then P.customFailure (UnknownType tok matches) else pure $ Type.ref (ann tok) (Set.findMin matches) -type1 :: Var v => TypeP v +type1 :: (Var v) => TypeP v type1 = arrow type2a -type2a :: Var v => TypeP v +type2a :: (Var v) => TypeP v type2a = delayed <|> type2 -delayed :: Var v => TypeP v +delayed :: (Var v) => TypeP v delayed = do q <- reserved "'" t <- effect <|> type2a @@ -62,27 +62,27 @@ delayed = do (DD.unitType (ann q)) t -type2 :: Var v => TypeP v +type2 :: (Var v) => TypeP v type2 = do hd <- valueTypeLeaf tl <- many (effectList <|> valueTypeLeaf) pure $ foldl' (\a b -> Type.app (ann a <> ann b) a b) hd tl -- ex : {State Text, IO} (List Int) -effect :: Var v => TypeP v +effect :: (Var v) => TypeP v effect = do es <- effectList t <- type2 pure (Type.effect1 (ann es <> ann t) es t) -effectList :: Var v => TypeP v +effectList :: (Var v) => TypeP v effectList = do open <- openBlockWith "{" es <- sepBy (reserved ",") valueType close <- closeBlock pure $ Type.effects (ann open <> ann close) es -sequenceTyp :: Var v => TypeP v +sequenceTyp :: (Var v) => TypeP v sequenceTyp = do open <- openBlockWith "[" t <- valueType @@ -90,7 +90,7 @@ sequenceTyp = do let a = ann open <> ann close pure $ Type.app a (Type.list a) t -tupleOrParenthesizedType :: Var v => TypeP v -> TypeP v +tupleOrParenthesizedType :: (Var v) => TypeP v -> TypeP v tupleOrParenthesizedType rec = tupleOrParenthesized rec DD.unitType pair where pair t1 t2 = @@ -98,7 +98,7 @@ tupleOrParenthesizedType rec = tupleOrParenthesized rec DD.unitType pair in Type.app a (Type.app (ann t1) (DD.pairType a) t1) t2 -- valueType ::= ... | Arrow valueType computationType -arrow :: Var v => TypeP v -> TypeP v +arrow :: (Var v) => TypeP v -> TypeP v arrow rec = let eff = mkArr <$> optional effectList mkArr Nothing a b = Type.arrow (ann a <> ann b) a b @@ -106,7 +106,7 @@ arrow rec = in chainr1 (effect <|> rec) (reserved "->" *> eff) -- "forall a b . List a -> List b -> Maybe Text" -forall :: Var v => TypeP v -> TypeP v +forall :: (Var v) => TypeP v -> TypeP v forall rec = do kw <- reserved "forall" <|> reserved "∀" vars <- fmap (fmap L.payload) . some $ prefixDefinitionName diff --git a/parser-typechecker/src/Unison/Syntax/TypePrinter.hs b/parser-typechecker/src/Unison/Syntax/TypePrinter.hs index 02ca5156f..5e7c3dcab 100644 --- a/parser-typechecker/src/Unison/Syntax/TypePrinter.hs +++ b/parser-typechecker/src/Unison/Syntax/TypePrinter.hs @@ -39,13 +39,13 @@ import qualified Unison.Var as Var type SyntaxText = S.SyntaxText' Reference -pretty :: Var v => PrettyPrintEnv -> Type v a -> Pretty ColorText +pretty :: (Var v) => PrettyPrintEnv -> Type v a -> Pretty ColorText pretty ppe t = PP.syntaxToColor $ prettySyntax ppe t -prettySyntax :: Var v => PrettyPrintEnv -> Type v a -> Pretty SyntaxText +prettySyntax :: (Var v) => PrettyPrintEnv -> Type v a -> Pretty SyntaxText prettySyntax ppe = runPretty ppe . pretty0 Map.empty (-1) -prettyStr :: Var v => Maybe Width -> PrettyPrintEnv -> Type v a -> String +prettyStr :: (Var v) => Maybe Width -> PrettyPrintEnv -> Type v a -> String prettyStr (Just width) ppe t = toPlain . PP.render width . PP.syntaxToColor . runPretty ppe $ pretty0 Map.empty (-1) t prettyStr Nothing ppe t = @@ -75,7 +75,7 @@ prettyStr Nothing ppe t = pretty0 :: forall v a m. - MonadPretty v m => + (MonadPretty v m) => Imports -> Int -> Type v a -> @@ -84,7 +84,7 @@ pretty0 im p tp = prettyRaw im p (cleanup (removePureEffects tp)) prettyRaw :: forall v a m. - MonadPretty v m => + (MonadPretty v m) => Imports -> Int -> Type v a -> @@ -133,7 +133,7 @@ prettyRaw im p tp = go im p tp case fst of Var' v | Var.name v == "()" -> - PP.parenthesizeIf (p >= 10) <$> arrows True True rest + PP.parenthesizeIf (p >= 10) <$> arrows True True rest _ -> PP.parenthesizeIf (p >= 0) <$> ((<>) <$> go im 0 fst <*> arrows False False rest) @@ -182,14 +182,14 @@ fmt = PP.withSyntax -- todo: provide sample output in comment prettySignaturesCT :: - Var v => + (Var v) => PrettyPrintEnv -> [(Referent, HashQualified Name, Type v a)] -> [Pretty ColorText] prettySignaturesCT ppe ts = map PP.syntaxToColor $ prettySignaturesST ppe ts prettySignaturesCTCollapsed :: - Var v => + (Var v) => PrettyPrintEnv -> [(Referent, HashQualified Name, Type v a)] -> Pretty ColorText @@ -199,7 +199,7 @@ prettySignaturesCTCollapsed ppe ts = $ prettySignaturesCT ppe ts prettySignaturesST :: - Var v => + (Var v) => PrettyPrintEnv -> [(Referent, HashQualified Name, Type v a)] -> [Pretty SyntaxText] @@ -215,7 +215,7 @@ prettySignaturesST ppe ts = -- todo: provide sample output in comment; different from prettySignatures' prettySignaturesAlt' :: - Var v => + (Var v) => PrettyPrintEnv -> [([HashQualified Name], Type v a)] -> [Pretty ColorText] @@ -224,7 +224,7 @@ prettySignaturesAlt' ppe ts = runPretty ppe $ ts' <- traverse f ts pure $ map PP.syntaxToColor $ PP.align ts' where - f :: MonadPretty v m => ([HashQualified Name], Type v a) -> m (Pretty SyntaxText, Pretty SyntaxText) + f :: (MonadPretty v m) => ([HashQualified Name], Type v a) -> m (Pretty SyntaxText, Pretty SyntaxText) f (names, typ) = do typ' <- pretty0 Map.empty (-1) typ let col = fmt S.TypeAscriptionColon ": " @@ -237,7 +237,7 @@ prettySignaturesAlt' ppe ts = runPretty ppe $ -- prettySignatures'' env ts = prettySignatures' env (first HQ.fromName <$> ts) prettySignaturesAlt :: - Var v => + (Var v) => PrettyPrintEnv -> [([HashQualified Name], Type v a)] -> Pretty ColorText diff --git a/parser-typechecker/src/Unison/Typechecker.hs b/parser-typechecker/src/Unison/Typechecker.hs index 9a0a0beda..712d30f2b 100644 --- a/parser-typechecker/src/Unison/Typechecker.hs +++ b/parser-typechecker/src/Unison/Typechecker.hs @@ -102,13 +102,13 @@ synthesize env t = (TypeVar.liftTerm t) in Result.hoist (pure . runIdentity) $ fmap TypeVar.lowerType result -isSubtype :: Var v => Type v loc -> Type v loc -> Bool +isSubtype :: (Var v) => Type v loc -> Type v loc -> Bool isSubtype t1 t2 = handleCompilerBug (Context.isSubtype (tvar $ void t1) (tvar $ void t2)) where tvar = TypeVar.liftType -handleCompilerBug :: Var v => Either (Context.CompilerBug v ()) a -> a +handleCompilerBug :: (Var v) => Either (Context.CompilerBug v ()) a -> a handleCompilerBug = \case Left bug -> error $ "compiler bug encountered: " ++ show bug Right b -> b @@ -129,12 +129,12 @@ handleCompilerBug = \case -- @ -- exists x. '{IO, Exception} x -- @ -fitsScheme :: Var v => Type v loc -> Type v loc -> Bool +fitsScheme :: (Var v) => Type v loc -> Type v loc -> Bool fitsScheme t1 t2 = handleCompilerBug (Context.fitsScheme (tvar $ void t1) (tvar $ void t2)) where tvar = TypeVar.liftType -isEqual :: Var v => Type v loc -> Type v loc -> Bool +isEqual :: (Var v) => Type v loc -> Type v loc -> Bool isEqual t1 t2 = isSubtype t1 t2 && isSubtype t2 t1 type TDNR f v loc a = @@ -166,10 +166,10 @@ typeError note = do tell $ Notes mempty [note] mempty Control.Monad.Fail.fail "" -btw :: Monad f => Context.InfoNote v loc -> ResultT (Notes v loc) f () +btw :: (Monad f) => Context.InfoNote v loc -> ResultT (Notes v loc) f () btw note = tell $ Notes mempty mempty [note] -liftResult :: Monad f => Result (Notes v loc) a -> TDNR f v loc a +liftResult :: (Monad f) => Result (Notes v loc) a -> TDNR f v loc a liftResult = lift . MaybeT . WriterT . pure . runIdentity . runResultT -- Resolve "solved blanks". If a solved blank's type and name matches the type diff --git a/parser-typechecker/src/Unison/Typechecker/Components.hs b/parser-typechecker/src/Unison/Typechecker/Components.hs index 9984b2510..ae1358e64 100644 --- a/parser-typechecker/src/Unison/Typechecker/Components.hs +++ b/parser-typechecker/src/Unison/Typechecker/Components.hs @@ -14,10 +14,10 @@ import Unison.Term (Term') import qualified Unison.Term as Term import Unison.Var (Var) -unordered :: Var v => [(v, Term' vt v a)] -> [[(v, Term' vt v a)]] +unordered :: (Var v) => [(v, Term' vt v a)] -> [[(v, Term' vt v a)]] unordered = ABT.components -ordered :: Var v => [(v, Term' vt v a)] -> [[(v, Term' vt v a)]] +ordered :: (Var v) => [(v, Term' vt v a)] -> [[(v, Term' vt v a)]] ordered = ABT.orderedComponents -- | Algorithm for minimizing cycles of a `let rec`. This can @@ -35,13 +35,14 @@ ordered = ABT.orderedComponents -- -- Fails on the left if there are duplicate definitions. minimize :: - Var v => + (Var v) => Term' vt v a -> Either (NonEmpty (v, [a])) (Maybe (Term' vt v a)) -minimize (Term.LetRecNamedAnnotatedTop' isTop ann bs e) = +minimize (Term.LetRecNamedAnnotatedTop' isTop blockAnn bs e) = let bindings = first snd <$> bs group = - map (fst . head &&& map (ABT.annotation . snd)) . groupBy ((==) `on` fst) + map (fst . head &&& map (ABT.annotation . snd)) + . groupBy ((==) `on` fst) . sortBy (compare `on` fst) grouped = group bindings @@ -70,23 +71,23 @@ minimize (Term.LetRecNamedAnnotatedTop' isTop ann bs e) = | Set.member hdv (ABT.freeVars hdb) = Term.letRec isTop - (annotationFor hdv) + blockAnn [(annotatedVar hdv, hdb)] e - | otherwise = Term.let1 isTop [(annotatedVar hdv, hdb)] e - mklet cycle@((hdv, _) : _) e = + | otherwise = Term.singleLet isTop blockAnn (hdv, hdb) e + mklet cycle@((_, _) : _) e = Term.letRec isTop - (annotationFor hdv) + blockAnn (first annotatedVar <$> cycle) e mklet [] e = e in -- The outer annotation is going to be meaningful, so we make -- sure to preserve it, whereas the annotations at intermediate Abs -- nodes aren't necessarily meaningful - Right . Just . ABT.annotate ann . foldr mklet e $ cs + Right . Just . ABT.annotate blockAnn . foldr mklet e $ cs minimize _ = Right Nothing minimize' :: - Var v => Term' vt v a -> Either (NonEmpty (v, [a])) (Term' vt v a) + (Var v) => Term' vt v a -> Either (NonEmpty (v, [a])) (Term' vt v a) minimize' term = fromMaybe term <$> minimize term diff --git a/parser-typechecker/src/Unison/Typechecker/Context.hs b/parser-typechecker/src/Unison/Typechecker/Context.hs index e916cf770..f3b49b6b3 100644 --- a/parser-typechecker/src/Unison/Typechecker/Context.hs +++ b/parser-typechecker/src/Unison/Typechecker/Context.hs @@ -42,10 +42,12 @@ where import Control.Lens (over, view, _3) import qualified Control.Monad.Fail as MonadFail +import Control.Monad.Fix (MonadFix (..)) import Control.Monad.State ( MonadState, StateT, evalState, + evalStateT, get, gets, put, @@ -59,6 +61,7 @@ import qualified Data.Foldable as Foldable import Data.Function (on) import Data.List import Data.List.NonEmpty (NonEmpty) +import qualified Data.List.NonEmpty as Nel import qualified Data.Map as Map import qualified Data.Sequence as Seq import Data.Sequence.NonEmpty (NESeq) @@ -81,9 +84,13 @@ import qualified Unison.DataDeclaration as DD import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Pattern (Pattern) import qualified Unison.Pattern as Pattern +import Unison.PatternMatchCoverage (checkMatch) +import Unison.PatternMatchCoverage.Class (EnumeratedConstructors (..), Pmc (..), traverseConstructors) +import qualified Unison.PatternMatchCoverage.ListPat as ListPat import Unison.Prelude import qualified Unison.PrettyPrintEnv as PPE import Unison.Reference (Reference) +import qualified Unison.Reference as Reference import Unison.Referent (Referent) import qualified Unison.Syntax.TypePrinter as TP import qualified Unison.Term as Term @@ -173,6 +180,14 @@ instance Monad (Result v loc) where CompilerBug bug es is >>= _ = CompilerBug bug es is {-# INLINE (>>=) #-} +instance MonadFix (Result v loc) where + mfix f = + let res = f theA + theA = case res of + Success _ a -> a + _ -> error "mfix Result: forced an unsuccessful value" + in res + btw' :: InfoNote v loc -> Result v loc () btw' note = Success (Seq.singleton note) () @@ -374,6 +389,9 @@ data Cause v loc | ConcatPatternWithoutConstantLength loc (Type v loc) | HandlerOfUnexpectedType loc (Type v loc) | DataEffectMismatch Unknown Reference (DataDeclaration v loc) + | UncoveredPatterns loc (NonEmpty (Pattern ())) + | RedundantPattern loc + | InaccessiblePattern loc deriving (Show) errorTerms :: ErrorNote v loc -> [Term v loc] @@ -765,6 +783,26 @@ getEffectDeclaration r = do getDataConstructorType :: (Var v, Ord loc) => ConstructorReference -> M v loc (Type v loc) getDataConstructorType = getConstructorType' Data getDataDeclaration +getDataConstructors :: (Var v) => Type v loc -> M v loc (EnumeratedConstructors (TypeVar v loc) v loc) +getDataConstructors typ + | Type.Ref' r <- typ, r == Type.booleanRef = pure BooleanType + | Type.App' (Type.Ref' r) arg <- typ, + r == Type.listRef = + let xs = + [ (ListPat.Cons, [arg]), + (ListPat.Nil, []) + ] + in pure (SequenceType xs) + | Just r <- theRef = do + decl <- getDataDeclaration r + pure $ ConstructorType [(v, ConstructorReference r i, ABT.vmap TypeVar.Universal t) | (i, (v, t)) <- zip [0 ..] (DD.constructors decl)] + | otherwise = pure OtherType + where + theRef = case typ of + Type.Apps' (Type.Ref' r@Reference.DerivedId {}) _targs -> Just r + Type.Ref' r@Reference.DerivedId {} -> Just r + _ -> Nothing + getEffectConstructorType :: (Var v, Ord loc) => ConstructorReference -> M v loc (Type v loc) getEffectConstructorType = getConstructorType' Effect go where @@ -1213,6 +1251,7 @@ synthesizeWanted e let outputType = existential' l B.Blank outputTypev appendContext [existential outputTypev] cwant <- checkCases scrutineeType outputType cases + ensurePatternCoverage e scrutinee scrutineeType cases want <- coalesceWanted cwant swant ctx <- getContext pure $ (apply ctx outputType, want) @@ -1220,6 +1259,61 @@ synthesizeWanted e l = loc e synthesizeWanted _e = compilerCrash PatternMatchFailure +getDataConstructorsAtType :: (Ord loc, Var v) => Type v loc -> M v loc (EnumeratedConstructors (TypeVar v loc) v loc) +getDataConstructorsAtType t0 = do + dataConstructors <- getDataConstructors t0 + res <- traverseConstructors (\v cr t -> (v,cr,) <$> fixType t) dataConstructors + pure res + where + fixType t = do + t <- ungeneralize t + let lastT = case t of + Type.Arrows' xs -> last xs + _ -> t + equate t0 lastT + applyM t + +instance (Ord loc, Var v) => Pmc (TypeVar v loc) v loc (StateT (Set v) (M v loc)) where + getConstructors = lift . getDataConstructorsAtType + getConstructorVarTypes t cref@(ConstructorReference _r cid) = do + getConstructors t >>= \case + ConstructorType cs -> case drop (fromIntegral cid) cs of + [] -> error $ show cref <> " not found in constructor list: " <> show cs + (_, _, consArgs) : _ -> case consArgs of + Type.Arrows' xs -> pure (init xs) + _ -> pure [] + BooleanType -> pure [] + OtherType -> pure [] + SequenceType {} -> pure [] + fresh = do + vs <- get + let v = Var.freshIn vs (Var.typed Var.Pattern) + put (Set.insert v vs) + pure v + +ensurePatternCoverage :: + forall v loc. + (Ord loc, Var v) => + Term v loc -> + Term v loc -> + Type v loc -> + [Term.MatchCase loc (Term v loc)] -> + MT v loc (Result v loc) () +ensurePatternCoverage wholeMatch _scrutinee scrutineeType cases = do + let matchLoc = ABT.annotation wholeMatch + scrutineeType <- applyM scrutineeType + case scrutineeType of + -- Don't check coverage on ability handlers yet + Type.Apps' (Type.Ref' r) _args | r == Type.effectRef -> pure () + _ -> do + (redundant, _inaccessible, uncovered) <- flip evalStateT (ABT.freeVars wholeMatch) do + checkMatch matchLoc scrutineeType cases + let checkUncovered = case Nel.nonEmpty uncovered of + Nothing -> pure () + Just xs -> failWith (UncoveredPatterns matchLoc xs) + checkRedundant = foldr (\a b -> failWith (RedundantPattern a) *> b) (pure ()) redundant + checkUncovered *> checkRedundant + checkCases :: (Var v) => (Ord loc) => @@ -3054,3 +3148,8 @@ instance (Monad f) => Applicative (MT v loc f) where instance (Monad f) => MonadState (Env v loc) (MT v loc f) where get = MT \_ _ env -> pure (env, env) put env = MT \_ _ _ -> pure ((), env) + +instance (MonadFix f) => MonadFix (MT v loc f) where + mfix f = MT \a b c -> + let res = mfix (\ ~(wubble, _finalenv) -> runM (f wubble) a b c) + in res diff --git a/parser-typechecker/src/Unison/Typechecker/Extractor.hs b/parser-typechecker/src/Unison/Typechecker/Extractor.hs index d6e9a7046..8f98e16aa 100644 --- a/parser-typechecker/src/Unison/Typechecker/Extractor.hs +++ b/parser-typechecker/src/Unison/Typechecker/Extractor.hs @@ -6,6 +6,7 @@ import Data.List.NonEmpty (NonEmpty) import qualified Data.Set as Set import qualified Unison.Blank as B import Unison.ConstructorReference (ConstructorReference) +import Unison.Pattern (Pattern) import Unison.Prelude hiding (whenM) import qualified Unison.Term as Term import Unison.Type (Type) @@ -36,13 +37,13 @@ extract = runReader . runMaybeT subseqExtractor :: (C.ErrorNote v loc -> [Ranged a]) -> SubseqExtractor v loc a subseqExtractor f = SubseqExtractor' f -traceSubseq :: Show a => String -> SubseqExtractor' n a -> SubseqExtractor' n a +traceSubseq :: (Show a) => String -> SubseqExtractor' n a -> SubseqExtractor' n a traceSubseq s ex = SubseqExtractor' $ \n -> let rs = runSubseq ex n in trace (if null s then show rs else s ++ ": " ++ show rs) rs traceNote :: - Show a => String -> ErrorExtractor v loc a -> ErrorExtractor v loc a + (Show a) => String -> ErrorExtractor v loc a -> ErrorExtractor v loc a traceNote s ex = extractor $ \n -> let result = extract ex n in trace (if null s then show result else s ++ ": " ++ show result) result @@ -242,6 +243,18 @@ duplicateDefinitions = C.DuplicateDefinitions vs -> pure vs _ -> mzero +uncoveredPatterns :: ErrorExtractor v loc (loc, NonEmpty (Pattern ())) +uncoveredPatterns = + cause >>= \case + C.UncoveredPatterns matchLoc xs -> pure (matchLoc, xs) + _ -> empty + +redundantPattern :: ErrorExtractor v loc loc +redundantPattern = + cause >>= \case + C.RedundantPattern patternLoc -> pure patternLoc + _ -> empty + typeMismatch :: ErrorExtractor v loc (C.Context v loc) typeMismatch = cause >>= \case @@ -260,7 +273,7 @@ unknownSymbol = C.UnknownSymbol loc v -> pure (loc, v) _ -> mzero -unknownTerm :: Var v => ErrorExtractor v loc (loc, v, [C.Suggestion v loc], C.Type v loc) +unknownTerm :: (Var v) => ErrorExtractor v loc (loc, v, [C.Suggestion v loc], C.Type v loc) unknownTerm = cause >>= \case C.UnknownTerm loc v suggestions expectedType -> do diff --git a/parser-typechecker/src/Unison/Typechecker/TypeError.hs b/parser-typechecker/src/Unison/Typechecker/TypeError.hs index 9cfceff0c..4670ead83 100644 --- a/parser-typechecker/src/Unison/Typechecker/TypeError.hs +++ b/parser-typechecker/src/Unison/Typechecker/TypeError.hs @@ -5,6 +5,7 @@ module Unison.Typechecker.TypeError where import Data.Bifunctor (second) import Data.List.NonEmpty (NonEmpty) import qualified Unison.ABT as ABT +import Unison.Pattern (Pattern) import Unison.Prelude hiding (whenM) import Unison.Type (Type) import qualified Unison.Type as Type @@ -103,6 +104,8 @@ data TypeError v loc { defns :: NonEmpty (v, [loc]), note :: C.ErrorNote v loc } + | UncoveredPatterns loc (NonEmpty (Pattern ())) + | RedundantPattern loc | Other (C.ErrorNote v loc) deriving (Show) @@ -145,7 +148,9 @@ allErrors = unguardedCycle, unknownType, unknownTerm, - duplicateDefinitions + duplicateDefinitions, + redundantPattern, + uncoveredPatterns ] topLevelComponent :: Ex.InfoExtractor v a (TypeInfo v a) @@ -153,6 +158,16 @@ topLevelComponent = do defs <- Ex.topLevelComponent pure $ TopLevelComponent defs +redundantPattern :: Ex.ErrorExtractor v a (TypeError v a) +redundantPattern = do + ploc <- Ex.redundantPattern + pure (RedundantPattern ploc) + +uncoveredPatterns :: Ex.ErrorExtractor v a (TypeError v a) +uncoveredPatterns = do + (mloc, uncoveredCases) <- Ex.uncoveredPatterns + pure (UncoveredPatterns mloc uncoveredCases) + abilityCheckFailure :: Ex.ErrorExtractor v a (TypeError v a) abilityCheckFailure = do (ambient, requested, _ctx) <- Ex.abilityCheckFailure @@ -189,7 +204,7 @@ unknownType = do n <- Ex.errorNote pure $ UnknownType v loc n -unknownTerm :: Var v => Ex.ErrorExtractor v loc (TypeError v loc) +unknownTerm :: (Var v) => Ex.ErrorExtractor v loc (TypeError v loc) unknownTerm = do (loc, v, suggs, typ) <- Ex.unknownTerm n <- Ex.errorNote @@ -295,7 +310,7 @@ ifBody = existentialMismatch0 IfBody (Ex.inSynthesizeApp >> Ex.inIfBody) listBody = existentialMismatch0 ListBody (Ex.inSynthesizeApp >> Ex.inVector) matchBody = existentialMismatch0 CaseBody (Ex.inMatchBody >> Ex.inMatch) -applyingNonFunction :: Var v => Ex.ErrorExtractor v loc (TypeError v loc) +applyingNonFunction :: (Var v) => Ex.ErrorExtractor v loc (TypeError v loc) applyingNonFunction = do _ <- Ex.typeMismatch n <- Ex.errorNote diff --git a/parser-typechecker/src/Unison/Typechecker/TypeVar.hs b/parser-typechecker/src/Unison/Typechecker/TypeVar.hs index 6ddd6ff1b..8a76f88a6 100644 --- a/parser-typechecker/src/Unison/Typechecker/TypeVar.hs +++ b/parser-typechecker/src/Unison/Typechecker/TypeVar.hs @@ -12,12 +12,12 @@ import qualified Unison.Var as Var data TypeVar b v = Universal v | Existential b v deriving (Functor) -instance Eq v => Eq (TypeVar b v) where +instance (Eq v) => Eq (TypeVar b v) where Universal v == Universal v2 = v == v2 Existential _ v == Existential _ v2 = v == v2 _ == _ = False -instance Ord v => Ord (TypeVar b v) where +instance (Ord v) => Ord (TypeVar b v) where Universal v `compare` Universal v2 = compare v v2 Existential _ v `compare` Existential _ v2 = compare v v2 Universal _ `compare` Existential _ _ = LT @@ -27,27 +27,27 @@ underlying :: TypeVar b v -> v underlying (Universal v) = v underlying (Existential _ v) = v -instance Show v => Show (TypeVar b v) where +instance (Show v) => Show (TypeVar b v) where show (Universal v) = show v show (Existential _ v) = "'" ++ show v -instance ABT.Var v => ABT.Var (TypeVar b v) where +instance (ABT.Var v) => ABT.Var (TypeVar b v) where freshIn s v = ABT.freshIn (Set.map underlying s) <$> v -instance Var v => Var (TypeVar b v) where +instance (Var v) => Var (TypeVar b v) where typed t = Universal (Var.typed t) typeOf v = Var.typeOf (underlying v) freshId v = Var.freshId (underlying v) freshenId id v = Var.freshenId id <$> v -liftType :: Ord v => Type v a -> Type (TypeVar b v) a +liftType :: (Ord v) => Type v a -> Type (TypeVar b v) a liftType = ABT.vmap Universal -lowerType :: Ord v => Type (TypeVar b v) a -> Type v a +lowerType :: (Ord v) => Type (TypeVar b v) a -> Type v a lowerType = ABT.vmap underlying -liftTerm :: Ord v => Term v a -> Term' (TypeVar b v) v a +liftTerm :: (Ord v) => Term v a -> Term' (TypeVar b v) v a liftTerm = Term.vtmap Universal -lowerTerm :: Ord v => Term' (TypeVar b v) v a -> Term v a +lowerTerm :: (Ord v) => Term' (TypeVar b v) v a -> Term v a lowerTerm = Term.vtmap underlying diff --git a/parser-typechecker/src/Unison/UnisonFile/Names.hs b/parser-typechecker/src/Unison/UnisonFile/Names.hs index 7fc567269..7e313dcf5 100644 --- a/parser-typechecker/src/Unison/UnisonFile/Names.hs +++ b/parser-typechecker/src/Unison/UnisonFile/Names.hs @@ -24,13 +24,13 @@ import qualified Unison.Util.Relation as Relation import Unison.Var (Var) import qualified Unison.WatchKind as WK -toNames :: Var v => UnisonFile v a -> Names +toNames :: (Var v) => UnisonFile v a -> Names toNames uf = datas <> effects where datas = foldMap (DD.Names.dataDeclToNames' Name.unsafeFromVar) (Map.toList (UF.dataDeclarationsId uf)) effects = foldMap (DD.Names.effectDeclToNames' Name.unsafeFromVar) (Map.toList (UF.effectDeclarationsId uf)) -typecheckedToNames :: Var v => TypecheckedUnisonFile v a -> Names +typecheckedToNames :: (Var v) => TypecheckedUnisonFile v a -> Names typecheckedToNames uf = Names (terms <> ctors) types where terms = @@ -54,7 +54,7 @@ typecheckedToNames uf = Names (terms <> ctors) types . UF.hashConstructors $ uf -typecheckedUnisonFile0 :: Ord v => TypecheckedUnisonFile v a +typecheckedUnisonFile0 :: (Ord v) => TypecheckedUnisonFile v a typecheckedUnisonFile0 = TypecheckedUnisonFileId Map.empty Map.empty mempty mempty mempty -- Substitutes free type and term variables occurring in the terms of this @@ -66,7 +66,7 @@ typecheckedUnisonFile0 = TypecheckedUnisonFileId Map.empty Map.empty mempty memp -- we are done parsing, whereas `math.sqrt#abc` can be resolved immediately -- as it can't refer to a local definition. bindNames :: - Var v => + (Var v) => Names -> UnisonFile v a -> Names.ResolutionResult v a (UnisonFile v a) @@ -89,7 +89,7 @@ bindNames names (UnisonFileId d e ts ws) = do -- left. environmentFor :: forall v a. - Var v => + (Var v) => Names -> Map v (DataDeclaration v a) -> Map v (EffectDeclaration v a) -> diff --git a/parser-typechecker/src/Unison/UnisonFile/Type.hs b/parser-typechecker/src/Unison/UnisonFile/Type.hs index 42b86ca12..ad8af9307 100644 --- a/parser-typechecker/src/Unison/UnisonFile/Type.hs +++ b/parser-typechecker/src/Unison/UnisonFile/Type.hs @@ -73,7 +73,7 @@ pattern TypecheckedUnisonFile ds es tlcs wcs hts <- wcs (fmap (over _2 Reference.DerivedId) -> hts) -instance Ord v => Functor (TypecheckedUnisonFile v) where +instance (Ord v) => Functor (TypecheckedUnisonFile v) where fmap f (TypecheckedUnisonFileId ds es tlcs wcs hashTerms) = TypecheckedUnisonFileId ds' es' tlcs' wcs' hashTerms' where diff --git a/parser-typechecker/src/Unison/Util/CyclicEq.hs b/parser-typechecker/src/Unison/Util/CyclicEq.hs index b6dfd4451..71cf456bb 100644 --- a/parser-typechecker/src/Unison/Util/CyclicEq.hs +++ b/parser-typechecker/src/Unison/Util/CyclicEq.hs @@ -52,18 +52,18 @@ bothEq h1 h2 a1 a2 b1 b2 = then cyclicEq h1 h2 b1 b2 else pure False -instance CyclicEq a => CyclicEq [a] where +instance (CyclicEq a) => CyclicEq [a] where cyclicEq h1 h2 (x : xs) (y : ys) = bothEq h1 h2 x y xs ys cyclicEq _ _ [] [] = pure True cyclicEq _ _ _ _ = pure False -instance CyclicEq a => CyclicEq (S.Seq a) where +instance (CyclicEq a) => CyclicEq (S.Seq a) where cyclicEq h1 h2 xs ys = if S.length xs == S.length ys then cyclicEq h1 h2 (toList xs) (toList ys) else pure False -instance CyclicEq a => CyclicEq (Vector a) where +instance (CyclicEq a) => CyclicEq (Vector a) where cyclicEq h1 h2 xs ys = if V.length xs /= V.length ys then pure False diff --git a/parser-typechecker/src/Unison/Util/CyclicOrd.hs b/parser-typechecker/src/Unison/Util/CyclicOrd.hs index 85f151bb2..daab2527f 100644 --- a/parser-typechecker/src/Unison/Util/CyclicOrd.hs +++ b/parser-typechecker/src/Unison/Util/CyclicOrd.hs @@ -45,16 +45,16 @@ bothOrd h1 h2 a1 a2 b1 b2 = then cyclicOrd h1 h2 b1 b2 else pure b -instance CyclicOrd a => CyclicOrd [a] where +instance (CyclicOrd a) => CyclicOrd [a] where cyclicOrd h1 h2 (x : xs) (y : ys) = bothOrd h1 h2 x y xs ys cyclicOrd _ _ [] [] = pure EQ cyclicOrd _ _ [] _ = pure LT cyclicOrd _ _ _ [] = pure GT -instance CyclicOrd a => CyclicOrd (S.Seq a) where +instance (CyclicOrd a) => CyclicOrd (S.Seq a) where cyclicOrd h1 h2 xs ys = cyclicOrd h1 h2 (toList xs) (toList ys) -instance CyclicOrd a => CyclicOrd (Vector a) where +instance (CyclicOrd a) => CyclicOrd (Vector a) where cyclicOrd h1 h2 xs ys = go 0 h1 h2 xs ys where go !i !h1 !h2 !xs !ys = diff --git a/parser-typechecker/src/Unison/Util/EnumContainers.hs b/parser-typechecker/src/Unison/Util/EnumContainers.hs index 51def1b04..a5f7d7017 100644 --- a/parser-typechecker/src/Unison/Util/EnumContainers.hs +++ b/parser-typechecker/src/Unison/Util/EnumContainers.hs @@ -9,6 +9,7 @@ module Unison.Util.EnumContainers setSingleton, mapInsert, unionWith, + intersectionWith, hasKey, keys, keysSet, @@ -22,7 +23,9 @@ module Unison.Util.EnumContainers mapToList, (!), findMin, + interverse, traverseSet_, + traverseWithKey, setSize, ) where @@ -70,76 +73,99 @@ newtype EnumSet k = ES IS.IntSet Semigroup ) -mapFromList :: EnumKey k => [(k, a)] -> EnumMap k a +mapFromList :: (EnumKey k) => [(k, a)] -> EnumMap k a mapFromList = EM . IM.fromList . fmap (first keyToInt) -setFromList :: EnumKey k => [k] -> EnumSet k +setFromList :: (EnumKey k) => [k] -> EnumSet k setFromList = ES . IS.fromList . fmap keyToInt -setToList :: EnumKey k => EnumSet k -> [k] +setToList :: (EnumKey k) => EnumSet k -> [k] setToList (ES s) = intToKey <$> IS.toList s -mapSingleton :: EnumKey k => k -> a -> EnumMap k a +mapSingleton :: (EnumKey k) => k -> a -> EnumMap k a mapSingleton e a = EM $ IM.singleton (keyToInt e) a -setSingleton :: EnumKey k => k -> EnumSet k +setSingleton :: (EnumKey k) => k -> EnumSet k setSingleton e = ES . IS.singleton $ keyToInt e -mapInsert :: EnumKey k => k -> a -> EnumMap k a -> EnumMap k a +mapInsert :: (EnumKey k) => k -> a -> EnumMap k a -> EnumMap k a mapInsert e x (EM m) = EM $ IM.insert (keyToInt e) x m unionWith :: - EnumKey k => - EnumKey k => + (EnumKey k) => (a -> a -> a) -> EnumMap k a -> EnumMap k a -> EnumMap k a unionWith f (EM l) (EM r) = EM $ IM.unionWith f l r -keys :: EnumKey k => EnumMap k a -> [k] +intersectionWith :: + (a -> b -> c) -> + EnumMap k a -> + EnumMap k b -> + EnumMap k c +intersectionWith f (EM l) (EM r) = EM $ IM.intersectionWith f l r + +keys :: (EnumKey k) => EnumMap k a -> [k] keys (EM m) = fmap intToKey . IM.keys $ m -keysSet :: EnumKey k => EnumMap k a -> EnumSet k +keysSet :: (EnumKey k) => EnumMap k a -> EnumSet k keysSet (EM m) = ES (IM.keysSet m) -restrictKeys :: EnumKey k => EnumMap k a -> EnumSet k -> EnumMap k a +restrictKeys :: (EnumKey k) => EnumMap k a -> EnumSet k -> EnumMap k a restrictKeys (EM m) (ES s) = EM $ IM.restrictKeys m s -withoutKeys :: EnumKey k => EnumMap k a -> EnumSet k -> EnumMap k a +withoutKeys :: (EnumKey k) => EnumMap k a -> EnumSet k -> EnumMap k a withoutKeys (EM m) (ES s) = EM $ IM.withoutKeys m s -member :: EnumKey k => k -> EnumSet k -> Bool +member :: (EnumKey k) => k -> EnumSet k -> Bool member e (ES s) = IS.member (keyToInt e) s -hasKey :: EnumKey k => k -> EnumMap k a -> Bool +hasKey :: (EnumKey k) => k -> EnumMap k a -> Bool hasKey k (EM m) = IM.member (keyToInt k) m -lookup :: EnumKey k => k -> EnumMap k a -> Maybe a +lookup :: (EnumKey k) => k -> EnumMap k a -> Maybe a lookup e (EM m) = IM.lookup (keyToInt e) m -lookupWithDefault :: EnumKey k => a -> k -> EnumMap k a -> a +lookupWithDefault :: (EnumKey k) => a -> k -> EnumMap k a -> a lookupWithDefault d e (EM m) = IM.findWithDefault d (keyToInt e) m -mapWithKey :: EnumKey k => (k -> a -> b) -> EnumMap k a -> EnumMap k b +mapWithKey :: (EnumKey k) => (k -> a -> b) -> EnumMap k a -> EnumMap k b mapWithKey f (EM m) = EM $ IM.mapWithKey (f . intToKey) m -foldMapWithKey :: EnumKey k => Monoid m => (k -> a -> m) -> EnumMap k a -> m +foldMapWithKey :: (EnumKey k) => (Monoid m) => (k -> a -> m) -> EnumMap k a -> m foldMapWithKey f (EM m) = IM.foldMapWithKey (f . intToKey) m -mapToList :: EnumKey k => EnumMap k a -> [(k, a)] +mapToList :: (EnumKey k) => EnumMap k a -> [(k, a)] mapToList (EM m) = first intToKey <$> IM.toList m -(!) :: EnumKey k => EnumMap k a -> k -> a +(!) :: (EnumKey k) => EnumMap k a -> k -> a (!) (EM m) e = m IM.! keyToInt e -findMin :: EnumKey k => EnumSet k -> k +findMin :: (EnumKey k) => EnumSet k -> k findMin (ES s) = intToKey $ IS.findMin s traverseSet_ :: - Applicative f => EnumKey k => (k -> f ()) -> EnumSet k -> f () + (Applicative f) => (EnumKey k) => (k -> f ()) -> EnumSet k -> f () traverseSet_ f (ES s) = IS.foldr (\i r -> f (intToKey i) *> r) (pure ()) s +interverse :: + (Applicative f) => + (a -> b -> f c) -> + EnumMap k a -> + EnumMap k b -> + f (EnumMap k c) +interverse f (EM l) (EM r) = + fmap EM . traverse id $ IM.intersectionWith f l r + +traverseWithKey :: + (Applicative f) => + (EnumKey k) => + (k -> a -> f b) -> + EnumMap k a -> + f (EnumMap k b) +traverseWithKey f (EM m) = EM <$> IM.traverseWithKey (f . intToKey) m + setSize :: EnumSet k -> Int setSize (ES s) = IS.size s diff --git a/parser-typechecker/src/Unison/Util/Exception.hs b/parser-typechecker/src/Unison/Util/Exception.hs index b23746c18..a2a394d21 100644 --- a/parser-typechecker/src/Unison/Util/Exception.hs +++ b/parser-typechecker/src/Unison/Util/Exception.hs @@ -7,7 +7,7 @@ import Unison.Prelude -- License is MIT: https://github.com/snoyberg/classy-prelude/blob/ccd19f2c62882c69d5dcdd3da5c0df1031334c5a/classy-prelude/LICENSE -- Catch all exceptions except asynchronous exceptions. -tryAny :: MonadIO m => IO a -> m (Either SomeException a) +tryAny :: (MonadIO m) => IO a -> m (Either SomeException a) tryAny action = liftIO $ withAsync action waitCatch -- Catch all exceptions except asynchronous exceptions. diff --git a/parser-typechecker/src/Unison/Util/PinBoard.hs b/parser-typechecker/src/Unison/Util/PinBoard.hs index f7482f94a..36056d1db 100644 --- a/parser-typechecker/src/Unison/Util/PinBoard.hs +++ b/parser-typechecker/src/Unison/Util/PinBoard.hs @@ -57,7 +57,7 @@ import Unison.Prelude newtype PinBoard a = PinBoard (MVar (IntMap (Bucket a))) -new :: MonadIO m => m (PinBoard a) +new :: (MonadIO m) => m (PinBoard a) new = liftIO (PinBoard <$> newMVar IntMap.empty) @@ -90,7 +90,7 @@ pin (PinBoard boardVar) x = liftIO do n = hash x -debugDump :: MonadIO m => (a -> Text) -> PinBoard a -> m () +debugDump :: (MonadIO m) => (a -> Text) -> PinBoard a -> m () debugDump f (PinBoard boardVar) = liftIO do board <- readMVar boardVar contents <- (traverse . traverse) bucketToList (IntMap.toList board) @@ -129,7 +129,7 @@ bucketCompact (Bucket weaks) = bucketFromList <$> mapMaybeM (\w -> (w <$) <$> deRefWeak w) weaks -- | Look up a value in a bucket per its Eq instance. -bucketFind :: Eq a => Bucket a -> a -> IO (Maybe a) +bucketFind :: (Eq a) => Bucket a -> a -> IO (Maybe a) bucketFind bucket x = find (== x) <$> bucketToList bucket diff --git a/parser-typechecker/src/Unison/Util/Pretty/MegaParsec.hs b/parser-typechecker/src/Unison/Util/Pretty/MegaParsec.hs index 303c80f91..0e54f8363 100644 --- a/parser-typechecker/src/Unison/Util/Pretty/MegaParsec.hs +++ b/parser-typechecker/src/Unison/Util/Pretty/MegaParsec.hs @@ -42,10 +42,11 @@ prettyPrintParseError input errBundle = message = [expected] <> catMaybes [found] in P.oxfordCommasWith "." message -showErrorFancy :: Parser.ShowErrorComponent e => Parser.ErrorFancy e -> String +showErrorFancy :: (Parser.ShowErrorComponent e) => Parser.ErrorFancy e -> String showErrorFancy (Parser.ErrorFail msg) = msg showErrorFancy (Parser.ErrorIndentation ord ref actual) = - "incorrect indentation (got " <> show (Parser.unPos actual) + "incorrect indentation (got " + <> show (Parser.unPos actual) <> ", should be " <> p <> show (Parser.unPos ref) diff --git a/parser-typechecker/src/Unison/Util/RefPromise.hs b/parser-typechecker/src/Unison/Util/RefPromise.hs new file mode 100644 index 000000000..40d959ca8 --- /dev/null +++ b/parser-typechecker/src/Unison/Util/RefPromise.hs @@ -0,0 +1,36 @@ +module Unison.Util.RefPromise + ( Ticket, + peekTicket, + readForCAS, + casIORef, + Promise, + newPromise, + readPromise, + tryReadPromise, + writePromise, + ) +where + +import Control.Concurrent.MVar (MVar, newEmptyMVar, readMVar, tryPutMVar, tryReadMVar) +import Data.Atomics (Ticket, casIORef, peekTicket, readForCAS) + +newtype Promise a = Promise {state :: MVar a} + +-- create an empty promise +newPromise :: IO (Promise a) +newPromise = fmap Promise newEmptyMVar + +-- read the value of the promise +-- return immediately if the promise if full, block if empty +readPromise :: Promise a -> IO a +readPromise Promise {state} = readMVar state + +-- try to read the value of the promise +-- immediately return Nothing if the promise is empty +tryReadPromise :: Promise a -> IO (Maybe a) +tryReadPromise Promise {state} = tryReadMVar state + +-- if the promise is empty, write the value, awake all readers and return True +-- if full, ignore the write and return False +writePromise :: Promise a -> a -> IO Bool +writePromise Promise {state} value = tryPutMVar state value diff --git a/parser-typechecker/src/Unison/Util/Star3.hs b/parser-typechecker/src/Unison/Util/Star3.hs index 29d79e764..61a994ffd 100644 --- a/parser-typechecker/src/Unison/Util/Star3.hs +++ b/parser-typechecker/src/Unison/Util/Star3.hs @@ -201,7 +201,7 @@ deleteD2 (f, x) s = garbageCollect f (Star3 (fact s) (d1 s) d2' (d3 s)) -- | Given a possibly-invalid Star3, which may contain the given fact in its fact set that are not related to any d1, -- d2, or d3, return a valid Star3, with this fact possibly removed. -garbageCollect :: Ord fact => fact -> Star3 fact d1 d2 d3 -> Star3 fact d1 d2 d3 +garbageCollect :: (Ord fact) => fact -> Star3 fact d1 d2 d3 -> Star3 fact d1 d2 d3 garbageCollect f star = star { fact = diff --git a/parser-typechecker/src/Unison/Util/TQueue.hs b/parser-typechecker/src/Unison/Util/TQueue.hs index 6d2395c75..0e3191753 100644 --- a/parser-typechecker/src/Unison/Util/TQueue.hs +++ b/parser-typechecker/src/Unison/Util/TQueue.hs @@ -8,7 +8,7 @@ import UnliftIO.STM hiding (TQueue) data TQueue a = TQueue (TVar (Seq a)) (TVar Word64) -newIO :: MonadIO m => m (TQueue a) +newIO :: (MonadIO m) => m (TQueue a) newIO = TQueue <$> newTVarIO mempty <*> newTVarIO 0 size :: TQueue a -> STM Int @@ -68,7 +68,7 @@ enqueue (TQueue v count) a = do modifyTVar' v (|> a) modifyTVar' count (+ 1) -raceIO :: MonadIO m => STM a -> STM b -> m (Either a b) +raceIO :: (MonadIO m) => STM a -> STM b -> m (Either a b) raceIO a b = liftIO do aa <- Async.async $ atomically a ab <- Async.async $ atomically b diff --git a/parser-typechecker/src/Unison/Util/Text/Pattern.hs b/parser-typechecker/src/Unison/Util/Text/Pattern.hs index ebed0da48..14abd413e 100644 --- a/parser-typechecker/src/Unison/Util/Text/Pattern.hs +++ b/parser-typechecker/src/Unison/Util/Text/Pattern.hs @@ -2,7 +2,7 @@ module Unison.Util.Text.Pattern where -import Data.Char (isDigit, isLetter, isPunctuation, isSpace) +import Data.Char (isAlphaNum, isControl, isLetter, isLower, isMark, isNumber, isPrint, isPunctuation, isSeparator, isSpace, isSymbol, isUpper) import qualified Data.Text as DT import Unison.Util.Text (Text) import qualified Unison.Util.Text as Text @@ -13,18 +13,35 @@ data Pattern | Capture Pattern -- capture all the text consumed by the inner pattern, discarding its subcaptures | Many Pattern -- zero or more repetitions (at least 1 can be written: Join [p, Many p]) | Replicate Int Int Pattern -- m to n occurrences of a pattern, optional = 0-1 - | AnyChar -- consume a single char | Eof -- succeed if given the empty text, fail otherwise | Literal Text -- succeed if input starts with the given text, advance by that text - | CharRange Char Char -- consume 1 char in the given range, or fail - | CharIn [Char] -- consume 1 char in the given set, or fail - | NotCharIn [Char] -- consume 1 char NOT in the given set, or fail - | NotCharRange Char Char -- consume 1 char NOT in the given range, or fail - | Digit -- consume 1 digit (according to Char.isDigit) - | Letter -- consume 1 letter (according to Char.isLetter) - | Space -- consume 1 space character (according to Char.isSpace) - | Punctuation -- consume 1 punctuation char (according to Char.isPunctuation) - deriving (Eq, Ord) + | Char CharPattern -- succeed if input starts with a char matching the given pattern, advance by 1 char + deriving (Show, Eq, Ord) + +data CharPattern + = Any -- any char + | Not CharPattern -- negation of the given pattern + | Union CharPattern CharPattern -- match if either pattern matches + | Intersect CharPattern CharPattern -- match if both patterns match + | CharRange Char Char -- match if char is in the given range + | CharSet [Char] -- match if char is in the given set + | CharClass CharClass -- match if char is in the given class + deriving (Show, Eq, Ord) + +data CharClass + = AlphaNum -- alphabetic or numeric characters + | Upper -- uppercase alphabetic characters + | Lower -- lowercase alphabetic characters + | Whitespace -- whitespace characters (space, tab, newline, etc.) + | Control -- non-printing control characters + | Printable -- letters, numbers, punctuation, symbols, spaces + | MarkChar -- accents, diacritics, etc. + | Number -- numeric characters in any script + | Punctuation -- connectors, brackets, quotes + | Symbol -- symbols (math, currency, etc.) + | Separator -- spaces, line separators, paragraph separators + | Letter -- letters in any script + deriving (Show, Eq, Ord) -- Wrapper type. Holds a pattern together with its compilation. This is used as -- the semantic value of a unison `Pattern a`. Laziness avoids building the @@ -98,13 +115,13 @@ compile (Literal txt) !err !success = go go acc t | Text.take (Text.size txt) t == txt = success acc (Text.drop (Text.size txt) t) | otherwise = err acc t -compile AnyChar !err !success = go +compile (Char Any) !err !success = go where go acc t = case Text.drop 1 t of rem | Text.size t > Text.size rem -> success acc rem | otherwise -> err acc rem -compile (Capture (Many AnyChar)) !_ !success = \acc t -> success (pushCapture t acc) Text.empty +compile (Capture (Many (Char Any))) !_ !success = \acc t -> success (pushCapture t acc) Text.empty compile (Capture c) !err !success = go where err' _ _ acc0 t0 = err acc0 t0 @@ -122,38 +139,15 @@ compile (Join ps) !err !success = go ps let pc = compile p err psc psc = compile (Join ps) err success in pc -compile (NotCharIn cs) !err !success = go +compile (Char cp) !err !success = go where - ok = charNotInPred cs + ok = charPatternPred cp go acc t = case Text.uncons t of Just (ch, rem) | ok ch -> success acc rem _ -> err acc t -compile (CharIn cs) !err !success = go - where - ok = charInPred cs - go acc t = case Text.uncons t of - Just (ch, rem) | ok ch -> success acc rem - _ -> err acc t -compile (CharRange c1 c2) !err !success = go - where - go acc t = case Text.uncons t of - Just (ch, rem) | ch >= c1 && ch <= c2 -> success acc rem - _ -> err acc t -compile (NotCharRange c1 c2) !err !success = go - where - go acc t = case Text.uncons t of - Just (ch, rem) | not (ch >= c1 && ch <= c2) -> success acc rem - _ -> err acc t compile (Many p) !_ !success = case p of - AnyChar -> (\acc _ -> success acc Text.empty) - CharIn cs -> walker (charInPred cs) - NotCharIn cs -> walker (charNotInPred cs) - CharRange c1 c2 -> walker (\ch -> ch >= c1 && ch <= c2) - NotCharRange c1 c2 -> walker (\ch -> ch < c1 || ch > c2) - Digit -> walker isDigit - Letter -> walker isLetter - Punctuation -> walker isPunctuation - Space -> walker isSpace + Char Any -> (\acc _ -> success acc Text.empty) + Char cp -> walker (charPatternPred cp) p -> go where go = compile p success success' @@ -176,18 +170,11 @@ compile (Many p) !_ !success = case p of success acc (Text.appendUnbalanced (Text.fromText rem) t) {-# INLINE walker #-} compile (Replicate m n p) !err !success = case p of - AnyChar -> \acc t -> + Char Any -> \acc t -> if Text.size t < m then err acc t else success acc (Text.drop n t) - CharIn cs -> dropper (charInPred cs) - NotCharIn cs -> dropper (charNotInPred cs) - CharRange c1 c2 -> dropper (\ch -> ch >= c1 && c1 <= c2) - NotCharRange c1 c2 -> dropper (\ch -> ch < c1 || ch > c2) - Digit -> dropper isDigit - Letter -> dropper isLetter - Punctuation -> dropper isPunctuation - Space -> dropper isSpace + Char cp -> dropper (charPatternPred cp) _ -> try "Replicate" (go1 m) err (go2 (n - m)) where go1 0 = \_err success stk rem -> success stk rem @@ -198,26 +185,6 @@ compile (Replicate m n p) !err !success = case p of dropper ok acc t | (i, rest) <- Text.dropWhileMax ok n t, i >= m = success acc rest | otherwise = err acc t -compile Digit !err !success = go - where - go acc t = case Text.uncons t of - Just (ch, rem) | isDigit ch -> success acc rem - _ -> err acc t -compile Letter !err !success = go - where - go acc t = case Text.uncons t of - Just (ch, rem) | isLetter ch -> success acc rem - _ -> err acc t -compile Punctuation !err !success = go - where - go acc t = case Text.uncons t of - Just (ch, rem) | isPunctuation ch -> success acc rem - _ -> err acc t -compile Space !err !success = go - where - go acc t = case Text.uncons t of - Just (ch, rem) | isSpace ch -> success acc rem - _ -> err acc t charInPred, charNotInPred :: [Char] -> Char -> Bool charInPred [] = const False @@ -225,6 +192,29 @@ charInPred (c : chs) = let ok = charInPred chs in \ci -> ci == c || ok ci charNotInPred [] = const True charNotInPred (c : chs) = let ok = charNotInPred chs in (\ci -> ci /= c && ok ci) +charPatternPred :: CharPattern -> Char -> Bool +charPatternPred Any = const True +charPatternPred (Not cp) = let notOk = charPatternPred cp in \ci -> not (notOk ci) +charPatternPred (Union cp1 cp2) = let ok1 = charPatternPred cp1; ok2 = charPatternPred cp2 in \ci -> ok1 ci || ok2 ci +charPatternPred (Intersect cp1 cp2) = let ok1 = charPatternPred cp1; ok2 = charPatternPred cp2 in \ci -> ok1 ci && ok2 ci +charPatternPred (CharRange c1 c2) = \ci -> ci >= c1 && ci <= c2 +charPatternPred (CharSet cs) = charInPred cs +charPatternPred (CharClass cc) = charClassPred cc + +charClassPred :: CharClass -> Char -> Bool +charClassPred AlphaNum = isAlphaNum +charClassPred Upper = isUpper +charClassPred Lower = isLower +charClassPred Whitespace = isSpace +charClassPred Control = isControl +charClassPred Printable = isPrint +charClassPred MarkChar = isMark +charClassPred Number = isNumber +charClassPred Punctuation = isPunctuation +charClassPred Symbol = isSymbol +charClassPred Separator = isSeparator +charClassPred Letter = isLetter + -- runs c and if it fails, restores state to what it was before try :: String -> Compiled r -> Compiled r try msg c err success stk rem = diff --git a/parser-typechecker/src/Unison/Util/TransitiveClosure.hs b/parser-typechecker/src/Unison/Util/TransitiveClosure.hs index 666187cdd..f72a4a09e 100644 --- a/parser-typechecker/src/Unison/Util/TransitiveClosure.hs +++ b/parser-typechecker/src/Unison/Util/TransitiveClosure.hs @@ -20,7 +20,7 @@ transitiveClosure getDependencies open = go (Set.insert h closed) (toList deps ++ t) in go Set.empty (toList open) -transitiveClosure' :: Ord a => (a -> Set a) -> Set a -> Set a +transitiveClosure' :: (Ord a) => (a -> Set a) -> Set a -> Set a transitiveClosure' f as = runIdentity $ transitiveClosure (pure . f) as transitiveClosure1 :: @@ -31,5 +31,5 @@ transitiveClosure1 :: m (Set a) transitiveClosure1 f a = transitiveClosure f (Set.singleton a) -transitiveClosure1' :: Ord a => (a -> Set a) -> a -> Set a +transitiveClosure1' :: (Ord a) => (a -> Set a) -> a -> Set a transitiveClosure1' f a = runIdentity $ transitiveClosure1 (pure . f) a diff --git a/parser-typechecker/tests/Unison/Test/ANF.hs b/parser-typechecker/tests/Unison/Test/ANF.hs index d0a93a522..62558bb04 100644 --- a/parser-typechecker/tests/Unison/Test/ANF.hs +++ b/parser-typechecker/tests/Unison/Test/ANF.hs @@ -13,7 +13,7 @@ import qualified Unison.ABT as ABT import Unison.ABT.Normalized (Term (TAbs)) import Unison.ConstructorReference (GConstructorReference (..)) import qualified Unison.Pattern as P -import Unison.Reference (Reference(Builtin)) +import Unison.Reference (Reference (Builtin)) import Unison.Runtime.ANF as ANF import Unison.Runtime.MCode (RefNums (..), emitCombs) import qualified Unison.Term as Term @@ -38,7 +38,7 @@ simpleRefs r | r == Ty.charRef = 5 | otherwise = 100 -runANF :: Var v => ANFM v a -> a +runANF :: (Var v) => ANFM v a -> a runANF m = evalState (runReaderT m Set.empty) (0, 1, []) testANF :: String -> Test () @@ -59,7 +59,7 @@ testLift s = case cs of !_ -> ok . lamLift $ tm s -denormalize :: Var v => ANormal v -> Term.Term0 v +denormalize :: (Var v) => ANormal v -> Term.Term0 v denormalize (TVar v) = Term.var () v denormalize (TLit l) = case l of I i -> Term.int () i @@ -116,7 +116,7 @@ backReference :: Word64 -> Reference backReference _ = error "backReference" denormalizeMatch :: - Var v => Branched (ANormal v) -> [Term.MatchCase () (Term.Term0 v)] + (Var v) => Branched (ANormal v) -> [Term.MatchCase () (Term.Term0 v)] denormalizeMatch b | MatchEmpty <- b = [] | MatchIntegral m df <- b = @@ -141,7 +141,7 @@ denormalizeMatch b where (n, dbr) = denormalizeBranch br - ipat :: Integral a => Reference -> p -> a -> P.Pattern () + ipat :: (Integral a) => Reference -> p -> a -> P.Pattern () ipat r _ i | r == Ty.natRef = P.Nat () $ fromIntegral i | otherwise = P.Int () $ fromIntegral i @@ -157,7 +157,7 @@ denormalizeBranch (TAbs v br) = (n + 1, ABT.abs v dbr) denormalizeBranch tm = (0, denormalize tm) denormalizeHandler :: - Var v => + (Var v) => Map.Map Reference (EnumMap CTag ([Mem], ANormal v)) -> ANormal v -> [Term.MatchCase () (Term.Term0 v)] diff --git a/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs b/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs index c556aa404..221d9f41b 100644 --- a/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs +++ b/parser-typechecker/tests/Unison/Test/Codebase/Causal.hs @@ -1,15 +1,21 @@ {-# LANGUAGE PartialTypeSignatures #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} module Unison.Test.Codebase.Causal (test) where -import Control.Monad (replicateM_) -import Data.Functor.Identity (Identity (runIdentity)) -import Data.Int (Int64) -import Data.Set (Set) import qualified Data.Set as Set +import qualified Data.Text.Encoding as Text import EasyTest import Unison.Codebase.Causal (Causal, one) import qualified Unison.Codebase.Causal as Causal +import qualified Unison.Hash as Hash +import qualified Unison.Hashing.V2 as Hashing +import Unison.Prelude + +-- Dummy instances for this test suite. Would probably be better if they weren't orphans. +instance Hashing.ContentAddressable Int64 where contentHash = Hash.fromByteString . Text.encodeUtf8 . tShow + +instance Hashing.ContentAddressable (Set Int64) where contentHash = Hash.fromByteString . Text.encodeUtf8 . tShow test :: Test () test = @@ -33,7 +39,7 @@ test = -- $ prop_mergeCommutative {- , scope "threeWayMerge.commonAncestor" . expect - $ testCommonAncestor + \$ testCommonAncestor -- $ prop_mergeCommonAncestor --} scope "lca.hasLca" lcaPairTest, scope "lca.noLca" noLcaPairTest, @@ -135,13 +141,13 @@ testThreeWay = runIdentity $ threeWayMerge' oneRemoved twoRemoved -setCombine :: Applicative m => Ord a => Set a -> Set a -> m (Set a) +setCombine :: (Applicative m) => (Ord a) => Set a -> Set a -> m (Set a) setCombine a b = pure $ a <> b -setDiff :: Applicative m => Ord a => Set a -> Set a -> m (Set a, Set a) +setDiff :: (Applicative m) => (Ord a) => Set a -> Set a -> m (Set a, Set a) setDiff old new = pure (Set.difference new old, Set.difference old new) -setPatch :: Applicative m => Ord a => Set a -> (Set a, Set a) -> m (Set a) +setPatch :: (Applicative m) => (Ord a) => Set a -> (Set a, Set a) -> m (Set a) setPatch s (added, removed) = pure (added <> Set.difference s removed) -- merge x x == x, should not add a new head, and also the value at the head should be the same of course diff --git a/parser-typechecker/tests/Unison/Test/Codebase/Path.hs b/parser-typechecker/tests/Unison/Test/Codebase/Path.hs index c497ef69d..6b9c5085a 100644 --- a/parser-typechecker/tests/Unison/Test/Codebase/Path.hs +++ b/parser-typechecker/tests/Unison/Test/Codebase/Path.hs @@ -32,7 +32,8 @@ test = let s = "foo.bar.baz#abc" in scope s . expect $ isLeft $ parseSplit' wordyNameSegment s, let s = "foo.bar.+" in scope s . expect $ - isLeft $ parseSplit' wordyNameSegment s + isLeft $ + parseSplit' wordyNameSegment s ], scope "definitionNameSegment" . tests $ [ let s = "foo.bar.+" diff --git a/parser-typechecker/tests/Unison/Test/Common.hs b/parser-typechecker/tests/Unison/Test/Common.hs index 10de1a5dc..43fbff855 100644 --- a/parser-typechecker/tests/Unison/Test/Common.hs +++ b/parser-typechecker/tests/Unison/Test/Common.hs @@ -52,7 +52,7 @@ tm s = Parser.run (Parser.root TermParser.term) s parsingEnv showParseError :: - Var v => + (Var v) => String -> MPE.ParseError Parser.Input (Parser.Error v) -> String diff --git a/parser-typechecker/tests/Unison/Test/DataDeclaration.hs b/parser-typechecker/tests/Unison/Test/DataDeclaration.hs index 2e42b521e..e7fcea751 100644 --- a/parser-typechecker/tests/Unison/Test/DataDeclaration.hs +++ b/parser-typechecker/tests/Unison/Test/DataDeclaration.hs @@ -8,9 +8,9 @@ import Data.Map ((!)) import qualified Data.Map as Map import EasyTest import Text.RawString.QQ -import qualified U.Util.Hash as Hash import Unison.DataDeclaration (DataDeclaration (..), Decl) import qualified Unison.DataDeclaration as DD +import qualified Unison.Hash as Hash import qualified Unison.Hashing.V2.Convert as Hashing import Unison.Parser.Ann (Ann) import Unison.Parsers (unsafeParseFile) diff --git a/parser-typechecker/tests/Unison/Test/Syntax/FileParser.hs b/parser-typechecker/tests/Unison/Test/Syntax/FileParser.hs index 7939e7a23..525e45306 100644 --- a/parser-typechecker/tests/Unison/Test/Syntax/FileParser.hs +++ b/parser-typechecker/tests/Unison/Test/Syntax/FileParser.hs @@ -81,7 +81,7 @@ emptyWatchTest = scope "emptyWatchTest" $ expectFileParseFailure ">" expectation where - expectation :: Var e => P.Error e -> Test () + expectation :: (Var e) => P.Error e -> Test () expectation e = case e of P.EmptyWatch _ann -> ok _ -> crash "Error wasn't EmptyWatch" @@ -91,7 +91,7 @@ signatureNeedsAccompanyingBodyTest = scope "signatureNeedsAccompanyingBodyTest" $ expectFileParseFailure (unlines ["f : Nat -> Nat", "", "g a = a + 1"]) expectation where - expectation :: Var e => P.Error e -> Test () + expectation :: (Var e) => P.Error e -> Test () expectation e = case e of P.SignatureNeedsAccompanyingBody _ -> ok _ -> crash "Error wasn't SignatureNeedsAccompanyingBody" @@ -101,7 +101,7 @@ emptyBlockTest = scope "emptyBlockTest" $ expectFileParseFailure (unlines ["f a =", "", "> 1 + 1"]) expectation where - expectation :: Var e => P.Error e -> Test () + expectation :: (Var e) => P.Error e -> Test () expectation e = case e of P.EmptyBlock _ -> ok _ -> crash "Error wasn't EmptyBlock" @@ -111,7 +111,7 @@ expectedBlockOpenTest = scope "expectedBlockOpenTest" $ expectFileParseFailure "f a b = match a b" expectation where - expectation :: Var e => P.Error e -> Test () + expectation :: (Var e) => P.Error e -> Test () expectation e = case e of P.ExpectedBlockOpen _ _ -> ok _ -> crash "Error wasn't ExpectedBlockOpen" @@ -121,7 +121,7 @@ unknownDataConstructorTest = scope "unknownDataConstructorTest" $ expectFileParseFailure "m a = match a with A -> 1" expectation where - expectation :: Var e => P.Error e -> Test () + expectation :: (Var e) => P.Error e -> Test () expectation e = case e of P.UnknownDataConstructor _ _ -> ok _ -> crash "Error wasn't UnknownDataConstructor" @@ -131,7 +131,7 @@ unknownAbilityConstructorTest = scope "unknownAbilityConstructorTest" $ expectFileParseFailure "f e = match e with {E t -> u} -> 1" expectation where - expectation :: Var e => P.Error e -> Test () + expectation :: (Var e) => P.Error e -> Test () expectation e = case e of P.UnknownAbilityConstructor _ _ -> ok _ -> crash "Error wasn't UnknownAbilityConstructor" diff --git a/parser-typechecker/tests/Unison/Test/Term.hs b/parser-typechecker/tests/Unison/Test/Term.hs index 319db7c7c..751d73653 100644 --- a/parser-typechecker/tests/Unison/Test/Term.hs +++ b/parser-typechecker/tests/Unison/Test/Term.hs @@ -6,7 +6,7 @@ import Data.Map ((!)) import qualified Data.Map as Map import Data.Text.Encoding (encodeUtf8) import EasyTest -import qualified U.Util.Hash as Hash +import qualified Unison.Hash as Hash import qualified Unison.Reference as R import Unison.Symbol (Symbol) import qualified Unison.Term as Term diff --git a/parser-typechecker/tests/Unison/Test/Type.hs b/parser-typechecker/tests/Unison/Test/Type.hs index e2f3b0bf3..3ca27b51e 100644 --- a/parser-typechecker/tests/Unison/Test/Type.hs +++ b/parser-typechecker/tests/Unison/Test/Type.hs @@ -10,7 +10,7 @@ import qualified Unison.Var as Var infixr 1 --> -(-->) :: Ord v => Type v () -> Type v () -> Type v () +(-->) :: (Ord v) => Type v () -> Type v () -> Type v () (-->) a b = arrow () a b test :: Test () diff --git a/parser-typechecker/tests/Unison/Test/UnisonSources.hs b/parser-typechecker/tests/Unison/Test/UnisonSources.hs index 57518c473..d6f2a3f29 100644 --- a/parser-typechecker/tests/Unison/Test/UnisonSources.hs +++ b/parser-typechecker/tests/Unison/Test/UnisonSources.hs @@ -90,7 +90,7 @@ go rt files how = do files' <- liftIO files tests (makePassingTest rt how <$> files') -showNotes :: Foldable f => String -> PrintError.Env -> f Note -> String +showNotes :: (Foldable f) => String -> PrintError.Env -> f Note -> String showNotes source env = intercalateMap "\n\n" $ PrintError.renderNoteAsANSI 60 env source Path.absoluteEmpty diff --git a/parser-typechecker/tests/Unison/Test/Util/Text.hs b/parser-typechecker/tests/Unison/Test/Util/Text.hs index 04f3bf58b..081d16e4c 100644 --- a/parser-typechecker/tests/Unison/Test/Util/Text.hs +++ b/parser-typechecker/tests/Unison/Test/Util/Text.hs @@ -4,6 +4,7 @@ module Unison.Test.Util.Text where import Control.Monad import Data.List (foldl', unfoldr) +import Data.Maybe (isNothing) import qualified Data.Text as T import EasyTest import qualified Unison.Util.Rope as R @@ -45,8 +46,10 @@ test = scope "<>" . expect' $ Text.toText (t1s <> t2s <> t3s) == t1 <> t2 <> t3 scope "Ord" . expect' $ - (t1 <> t2 <> t3) `compare` t3 - == (t1s <> t2s <> t3s) `compare` t3s + (t1 <> t2 <> t3) + `compare` t3 + == (t1s <> t2s <> t3s) + `compare` t3s scope "take" . expect' $ Text.toText (Text.take k (t1s <> t2s)) == T.take k (t1 <> t2) scope "drop" . expect' $ @@ -102,36 +105,36 @@ test = ok, scope "patterns" $ do expect' (P.run P.Eof "" == Just ([], "")) - expect' (P.run P.AnyChar "a" == Just ([], "")) - expect' (P.run (P.CharRange 'a' 'z') "a" == Just ([], "")) - expect' (P.run (P.NotCharRange 'a' 'z') "a" == Nothing) - expect' (P.run (P.Or (P.NotCharRange 'a' 'z') P.AnyChar) "abc" == Just ([], "bc")) + expect' (P.run (P.Char P.Any) "a" == Just ([], "")) + expect' (P.run (P.Char (P.CharRange 'a' 'z')) "a" == Just ([], "")) + expect' . isNothing $ P.run (P.Char (P.Not (P.CharRange 'a' 'z'))) "a" + expect' (P.run (P.Or (P.Char (P.Not (P.CharRange 'a' 'z'))) (P.Char P.Any)) "abc" == Just ([], "bc")) -- this shows that we ignore subcaptures - expect' (P.run (P.Join [P.Capture (P.Join [P.Capture P.AnyChar, P.Capture P.AnyChar]), P.AnyChar]) "abcdef" == Just (["ab"], "def")) - expect' (P.run (P.CharIn "0123") "3ab" == Just ([], "ab")) - expect' (P.run (P.NotCharIn "0123") "a3b" == Just ([], "3b")) - expect' (P.run (P.Capture (P.NotCharIn "0123")) "a3b" == Just (["a"], "3b")) - expect' (P.run (P.Many (P.CharIn "abcd")) "babbababac123" == Just ([], "123")) - expect' (P.run (P.Capture (P.Many (P.CharIn "abcd"))) "babbababac123" == Just (["babbababac"], "123")) - expect' (P.run (P.Capture (P.Many (P.Digit))) "012345abc" == Just (["012345"], "abc")) - expect' (P.run (P.Join [P.Capture (P.Many (P.Digit)), P.Literal ",", P.Capture (P.Many P.AnyChar)]) "012345,abc" == Just (["012345", "abc"], "")) + expect' (P.run (P.Join [P.Capture (P.Join [P.Capture (P.Char P.Any), P.Capture (P.Char P.Any)]), P.Char P.Any]) "abcdef" == Just (["ab"], "def")) + expect' (P.run (P.Char (P.CharSet "0123")) "3ab" == Just ([], "ab")) + expect' (P.run (P.Char (P.Not (P.CharSet "0123"))) "a3b" == Just ([], "3b")) + expect' (P.run (P.Capture (P.Char (P.Not (P.CharSet "0123")))) "a3b" == Just (["a"], "3b")) + expect' (P.run (P.Many (P.Char (P.CharSet "abcd"))) "babbababac123" == Just ([], "123")) + expect' (P.run (P.Capture (P.Many (P.Char (P.CharSet "abcd")))) "babbababac123" == Just (["babbababac"], "123")) + expect' (P.run (P.Capture (P.Many (P.Char (P.CharClass P.Number)))) "012345abc" == Just (["012345"], "abc")) + expect' (P.run (P.Join [P.Capture (P.Many (P.Char (P.CharClass P.Number))), P.Literal ",", P.Capture (P.Many (P.Char P.Any))]) "012345,abc" == Just (["012345", "abc"], "")) expect' - ( P.run (P.Many (P.Join [P.Capture (P.Many (P.Digit)), P.Many P.Space])) "01 10 20 1123 292 110 10" + ( P.run (P.Many (P.Join [P.Capture (P.Many (P.Char (P.CharClass P.Number))), P.Many (P.Char (P.CharClass P.Whitespace))])) "01 10 20 1123 292 110 10" == Just (["01", "10", "20", "1123", "292", "110", "10"], "") ) expect' $ - let part = P.Capture (P.Replicate 1 3 (P.Digit)) + let part = P.Capture (P.Replicate 1 3 (P.Char (P.CharClass P.Number))) dpart = P.Join [P.Literal ".", part] ip = P.Join [part, P.Replicate 3 3 dpart, P.Eof] in P.run ip "127.0.0.1" == Just (["127", "0", "0", "1"], "") expect' $ - let p = P.Replicate 5 8 (P.Capture P.Digit) + let p = P.Replicate 5 8 (P.Capture (P.Char (P.CharClass P.Number))) in P.run p "12345" == Just (["1", "2", "3", "4", "5"], "") expect' $ - let p = P.Replicate 5 8 (P.Capture P.Digit) `P.Or` P.Join [] + let p = P.Replicate 5 8 (P.Capture (P.Char (P.CharClass P.Number))) `P.Or` P.Join [] in P.run p "1234" == Just ([], "1234") expect' $ - let p = P.Replicate 5 8 (P.Capture (P.Join [P.Digit, P.Literal "z"])) `P.Or` P.Join [] + let p = P.Replicate 5 8 (P.Capture (P.Join [P.Char (P.CharClass P.Number), P.Literal "z"])) `P.Or` P.Join [] in P.run p "1z2z3z4z5z6a" == Just (["1z", "2z", "3z", "4z", "5z"], "6a") -- https://github.com/unisonweb/unison/issues/3530 expectEqual Nothing $ @@ -154,10 +157,10 @@ test = -- this is just making sure we don't duplicate captures to our left -- when entering an `Or` node expectEqual (Just (["@"], "")) $ - let p = P.Join [P.Capture P.AnyChar, P.Or (P.Literal "c") (P.Join []), P.Literal "d"] + let p = P.Join [P.Capture (P.Char P.Any), P.Or (P.Literal "c") (P.Join []), P.Literal "d"] in P.run p "@cd" expectEqual (Just (["%", "c"], "")) $ - let p = P.Join [P.Capture P.AnyChar, (P.Or (P.Capture (P.Literal "c")) (P.Join [])), P.Literal "d"] + let p = P.Join [P.Capture (P.Char P.Any), (P.Or (P.Capture (P.Literal "c")) (P.Join [])), P.Literal "d"] in P.run p "%cd" expectEqual (Just ([""], "ac")) $ let p = P.Capture (P.Or (P.Join [P.Literal "a", P.Literal "b"]) (P.Join [])) diff --git a/parser-typechecker/unison-parser-typechecker.cabal b/parser-typechecker/unison-parser-typechecker.cabal index e7d8ac5f6..94c6ee73f 100644 --- a/parser-typechecker/unison-parser-typechecker.cabal +++ b/parser-typechecker/unison-parser-typechecker.cabal @@ -17,6 +17,10 @@ source-repository head type: git location: https://github.com/unisonweb/unison +flag arraychecks + manual: True + default: False + flag optimized manual: True default: True @@ -78,6 +82,8 @@ library Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema4To5 Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema5To6 Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema6To7 + Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema7To8 + Unison.Codebase.SqliteCodebase.Migrations.MigrateSchema8To9 Unison.Codebase.SqliteCodebase.Operations Unison.Codebase.SqliteCodebase.Paths Unison.Codebase.SqliteCodebase.SyncEphemeral @@ -92,6 +98,20 @@ library Unison.FileParsers Unison.Hashing.V2.Convert Unison.Parsers + Unison.PatternMatchCoverage + Unison.PatternMatchCoverage.Class + Unison.PatternMatchCoverage.Constraint + Unison.PatternMatchCoverage.Desugar + Unison.PatternMatchCoverage.Fix + Unison.PatternMatchCoverage.GrdTree + Unison.PatternMatchCoverage.IntervalSet + Unison.PatternMatchCoverage.ListPat + Unison.PatternMatchCoverage.Literal + Unison.PatternMatchCoverage.NormalizedConstraints + Unison.PatternMatchCoverage.PmGrd + Unison.PatternMatchCoverage.PmLit + Unison.PatternMatchCoverage.Solve + Unison.PatternMatchCoverage.UFMap Unison.PrettyPrintEnv Unison.PrettyPrintEnv.FQN Unison.PrettyPrintEnv.MonadPretty @@ -103,6 +123,7 @@ library Unison.Result Unison.Runtime.ANF Unison.Runtime.ANF.Serialize + Unison.Runtime.Array Unison.Runtime.Builtin Unison.Runtime.Debug Unison.Runtime.Decompile @@ -147,6 +168,7 @@ library Unison.Util.Logger Unison.Util.PinBoard Unison.Util.Pretty.MegaParsec + Unison.Util.RefPromise Unison.Util.Star3 Unison.Util.Text Unison.Util.Text.Pattern @@ -188,6 +210,7 @@ library , aeson , ansi-terminal , async + , atomic-primops , base , base16 >=0.2.1.0 , base64-bytestring @@ -232,6 +255,7 @@ library , mmorph , monad-validate , mtl + , murmur-hash , mutable-containers , mwc-random , natural-transformation @@ -277,6 +301,7 @@ library , unison-codebase-sync , unison-core , unison-core1 + , unison-hash , unison-hashing-v2 , unison-prelude , unison-pretty-printer @@ -302,6 +327,8 @@ library default-language: Haskell2010 if flag(optimized) ghc-options: -funbox-strict-fields -O2 + if flag(arraychecks) + cpp-options: -DARRAY_CHECK test-suite parser-typechecker-tests type: exitcode-stdio-1.0 @@ -370,6 +397,7 @@ test-suite parser-typechecker-tests , aeson , ansi-terminal , async + , atomic-primops , base , base16 >=0.2.1.0 , base64-bytestring @@ -417,6 +445,7 @@ test-suite parser-typechecker-tests , mmorph , monad-validate , mtl + , murmur-hash , mutable-containers , mwc-random , natural-transformation @@ -463,6 +492,7 @@ test-suite parser-typechecker-tests , unison-codebase-sync , unison-core , unison-core1 + , unison-hash , unison-hashing-v2 , unison-parser-typechecker , unison-prelude @@ -489,3 +519,5 @@ test-suite parser-typechecker-tests default-language: Haskell2010 if flag(optimized) ghc-options: -funbox-strict-fields -O2 + if flag(arraychecks) + cpp-options: -DARRAY_CHECK diff --git a/scheme-libs/chez/unison/.gitignore b/scheme-libs/chez/unison/.gitignore new file mode 100644 index 000000000..607dadb6c --- /dev/null +++ b/scheme-libs/chez/unison/.gitignore @@ -0,0 +1 @@ +builtin-generated.ss diff --git a/scheme-libs/chez/unison/cont.ss b/scheme-libs/chez/unison/cont.ss new file mode 100644 index 000000000..b5b0e7a6a --- /dev/null +++ b/scheme-libs/chez/unison/cont.ss @@ -0,0 +1,259 @@ +; This library is intended to contain the implementation of +; delimited continuations used in the semantics of abilities. + +(library (unison cont) + (export + prompt + control) + + (import (chezscheme) + (unison core)) + + ; This implementation is based on the implementation of delimited + ; continuations used in racket, and makes use of primitives added in + ; the racket fork of chez scheme. + ; + ; The overall idea is to keep track of a meta-continuation that is + ; made up of a series of captured native continuations. The native + ; continuations make part of the frames of the meta-continuation, + ; and these frames can be labeled with prompts to support + ; multi-prompt delimited continuations. The native 'current + ; continuation' makes up the portion of the meta-continuation below + ; the nearest prompt. + ; + ; The specific racket-chez feature used is #%$call-in-continuation + ; which does not seem to be available in the upstream chez. This is + ; an important feature to have, because the mechanism for obtaining + ; the native continuation in chez is call/cc, which leaves the + ; native continuation in place. However, when we capture the native + ; continuation to push it onto a frame of the meta-continuation, it + ; may actually be completely eliminated from the implicit + ; continuation, because we will only ever return to it by popping + ; the corresponding frame of the meta=continuation. + ; + ; Failure to truncate the native continuation can lead to space + ; leaks due to growing unreachable portions of it. The racket-chez + ; feature allows us to instead repeatedly replace the implicit + ; continuation with #%$null-continuation, which avoids the leak. + (define-virtual-register meta-continuation '()) + + ; A record type representing continuation prompts. + ; + ; By comparing these records for pointer equality, we can make up + ; fresh prompts whenever needed, without having to keep track of + ; some sort of supply of prompts. + (define-record-type continuation-prompt + (fields (immutable name))) + + ; A frame of the meta-continuation consists of: + ; 1. A prompt delimiting the portion of the meta-continuation in + ; front of it. + ; 2. A native continuation to resume when re-entering the given + ; frame. + (define-record-type meta-frame + (fields + (immutable prompt) + (immutable resume-k))) + + ; A convenient abbreviation for grabbing the continuation. + (define-syntax let/cc + (syntax-rules () + [(let/cc k e ...) + (identifier? #'k) + (call/cc (lambda (k) e ...))])) + + ; A wrapper around primitive operations for truncating the implicit + ; continuation. `h` should be a nullary procedure that we want to + ; execute in an empty continuation. + (define (call-in-empty-frame h) + (($primitive $call-in-continuation) + ($primitive $null-continuation) + '() ; marks + h)) + + ; Removes and returns the top frame of the meta-continuation. + ; + ; Note: this procedure assumes that the meta-continuation has + ; already been checked for emptiness, and does no checking of its + ; own. + (define (pop-frame!) + (let ([mf (car meta-continuation)]) + (set! meta-continuation (cdr meta-continuation)) + mf)) + + ; Adds a frame to the top of the meta-continuation. + (define (push-frame! fm) + (set! meta-continuation (cons fm meta-continuation))) + + ; Handles returning values up the meta-continuation. + ; + ; Note: when we replace the native continuation with the null + ; continuation, for reasons mentioned above, it's important that the + ; things we run in that null continuation actually call this to + ; return up the meta-continuation. Otherwise we will _actually_ + ; return to the null continuation, which causes a crash. + (define (yield-to-meta-continuation results) + (cond + [(null? meta-continuation) + (display "falling off end\n") + results] + [else + (let ([mf (pop-frame!)]) + (($primitive $call-in-continuation) + (meta-frame-resume-k mf) + '() + (lambda () + (if (and (pair? results) (null? (cdr results))) + (car results) + (apply values results)))))])) + + ; This operation corresponds roughly to `reset` in shift/reset + ; delimited control. It calls (h p) in a context delimited by + ; the prompt p. + ; + ; This is something of a helper function, as the actual `prompt` + ; implementation will involve making up a fresh `p`. However, + ; this common code is useful for test cases using only single + ; prompt continuations. + ; + ; Mechanically, what this does is capture the current native + ; continuation, push it on the meta-continuation with the specified + ; prompt attached, and call (h p) in an empty native continuation. + (define (call-delimited-with-prompt p h) + (let/cc k + (call-in-empty-frame + (lambda () + (let-values + ([results + (let ([fm (make-meta-frame p k)]) + (push-frame! fm) + (h p))]) + (yield-to-meta-continuation results)))))) + + ; Implements prompt for our multi-prompt prompt/control calculus. + ; + ; `prompt` makes up a fresh prompt value, and runs its body + ; delimited with that value, e.g.: + ; + ; (prompt p ...) + ; + ; where `p` is a binding for the prompt value. The above is + ; syntactic sugar for something like: + ; + ; (prompt-impl (lambda (p) ...)) + (define (prompt-impl h) + (let ([p (make-continuation-prompt 'prompt)]) + (call-delimited-with-prompt p h))) + + ; The nicer syntactic form for the above prompt implementation. + (define-syntax prompt + (syntax-rules () + [(prompt p e ...) + (prompt-impl (lambda (p) e ...))])) + + ; Removes the prompt from the first frame of a meta-continuation. + (define (strip-prompt mc) + (let ([mf (car mc)]) + (cons (make-meta-frame #f (meta-frame-resume-k mf)) (cdr mc)))) + + ; This funcion is used to reinstate a captured continuation. It + ; should be called with: + ; + ; k - a native continuation to be pushed before the captured + ; meta-continuation + ; cc - the captured meta-continuation segment + ; p - a prompt that should delimit cc + ; + ; `p` is used as the prompt value of the `k` frame, so shift/reset + ; can be implemented by passing the same `p` that was used when `cc` + ; was captured (as that means that any further `p` control effects + ; in `cc` do not escape their original scope). + ; + ; However, we will usually be calling with p = #f, since shallow + ; handlers correspond to control effects that are able to eliminate + ; prompts. + ; + ; Note: the captured continuation `cc` is assumed to be in reverse + ; order, so will be reversed back onto the meta-continuation. + (define (push-to-meta-continuation k cc p) + (push-frame! (make-meta-frame p k)) + (let rec ([cc cc]) + (cond + [(null? cc) #f] + [else + (push-frame! (car cc)) + (rec (cdr cc))]))) + + ; Wraps a captured continuation with a procedure that reinstates it + ; upon invocation. This should be called with: + ; + ; ok - the captured native continuation that was captured along + ; with... + ; cc - the split meta-continuation + ; p - a prompt associated with the captured continuation. This + ; will be installed as a delimiter when the captured + ; continuation is re-pushed. If no delimiting is desired, + ; simply use #f, or some dummy prompt that will not be + ; involved in actual control flow. + ; + ; Note: the captured continuation `cc` is assumed to be in reverse + ; order, so will be reversed back onto the meta-continuation. + (define (make-callable-continuation ok cc p) + (lambda vals + (let/cc nk + (($primitive $call-in-continuation) + ok + '() + (lambda () + push-to-meta-continuation nk cc p + (apply values vals)))))) + + ; Captures the meta-continuation up to the specified prompt. The + ; continuation is wrapped in a function that reinstates it when + ; called. The supplied 'body' `h` is then evaluated with the + ; captured continuation. + ; + ; This implementation is designed to support shallow ability + ; handlers. This means that we actually implement what would be + ; called (in delimited continuation literature) control0. This means + ; that: + ; + ; 1. The control operator _removes_ the prompt from the + ; meta-continuation. So any control effects referring to the + ; same prompt will only be delimited further up the + ; continuation. + ; 2. The procedure reinstating the captured continuation does not + ; install a delimiter, so said captured continuation is itself + ; a procedure that can have control effects relevant to the + ; original prompt. + ; + ; The reason for this is that shallow handlers are one-shot in a + ; corresponding way. They only handle the first effect in their + ; 'body', and handling _all_ relevant effects requires an explicitly + ; recursive handler that re-installs a handling delimiter after each + ; effect request. + (define (control-impl p h) + (assert (continuation-prompt? p)) + (let/cc k + (let rec ([cc '()] [mc meta-continuation]) + (cond + [(or (null? mc) + (eq? p (meta-frame-prompt (car mc)))) + (set! meta-continuation (strip-prompt mc)) + (let ([ck (make-callable-continuation k cc #f)]) + (call-in-empty-frame + (lambda () + (let-values ([results (h ck)]) + (yield-to-meta-continuation results)))))] + [else (rec (cons (car mc) cc) (cdr mc))])))) + + ; The nicer syntactic form for the control operator. + (define-syntax control + (syntax-rules () + [(control p k e ...) + (control-impl (lambda (k) e ...))])) + + ; TODO: generate this as part of the main program. + ; (define-init-registers init-regs) + ; (init-regs) + ) diff --git a/scheme-libs/chez/unison/core.ss b/scheme-libs/chez/unison/core.ss new file mode 100644 index 000000000..a8ec91c49 --- /dev/null +++ b/scheme-libs/chez/unison/core.ss @@ -0,0 +1,138 @@ +; This library implements various functions and macros that are used +; internally to the unison scheme libraries. This provides e.g. a +; measure of abstraction for the particular scheme platform. A useful +; feature of one implementation might need to be implemented on top of +; other features of another, and would go in this library. +; +; This library won't be directly imported by the generated unison +; code, so if some function is needed for those, it should be +; re-exported by (unison boot). +(library (unison core) + (export + define-virtual-register + define-init-registers + + describe-value + decode-value + + universal-compare + + fx1- + list-head + + exception->string + record-case + fluid-let + + freeze-string! + string-copy! + + freeze-bytevector! + freeze-vector! + + bytevector) + + (import (chezscheme)) + + ; Wrapper for chez scheme's virtual registers, which are top-level + ; variables that may perform better than normal variables. They are + ; limited in number, and referenced by an integer. + ; + ; This macro allows the definition of names for the virtual registers, + ; and keeps track of how many have been declared, so that a static + ; error is thrown if more are declared than are available. + ; + ; Virtual registers are thread local + (meta define virtual-register-inits '()) + + (define-syntax (define-virtual-register stx) + (syntax-case stx () + [(define-virtual-register name init) + (let ([n (length virtual-register-inits)]) + (with-syntax ([reg (datum->syntax #'define-virtual-register n)]) + (cond + [(>= n (virtual-register-count)) + (syntax-error stx + "Could not allocate a virtual register:")] + [else + (set! virtual-register-inits + (cons #'init virtual-register-inits)) + #`(define-syntax name + (identifier-syntax + [id (virtual-register reg)] + [(set! id e) (set-virtual-register! reg e)]))])))])) + + (define-syntax (define-init-registers stx) + (syntax-case stx () + [(_ name) + (with-syntax + ([(set ...) + (let rec ([l (reverse virtual-register-inits)] + [n 0]) + (cond + [(null? l) '()] + [else + (with-syntax ([reg (datum->syntax #'name n)] + [val (car l)]) + (cons #'(set-virtual-register! reg val) + (rec (cdr l) (+ 1 n))))]))]) + #'(define (name) set ... #t))])) + + ; Recovers the original function name from the partial + ; application name. + (define (extract-name i) + (string->symbol ((i 'code) 'name))) + + (define (describe-value x) + (let explain ([i (inspect/object x)]) + (case (i 'type) + [(simple) (i 'value)] + [(variable) (explain (i 'ref))] + [(procedure) + (let explain-args ([j (- (i 'length) 1)] [args '()]) + (if (< j 0) + (cons (extract-name i) args) + (explain-args + (- j 1) + (cons (explain (i 'ref j)) args))))]))) + + ; partial, data, cont, lit + (define (decode-value x) + (let reify ([i (inspect/object x)]) + (case (i 'type) + [(simple) (list 3 (i 'value))] + [(variable) (reify (i 'ref))] + [(procedure) + (let reify-args ([j (- (i 'length) 1)] [args '()]) + (if (< j 0) + (cons* 0 (extract-name i) args) + (reify-args + (- j 1) + (cons (reify (i 'ref j)) args))))]))) + + ; 0 = LT + ; 1 = EQ + ; 2 = GT + (define (universal-compare l r) + (cond + [(equal? l r) 1] + [(and (number? l) (number? r)) (if (< l r) 0 2)] + [else (raise "universal-compare: unimplemented")])) + + (define (exception->string e) + (let-values ([(port result) (open-string-output-port)]) + (display-condition e port) + (result))) + + (define (freeze-string! s) + (($primitive $string-set-immutable!) s) + s) + + (define (freeze-bytevector! bs) + (($primitive $bytevector-set-immutable!) bs) + bs) + + (define (freeze-vector! v) + (($primitive $vector-set-immutable!) v) + v) + ) diff --git a/scheme-libs/chez/unison/crypto.ss b/scheme-libs/chez/unison/crypto.ss new file mode 100644 index 000000000..264c97217 --- /dev/null +++ b/scheme-libs/chez/unison/crypto.ss @@ -0,0 +1,109 @@ + +(library (unison crypto) + (export + unison-FOp-crypto.HashAlgorithm.Sha1 + unison-FOp-crypto.hashBytes) + + (import (chezscheme) + (unison core) + (unison string) + (unison bytevector)) + + (define (capture-output fn) + (parameterize ((current-output-port (open-output-string))) + (fn) + (get-output-string (current-output-port)))) + + ; if loading the dynamic library is successful, returns true + ; otherwise, returns a lambda that will throw the original error + ; with some helpful messaging. + (define try-load-shared (lambda (name message) + (guard (x [else (lambda () + (printf "\n🚨🚨🚨 (crypto.ss) Unable to load shared library ~s 🚨🚨🚨\n---> ~a\n\nOriginal exception:\n" name message) + (raise x) + )]) + (load-shared-object name) + #t))) + + (define libcrypto (try-load-shared "libcrypto.3.dylib" "Do you have openssl installed?")) + (define libb2 (try-load-shared "libb2.dylib" "Do you have libb2 installed?")) + + ; if the "source" library was loaded, call (fn), otherwise returns a lambda + ; that will throw the original source-library-loading exception when called. + (define (if-loaded source fn) + (case source + (#t (fn)) + (else (lambda args (source))))) + + (define EVP_Digest + (if-loaded libcrypto (lambda () (foreign-procedure "EVP_Digest" + ( + u8* ; input buffer + unsigned-int ; length of input + u8* ; output buffer + boolean ; note: not a boolean, we just need to be able to pass NULL (0) + void* ; the EVP_MD* pointer, which holds the digest algorithm + boolean ; note: not a boolean, we just need to be able to pass NULL (0) + ) + ; 1 if success, 0 or -1 for failure + int)))) + + (define digest (lambda (text kind bits) + (let ([buffer (make-bytevector (/ bits 8))]) + (if (= 1 (EVP_Digest text (bytevector-length text) buffer #f kind #f)) + buffer + (error "crypto.ss digest" "libssl was unable to hash the data for some reason"))))) + + (define EVP_sha1 (if-loaded libcrypto (lambda () (foreign-procedure "EVP_sha1" () void*)))) + (define EVP_sha256 (if-loaded libcrypto (lambda () (foreign-procedure "EVP_sha256" () void*)))) + (define EVP_sha512 (if-loaded libcrypto (lambda () (foreign-procedure "EVP_sha512" () void*)))) + (define EVP_sha3_256 (if-loaded libcrypto (lambda () (foreign-procedure "EVP_sha3_256" () void*)))) + (define EVP_sha3_512 (if-loaded libcrypto (lambda () (foreign-procedure "EVP_sha3_512" () void*)))) + + (define sha1 (lambda (text) (digest text (EVP_sha1) 160))) + (define sha256 (lambda (text) (digest text (EVP_sha256) 256))) + (define sha512 (lambda (text) (digest text (EVP_sha512) 512))) + (define sha3_256 (lambda (text) (digest text (EVP_sha3_256) 256))) + (define sha3_512 (lambda (text) (digest text (EVP_sha3_512) 512))) + + (define blake2b-raw + (if-loaded libb2 (lambda () (foreign-procedure "blake2b" + ( + u8* ; output buffer + string ; input buffer + u8* ; input key + int ; output length + int ; input length + int ; key length + ) int + )))) + + (define blake2s-raw + (if-loaded libb2 (lambda () (foreign-procedure "blake2s" + ( + u8* ; output buffer + string ; input buffer + u8* ; input key + int ; output length + int ; input length + int ; key length + ) int + )))) + + (define blake2s (lambda (text size) + (let ([buffer (make-bytevector (/ size 8))]) + (if (= 0 (blake2s-raw buffer text #f (/ size 8) (string-length text) 0)) + buffer + (error "crypto.ss blake2s" "libb2 was unable to hash the data for some reason"))))) + + (define blake2b (lambda (text size) + (let ([buffer (make-bytevector (/ size 8))]) + (if (= 0 (blake2b-raw buffer text #f (/ size 8) (string-length text) 0)) + buffer + (error "crypto.ss blake2b" "libb2 was unable to hash the data for some reason"))))) + + (define (unison-FOp-crypto.HashAlgorithm.Sha1) sha1) + (define (unison-FOp-crypto.hashBytes algo text) + (algo text)) + + ) diff --git a/scheme-libs/common/unison/boot.ss b/scheme-libs/common/unison/boot.ss new file mode 100644 index 000000000..f813c379d --- /dev/null +++ b/scheme-libs/common/unison/boot.ss @@ -0,0 +1,325 @@ +; This library implements various syntactic constructs and functions +; that are used in the compilation of unison (intermediate) source to +; scheme. The intent is to provide for writing scheme definitions that +; more directly match the source, so that the compiler doesn't need to +; emit all the code necessary to fix up the difference itself. +; +; Probably the best example of this is the define-unison macro, which +; looks similar to scheme's define, but the function being defined is +; allowed to be under/over applied similar to a unison function. It +; has an 'arity' at which computation happens, but the function +; automatically handles being applied to fewer or more arguments than +; that arity appropriately. +#!r6rs +(library (unison boot) + (export + bytevector + control + define-unison + handle + identity + name + data + data-case + + request + request-case + sum + sum-case + unison-force) + + (import (rnrs) + (for + (only (unison core) syntax->list) + expand) + (only (srfi :28) format) + (unison core) + (unison data) + (unison cont) + (unison crypto)) + + ; Computes a symbol for automatically generated partial application + ; cases, based on number of arguments applied. The partial + ; application of `f` is (locally) named `f-partial-N` + ; (meta define (partial-symbol name m) + ; (fun-sym (symbol->string name) "partial" (number->string m))) + + ; As above, but takes a syntactic object representing the arguments + ; rather than their count. + ; (define (partial-name name us) + ; (datum->syntax name (syntax->datum name))) + + (define-syntax with-name + (syntax-rules () + [(with-name name e) (let ([name e]) name)])) + + ; function definition with slow/fast path. Slow path allows for + ; under/overapplication. Fast path is exact application. + ; + ; The intent is for the scheme compiler to be able to recognize and + ; optimize static, fast path calls itself, while still supporting + ; unison-like automatic partial application and such. + (define-syntax define-unison + (lambda (x) + (define (fast-path-symbol name) + (string->symbol + (string-append + "fast-path-" + (symbol->string name)))) + + (define (fast-path-name name) + (datum->syntax name (fast-path-symbol (syntax->datum name)))) + + ; Helper function. Turns a list of syntax objects into a + ; list-syntax object. + (define (list->syntax l) #`(#,@l)) + ; Builds partial application cases for unison functions. + ; It seems most efficient to have a case for each posible + ; under-application. + (define (build-partials name formals) + (let rec ([us formals] [acc '()]) + (syntax-case us () + [() (list->syntax (cons #`[() #,name] acc))] + [(a ... z) + (rec #'(a ...) + (cons + #`[(a ... z) + (with-name + #,(datum->syntax name (syntax->datum name)) + (lambda r (apply #,name a ... z r)))] + acc))]))) + + ; Given an overall function name, a fast path name, and a list of + ; arguments, builds the case-lambda body of a unison function that + ; enables applying to arbitrary numbers of arguments. + (define (func-cases name fast args) + (syntax-case args () + [() #`(case-lambda + [() (#,fast)] + [r (apply (#,fast) r)])] + [(a ... z) + #`(case-lambda + #,@(build-partials name #'(a ...)) + [(a ... z) (#,fast a ... z)] + [(a ... z . r) (apply (#,fast a ... z) r)])])) + + (define (func-wrap name args body) + (with-syntax ([fp (fast-path-name name)]) + #`(let ([fp (lambda (#,@args) #,@body)]) + #,(func-cases name #'fp args)))) + + (syntax-case x () + [(define-unison (name a ...) e ...) + #`(define name + #,(func-wrap #'name #'(a ...) #'(e ...)))]))) + + ; call-by-name bindings + (define-syntax name + (syntax-rules () + ((name ([v (f . args)] ...) body ...) + (let ([v (lambda r (apply f (append (list . args) r)))] + ...) + body ...)))) + + ; Wrapper that more closely matches `handle` constructs + ; + ; Note: this uses the prompt _twice_ to achieve the sort of dynamic + ; scoping we want. First we push an outer delimiter, then install + ; the continuation marks corresponding to the handled abilities + ; (which tells which propt to use for that ability and which + ; functions to use for each request). Then we re-delimit by the same + ; prompt. + ; + ; If we just used one delimiter, we'd have a problem. If we pushed + ; the marks _after_ the delimiter, then the continuation captured + ; when handling would contain those marks, and would effectively + ; retain the handler for requests within the continuation. If the + ; marks were outside the prompt, we'd be in a similar situation, + ; except where the handler would be automatically handling requests + ; within its own implementation (although, in both these cases we'd + ; get control errors, because we would be using the _function_ part + ; of the handler without the necessary delimiters existing on the + ; continuation). Both of these situations are wrong for _shallow_ + ; handlers. + ; + ; Instead, what we need to be able to do is capture the continuation + ; _up to_ the marks, then _discard_ the marks, and this is what the + ; multiple delimiters accomplish. There might be more efficient ways + ; to accomplish this with some specialized mark functions, but I'm + ; uncertain of what pitfalls there are with regard to that (whehter + ; they work might depend on exact frame structure of the + ; metacontinuation). + (define-syntax handle + (syntax-rules () + [(handle [r ...] h e ...) + (let ([p (make-prompt)]) + (prompt0-at p + (let ([v (let-marks (list (quote r) ...) (cons p h) + (prompt0-at p e ...))]) + (h (make-pure v)))))])) + + ; wrapper that more closely matches ability requests + (define-syntax request + (syntax-rules () + [(request r t . args) + (let ([rq (make-request (quote r) t (list . args))]) + (let ([current-mark (ref-mark (quote r))]) + (if (equal? #f current-mark) + (raise (condition + (make-error) + (make-message-condition (format "Unhandled top-level effect! ~a" (list r t . args))))) + ((cdr current-mark) rq))))])) + + ; See the explanation of `handle` for a more thorough understanding + ; of why this is doing two control operations. + ; + ; In-unison 'control' corresponds to a (shallow) handler jump, so we + ; need to capture the continuation _and_ discard some dynamic scope + ; information. The capture is accomplished via the first + ; control0-at, while the second does the discard, based on the + ; convention used in `handle`. + (define-syntax control + (syntax-rules () + [(control r k e ...) + (let ([p (car (ref-mark r))]) + (control0-at p k (control0-at p _k e ...)))])) + + (define (identity x) x) + + ; forces something that is expected to be a thunk, defined with + ; e.g. `name` above. In some cases, we might have a normal value, + ; so just do nothing in that case. + (define (unison-force x) + (if (procedure? x) (x) x)) + + (define-syntax sum-case + (lambda (stx) + (define (make-case scrut-stx) + (lambda (cur) + (with-syntax ([scrut scrut-stx]) + (syntax-case cur (else) + [(else e ...) #'(else e ...)] + [((t ...) () e ...) #'((t ...) e ...)] + [(t () e ...) #'((t) e ...)] + [((t ...) (v ...) e ...) + #'((t ...) + (let-values + ([(v ...) (apply values (sum-fields scrut))]) + e ...))] + [(t (v ...) e ...) + #'((t) + (let-values + ([(v ...) (apply values (sum-fields scrut))]) + e ...))] + [((t ...) v e ...) + (identifier? #'v) + #'((t ...) + (let ([v (sum-fields scrut)]) + e ...))] + [(t v e ...) + (identifier? #'v) + #'((t) + (let ([v (sum-fields scrut)]) + e ...))])))) + + (syntax-case stx () + [(sum-case scrut c ...) + (with-syntax + ([(tc ...) + (map (make-case #'scrut) (syntax->list #'(c ...)))]) + #'(case (sum-tag scrut) tc ...))]))) + + (define-syntax data-case + (lambda (stx) + (define (make-case scrut-stx) + (lambda (cur) + (with-syntax ([scrut scrut-stx]) + (syntax-case cur (else) + [(else e ...) #'(else e ...)] + [((t ...) () e ...) #'((t ...) e ...)] + [(t () e ...) #'((t) e ...)] + [((t ...) (v ...) e ...) + #'((t ...) + (let-values + ([(v ...) (apply values (data-fields scrut))]) + e ...))] + [(t (v ...) e ...) + #'((t) + (let-values + ([(v ...) (apply values (data-fields scrut))]) + e ...))] + [((t ...) v e ...) + (identifier? #'v) + #'((t ...) + (let ([v (data-fields scrut)]) + e ...))] + [(t v e ...) + (identifier? #'v) + #'((t) + (let ([v (data-fields scrut)]) + e ...))])))) + (syntax-case stx () + [(data-case scrut c ...) + (with-syntax + ([(tc ...) + (map (make-case #'scrut) (syntax->list #'(c ...)))]) + #'(case (data-tag scrut) tc ...))]))) + + (define-syntax request-case + (lambda (stx) + (define (pure-case? c) + (syntax-case c (pure) + [(pure . xs) #t] + [_ #f])) + + (define (mk-pure scrut ps) + (if (null? ps) + #`(pure-val #,scrut) + (syntax-case (car ps) (pure) + [(pure (v) e ...) + #`(let ([v (pure-val #,scrut)]) + e ...)] + [(pure vs e ...) + (raise-syntax-error + #f + "pure cases receive exactly one variable" + (car ps) + #'vs)]))) + + (define (mk-req scrut-stx) + (lambda (stx) + (syntax-case stx () + [(t vs e ...) + (with-syntax ([scrut scrut-stx]) + #'((t) (let-values + ([vs (apply values (request-fields scrut))]) + e ...)))]))) + + (define (mk-abil scrut-stx) + (lambda (stx) + (syntax-case stx () + [(t sc ...) + (let ([sub (mk-req scrut-stx)]) + (with-syntax + ([(sc ...) (map sub (syntax->list #'(sc ...)))] + [scrut scrut-stx]) + #'((t) (case (request-tag scrut) sc ...))))]))) + + (syntax-case stx () + [(request-case scrut c ...) + (let-values + ([(ps as) (partition pure-case? (syntax->list #'(c ...)))]) + (if (> 1 (length ps)) + (raise-syntax-error + #f + "multiple pure cases in request-case" + stx) + (with-syntax + ([pc (mk-pure #'scrut ps)] + [(ac ...) (map (mk-abil #'scrut) as)]) + + #'(cond + [(pure? scrut) pc] + [else (case (request-ability scrut) ac ...)]))))]))) + + ) diff --git a/scheme-libs/common/unison/bytevector.ss b/scheme-libs/common/unison/bytevector.ss new file mode 100644 index 000000000..c8656e53f --- /dev/null +++ b/scheme-libs/common/unison/bytevector.ss @@ -0,0 +1,33 @@ +; This library implements missing bytevector functionality for unison +; builtins. The main missing bits are better support for immutable +; bytevectors. Both chez and racket have support for immutable +; bytevectors, but there is no standard API for dealing with them that +; implements all the functions we'd want. This library exports the +; desired functionality on top of an unsafe in-place freeze +; re-exported from the (unison core) module. +#!r6rs +(library (unison bytevector) + (export + freeze-bytevector! + ibytevector-drop + ibytevector-take + u8-list->ibytevector) + + (import (rnrs) + (unison core)) + + (define (ibytevector-drop n bs) + (let* ([l (bytevector-length bs)] + [k (max 0 (- l n))] + [br (make-bytevector k)]) + (bytevector-copy! bs n br 0 k) + (freeze-bytevector! br))) + + (define (ibytevector-take n bs) + (let* ([sz (min n (bytevector-length bs))] + [br (make-bytevector sz)]) + (bytevector-copy! bs 0 br 0 sz) + (freeze-bytevector! br))) + + (define (u8-list->ibytevector l) + (freeze-bytevector! (u8-list->bytevector l)))) diff --git a/scheme-libs/common/unison/data.ss b/scheme-libs/common/unison/data.ss new file mode 100644 index 000000000..546e2b916 --- /dev/null +++ b/scheme-libs/common/unison/data.ss @@ -0,0 +1,126 @@ +;; Helpers for building data that conform to the compiler calling convention + +#!r6rs +(library (unison data) + (export + + make-data + data + data? + data-ref + data-tag + data-fields + + make-sum + sum + sum? + sum-tag + sum-fields + + make-pure + pure? + pure-val + + make-request + request? + request-ability + request-tag + request-fields + + some + none + some? + none? + option-get + right + left + right? + left? + either-get + either-get + unit + false + true + any + failure + exception) + + (import (rnrs)) + + (define-record-type (unison-data make-data data?) + (fields + (immutable ref data-ref) + (immutable tag data-tag) + (immutable fields data-fields))) + + (define (data r t . args) (make-data r t args)) + + (define-record-type (unison-sum make-sum sum?) + (fields + (immutable tag sum-tag) + (immutable fields sum-fields))) + + (define (sum t . args) (make-sum t args)) + + (define-record-type (unison-pure make-pure pure?) + (fields + (immutable val pure-val))) + + (define-record-type (unison-request make-request request?) + (fields + (immutable ability request-ability) + (immutable tag request-tag) + (immutable fields request-fields))) + + ; Option a + (define none (sum 0)) + + ; a -> Option a + (define (some a) (sum 1 a)) + + ; Option a -> Bool + (define (some? option) (eq? 1 (sum-tag option))) + + ; Option a -> Bool + (define (none? option) (eq? 0 (sum-tag option))) + + ; Option a -> a (or #f) + (define (option-get option) + (if + (some? option) + (car (sum-fields option)) + (raise "Cannot get the value of an empty option "))) + + ; # works as well + ; Unit + (define unit (sum 0)) + + ; Booleans are represented as numbers + (define false 0) + (define true 1) + + ; a -> Either b a + (define (right a) (sum 1 a)) + + ; b -> Either b a + (define (left b) (sum 0 b)) + + ; Either a b -> Boolean + (define (right? either) (eq? 1 (sum-tag either))) + + ; Either a b -> Boolean + (define (left? either) (eq? 0 (sum-tag either))) + + ; Either a b -> a | b + (define (either-get either) (car (sum-fields either))) + + ; a -> Any + (define (any a) (data 'Any 0 a)) + + ; Type -> Text -> Any -> Failure + (define (failure typeLink msg any) + (sum 0 typeLink msg any)) + + ; Type -> Text -> a ->{Exception} b + (define (exception typeLink msg a) + (failure typeLink msg (any a)))) diff --git a/scheme-libs/common/unison/primops.ss b/scheme-libs/common/unison/primops.ss new file mode 100644 index 000000000..49f725485 --- /dev/null +++ b/scheme-libs/common/unison/primops.ss @@ -0,0 +1,395 @@ +; This library implements pimitive operations that are used in +; builtins. There are two different sorts of primitive operations, but +; the difference is essentially irrelevant except for naming schemes. +; +; POps are part of a large enumeration of 'instructions' directly +; implemented in the Haskell runtime. These are referred to using the +; naming scheme `unison-POp-INST` where `INST` is the name of the +; instruction, which is (at the time of this writing) 4 letters. +; +; FOps are 'foreign' functons, which are allowed to be declared more +; flexibly in the Haskell runtime. Each such declaration associates a +; builtin to a Haskell function. For these, the naming shceme is +; `unison-FOp-NAME` where `NAME` is the name of the unison builtin +; associated to the declaration. +; +; Both POps and FOps are always called with exactly the right number +; of arguments, so they may be implemented as ordinary scheme +; definitions with a fixed number of arguments. By implementing the +; POp/FOp, you are expecting the associated unison function(s) to be +; implemented by code generation from the wrappers in +; Unison.Runtime.Builtin, so the POp/FOp implementation must +; take/return arguments that match what is expected in those wrappers. + +#!r6rs +(library (unison primops) + (export + ; unison-FOp-Bytes.decodeNat16be + ; unison-FOp-Bytes.decodeNat32be + ; unison-FOp-Bytes.decodeNat64be + unison-FOp-Char.toText + ; unison-FOp-Code.dependencies + ; unison-FOp-Code.serialize + unison-FOp-IO.closeFile.impl.v3 + unison-FOp-IO.openFile.impl.v3 + unison-FOp-IO.putBytes.impl.v3 + unison-FOp-Text.fromUtf8.impl.v3 + unison-FOp-Text.repeat + unison-FOp-Text.toUtf8 + ; unison-FOp-Value.serialize + unison-FOp-IO.stdHandle + unison-FOp-IO.getArgs.impl.v1 + + unison-FOp-ImmutableArray.copyTo! + unison-FOp-ImmutableArray.read + + unison-FOp-MutableArray.freeze! + unison-FOp-MutableArray.freeze + unison-FOp-MutableArray.read + unison-FOp-MutableArray.write + + unison-FOp-MutableArray.size + unison-FOp-ImmutableArray.size + + unison-FOp-MutableByteArray.size + unison-FOp-ImmutableByteArray.size + + unison-FOp-MutableByteArray.length + unison-FOp-ImmutableByteArray.length + + unison-FOp-ImmutableByteArray.copyTo! + unison-FOp-ImmutableByteArray.read8 + + unison-FOp-MutableByteArray.freeze! + unison-FOp-MutableByteArray.write8 + + unison-FOp-Scope.bytearray + unison-FOp-Scope.bytearrayOf + unison-FOp-Scope.array + unison-FOp-Scope.arrayOf + unison-FOp-Scope.ref + + unison-FOp-IO.bytearray + unison-FOp-IO.bytearrayOf + unison-FOp-IO.array + unison-FOp-IO.arrayOf + + unison-FOp-IO.ref + unison-FOp-Ref.read + unison-FOp-Ref.write + unison-FOp-Ref.readForCas + unison-FOp-Ref.Ticket.read + unison-FOp-Ref.cas + + unison-FOp-Promise.new + unison-FOp-Promise.read + unison-FOp-Promise.tryRead + unison-FOp-Promise.write + + unison-FOp-IO.delay.impl.v3 + unison-POp-FORK + unison-FOp-IO.kill.impl.v3 + unison-POp-TFRC + + unison-POp-ADDN + unison-POp-ANDN + unison-POp-BLDS + unison-POp-CATS + unison-POp-CATT + unison-POp-CMPU + unison-POp-COMN + unison-POp-CONS + unison-POp-DECI + unison-POp-DIVN + unison-POp-DRPB + unison-POp-DRPS + unison-POp-DRPT + unison-POp-EQLN + unison-POp-EQLT + unison-POp-EQLU + unison-POp-EROR + unison-POp-FTOT + unison-POp-IDXB + unison-POp-IDXS + unison-POp-IORN + unison-POp-ITOT + unison-POp-LEQN + ; unison-POp-LKUP + unison-POp-LZRO + unison-POp-MULN + unison-POp-MODN + unison-POp-NTOT + unison-POp-PAKT + unison-POp-SHLI + unison-POp-SHLN + unison-POp-SHRI + unison-POp-SHRN + unison-POp-SIZS + unison-POp-SIZT + unison-POp-SNOC + unison-POp-SUBN + unison-POp-TAKS + unison-POp-TAKT + unison-POp-TRCE + unison-POp-TTON + unison-POp-UPKT + unison-POp-XORN + unison-POp-VALU + unison-POp-VWLS + + unison-POp-UPKB + unison-POp-PAKB + unison-POp-ADDI + unison-POp-DIVI + unison-POp-EQLI + unison-POp-MODI + unison-POp-LEQI + unison-POp-POWN + unison-POp-VWRS + + unison-FOp-crypto.hashBytes + unison-FOp-crypto.hmacBytes + unison-FOp-crypto.HashAlgorithm.Sha1 + unison-FOp-crypto.HashAlgorithm.Sha2_256 + unison-FOp-crypto.HashAlgorithm.Sha2_512 + unison-FOp-crypto.HashAlgorithm.Sha3_256 + unison-FOp-crypto.HashAlgorithm.Sha3_512 + unison-FOp-crypto.HashAlgorithm.Blake2s_256 + unison-FOp-crypto.HashAlgorithm.Blake2b_256 + unison-FOp-crypto.HashAlgorithm.Blake2b_512 + + unison-FOp-IO.clientSocket.impl.v3 + unison-FOp-IO.closeSocket.impl.v3 + unison-FOp-IO.socketReceive.impl.v3 + unison-FOp-IO.socketSend.impl.v3 + unison-FOp-IO.socketPort.impl.v3 + unison-FOp-IO.serverSocket.impl.v3 + unison-FOp-IO.socketAccept.impl.v3 + unison-FOp-IO.listen.impl.v3 + ) + + (import (rnrs) + (unison core) + (unison data) + (unison string) + (unison crypto) + (unison data) + (unison tcp) + (unison bytevector) + (unison vector) + (unison concurrent)) + + (define unison-POp-UPKB bytevector->u8-list) + (define unison-POp-ADDI +) + (define unison-POp-DIVI /) + (define (unison-POp-EQLI a b) + (if (= a b) 1 0) + ) + (define unison-POp-MODI mod) + (define unison-POp-LEQI <=) + (define unison-POp-POWN expt) + + (define (reify-exn thunk) + (guard + (e [else + (sum 0 '() (exception->string e) e)]) + (thunk))) + + ; Core implemented primops, upon which primops-in-unison can be built. + (define (unison-POp-ADDN m n) (fx+ m n)) + (define (unison-POp-ANDN m n) (fxand m n)) + (define unison-POp-BLDS list) + (define (unison-POp-CATS l r) (append l r)) + (define (unison-POp-CATT l r) (istring-append l r)) + (define (unison-POp-CMPU l r) (universal-compare l r)) + (define (unison-POp-COMN n) (fxnot n)) + (define (unison-POp-CONS x xs) (cons x xs)) + (define (unison-POp-DECI n) (fx1- n)) + (define (unison-POp-DIVN m n) (fxdiv m n)) + (define (unison-POp-DRPB n bs) (ibytevector-drop n bs)) + (define (unison-POp-DRPS n l) + (let ([m (max 0 (min n (length l)))]) (list-tail l m))) + (define (unison-POp-DRPT n t) (istring-drop n t)) + (define (unison-POp-EQLN m n) (if (fx=? m n) 1 0)) + (define (unison-POp-EQLT s t) (if (string=? s t) 1 0)) + (define (unison-POp-EQLU x y) (if (universal-equal? x y) 1 0)) + (define (unison-POp-EROR fnm x) + (let-values ([(p g) (open-string-output-port)]) + (put-string p fnm) + (put-string p ": ") + (display x p) + (raise (g)))) + (define (unison-POp-FTOT f) (number->istring f)) + (define (unison-POp-IDXB n bs) (bytevector-u8-ref bs n)) + (define (unison-POp-IDXS n l) + (guard (x [else (sum 0)]) + (sum 1 (list-ref l n)))) + (define (unison-POp-IORN m n) (fxior m n)) + (define (unison-POp-ITOT i) (signed-number->istring i)) + (define (unison-POp-LEQN m n) (if (fx<=? m n) 1 0)) + (define (unison-POp-LZRO m) (- 64 (fxlength m))) + (define (unison-POp-MULN m n) (fx* m n)) + (define (unison-POp-MODN m n) (fxmod m n)) + (define (unison-POp-NTOT m) (number->istring m)) + (define (unison-POp-PAKB l) (u8-list->ibytevector l)) + (define (unison-POp-PAKT l) (list->istring l)) + (define (unison-POp-SHLI i k) (fxarithmetic-shift-left i k)) + (define (unison-POp-SHLN n k) (fxarithmetic-shift-left n k)) + (define (unison-POp-SHRI i k) (fxarithmetic-shift-right i k)) + (define (unison-POp-SHRN n k) (fxarithmetic-shift-right n k)) + (define (unison-POp-SIZS l) (length l)) + (define (unison-POp-SIZT t) (string-length t)) + (define (unison-POp-SNOC xs x) (append xs (list x))) + (define (unison-POp-SUBN m n) (fx- m n)) + (define (unison-POp-TAKS n s) (list-head s n)) + (define (unison-POp-TAKT n t) (istring-take n t)) + (define (unison-POp-TRCE s x) + (display s) + (display "\n") + (display x) + (display "\n") + (display (describe-value x)) + (display "\n")) + (define (unison-POp-TTON s) + (let ([mn (string->number s)]) + (if mn (sum 1 mn) (sum 0)))) + (define (unison-POp-UPKT t) (string->list t)) + (define (unison-POp-VWLS l) + (if (null? l) + (sum 0) + (sum 1 (car l) (cdr l)))) + (define (unison-POp-VWRS l) + (if (null? l) + (sum 0) + (let ([r (reverse l)]) + (sum 1 (reverse (cdr l)) (car l))))) + + (define (unison-POp-XORN m n) (fxxor m n)) + (define (unison-POp-VALU c) (decode-value c)) + + (define (unison-FOp-IO.putBytes.impl.v3 p bs) + (begin + (put-bytevector p bs) + (flush-output-port p) + (sum 1 #f))) + + (define (unison-FOp-Char.toText c) (istring c)) + + (define stdin (standard-input-port)) + (define stdout (standard-output-port)) + (define stderr (standard-error-port)) + + (define (unison-FOp-IO.stdHandle n) + (case n + [(0) stdin] + [(1) stdout] + [(2) stderr])) + + (define (unison-FOp-IO.getArgs.impl.v1) + (sum 1 (cdr (command-line)))) + + (define (unison-FOp-Text.fromUtf8.impl.v3 s) + (right (bytevector->string s utf-8-transcoder))) + + (define (unison-FOp-Text.toUtf8 s) + (string->bytevector s utf-8-transcoder)) + + (define (unison-FOp-IO.closeFile.impl.v3 h) + (close-input-port h)) + + (define (unison-FOp-IO.openFile.impl.v3 fn mode) + (case mode + [(0) (open-file-input-port fn)] + [(1) (open-file-output-port fn)] + [(2) (open-file-output-port fn 'no-truncate)] + [else (open-file-input/output-port fn)])) + + (define (unison-FOp-Text.repeat n t) (istring-repeat n t)) + + (define (catch-array thunk) + (reify-exn thunk)) + + (define (unison-FOp-ImmutableArray.read vec i) + (catch-array + (lambda () + (sum 1 (vector-ref vec i))))) + + (define (unison-FOp-ImmutableArray.copyTo! dst doff src soff n) + (catch-array + (lambda () + (let next ([i (fx1- n)]) + (if (< i 0) + (sum 1 #f) + (begin + (vector-set! dst (+ doff i) (vector-ref src (+ soff i))) + (next (fx1- i)))))))) + + (define unison-FOp-MutableArray.freeze! freeze-vector!) + + (define unison-FOp-MutableArray.freeze freeze-subvector) + + (define (unison-FOp-MutableArray.read src i) + (catch-array + (lambda () + (sum 1 (vector-ref src i))))) + + (define (unison-FOp-MutableArray.write dst i x) + (catch-array + (lambda () + (vector-set! dst i x) + (sum 1)))) + + (define (unison-FOp-ImmutableByteArray.copyTo! dst doff src soff n) + (catch-array + (lambda () + (bytevector-copy! src soff dst doff n) + (sum 1 #f)))) + + (define (unison-FOp-ImmutableByteArray.read8 arr i) + (catch-array + (lambda () + (sum 1 (bytevector-u8-ref arr i))))) + + (define unison-FOp-MutableByteArray.freeze! freeze-bytevector!) + + (define (unison-FOp-MutableByteArray.write8 arr i b) + (catch-array + (lambda () + (bytevector-u8-set! arr i b) + (sum 1)))) + + (define (unison-FOp-Scope.bytearray n) (make-bytevector n)) + (define (unison-FOp-IO.bytearray n) (make-bytevector n)) + + (define (unison-FOp-Scope.array n) (make-vector n)) + (define (unison-FOp-IO.array n) (make-vector n)) + + (define (unison-FOp-Scope.bytearrayOf b n) (make-bytevector n b)) + (define (unison-FOp-IO.bytearrayOf b n) (make-bytevector n b)) + + (define (unison-FOp-Scope.arrayOf v n) (make-vector n v)) + (define (unison-FOp-IO.arrayOf v n) (make-vector n v)) + + (define unison-FOp-MutableByteArray.length bytevector-length) + (define unison-FOp-ImmutableByteArray.length bytevector-length) + (define unison-FOp-MutableByteArray.size bytevector-length) + (define unison-FOp-ImmutableByteArray.size bytevector-length) + (define unison-FOp-MutableArray.size vector-length) + (define unison-FOp-ImmutableArray.size vector-length) + + (define (unison-POp-FORK thunk) (fork thunk)) + (define (unison-POp-TFRC thunk) (try-eval thunk)) + (define (unison-FOp-IO.delay.impl.v3 micros) (sleep micros)) + (define (unison-FOp-IO.kill.impl.v3 threadId) (kill threadId)) + (define (unison-FOp-Scope.ref a) (ref-new a)) + (define (unison-FOp-IO.ref a) (ref-new a)) + (define (unison-FOp-Ref.read ref) (ref-read ref)) + (define (unison-FOp-Ref.write ref a) (ref-write ref a)) + (define (unison-FOp-Ref.readForCas ref) (ref-read ref)) + (define (unison-FOp-Ref.Ticket.read ticket) ticket) + (define (unison-FOp-Ref.cas ref ticket value) (ref-cas ref ticket value)) + (define (unison-FOp-Promise.new) (promise-new)) + (define (unison-FOp-Promise.read promise) (promise-read promise)) + (define (unison-FOp-Promise.tryRead promise) (promise-try-read promise)) + (define (unison-FOp-Promise.write promise a) (promise-write promise a))) + diff --git a/scheme-libs/common/unison/string.ss b/scheme-libs/common/unison/string.ss new file mode 100644 index 000000000..92366c5d8 --- /dev/null +++ b/scheme-libs/common/unison/string.ss @@ -0,0 +1,57 @@ +; This library wraps some implementation-specific functionality to +; provide immutable strings. Both have mechanisms for (efficiently) +; marking strings immutable, but there is no standard API for working +; entirely in terms of immutable strings. This module takes the +; freezing function, re-exported by (unison core) and implements the +; API needed for unison. +#!r6rs +(library (unison string) + (export + istring + istring-append + istring-drop + istring-take + istring-repeat + list->istring + make-istring + number->istring + signed-number->istring + utf8-bytevector->istring + utf-8-transcoder) + + (import (rnrs) (unison core)) + + (define istring (lambda l (freeze-string! (apply string l)))) + + (define (make-istring n c) (freeze-string! (make-string n c))) + + (define (istring-repeat n s) + (let* ([k (string-length s)] + [t (make-string (* k n))]) + (let loop ([i 0]) + (if (< i n) + (begin + (string-copy! s 0 t (* i k) k) + (loop (+ i 1))) + (freeze-string! t))))) + + (define istring-append (lambda l (freeze-string! (apply string-append l)))) + + (define (istring-drop n s) (freeze-string! (substring s n (- (string-length s) n)))) + + (define (number->istring n) (freeze-string! (number->string n))) + + (define (signed-number->istring n) + (freeze-string! + (if (>= n 0) + (string-append "+" (number->string n)) + (number->string n)))) + + (define (list->istring l) (freeze-string! (list->string l))) + + (define (istring-take n s) (freeze-string! (substring s 0 n))) + + (define utf-8-transcoder (make-transcoder (utf-8-codec))) + + (define (utf8-bytevector->istring bs) + (freeze-string! (bytevector->string bs utf-8-transcoder)))) diff --git a/scheme-libs/common/unison/vector.ss b/scheme-libs/common/unison/vector.ss new file mode 100644 index 000000000..a489314f9 --- /dev/null +++ b/scheme-libs/common/unison/vector.ss @@ -0,0 +1,22 @@ + +#!r6rs +(library (unison vector) + (export + freeze-vector! + freeze-subvector) + + (import (rnrs) + (unison core) + (unison data)) + + (define (freeze-subvector src off len) + (let ([dst (make-vector len)]) + (let next ([i (fx1- len)]) + (if (< i 0) + (begin + (freeze-vector! dst) + (sum 1 dst)) + (begin + (vector-set! dst i (vector-ref src (+ off i))) + (next (fx1- i))))))) + ) diff --git a/scheme-libs/racket/unison/Readme.md b/scheme-libs/racket/unison/Readme.md new file mode 100644 index 000000000..a90fc4976 --- /dev/null +++ b/scheme-libs/racket/unison/Readme.md @@ -0,0 +1,21 @@ +# Racket unison! + +To load these libraries into a racket runtime, racket should be invoked like this: +```bash +$ racket -S scheme-libs/racket +Welcome to Racket v8.7 [cs]. +> (require unison/core) +> ; now you can try out the definitions in core.ss! +``` + +## crypto +NOTE: Our crypto functions require on both `libcrypto` (from openssl) and `libb2`. You may have to tell racket where to find `libb2`, by adding an entry to the hash table in your [`config.rktd` file](https://docs.racket-lang.org/raco/config-file.html). This is what I had, for an M1 mac w/ libb2 installed via Homebrew: +``` +(lib-search-dirs . (#f "/opt/homebrew/Cellar/libb2/0.98.1/lib/")) +``` + +You can then run the tests with +```bash +$ raco test scheme-libs/racket/unison/crypto.rkt +``` +On success, it has no output. diff --git a/scheme-libs/racket/unison/concurrent.ss b/scheme-libs/racket/unison/concurrent.ss new file mode 100644 index 000000000..5bd613fd8 --- /dev/null +++ b/scheme-libs/racket/unison/concurrent.ss @@ -0,0 +1,115 @@ +#!r6rs + +(library (unison concurrent) + (export + ref-new + ref-read + ref-write + ref-cas + promise-new + promise-read + promise-write + promise-try-read + fork + kill + sleep + try-eval) + + (import (rnrs) + (rnrs records syntactic) + (unison data) + (rename + (only (racket base) + box + unbox + set-box! + box-cas! + make-semaphore + semaphore-peek-evt + semaphore-post + sync/enable-break + thread + break-thread + parameterize-break + sleep + printf + with-handlers + exn:break? + exn:fail? + exn:fail:read? + exn:fail:filesystem? + exn:fail:network? + exn:fail:contract:divide-by-zero? + exn:fail:contract:non-fixnum-result?) + (box ref-new) + (unbox ref-read) + (set-box! ref-write) + (sleep sleep-secs)) + (only (racket exn) exn->string) + (only (racket unsafe ops) unsafe-struct*-cas!)) + + (define-record-type promise (fields semaphore event (mutable value))) + + (define (promise-new) + (let* ([sem (make-semaphore)] + [evt (semaphore-peek-evt sem)] + [value none]) + (make-promise sem evt value))) + + (define (promise-try-read promise) (promise-value promise)) + + (define (promise-read promise) + (let loop () + (let ([value (promise-value promise)]) + (cond + [(some? value) (option-get value)] + [else (sync/enable-break (promise-event promise)) (loop)])))) + + (define (promise-write promise new-value) + (let loop () + (let* ([value (promise-value promise)] + [cas! (lambda () (unsafe-struct*-cas! promise 2 value (some new-value)))] + [awake-readers (lambda () (semaphore-post (promise-semaphore promise)))]) + (cond + [(some? value) false] + [else + (let ([ok (parameterize-break #f (if (cas!) (awake-readers) false))]) + (if ok true (loop)))])))) + + (define (ref-cas ref ticket value) + (if (box-cas! ref ticket value) true false)) + + (define (sleep n) + (sleep-secs (/ n 1000000)) + (right unit)) + + ;; Swallows uncaught breaks/thread kills rather than logging them to + ;; match the behaviour of the Haskell runtime + (define (fork thunk) + (thread + (lambda () + (with-handlers ([exn:break? (lambda (x) ())]) + (thunk))))) + + (define (kill threadId) + (break-thread threadId) + (right unit)) + + (define (exn:io? e) + (or (exn:fail:read? e) + (exn:fail:filesystem? e) + (exn:fail:network? e))) + + (define (exn:arith? e) + (or (exn:fail:contract:divide-by-zero? e) + (exn:fail:contract:non-fixnum-result? e))) + + ;; TODO Replace strings with proper type links once we have them + (define (try-eval thunk) + (with-handlers + ([exn:break? (lambda (e) (exception "ThreadKilledFailure" "thread killed" ()))] + [exn:io? (lambda (e) (exception "IOFailure" (exn->string e) ()))] + [exn:arith? (lambda (e) (exception "ArithmeticFailure" (exn->string e) ()))] + [exn:fail? (lambda (e) (exception "RuntimeFailure" (exn->string e) ()))] + [(lambda (x) #t) (lambda (e) (exception "MiscFailure" "unknown exception" e))]) + (right (thunk))))) diff --git a/scheme-libs/racket/unison/cont.ss b/scheme-libs/racket/unison/cont.ss new file mode 100644 index 000000000..9e37af226 --- /dev/null +++ b/scheme-libs/racket/unison/cont.ss @@ -0,0 +1,21 @@ + +#!r6rs +(library (unison cont) + (export + make-prompt + prompt0-at + control0-at) + + (import + (rnrs) + (unison core) + + (rename + (only (racket) + make-continuation-prompt-tag) + (make-continuation-prompt-tag make-prompt)) + + (rename + (only (racket control) + prompt0-at + control0-at)))) diff --git a/scheme-libs/racket/unison/core.ss b/scheme-libs/racket/unison/core.ss new file mode 100644 index 000000000..ee0a96bb4 --- /dev/null +++ b/scheme-libs/racket/unison/core.ss @@ -0,0 +1,120 @@ +; This library implements various functions and macros that are used +; internally to the unison scheme libraries. This provides e.g. a +; measure of abstraction for the particular scheme platform. A useful +; feature of one implementation might need to be implemented on top of +; other features of another, and would go in this library. +; +; This library won't be directly imported by the generated unison +; code, so if some function is needed for those, it should be +; re-exported by (unison boot). +#!r6rs +(library (unison core) + (export + describe-value + decode-value + + universal-compare + universal-equal? + + fx1- + list-head + + syntax->list + raise-syntax-error + + exception->string + let-marks + ref-mark + + freeze-string! + string-copy! + + freeze-bytevector! + freeze-vector! + + bytevector) + + (import + (rnrs) + (rename (only (racket) + string-copy! + bytes + with-continuation-mark + continuation-mark-set-first + raise-syntax-error) + (string-copy! racket-string-copy!) + (bytes bytevector)) + (racket exn) + (racket unsafe ops) + (unison data)) + + (define (fx1- n) (fx- n 1)) + + (define (list-head l n) + (let rec ([c l] [m n]) + (cond + [(eqv? m 0) '()] + [(null? c) '()] + [else + (let ([sub (rec (cdr c) (- m 1))]) + (cons (car c) sub))]))) + + (define (describe-value x) '()) + (define (decode-value x) '()) + + ; 0 = LT + ; 1 = EQ + ; 2 = GT + (define (universal-compare l r) + (cond + [(equal? l r) 1] + [(and (number? l) (number? r)) (if (< l r) 0 2)] + [else (raise "universal-compare: unimplemented")])) + + (define (universal-equal? l r) + (define (pointwise ll lr) + (let ([nl (null? ll)] [nr (null? lr)]) + (cond + [(and nl nr) #t] + [(or nl nr) #f] + [else + (and (universal-equal? (car ll) (car lr)) + (pointwise (cdr ll) (cdr lr)))]))) + (cond + [(eq? l r) 1] + [(and (data? l) (data? r)) + (and + (eqv? (data-tag l) (data-tag r)) + (pointwise (data-fields l) (data-fields r)))])) + + (define exception->string exn->string) + + (define (syntax->list stx) + (syntax-case stx () + [() '()] + [(x . xs) (cons #'x (syntax->list #'xs))])) + + (define (call-with-marks rs v f) + (cond + [(null? rs) (f)] + [else + (with-continuation-mark (car rs) v + (call-with-marks (cdr rs) v f))])) + + (define-syntax let-marks + (syntax-rules () + [(let-marks ks bn e ...) + (call-with-marks ks bn (lambda () e ...))])) + + (define (ref-mark k) (continuation-mark-set-first #f k)) + + (define freeze-string! unsafe-string->immutable-string!) + (define freeze-bytevector! unsafe-bytes->immutable-bytes!) + + (define freeze-vector! unsafe-vector*->immutable-vector!) + + ; racket string-copy! has the opposite argument order convention + ; from chez. + (define (string-copy! src soff dst doff len) + (racket-string-copy! dst doff src soff len)) + ) diff --git a/scheme-libs/racket/unison/crypto.rkt b/scheme-libs/racket/unison/crypto.rkt new file mode 100644 index 000000000..16036700a --- /dev/null +++ b/scheme-libs/racket/unison/crypto.rkt @@ -0,0 +1,225 @@ +#lang racket/base +(require ffi/unsafe + ffi/unsafe/define + racket/exn + openssl/libcrypto + ) + +(provide (prefix-out unison-FOp-crypto. + (combine-out + HashAlgorithm.Sha1 + HashAlgorithm.Sha2_256 + HashAlgorithm.Sha2_512 + HashAlgorithm.Sha3_256 + HashAlgorithm.Sha3_512 + HashAlgorithm.Blake2s_256 + HashAlgorithm.Blake2b_256 + HashAlgorithm.Blake2b_512 + hashBytes + hmacBytes))) + +(define libcrypto + (with-handlers [[exn:fail? exn->string]] + (ffi-lib "libcrypto" openssl-lib-versions))) + +(define libb2 + (with-handlers [[exn:fail? exn->string]] + (ffi-lib "libb2" '("" "1")))) + +(define _EVP-pointer (_cpointer 'EVP)) + +; returns a function that, when called, either +; 1) raises an exception, if libcrypto failed to load, or +; 2) returns a pair of (_EVP-pointer bits) +(define (lc-algo name bits) + (if (string? libcrypto) + (lambda _ (raise (error 'libcrypto "~a\n~a" name libcrypto))) + (let ([getter (get-ffi-obj name libcrypto (_fun -> _EVP-pointer))]) + (lambda [] + (cons (getter) bits))))) + +(define (check v who) + (unless (= 1 v) + (error who "failed with return value ~a" v))) + +(define EVP_Digest + (if (string? libcrypto) + (lambda _ (raise (error 'libcrypto "EVP_Digest\n~a" libcrypto))) + (get-ffi-obj "EVP_Digest" libcrypto + (_fun + _pointer ; input + _int ; input-len + _pointer ; output + _pointer ; null + _EVP-pointer ; algorithm + _pointer ; null + -> (r : _int) + -> (unless (= 1 r) + (error 'EVP_Digest "failed with return value ~a" r)))))) + +(define HMAC + (if (string? libcrypto) + (lambda _ (raise (error 'libcrypto "HMAC\n~a" libcrypto))) + (get-ffi-obj "HMAC" libcrypto + (_fun + _EVP-pointer ; algorithm + _pointer ; key + _int ; key-len + _pointer ; input + _int ; input-len + _pointer ; md + _pointer ; null + -> _pointer ; unused + )))) + +(define (libb2-raw fn) + (if (string? libb2) + (lambda _ (raise (error 'libb2 "~a\n~a" fn libb2))) + (get-ffi-obj fn libb2 + (_fun + _pointer ; output + _pointer ; input + _pointer ; key + _int ; output-len + _int ; input-len + _int ; key-len + -> (r : _int) + -> (unless (= 0 r) + (error 'blake2 "~a failed with return value ~a" fn r)))))) + +(define blake2b-raw (libb2-raw "blake2b")) + +(define HashAlgorithm.Sha1 (lc-algo "EVP_sha1" 160)) +(define HashAlgorithm.Sha2_256 (lc-algo "EVP_sha256" 256)) +(define HashAlgorithm.Sha2_512 (lc-algo "EVP_sha512" 512)) +(define HashAlgorithm.Sha3_256 (lc-algo "EVP_sha3_256" 256)) +(define HashAlgorithm.Sha3_512 (lc-algo "EVP_sha3_512" 512)) +(define HashAlgorithm.Blake2s_256 (lc-algo "EVP_blake2s256" 256)) +(define HashAlgorithm.Blake2b_512 (lc-algo "EVP_blake2b512" 512)) + +; This one isn't provided by libcrypto, for some reason +(define (HashAlgorithm.Blake2b_256) (cons 'blake2b 256)) + +; kind is a pair of (algorithm bits) +; where algorithm is either an EVP_pointer for libcrypto functions, +; or the tag 'blake2b for libb2 function. +(define (hashBytes kind input) + (let* ([bytes (/ (cdr kind) 8)] + [output (make-bytes bytes)] + [algo (car kind)]) + (case algo + ['blake2b (blake2b-raw output input #f bytes (bytes-length input) 0)] + [else (EVP_Digest input (bytes-length input) output #f algo #f)]) + + output)) + +; Mutates and returns the first argument +(define (xor one two) + (for ([i (in-range (bytes-length one))]) + (bytes-set! one i + (bitwise-xor + (bytes-ref one i) + (bytes-ref two i)))) + one) + +; doing the blake hmac by hand. libcrypto +; supports hmac natively, so we just defer to that +(define (hmacBlake kind key input) + (let* ( + [bytes (/ (cdr kind) 8)] + [blocksize (case (car kind) ['blake2b 128] ['blake2s 64])] + + [key_ + (let ([key_ (make-bytes blocksize 0)]) + (bytes-copy! key_ 0 + (if (< blocksize (bytes-length key)) + (hashBytes kind key) + key)) + key_)] + + [opad (xor (make-bytes blocksize #x5c) key_)] + [ipad (xor (make-bytes blocksize #x36) key_)] + + [full (bytes-append + opad + (hashBytes kind (bytes-append ipad input)))]) + (hashBytes kind full))) + +(define (hmacBytes kind key input) + (case (car kind) + ['blake2b (hmacBlake kind key input)] + [else + (let* ([bytes (/ (cdr kind) 8)] + [output (make-bytes bytes)] + [algo (car kind)]) + (HMAC algo key (bytes-length key) input (bytes-length input) output #f) + output + )])) + + +; These will only be evaluated by `raco test` +(module+ test + (require rackunit + (only-in openssl/sha1 bytes->hex-string hex-string->bytes)) + + (test-case "sha1 hmac" + (check-equal? + (bytes->hex-string (hmacBytes (HashAlgorithm.Sha1) #"key" #"message")) + "2088df74d5f2146b48146caf4965377e9d0be3a4")) + + (test-case "blake2b-256 hmac" + (check-equal? + (bytes->hex-string (hmacBytes (HashAlgorithm.Blake2b_256) #"key" #"message")) + "442d98a3872d3f56220f89e2b23d0645610b37c33dd3315ef224d0e39ada6751")) + + (test-case "blake2b-512 hmac" + (check-equal? + (bytes->hex-string (hmacBytes (HashAlgorithm.Blake2b_512) #"key" #"message")) + "04e9ada930688cde75eec939782eed653073dd621d7643f813702976257cf037d325b50eedd417c01b6ad1f978fbe2980a93d27d854044e8626df6fa279d6680")) + + (test-case "blake2s-256 hmac" + (check-equal? + (bytes->hex-string (hmacBytes (HashAlgorithm.Blake2s_256) #"key" #"message")) + "bba8fa28708ae80d249e317318c95c859f3f77512be23910d5094d9110454d6f")) + + (test-case "sha1 basic" + (check-equal? + (bytes->hex-string (hashBytes (HashAlgorithm.Sha1) #"")) + "da39a3ee5e6b4b0d3255bfef95601890afd80709")) + + (test-case "sha2-256 basic" + (check-equal? + (bytes->hex-string (hashBytes (HashAlgorithm.Sha2_256) #"")) + "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")) + + (test-case "sha2-512 basic" + (check-equal? + (bytes->hex-string (hashBytes (HashAlgorithm.Sha2_512) #"")) + "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e")) + + (test-case "sha3-256 basic" + (check-equal? + (bytes->hex-string (hashBytes (HashAlgorithm.Sha3_256) #"")) + "a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a")) + + (test-case "sha3-512 basic" + (check-equal? + (bytes->hex-string (hashBytes (HashAlgorithm.Sha3_512) #"")) + "a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26")) + + (test-case "blake2s_256 basic" + (check-equal? + (bytes->hex-string (hashBytes (HashAlgorithm.Blake2s_256) #"")) + "69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9")) + + (test-case "blake2b_256 basic" + (check-equal? + (bytes->hex-string (hashBytes (HashAlgorithm.Blake2b_256) #"")) + "0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8")) + + (test-case "blake2b_512 basic" + (check-equal? + (bytes->hex-string (hashBytes (HashAlgorithm.Blake2b_512) #"")) + "786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce")) + +) diff --git a/scheme-libs/racket/unison/crypto.ss b/scheme-libs/racket/unison/crypto.ss new file mode 100644 index 000000000..70003903e --- /dev/null +++ b/scheme-libs/racket/unison/crypto.ss @@ -0,0 +1,11 @@ +#!r6rs +;; stubbed out just to avoid import error, replace with real thing +(library (unison crypto) + (export + unison-FOp-crypto.HashAlgorithm.Sha1 + unison-FOp-crypto.hashBytes) + + (import (rnrs)) + + (define (unison-FOp-crypto.HashAlgorithm.Sha1) (lambda (x) x)) + (define (unison-FOp-crypto.hashBytes algo text) (algo text))) diff --git a/scheme-libs/racket/unison/tcp.rkt b/scheme-libs/racket/unison/tcp.rkt new file mode 100644 index 000000000..776033459 --- /dev/null +++ b/scheme-libs/racket/unison/tcp.rkt @@ -0,0 +1,91 @@ +; TLS primitives! Supplied by openssl (libssl) +#lang racket/base +(require racket/exn + racket/match + racket/tcp + unison/data) + +(provide + (prefix-out + unison-FOp-IO. + (combine-out + clientSocket.impl.v3 + closeSocket.impl.v3 + socketReceive.impl.v3 + socketPort.impl.v3 + serverSocket.impl.v3 + listen.impl.v3 + socketAccept.impl.v3 + socketSend.impl.v3))) + +(define (input socket) (car socket)) +(define (output socket) (car (cdr socket))) + +(define (closeSocket.impl.v3 socket) + (if (pair? socket) + (begin + (close-input-port (input socket)) + (close-output-port (output socket))) + (tcp-close socket)) + (right none)) + +(define (clientSocket.impl.v3 host port) + (with-handlers + [[exn:fail:network? (lambda (e) (exception "IOFailure" (exn->string e) '()))] + [exn:fail:contract? (lambda (e) (exception "InvalidArguments" (exn->string e) '()))] + [(lambda _ #t) (lambda (e) (exception "MiscFailure" "Unknown exception" e))] ] + + (let-values ([(input output) (tcp-connect host (string->number port))]) + (right (list input output))))) + +(define (socketSend.impl.v3 socket data) + (if (not (pair? socket)) + (exception "InvalidArguments" "Cannot send on a server socket") + (begin + (write-bytes data (output socket)) + (flush-output (output socket)) + (right none)))) + +(define (socketReceive.impl.v3 socket amt) + (if (not (pair? socket)) + (exception "InvalidArguments" "Cannot receive on a server socket") + (begin + (let ([buffer (make-bytes amt)]) + (read-bytes-avail! buffer (input socket)) + (right buffer))))) + +; A "connected" socket is represented as a list of (list input-port output-port), +; while a "listening" socket is just the tcp-listener itself. +(define (socketPort.impl.v3 socket) + (let-values ([(_ local-port __ ___) (tcp-addresses (if (pair? socket) (input socket) socket) #t)]) + (right local-port))) + +(define serverSocket.impl.v3 + (lambda args + (let-values ([(hostname port) + (match args + [(list _ port) (values #f port)] + [(list _ hostname port) (values hostname port)])]) + + (with-handlers + [[exn:fail:network? (lambda (e) (exception "IOFailure" (exn->string e) '()))] + [exn:fail:contract? (lambda (e) (exception "InvalidArguments" (exn->string e) '()))] + [(lambda _ #t) (lambda (e) (exception "MiscFailure" "Unknown exception" e))] ] + (let ([listener (tcp-listen (string->number port) 4 #f (if (equal? 0 hostname) #f hostname))]) + (right listener)))))) + +; NOTE: This is a no-op because racket's public TCP stack doesn't have separate operations for +; "bind" vs "listen". We've decided to have `serverSocket` do the "bind & listen", and have +; this do nothing. +; If we want ~a little better parity with the haskell implementation, we might set a flag or +; something on the listener, and error if you try to `accept` on a server socket that you haven't +; called `listen` on yet. +(define (listen.impl.v3 _listener) + (right none)) + +(define (socketAccept.impl.v3 listener) + (if (pair? listener) + (exception "InvalidArguments" "Cannot accept on a non-server socket") + (begin + (let-values ([(input output) (tcp-accept listener)]) + (right (list input output)))))) diff --git a/scheme-libs/readme.md b/scheme-libs/readme.md new file mode 100644 index 000000000..6e54181f2 --- /dev/null +++ b/scheme-libs/readme.md @@ -0,0 +1,69 @@ +This directory contains libraries necessary for building and running +unison programs via Racket Scheme. + +**Prerequisites** + +You'll need to have a couple things installed on your system: + +* [libcrypto](https://github.com/openssl/openssl) (you probably already have this installed) +* [Racket](https://racket-lang.org/), with the executable `racket` on your path somewhere +* [BLAKE2](https://github.com/BLAKE2/libb2) (you may need to install this manually) + +To run the test suite, first `stack build` (or `stack build --fast`), then: + +``` +./unison-src/builtin-tests/jit-tests.sh +``` + +OR if you want to run the same tests in interpreted mode: + +``` +./unison-src/builtin-tests/interpreter-tests.sh +``` + +The above scripts fetch and cache a copy of base and the scheme-generating libraries, and copy this directory to `$XDG_DATA_DIRECTORY/unisonlanguage/scheme-libs`. + +## Iterating more quickly + +If running the above transcripts is too slow for you, here's a few things you can do instead: + +### Run without needing to bounce ucm + +First, tell UCM to load scheme files from this directory, by adding +a `SchemeLibs.Static` item to your `~/.unisonConfig`. + +``` +SchemeLibs.Static = "/path/to/unisoncode" +``` + +With this set, the compiler commands will look in `/path/to/somewhere/scheme-libs/` for the subdirectories containing the library files. + +Once that's done, you can load the testing library and tests: + +``` +.jit> load unison-src/builtin-tests/testlib.u +.jit> add +.jit> load unison-src/builtin-tests/tests.u +.jit> add +``` + +And then, without needing to bounce `ucm` every time you edit your scheme files, you can do: + +``` +.jit> run.native tests +``` + +### Run without needing to regenerate the scheme + +`run.native` produces a scheme file in `$XDG_CACHE_DIRECTORY/unisonlanguage/scheme-tmp`, so going one step further, you can grab these files and run them directly using Racket, bypassing `ucm` entirely. + +``` +~/unison » ls ~/.cache/unisonlanguage/scheme-tmp +testSuite.scm tests.scm +``` + +When running `tests.scm` directly with Racket, you'll need to add this `scheme-libs` directory and the generated builtins library to the path. + +``` +racket -S ~/.cache/unisonlanguage/scheme-libs/ -S ~/.local/share/unisonlanguage/scheme-libs/racket/ -S ~/.local/share/unisonlanguage/scheme-libs/common/ ~/.cache/unisonlanguage/scheme-tmp/tests.scm +``` \ No newline at end of file diff --git a/stack.yaml b/stack.yaml index 06d8612a8..14a6c1f26 100644 --- a/stack.yaml +++ b/stack.yaml @@ -24,11 +24,13 @@ packages: - codebase2/core - codebase2/util-serialization - codebase2/util-term +- lib/unison-hash +- lib/unison-hash-orphans-aeson +- lib/unison-hash-orphans-sqlite +- lib/unison-hashing - lib/unison-prelude - lib/unison-sqlite - lib/unison-util-base32hex -- lib/unison-util-base32hex-orphans-aeson -- lib/unison-util-base32hex-orphans-sqlite - lib/unison-util-bytes - lib/unison-util-cache - lib/unison-util-relation diff --git a/unison-cli/package.yaml b/unison-cli/package.yaml index 02ba66580..376ef2cdf 100644 --- a/unison-cli/package.yaml +++ b/unison-cli/package.yaml @@ -14,6 +14,7 @@ dependencies: - ListLike - aeson - aeson-pretty + - ansi-terminal - async - base - bytes @@ -70,9 +71,9 @@ dependencies: - unison-codebase - unison-codebase-sqlite - unison-codebase-sqlite-hashing-v2 - - unison-sqlite - unison-core - unison-core1 + - unison-hash - unison-parser-typechecker - unison-prelude - unison-pretty-printer diff --git a/unison-cli/src/Compat.hs b/unison-cli/src/Compat.hs index 550643997..2287e9ec2 100644 --- a/unison-cli/src/Compat.hs +++ b/unison-cli/src/Compat.hs @@ -8,6 +8,7 @@ import System.Mem.Weak (deRefWeak) import Unison.Prelude import qualified UnliftIO +{- ORMOLU_DISABLE -} #if defined(mingw32_HOST_OS) import qualified GHC.ConsoleHandler as WinSig #else @@ -15,13 +16,13 @@ import qualified System.Posix.Signals as Sig #endif onWindows :: Bool -onWindows = +onWindows = #if defined(mingw32_HOST_OS) True #else False #endif - +{- ORMOLU_ENABLE -} -- | Constructs a default interrupt handler which builds an interrupt handler which throws a -- UserInterrupt exception to the thread in which the setup was initially called. @@ -48,6 +49,7 @@ withInterruptHandler handler action = do -- Installs the new handler and returns an action to restore the old handlers. installNewHandlers :: IO (IO ()) installNewHandlers = do +{- ORMOLU_DISABLE -} #if defined(mingw32_HOST_OS) let sig_handler WinSig.ControlC = handler sig_handler WinSig.Break = handler @@ -63,3 +65,4 @@ withInterruptHandler handler action = do #endif restoreOldHandlers :: IO () -> IO () restoreOldHandlers restore = restore +{- ORMOLU_ENABLE -} diff --git a/unison-cli/src/Unison/Auth/CredentialFile.hs b/unison-cli/src/Unison/Auth/CredentialFile.hs index 4437258a0..8c2ab019b 100644 --- a/unison-cli/src/Unison/Auth/CredentialFile.hs +++ b/unison-cli/src/Unison/Auth/CredentialFile.hs @@ -3,12 +3,11 @@ module Unison.Auth.CredentialFile (atomicallyModifyCredentialsFile) where import qualified Data.Aeson as Aeson -import qualified Data.Text as Text import System.FilePath (takeDirectory, ()) import System.IO.LockFile import Unison.Auth.Types +import qualified Unison.Debug as Debug import Unison.Prelude -import qualified UnliftIO import UnliftIO.Directory lockfileConfig :: LockingParameters @@ -20,14 +19,14 @@ lockfileConfig = where sleepTimeMicros = 100_000 -- 100ms -getCredentialJSONFilePath :: MonadIO m => m FilePath +getCredentialJSONFilePath :: (MonadIO m) => m FilePath getCredentialJSONFilePath = do unisonDataDir <- getXdgDirectory XdgData "unisonlanguage" pure (unisonDataDir "credentials.json") -- | Atomically update the credential storage file. -- Creates an empty file automatically if one doesn't exist. -atomicallyModifyCredentialsFile :: MonadIO m => (Credentials -> Credentials) -> m Credentials +atomicallyModifyCredentialsFile :: (MonadIO m) => (Credentials -> Credentials) -> m Credentials atomicallyModifyCredentialsFile f = liftIO $ do credentialJSONPath <- getCredentialJSONFilePath doesFileExist credentialJSONPath >>= \case @@ -39,7 +38,12 @@ atomicallyModifyCredentialsFile f = liftIO $ do withLockFile lockfileConfig (withLockExt credentialJSONPath) $ do credentials <- Aeson.eitherDecodeFileStrict credentialJSONPath >>= \case - Left err -> UnliftIO.throwIO $ CredentialParseFailure credentialJSONPath (Text.pack err) + -- If something goes wrong, just wipe the credentials file so we're in a clean slate. + -- In the worst case the user will simply need to log in again. + Left err -> do + Debug.debugM Debug.Auth "Error decoding credentials file" err + Aeson.encodeFile credentialJSONPath emptyCredentials + pure emptyCredentials Right creds -> pure creds let newCredentials = f credentials when (newCredentials /= credentials) $ do diff --git a/unison-cli/src/Unison/Auth/CredentialManager.hs b/unison-cli/src/Unison/Auth/CredentialManager.hs index 98780efef..8729282f6 100644 --- a/unison-cli/src/Unison/Auth/CredentialManager.hs +++ b/unison-cli/src/Unison/Auth/CredentialManager.hs @@ -22,23 +22,23 @@ import qualified UnliftIO newtype CredentialManager = CredentialManager (UnliftIO.MVar Credentials) -- | Saves credentials to the active profile. -saveCredentials :: UnliftIO.MonadUnliftIO m => CredentialManager -> CodeserverId -> CodeserverCredentials -> m () +saveCredentials :: (UnliftIO.MonadUnliftIO m) => CredentialManager -> CodeserverId -> CodeserverCredentials -> m () saveCredentials credManager aud creds = do void . modifyCredentials credManager $ setCodeserverCredentials aud creds -- | Atomically update the credential storage file, and update the in-memory cache. -modifyCredentials :: UnliftIO.MonadUnliftIO m => CredentialManager -> (Credentials -> Credentials) -> m Credentials +modifyCredentials :: (UnliftIO.MonadUnliftIO m) => CredentialManager -> (Credentials -> Credentials) -> m Credentials modifyCredentials (CredentialManager credsVar) f = do UnliftIO.modifyMVar credsVar $ \_ -> do newCreds <- atomicallyModifyCredentialsFile f pure (newCreds, newCreds) -getCredentials :: MonadIO m => CredentialManager -> CodeserverId -> m (Either CredentialFailure CodeserverCredentials) +getCredentials :: (MonadIO m) => CredentialManager -> CodeserverId -> m (Either CredentialFailure CodeserverCredentials) getCredentials (CredentialManager credsVar) aud = do creds <- UnliftIO.readMVar credsVar pure $ getCodeserverCredentials aud creds -newCredentialManager :: MonadIO m => m CredentialManager +newCredentialManager :: (MonadIO m) => m CredentialManager newCredentialManager = do credentials <- atomicallyModifyCredentialsFile id credentialsVar <- UnliftIO.newMVar credentials diff --git a/unison-cli/src/Unison/Auth/Discovery.hs b/unison-cli/src/Unison/Auth/Discovery.hs index 4c2c07d4b..050047ffc 100644 --- a/unison-cli/src/Unison/Auth/Discovery.hs +++ b/unison-cli/src/Unison/Auth/Discovery.hs @@ -15,7 +15,7 @@ discoveryURIForCodeserver cs = let uri = codeserverToURI cs in uri {uriPath = uriPath uri <> "/.well-known/openid-configuration"} -fetchDiscoveryDoc :: MonadIO m => URI -> m (Either CredentialFailure DiscoveryDoc) +fetchDiscoveryDoc :: (MonadIO m) => URI -> m (Either CredentialFailure DiscoveryDoc) fetchDiscoveryDoc discoveryURI = liftIO . UnliftIO.try @_ @CredentialFailure $ do unauthenticatedHttpClient <- HTTP.getGlobalManager req <- HTTP.requestFromURI discoveryURI diff --git a/unison-cli/src/Unison/Auth/HTTPClient.hs b/unison-cli/src/Unison/Auth/HTTPClient.hs index 441f0a93f..10ecc746d 100644 --- a/unison-cli/src/Unison/Auth/HTTPClient.hs +++ b/unison-cli/src/Unison/Auth/HTTPClient.hs @@ -15,7 +15,7 @@ newtype AuthenticatedHttpClient = AuthenticatedHttpClient HTTP.Manager -- | Returns a new http manager which applies the appropriate Authorization header to -- any hosts our UCM is authenticated with. -newAuthenticatedHTTPClient :: MonadIO m => TokenProvider -> UCMVersion -> m AuthenticatedHttpClient +newAuthenticatedHTTPClient :: (MonadIO m) => TokenProvider -> UCMVersion -> m AuthenticatedHttpClient newAuthenticatedHTTPClient tokenProvider ucmVersion = liftIO $ do let managerSettings = HTTP.tlsManagerSettings diff --git a/unison-cli/src/Unison/Auth/Tokens.hs b/unison-cli/src/Unison/Auth/Tokens.hs index 412e2ae24..9d0a1202b 100644 --- a/unison-cli/src/Unison/Auth/Tokens.hs +++ b/unison-cli/src/Unison/Auth/Tokens.hs @@ -8,10 +8,10 @@ import Data.Time.Clock.POSIX (getPOSIXTime) import qualified Network.HTTP.Client as HTTP import qualified Network.HTTP.Client.TLS as HTTP import qualified Network.HTTP.Types as Network -import Network.URI (URI) import Unison.Auth.CredentialManager import Unison.Auth.Discovery (fetchDiscoveryDoc) import Unison.Auth.Types +import Unison.Auth.UserInfo (getUserInfo) import Unison.Prelude import Unison.Share.Types (CodeserverId) import qualified UnliftIO @@ -20,7 +20,7 @@ import Web.JWT import qualified Web.JWT as JWT -- | Checks whether a JWT access token is expired. -isExpired :: MonadIO m => AccessToken -> m Bool +isExpired :: (MonadIO m) => AccessToken -> m Bool isExpired accessToken = liftIO do jwt <- JWT.decode accessToken `whenNothing` (throwIO $ InvalidJWT "Failed to decode JWT") now <- getPOSIXTime @@ -40,21 +40,22 @@ newTokenProvider manager host = UnliftIO.try @_ @CredentialFailure $ do expired <- isExpired currentAccessToken if expired then do - newTokens@(Tokens {accessToken = newAccessToken}) <- throwEitherM $ performTokenRefresh discoveryURI tokens - saveCredentials manager host (codeserverCredentials discoveryURI newTokens) + discoveryDoc <- throwEitherM $ fetchDiscoveryDoc discoveryURI + newTokens@(Tokens {accessToken = newAccessToken}) <- throwEitherM $ performTokenRefresh discoveryDoc tokens + userInfo <- throwEitherM $ getUserInfo discoveryDoc newAccessToken + saveCredentials manager host (codeserverCredentials discoveryURI newTokens userInfo) pure $ newAccessToken else pure currentAccessToken -- | Don't yet support automatically refreshing tokens. -- -- Specification: https://datatracker.ietf.org/doc/html/rfc6749#section-6 -performTokenRefresh :: MonadIO m => URI -> Tokens -> m (Either CredentialFailure Tokens) -performTokenRefresh discoveryURI (Tokens {refreshToken = currentRefreshToken}) = runExceptT $ +performTokenRefresh :: (MonadIO m) => DiscoveryDoc -> Tokens -> m (Either CredentialFailure Tokens) +performTokenRefresh DiscoveryDoc {tokenEndpoint} (Tokens {refreshToken = currentRefreshToken}) = runExceptT $ case currentRefreshToken of Nothing -> throwError $ (RefreshFailure . Text.pack $ "Unable to refresh authentication, please run auth.login and try again.") Just rt -> do - DiscoveryDoc {tokenEndpoint} <- ExceptT $ fetchDiscoveryDoc discoveryURI req <- liftIO $ HTTP.requestFromURI tokenEndpoint let addFormData = HTTP.urlEncodedBody diff --git a/unison-cli/src/Unison/Auth/Types.hs b/unison-cli/src/Unison/Auth/Types.hs index 0b07dbbe4..a9efb3d33 100644 --- a/unison-cli/src/Unison/Auth/Types.hs +++ b/unison-cli/src/Unison/Auth/Types.hs @@ -15,6 +15,7 @@ module Unison.Auth.Types ProfileName, CredentialFailure (..), CodeserverCredentials (..), + UserInfo (..), getCodeserverCredentials, setCodeserverCredentials, codeserverCredentials, @@ -44,6 +45,7 @@ data CredentialFailure | RefreshFailure Text | InvalidTokenResponse URI Text | InvalidHost CodeserverURI + | FailedToFetchUserInfo URI Text deriving stock (Show, Eq) deriving anyclass (Exception) @@ -148,18 +150,45 @@ instance Aeson.FromJSON Credentials where activeProfile <- obj .: "active_profile" pure Credentials {..} +data UserInfo = UserInfo + { userId :: Text, -- E.g. U-1234-5678 + name :: Maybe Text, + handle :: Text -- The user's handle, no @ sign, e.g. "JohnSmith" + } + deriving (Show, Eq) + +instance ToJSON UserInfo where + toJSON (UserInfo userId name handle) = + Aeson.object + [ "user_id" .= userId, + "name" .= name, + "handle" .= handle + ] + +instance FromJSON UserInfo where + parseJSON = Aeson.withObject "UserInfo" $ \obj -> do + userId <- obj .: "user_id" + name <- obj .:? "name" + handle <- obj .: "handle" + pure (UserInfo {..}) + -- | Credentials for a specific codeserver data CodeserverCredentials = CodeserverCredentials { -- The most recent set of authentication tokens tokens :: Tokens, -- URI where the discovery document for this codeserver can be fetched. - discoveryURI :: URI + discoveryURI :: URI, + userInfo :: UserInfo } deriving (Eq) instance ToJSON CodeserverCredentials where - toJSON (CodeserverCredentials tokens discoveryURI) = - Aeson.object ["tokens" .= tokens, "discovery_uri" .= show discoveryURI] + toJSON (CodeserverCredentials tokens discoveryURI userInfo) = + Aeson.object + [ "tokens" .= tokens, + "discovery_uri" .= show discoveryURI, + "user_info" .= userInfo + ] instance FromJSON CodeserverCredentials where parseJSON = @@ -170,13 +199,14 @@ instance FromJSON CodeserverCredentials where discoveryURI <- case parseURI discoveryURIString of Nothing -> fail "discovery_uri is not a valid URI" Just uri -> pure uri + userInfo <- v .: "user_info" pure $ CodeserverCredentials {..} emptyCredentials :: Credentials emptyCredentials = Credentials mempty defaultProfileName -codeserverCredentials :: URI -> Tokens -> CodeserverCredentials -codeserverCredentials discoveryURI tokens = CodeserverCredentials {discoveryURI, tokens} +codeserverCredentials :: URI -> Tokens -> UserInfo -> CodeserverCredentials +codeserverCredentials discoveryURI tokens userInfo = CodeserverCredentials {discoveryURI, tokens, userInfo} getCodeserverCredentials :: CodeserverId -> Credentials -> Either CredentialFailure CodeserverCredentials getCodeserverCredentials host (Credentials {credentials, activeProfile}) = diff --git a/unison-cli/src/Unison/Auth/UserInfo.hs b/unison-cli/src/Unison/Auth/UserInfo.hs new file mode 100644 index 000000000..36ef184a6 --- /dev/null +++ b/unison-cli/src/Unison/Auth/UserInfo.hs @@ -0,0 +1,36 @@ +module Unison.Auth.UserInfo where + +import qualified Data.Aeson as Aeson +import qualified Data.Aeson.Types as Aeson +import qualified Data.ByteString.Lazy.Char8 as BL +import qualified Data.Text as Text +import qualified Data.Text.Encoding as Text +import qualified Network.HTTP.Client as HTTP +import qualified Network.HTTP.Client.TLS as HTTP +import Unison.Auth.Types +import Unison.Prelude + +-- | Get user info for an authenticated user. +getUserInfo :: (MonadIO m) => DiscoveryDoc -> AccessToken -> m (Either CredentialFailure UserInfo) +getUserInfo (DiscoveryDoc {userInfoEndpoint}) accessToken = liftIO $ do + unauthenticatedHttpClient <- HTTP.getGlobalManager + req <- HTTP.requestFromURI userInfoEndpoint <&> HTTP.applyBearerAuth (Text.encodeUtf8 accessToken) + resp <- HTTP.httpLbs req unauthenticatedHttpClient + case decodeUserInfo (HTTP.responseBody resp) of + Left err -> pure . Left $ FailedToFetchUserInfo userInfoEndpoint (Text.pack err) + Right userInfo -> pure . Right $ userInfo + +decodeUserInfo :: BL.ByteString -> Either String UserInfo +decodeUserInfo bs = do + obj <- Aeson.eitherDecode bs + flip Aeson.parseEither obj $ + Aeson.withObject "UserInfo" $ \o -> do + userId <- o Aeson..: "sub" + name <- o Aeson..:? "name" + handle <- o Aeson..: "handle" + pure + UserInfo + { userId, + name, + handle + } diff --git a/unison-cli/src/Unison/Cli/Monad.hs b/unison-cli/src/Unison/Cli/Monad.hs index 2d6210e0d..40b88ee51 100644 --- a/unison-cli/src/Unison/Cli/Monad.hs +++ b/unison-cli/src/Unison/Cli/Monad.hs @@ -1,5 +1,4 @@ {-# LANGUAGE DataKinds #-} -{-# LANGUAGE DefaultSignatures #-} -- | The main CLI monad. module Unison.Cli.Monad @@ -193,7 +192,7 @@ data LoopState = LoopState instance {-# OVERLAPS #-} - Functor f => + (Functor f) => IsLabel "currentPath" ((Path.Absolute -> f Path.Absolute) -> (LoopState -> f LoopState)) where fromLabel :: (Path.Absolute -> f Path.Absolute) -> (LoopState -> f LoopState) diff --git a/unison-cli/src/Unison/Cli/MonadUtils.hs b/unison-cli/src/Unison/Cli/MonadUtils.hs index 41985cc88..4a018a5d0 100644 --- a/unison-cli/src/Unison/Cli/MonadUtils.hs +++ b/unison-cli/src/Unison/Cli/MonadUtils.hs @@ -114,7 +114,7 @@ import UnliftIO.STM -- .unisonConfig things -- | Lookup a config value by key. -getConfig :: Configurator.Configured a => Text -> Cli (Maybe a) +getConfig :: (Configurator.Configured a) => Text -> Cli (Maybe a) getConfig key = do Cli.Env {config} <- ask liftIO (Configurator.lookup config key) @@ -322,7 +322,7 @@ stepAtM :: stepAtM cause = stepManyAtM @[] cause . pure stepManyAt :: - Foldable f => + (Foldable f) => Text -> f (Path, Branch0 IO -> Branch0 IO) -> Cli () @@ -331,7 +331,7 @@ stepManyAt reason actions = do syncRoot reason stepManyAt' :: - Foldable f => + (Foldable f) => Text -> f (Path, Branch0 IO -> Cli (Branch0 IO)) -> Cli Bool @@ -341,7 +341,7 @@ stepManyAt' reason actions = do pure res stepManyAtNoSync' :: - Foldable f => + (Foldable f) => f (Path, Branch0 IO -> Cli (Branch0 IO)) -> Cli Bool stepManyAtNoSync' actions = do @@ -352,14 +352,14 @@ stepManyAtNoSync' actions = do -- Like stepManyAt, but doesn't update the last saved root stepManyAtNoSync :: - Foldable f => + (Foldable f) => f (Path, Branch0 IO -> Branch0 IO) -> Cli () stepManyAtNoSync actions = void . modifyRootBranch $ Branch.stepManyAt actions stepManyAtM :: - Foldable f => + (Foldable f) => Text -> f (Path, Branch0 IO -> IO (Branch0 IO)) -> Cli () @@ -368,7 +368,7 @@ stepManyAtM reason actions = do syncRoot reason stepManyAtMNoSync :: - Foldable f => + (Foldable f) => f (Path, Branch0 IO -> IO (Branch0 IO)) -> Cli () stepManyAtMNoSync actions = do diff --git a/unison-cli/src/Unison/Cli/NamesUtils.hs b/unison-cli/src/Unison/Cli/NamesUtils.hs index 20eeeea0f..6486db16d 100644 --- a/unison-cli/src/Unison/Cli/NamesUtils.hs +++ b/unison-cli/src/Unison/Cli/NamesUtils.hs @@ -51,7 +51,7 @@ basicNames' nameScoping = do -- | Produce a `Names` needed to display all the hashes used in the given file. displayNames :: - Var v => + (Var v) => TypecheckedUnisonFile v a -> Cli NamesWithHistory displayNames unisonFile = diff --git a/unison-cli/src/Unison/Cli/TypeCheck.hs b/unison-cli/src/Unison/Cli/TypeCheck.hs index 382ad2e41..8105d3542 100644 --- a/unison-cli/src/Unison/Cli/TypeCheck.hs +++ b/unison-cli/src/Unison/Cli/TypeCheck.hs @@ -51,7 +51,7 @@ typecheck ambient names sourceName source = (fst source) typecheckHelper :: - MonadIO m => + (MonadIO m) => Codebase IO Symbol Ann -> IO Parser.UniqueName -> [Type Symbol Ann] -> diff --git a/unison-cli/src/Unison/Codebase/Editor/AuthorInfo.hs b/unison-cli/src/Unison/Codebase/Editor/AuthorInfo.hs index c9d61df37..c059d5f40 100644 --- a/unison-cli/src/Unison/Codebase/Editor/AuthorInfo.hs +++ b/unison-cli/src/Unison/Codebase/Editor/AuthorInfo.hs @@ -23,7 +23,7 @@ import UnliftIO (liftIO) data AuthorInfo v a = AuthorInfo {guid, author, copyrightHolder :: (Reference.Id, Term v a, Type v a)} -createAuthorInfo :: forall m v a. MonadIO m => Var v => a -> Text -> m (AuthorInfo v a) +createAuthorInfo :: forall m v a. (MonadIO m) => (Var v) => a -> Text -> m (AuthorInfo v a) createAuthorInfo a t = createAuthorInfo' . unpack <$> liftIO (getRandomBytes 32) where createAuthorInfo' :: [Word8] -> AuthorInfo v a diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs index 389477d20..05a3da7ea 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput.hs @@ -1,3 +1,6 @@ +{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} + +{-# HLINT ignore "Use tuple-section" #-} module Unison.Codebase.Editor.HandleInput ( loop, ) @@ -36,8 +39,14 @@ import System.Directory getXdgDirectory, ) import System.Environment (withArgs) +import System.Exit (ExitCode (..)) import System.FilePath (()) -import System.Process (callCommand, readCreateProcess, shell) +import System.Process + ( callProcess, + readCreateProcess, + readCreateProcessWithExitCode, + shell, + ) import qualified Text.Megaparsec as P import qualified U.Codebase.Branch.Diff as V2Branch import qualified U.Codebase.Causal as V2Causal @@ -46,9 +55,6 @@ import qualified U.Codebase.Reference as V2 (Reference) import qualified U.Codebase.Reflog as Reflog import qualified U.Codebase.Sqlite.Operations as Ops import qualified U.Codebase.Sqlite.Queries as Queries -import qualified U.Util.Hash as Hash -import U.Util.Hash32 (Hash32) -import qualified U.Util.Hash32 as Hash32 import qualified Unison.ABT as ABT import qualified Unison.Builtin as Builtin import qualified Unison.Builtin.Decls as DD @@ -94,6 +100,7 @@ import Unison.Codebase.Editor.RemoteRepo ( ReadGitRemoteNamespace (..), ReadRemoteNamespace (..), ReadShareRemoteNamespace (..), + ShareUserHandle (..), WriteGitRemotePath (..), WriteGitRepo, WriteRemotePath (..), @@ -113,6 +120,7 @@ import qualified Unison.Codebase.Metadata as Metadata import Unison.Codebase.Patch (Patch (..)) import qualified Unison.Codebase.Patch as Patch import Unison.Codebase.Path (Path, Path' (..)) +import qualified Unison.Codebase.Path as HQSplit' import qualified Unison.Codebase.Path as Path import qualified Unison.Codebase.Path.Parse as Path import Unison.Codebase.PushBehavior (PushBehavior) @@ -136,8 +144,12 @@ import qualified Unison.CommandLine.InputPatterns as IP import qualified Unison.CommandLine.InputPatterns as InputPatterns import Unison.ConstructorReference (GConstructorReference (..)) import qualified Unison.DataDeclaration as DD +import qualified Unison.Hash as Hash +import Unison.Hash32 (Hash32) +import qualified Unison.Hash32 as Hash32 import qualified Unison.HashQualified as HQ import qualified Unison.HashQualified' as HQ' +import qualified Unison.HashQualified' as HashQualified import qualified Unison.Hashing.V2.Convert as Hashing import Unison.LabeledDependency (LabeledDependency) import qualified Unison.LabeledDependency as LD @@ -164,8 +176,6 @@ import qualified Unison.Reference as Reference import Unison.Referent (Referent) import qualified Unison.Referent as Referent import qualified Unison.Result as Result -import Unison.Runtime.IOSource (isTest) -import qualified Unison.Runtime.IOSource as DD import qualified Unison.Runtime.IOSource as IOSource import Unison.Server.Backend (ShallowListEntry (..)) import qualified Unison.Server.Backend as Backend @@ -181,7 +191,7 @@ import Unison.Share.Types (codeserverBaseURL) import qualified Unison.ShortHash as SH import qualified Unison.Sqlite as Sqlite import Unison.Symbol (Symbol) -import qualified Unison.Sync.Types as Share (Path (..), hashJWTHash) +import qualified Unison.Sync.Types as Share import qualified Unison.Syntax.HashQualified as HQ (fromString, toString, toText, unsafeFromString) import qualified Unison.Syntax.Lexer as L import qualified Unison.Syntax.Name as Name (toString, toVar, unsafeFromString, unsafeFromVar) @@ -271,7 +281,7 @@ loop e = do go (ann, kind, _hash, _uneval, eval, isHit) = (ann, kind, eval, isHit) when (not (null e')) do Cli.respond $ Evaluated text ppe bindings e' - #latestTypecheckedFile .= (Just unisonFile) + #latestTypecheckedFile .= Just unisonFile case e of Left (IncomingRootBranch hashes) -> Cli.time "IncomingRootBranch" do @@ -344,44 +354,6 @@ loop e = do names <- displayNames uf ppe <- PPE.suffixifiedPPE <$> prettyPrintEnvDecl names Cli.respond $ Typechecked (Text.pack sourceName) ppe sr uf - - delete :: - DeleteOutput -> - ((Path.Absolute, HQ'.HQSegment) -> Cli (Set Referent)) -> -- compute matching terms - ((Path.Absolute, HQ'.HQSegment) -> Cli (Set Reference)) -> -- compute matching types - Path.HQSplit' -> - Cli () - delete doutput getTerms getTypes hq' = do - hq <- Cli.resolveSplit' hq' - terms <- getTerms hq - types <- getTypes hq - when (Set.null terms && Set.null types) (Cli.returnEarly (NameNotFound hq')) - -- Mitchell: stripping hash seems wrong here... - resolvedPath <- Path.convert <$> Cli.resolveSplit' (HQ'.toName <$> hq') - rootNames <- Branch.toNames <$> Cli.getRootBranch0 - let name = Path.unsafeToName (Path.unsplit resolvedPath) - toRel :: Ord ref => Set ref -> R.Relation Name ref - toRel = R.fromList . fmap (name,) . toList - -- these names are relative to the root - toDelete = Names (toRel terms) (toRel types) - endangerments <- Cli.runTransaction (getEndangeredDependents toDelete rootNames) - if null endangerments - then do - let makeDeleteTermNames = map (BranchUtil.makeDeleteTermName resolvedPath) . Set.toList $ terms - let makeDeleteTypeNames = map (BranchUtil.makeDeleteTypeName resolvedPath) . Set.toList $ types - before <- Cli.getRootBranch0 - description <- inputDescription input - Cli.stepManyAt description (makeDeleteTermNames ++ makeDeleteTypeNames) - case doutput of - DeleteOutput'Diff -> do - after <- Cli.getRootBranch0 - (ppe, diff) <- diffHelper before after - Cli.respondNumbered (ShowDiffAfterDeleteDefinitions ppe diff) - DeleteOutput'NoDiff -> do - Cli.respond Success - else do - ppeDecl <- currentPrettyPrintEnvDecl Backend.Within - Cli.respondNumbered (CantDeleteDefinitions ppeDecl endangerments) in Cli.time "InputPattern" case input of ApiI -> do Cli.Env {serverBaseUrl} <- ask @@ -445,7 +417,12 @@ loop e = do description <- inputDescription input dest <- Cli.resolvePath' dest0 ok <- Cli.updateAtM description dest (const $ pure srcb) - Cli.respond if ok then Success else BranchEmpty src0 + Cli.respond + if ok + then Success + else BranchEmpty case src0 of + Left hash -> WhichBranchEmptyHash hash + Right path -> WhichBranchEmptyPath path MergeLocalBranchI src0 dest0 mergeMode -> do description <- inputDescription input srcb <- Cli.expectBranchAtPath' src0 @@ -490,15 +467,16 @@ loop e = do headb <- getBranch headRepo mergedb <- liftIO (Branch.merge'' (Codebase.lca codebase) Branch.RegularMerge baseb headb) squashedb <- liftIO (Branch.merge'' (Codebase.lca codebase) Branch.SquashMerge headb baseb) - -- Perform all child updates in a single step. Cli.updateAt description destAbs $ Branch.step \destBranch0 -> - destBranch0 & Branch.children - %~ ( \childMap -> - childMap & at "base" ?~ baseb - & at "head" ?~ headb - & at "merged" ?~ mergedb - & at "squashed" ?~ squashedb - ) + destBranch0 + & Branch.children + %~ ( \childMap -> + childMap + & at "base" ?~ baseb + & at "head" ?~ headb + & at "merged" ?~ mergedb + & at "squashed" ?~ squashedb + ) let base = snoc dest0 "base" head = snoc dest0 "head" merged = snoc dest0 "merged" @@ -869,9 +847,9 @@ loop e = do ] Cli.respond Success DeleteI dtarget -> case dtarget of - DeleteTarget'TermOrType doutput hq -> delete doutput Cli.getTermsAt Cli.getTypesAt hq - DeleteTarget'Type doutput hq -> delete doutput (const (pure Set.empty)) Cli.getTypesAt hq - DeleteTarget'Term doutput hq -> delete doutput Cli.getTermsAt (const (pure Set.empty)) hq + DeleteTarget'TermOrType doutput hqs -> delete input doutput Cli.getTermsAt Cli.getTypesAt hqs + DeleteTarget'Type doutput hqs -> delete input doutput (const (pure Set.empty)) Cli.getTypesAt hqs + DeleteTarget'Term doutput hqs -> delete input doutput Cli.getTermsAt (const (pure Set.empty)) hqs DeleteTarget'Patch src' -> do _ <- Cli.expectPatchAt src' description <- inputDescription input @@ -885,12 +863,10 @@ loop e = do if hasConfirmed || insistence == Force then do description <- inputDescription input - Cli.stepAt - description - (Path.empty, const Branch.empty0) + Cli.updateRoot Branch.empty description Cli.respond DeletedEverything else Cli.respond DeleteEverythingConfirmation - DeleteTarget'Branch insistence (Just p) -> do + DeleteTarget'Branch insistence (Just p@(parentPath, childName)) -> do branch <- Cli.expectBranchAtPath' (Path.unsplit' p) description <- inputDescription input absPath <- Cli.resolveSplit' p @@ -900,7 +876,7 @@ loop e = do (Branch.toNames (Branch.head branch)) afterDelete <- do rootNames <- Branch.toNames <$> Cli.getRootBranch0 - endangerments <- Cli.runTransaction (getEndangeredDependents toDelete rootNames) + endangerments <- Cli.runTransaction (getEndangeredDependents toDelete Set.empty rootNames) case (null endangerments, insistence) of (True, _) -> pure (Cli.respond Success) (False, Force) -> do @@ -912,8 +888,12 @@ loop e = do ppeDecl <- currentPrettyPrintEnvDecl Backend.Within Cli.respondNumbered $ CantDeleteNamespace ppeDecl endangerments Cli.returnEarlyWithoutOutput - Cli.stepAt description $ - BranchUtil.makeDeleteBranch (Path.convert absPath) + parentPathAbs <- Cli.resolvePath' parentPath + -- We have to modify the parent in order to also wipe out the history at the + -- child. + Cli.updateAt description parentPathAbs \parentBranch -> + parentBranch + & Branch.modifyAt (Path.singleton childName) \_ -> Branch.empty afterDelete DisplayI outputLoc names' -> do currentBranch0 <- Cli.getCurrentBranch0 @@ -1103,6 +1083,7 @@ loop e = do Cli.LoadError -> Cli.returnEarly $ SourceLoadFailed path Cli.LoadSuccess contents -> pure contents loadUnisonFile (Text.pack path) contents + ClearI -> Cli.respond ClearScreen AddI requestedNames -> do description <- inputDescription input let vars = Set.map Name.toVar requestedNames @@ -1185,44 +1166,10 @@ loop e = do whenJustM (liftIO (Runtime.compileTo runtime codeLookup ppe ref (output <> ".uc"))) \err -> Cli.returnEarly (EvaluationFailure err) CompileSchemeI output main -> doCompileScheme output main - ExecuteSchemeI main -> doRunAsScheme main + ExecuteSchemeI main args -> doRunAsScheme main args GenSchemeLibsI -> doGenerateSchemeBoot True Nothing - FetchSchemeCompilerI -> doFetchCompiler - IOTestI main -> do - Cli.Env {codebase, runtime} <- ask - -- todo - allow this to run tests from scratch file, using addRunMain - let testType = Runtime.ioTestType runtime - parseNames <- (`NamesWithHistory.NamesWithHistory` mempty) <$> basicParseNames - ppe <- suffixifiedPPE parseNames - -- use suffixed names for resolving the argument to display - let oks results = - [ (r, msg) - | (r, Term.List' ts) <- results, - Term.App' (Term.Constructor' (ConstructorReference ref cid)) (Term.Text' msg) <- toList ts, - cid == DD.okConstructorId && ref == DD.testResultRef - ] - fails results = - [ (r, msg) - | (r, Term.List' ts) <- results, - Term.App' (Term.Constructor' (ConstructorReference ref cid)) (Term.Text' msg) <- toList ts, - cid == DD.failConstructorId && ref == DD.testResultRef - ] - - results = NamesWithHistory.lookupHQTerm main parseNames - ref <- do - let noMain = Cli.returnEarly $ NoMainFunction (HQ.toString main) ppe [testType] - case toList results of - [Referent.Ref ref] -> do - Cli.runTransaction (loadTypeOfTerm codebase (Referent.Ref ref)) >>= \case - Just typ | Typechecker.isSubtype typ testType -> pure ref - _ -> noMain - _ -> noMain - let a = ABT.annotation tm - tm = DD.forceTerm a a (Term.ref a ref) - -- Don't cache IO tests - tm' <- evalUnisonTerm False ppe False tm - Cli.respond $ TestResults Output.NewlyComputed ppe True True (oks [(ref, tm')]) (fails [(ref, tm')]) - + FetchSchemeCompilerI name -> doFetchCompiler name + IOTestI main -> handleIOTest main -- UpdateBuiltinsI -> do -- stepAt updateBuiltins -- checkTodo @@ -1317,7 +1264,7 @@ loop e = do Cli.Env {codebase} <- ask path <- maybe Cli.getCurrentPath Cli.resolvePath' namespacePath' Cli.getMaybeBranchAt path >>= \case - Nothing -> Cli.respond $ BranchEmpty (Right (Path.absoluteToPath' path)) + Nothing -> Cli.respond $ BranchEmpty (WhichBranchEmptyPath (Path.absoluteToPath' path)) Just b -> do externalDependencies <- Cli.runTransaction (NamespaceDependencies.namespaceDependencies codebase (Branch.head b)) @@ -1344,7 +1291,7 @@ loop e = do let seen h = State.gets (Set.member h) set h = State.modify (Set.insert h) getCausal b = (Branch.headHash b, pure $ Branch._history b) - goCausal :: forall m. Monad m => [(CausalHash, m (Branch.UnwrappedBranch m))] -> StateT (Set CausalHash) m () + goCausal :: forall m. (Monad m) => [(CausalHash, m (Branch.UnwrappedBranch m))] -> StateT (Set CausalHash) m () goCausal [] = pure () goCausal ((h, mc) : queue) = do ifM (seen h) (goCausal queue) do @@ -1352,7 +1299,7 @@ loop e = do Causal.One h _bh b -> goBranch h b mempty queue Causal.Cons h _bh b tail -> goBranch h b [fst tail] (tail : queue) Causal.Merge h _bh b (Map.toList -> tails) -> goBranch h b (map fst tails) (tails ++ queue) - goBranch :: forall m. Monad m => CausalHash -> Branch0 m -> [CausalHash] -> [(CausalHash, m (Branch.UnwrappedBranch m))] -> StateT (Set CausalHash) m () + goBranch :: forall m. (Monad m) => CausalHash -> Branch0 m -> [CausalHash] -> [(CausalHash, m (Branch.UnwrappedBranch m))] -> StateT (Set CausalHash) m () goBranch h b (Set.fromList -> causalParents) queue = case b of Branch0 terms0 types0 children0 patches0 _ _ _ _ _ _ _ -> let wrangleMetadata :: (Ord r, Ord n) => Metadata.Star r n -> r -> (r, (Set n, Set Metadata.Value)) @@ -1369,7 +1316,9 @@ loop e = do set h goCausal (map getCausal (Foldable.toList children0) ++ queue) prettyDump (h, Output.DN.DumpNamespace terms types patches children causalParents) = - P.lit "Namespace " <> P.shown h <> P.newline + P.lit "Namespace " + <> P.shown h + <> P.newline <> ( P.indentN 2 $ P.linesNonEmpty [ Monoid.unlessM (null causalParents) $ P.lit "Causal Parents:" <> P.newline <> P.indentN 2 (P.lines (map P.shown $ Set.toList causalParents)), @@ -1426,7 +1375,7 @@ loop e = do UpdateBuiltinsI -> Cli.respond NotImplemented QuitI -> Cli.haltRepl GistI input -> handleGist input - AuthLoginI -> authLogin (Codeserver.resolveCodeserver RemoteRepo.DefaultCodeserver) + AuthLoginI -> void $ authLogin (Codeserver.resolveCodeserver RemoteRepo.DefaultCodeserver) VersionI -> do Cli.Env {ucmVersion} <- ask Cli.respond $ PrintVersion ucmVersion @@ -1490,24 +1439,24 @@ inputDescription input = pure ("copy.patch " <> src <> " " <> dest) DeleteI dtarget -> do case dtarget of - DeleteTarget'TermOrType DeleteOutput'NoDiff thing0 -> do - thing <- hqs' thing0 - pure ("delete " <> thing) - DeleteTarget'TermOrType DeleteOutput'Diff thing0 -> do - thing <- hqs' thing0 - pure ("delete.verbose " <> thing) - DeleteTarget'Term DeleteOutput'NoDiff thing0 -> do - thing <- hqs' thing0 - pure ("delete.term " <> thing) - DeleteTarget'Term DeleteOutput'Diff thing0 -> do - thing <- hqs' thing0 - pure ("delete.term.verbose " <> thing) + DeleteTarget'TermOrType DeleteOutput'NoDiff things0 -> do + thing <- traverse hqs' things0 + pure ("delete " <> Text.intercalate " " thing) + DeleteTarget'TermOrType DeleteOutput'Diff things0 -> do + thing <- traverse hqs' things0 + pure ("delete.verbose " <> Text.intercalate " " thing) + DeleteTarget'Term DeleteOutput'NoDiff things0 -> do + thing <- traverse hqs' things0 + pure ("delete.term " <> Text.intercalate " " thing) + DeleteTarget'Term DeleteOutput'Diff things0 -> do + thing <- traverse hqs' things0 + pure ("delete.term.verbose " <> Text.intercalate " " thing) DeleteTarget'Type DeleteOutput'NoDiff thing0 -> do - thing <- hqs' thing0 - pure ("delete.type " <> thing) + thing <- traverse hqs' thing0 + pure ("delete.type " <> Text.intercalate " " thing) DeleteTarget'Type DeleteOutput'Diff thing0 -> do - thing <- hqs' thing0 - pure ("delete.type.verbose " <> thing) + thing <- traverse hqs' thing0 + pure ("delete.type.verbose " <> Text.intercalate " " thing) DeleteTarget'Branch Try opath0 -> do opath <- ops' opath0 pure ("delete.namespace " <> opath) @@ -1557,10 +1506,15 @@ inputDescription input = MergeBuiltinsI -> pure "builtins.merge" MergeIOBuiltinsI -> pure "builtins.mergeio" MakeStandaloneI out nm -> pure ("compile " <> Text.pack out <> " " <> HQ.toText nm) - ExecuteSchemeI nm -> pure ("run.native " <> HQ.toText nm) + ExecuteSchemeI nm args -> + pure $ + "run.native " + <> HQ.toText nm + <> " " + <> Text.unwords (fmap Text.pack args) CompileSchemeI fi nm -> pure ("compile.native " <> HQ.toText nm <> " " <> Text.pack fi) GenSchemeLibsI -> pure "compile.native.genlibs" - FetchSchemeCompilerI -> pure "compile.native.fetch" + FetchSchemeCompilerI name -> pure ("compile.native.fetch" <> Text.pack name) PullRemoteBranchI orepo dest0 _syncMode pullMode _ -> do dest <- p' dest0 let command = @@ -1627,6 +1581,7 @@ inputDescription input = ListDependentsI {} -> wat ListEditsI {} -> wat LoadI {} -> wat + ClearI {} -> pure "clear" NamesI {} -> wat NamespaceDependenciesI {} -> wat PopBranchI {} -> wat @@ -1871,6 +1826,64 @@ handleDiffNamespaceToPatch description input = do makeTypeEdit (Conversions.reference2to1 -> oldRef) newRefs = Set.asSingleton newRefs <&> \newRef -> (oldRef, TypeEdit.Replace (Conversions.reference2to1 newRef)) +handleIOTest :: HQ.HashQualified Name -> Cli () +handleIOTest main = do + Cli.Env {codebase, runtime} <- ask + + let testType = Runtime.ioTestType runtime + parseNames <- (`NamesWithHistory.NamesWithHistory` mempty) <$> basicParseNames + -- use suffixed names for resolving the argument to display + ppe <- suffixifiedPPE parseNames + let oks results = + [ (r, msg) + | (r, Term.List' ts) <- results, + Term.App' (Term.Constructor' (ConstructorReference ref cid)) (Term.Text' msg) <- toList ts, + cid == DD.okConstructorId && ref == DD.testResultRef + ] + fails results = + [ (r, msg) + | (r, Term.List' ts) <- results, + Term.App' (Term.Constructor' (ConstructorReference ref cid)) (Term.Text' msg) <- toList ts, + cid == DD.failConstructorId && ref == DD.testResultRef + ] + + matches <- + Cli.label \returnMatches -> do + -- First, look at the terms in the latest typechecked file for a name-match. + whenJustM Cli.getLatestTypecheckedFile \typecheckedFile -> do + whenJust (HQ.toName main) \mainName -> + whenJust (Map.lookup (Name.toVar mainName) (UF.hashTermsId typecheckedFile)) \(ref, _wk, _term, typ) -> + returnMatches [(Reference.fromId ref, typ)] + + -- Then, if we get here (because nothing in the scratch file matched), look at the terms in the codebase. + Cli.runTransaction do + forMaybe (Set.toList (NamesWithHistory.lookupHQTerm main parseNames)) \ref0 -> + runMaybeT do + ref <- MaybeT (pure (Referent.toTermReference ref0)) + typ <- MaybeT (loadTypeOfTerm codebase (Referent.Ref ref)) + pure (ref, typ) + + ref <- + case matches of + [] -> Cli.returnEarly (NoMainFunction (HQ.toString main) ppe [testType]) + [(ref, typ)] -> + if Typechecker.isSubtype typ testType + then pure ref + else Cli.returnEarly (BadMainFunction "io.test" (HQ.toString main) typ ppe [testType]) + _ -> do + hashLength <- Cli.runTransaction Codebase.hashLength + let labeledDependencies = + matches + & map (\(ref, _typ) -> LD.termRef ref) + & Set.fromList + Cli.returnEarly (LabeledReferenceAmbiguous hashLength main labeledDependencies) + + let a = ABT.annotation tm + tm = DD.forceTerm a a (Term.ref a ref) + -- Don't cache IO tests + tm' <- evalUnisonTerm False ppe False tm + Cli.respond $ TestResults Output.NewlyComputed ppe True True (oks [(ref, tm')]) (fails [(ref, tm')]) + -- | Handle a @push@ command. handlePushRemoteBranch :: PushRemoteBranchInput -> Cli () handlePushRemoteBranch PushRemoteBranchInput {maybeRemoteRepo = mayRepo, localPath = path, pushBehavior, syncMode} = @@ -1961,53 +1974,49 @@ handlePushToUnisonShare :: WriteShareRemotePath -> Path.Absolute -> PushBehavior handlePushToUnisonShare remote@WriteShareRemotePath {server, repo, path = remotePath} localPath behavior = do let codeserver = Codeserver.resolveCodeserver server let baseURL = codeserverBaseURL codeserver - let sharePath = Share.Path (repo Nel.:| pathToSegments remotePath) - ensureAuthenticatedWithCodeserver codeserver - - Cli.Env {authHTTPClient, codebase} <- ask + let sharePath = Share.Path (shareUserHandleToText repo Nel.:| pathToSegments remotePath) + _userInfo <- ensureAuthenticatedWithCodeserver codeserver -- doesn't handle the case where a non-existent path is supplied localCausalHash <- Cli.runTransaction (Ops.loadCausalHashAtPath (pathToSegments (Path.unabsolute localPath))) & onNothingM do Cli.returnEarly (EmptyPush . Path.absoluteToPath' $ localPath) - let checkAndSetPush :: Maybe Hash32 -> IO (Either (Share.SyncError Share.CheckAndSetPushError) ()) + let checkAndSetPush :: Maybe Hash32 -> Cli () checkAndSetPush remoteHash = - withEntitiesUploadedProgressCallback \uploadedCallback -> - if Just (Hash32.fromHash (unCausalHash localCausalHash)) == remoteHash - then pure (Right ()) - else - Share.checkAndSetPush - authHTTPClient - baseURL - (Codebase.withConnectionIO codebase) - sharePath - remoteHash - localCausalHash - uploadedCallback + when (Just (Hash32.fromHash (unCausalHash localCausalHash)) /= remoteHash) do + let push = + Cli.with withEntitiesUploadedProgressCallback \uploadedCallback -> do + Share.checkAndSetPush + baseURL + sharePath + remoteHash + localCausalHash + uploadedCallback + push & onLeftM (pushError ShareErrorCheckAndSetPush) case behavior of PushBehavior.ForcePush -> do maybeHashJwt <- - Cli.ioE (Share.getCausalHashByPath authHTTPClient baseURL sharePath) \err -> - Cli.returnEarly (Output.ShareError (ShareErrorGetCausalHashByPath err)) - Cli.ioE (checkAndSetPush (Share.hashJWTHash <$> maybeHashJwt)) (pushError ShareErrorCheckAndSetPush) + Share.getCausalHashByPath baseURL sharePath & onLeftM \err0 -> + (Cli.returnEarly . Output.ShareError) case err0 of + Share.SyncError err -> ShareErrorGetCausalHashByPath err + Share.TransportError err -> ShareErrorTransport err + checkAndSetPush (Share.hashJWTHash <$> maybeHashJwt) Cli.respond (ViewOnShare remote) PushBehavior.RequireEmpty -> do - Cli.ioE (checkAndSetPush Nothing) (pushError ShareErrorCheckAndSetPush) + checkAndSetPush Nothing Cli.respond (ViewOnShare remote) PushBehavior.RequireNonEmpty -> do - let push :: IO (Either (Share.SyncError Share.FastForwardPushError) ()) - push = do - withEntitiesUploadedProgressCallback \uploadedCallback -> + let push :: Cli (Either (Share.SyncError Share.FastForwardPushError) ()) + push = + Cli.with withEntitiesUploadedProgressCallback \uploadedCallback -> Share.fastForwardPush - authHTTPClient baseURL - (Codebase.withConnectionIO codebase) sharePath localCausalHash uploadedCallback - Cli.ioE push (pushError ShareErrorFastForwardPush) + push & onLeftM (pushError ShareErrorFastForwardPush) Cli.respond (ViewOnShare remote) where pathToSegments :: Path -> [Text] @@ -2078,8 +2087,24 @@ handleShowDefinition outputLoc showDefinitionScope inputQuery = do Cli.runTransaction (Backend.definitionsBySuffixes codebase nameSearch includeCycles query) outputPath <- getOutputPath when (not (null types && null terms)) do - let ppe = PPED.biasTo (mapMaybe HQ.toName inputQuery) unbiasedPPE - Cli.respond (DisplayDefinitions outputPath ppe types terms) + -- We need an 'isTest' check in the output layer, so it can prepend "test>" to tests in a scratch file. Since we + -- currently have the whole branch in memory, we just use that to make our predicate, but this could/should get this + -- information from the database instead, once it's efficient to do so. + isTest <- do + branch <- Cli.getCurrentBranch0 + pure \ref -> + branch + & Branch.deepTermMetadata + & Metadata.hasMetadataWithType' (Referent.fromTermReference ref) IOSource.isTestReference + Cli.respond $ + DisplayDefinitions + DisplayDefinitionsOutput + { isTest, + outputFile = outputPath, + prettyPrintEnv = PPED.biasTo (mapMaybe HQ.toName inputQuery) unbiasedPPE, + terms, + types + } when (not (null misses)) (Cli.respond (SearchTermsNotFound misses)) for_ outputPath \p -> do -- We set latestFile to be programmatically generated, if we @@ -2115,7 +2140,7 @@ handleTest TestInput {includeLibNamespace, showFailures, showSuccesses} = do branch <- Cli.getCurrentBranch0 branch & Branch.deepTermMetadata - & R4.restrict34d12 isTest + & R4.restrict34d12 IOSource.isTest & (if includeLibNamespace then id else R.filterRan (not . isInLibNamespace)) & R.dom & pure @@ -2251,23 +2276,15 @@ importRemoteShareBranch rrn@(ReadShareRemoteNamespace {server, repo, path}) = do let codeserver = Codeserver.resolveCodeserver server let baseURL = codeserverBaseURL codeserver -- Auto-login to share if pulling from a non-public path - when (not $ RemoteRepo.isPublic rrn) $ ensureAuthenticatedWithCodeserver codeserver - let shareFlavoredPath = Share.Path (repo Nel.:| coerce @[NameSegment] @[Text] (Path.toList path)) - Cli.Env {authHTTPClient, codebase} <- ask - let pull :: IO (Either (Share.SyncError Share.PullError) CausalHash) - pull = - withEntitiesDownloadedProgressCallback \downloadedCallback -> - Share.pull - authHTTPClient - baseURL - (Codebase.withConnectionIO codebase) - shareFlavoredPath - downloadedCallback + when (not $ RemoteRepo.isPublic rrn) . void $ ensureAuthenticatedWithCodeserver codeserver + let shareFlavoredPath = Share.Path (shareUserHandleToText repo Nel.:| coerce @[NameSegment] @[Text] (Path.toList path)) + Cli.Env {codebase} <- ask causalHash <- - Cli.ioE pull \err0 -> - (Cli.returnEarly . Output.ShareError) case err0 of - Share.SyncError err -> Output.ShareErrorPull err - Share.TransportError err -> Output.ShareErrorTransport err + Cli.with withEntitiesDownloadedProgressCallback \downloadedCallback -> + Share.pull baseURL shareFlavoredPath downloadedCallback & onLeftM \err0 -> + (Cli.returnEarly . Output.ShareError) case err0 of + Share.SyncError err -> Output.ShareErrorPull err + Share.TransportError err -> Output.ShareErrorTransport err liftIO (Codebase.getBranchForHash codebase causalHash) & onNothingM do error $ reportBug "E412939" "`pull` \"succeeded\", but I can't find the result in the codebase. (This is a bug.)" where @@ -2510,7 +2527,7 @@ searchResultsFor ns terms types = searchBranchScored :: forall score. - Ord score => + (Ord score) => Names -> (Name -> Name -> Maybe score) -> [HQ.HashQualified Name] -> @@ -2602,8 +2619,8 @@ compilerPath = Path.Path' {Path.unPath' = Left abs} rootPath = Path.Path {Path.toSeq = Seq.fromList segs} abs = Path.Absolute {Path.unabsolute = rootPath} -doFetchCompiler :: Cli () -doFetchCompiler = +doFetchCompiler :: String -> Cli () +doFetchCompiler username = inputDescription pullInput >>= doPullRemoteBranch repo @@ -2616,7 +2633,7 @@ doFetchCompiler = ns = ReadShareRemoteNamespace { server = RemoteRepo.DefaultCodeserver, - repo = "dolio", + repo = ShareUserHandle (Text.pack username), path = Path.fromList $ NameSegment <$> ["public", "internal", "trunk"] } @@ -2633,7 +2650,7 @@ doFetchCompiler = ensureCompilerExists :: Cli () ensureCompilerExists = Cli.branchExistsAtPath' compilerPath - >>= flip unless doFetchCompiler + >>= flip unless (doFetchCompiler "unison") getCacheDir :: Cli String getCacheDir = liftIO $ getXdgDirectory XdgCache "unisonlanguage" @@ -2689,7 +2706,7 @@ typecheckAndEval ppe tm = do | Typechecker.fitsScheme ty mty -> () <$ evalUnisonTerm False ppe False tm | otherwise -> - Cli.returnEarly $ BadMainFunction rendered ty ppe [mty] + Cli.returnEarly $ BadMainFunction "run" rendered ty ppe [mty] Result.Result notes Nothing -> do currentPath <- Cli.getCurrentPath let tes = [err | Result.TypeError err <- toList notes] @@ -2698,49 +2715,96 @@ typecheckAndEval ppe tm = do a = External rendered = P.toPlainUnbroken $ TP.pretty ppe tm -ensureSchemeExists :: Cli () -ensureSchemeExists = +ensureSchemeExists :: SchemeBackend -> Cli () +ensureSchemeExists bk = liftIO callScheme >>= \case True -> pure () False -> Cli.returnEarly (PrintMessage msg) where - msg = - P.lines - [ "I can't seem to call scheme. See", - "", - P.indentN - 2 - "https://github.com/cisco/ChezScheme/blob/main/BUILDING", - "", - "for how to install Chez Scheme." - ] + msg = case bk of + Racket -> + P.lines + [ "I can't seem to call racket. See", + "", + P.indentN + 2 + "https://download.racket-lang.org/", + "", + "for how to install Racket." + ] + Chez -> + P.lines + [ "I can't seem to call scheme. See", + "", + P.indentN + 2 + "https://github.com/cisco/ChezScheme/blob/main/BUILDING", + "", + "for how to install Chez Scheme." + ] + cmd = case bk of + Racket -> "racket -l- raco help" + Chez -> "scheme -q" callScheme = - catch - (True <$ readCreateProcess (shell "scheme -q") "") - (\(_ :: IOException) -> pure False) + readCreateProcessWithExitCode (shell cmd) "" >>= \case + (ExitSuccess, _, _) -> pure True + (ExitFailure _, _, _) -> pure False -runScheme :: String -> Cli () -runScheme file = do - ensureSchemeExists +racketOpts :: FilePath -> FilePath -> [String] -> [String] +racketOpts gendir statdir args = libs ++ args + where + includes = [gendir, statdir "common", statdir "racket"] + libs = concatMap (\dir -> ["-S", dir]) includes + +chezOpts :: FilePath -> FilePath -> [String] -> [String] +chezOpts gendir statdir args = + "-q" : opt ++ libs ++ ["--script"] ++ args + where + includes = [gendir, statdir "common", statdir "chez"] + libs = ["--libdirs", List.intercalate ":" includes] + opt = ["--optimize-level", "3"] + +data SchemeBackend = Racket | Chez + +runScheme :: SchemeBackend -> String -> [String] -> Cli () +runScheme bk file args = do + ensureSchemeExists bk gendir <- getSchemeGenLibDir statdir <- getSchemeStaticLibDir - let includes = gendir ++ ":" ++ statdir - lib = "--libdirs " ++ includes - opt = "--optimize-level 3" - cmd = "scheme -q " ++ opt ++ " " ++ lib ++ " --script " ++ file + let cmd = case bk of Racket -> "racket"; Chez -> "scheme" + opts = case bk of + Racket -> racketOpts gendir statdir (file : args) + Chez -> chezOpts gendir statdir (file : args) success <- liftIO $ - (True <$ callCommand cmd) `catch` \(_ :: IOException) -> - pure False + (True <$ callProcess cmd opts) + `catch` \(_ :: IOException) -> pure False unless success $ Cli.returnEarly (PrintMessage "Scheme evaluation failed.") -buildScheme :: String -> String -> Cli () -buildScheme main file = do - ensureSchemeExists +buildScheme :: SchemeBackend -> String -> String -> Cli () +buildScheme bk main file = do + ensureSchemeExists bk statDir <- getSchemeStaticLibDir genDir <- getSchemeGenLibDir + build genDir statDir main file + where + build + | Racket <- bk = buildRacket + | Chez <- bk = buildChez + +buildRacket :: String -> String -> String -> String -> Cli () +buildRacket genDir statDir main file = + let args = ["-l", "raco", "--", "exe", "-o", main, file] + opts = racketOpts genDir statDir args + in void . liftIO $ + catch + (True <$ callProcess "racket" opts) + (\(_ :: IOException) -> pure False) + +buildChez :: String -> String -> String -> String -> Cli () +buildChez genDir statDir main file = do let cmd = shell "scheme -q --optimize-level 3" void . liftIO $ readCreateProcess cmd (build statDir genDir) where @@ -2762,17 +2826,17 @@ buildScheme main file = do ++ lns gd gen ++ [surround file] -doRunAsScheme :: HQ.HashQualified Name -> Cli () -doRunAsScheme main = do - fullpath <- generateSchemeFile (HQ.toString main) main - runScheme fullpath +doRunAsScheme :: HQ.HashQualified Name -> [String] -> Cli () +doRunAsScheme main args = do + fullpath <- generateSchemeFile True (HQ.toString main) main + runScheme Racket fullpath args doCompileScheme :: String -> HQ.HashQualified Name -> Cli () doCompileScheme out main = - generateSchemeFile out main >>= buildScheme out + generateSchemeFile True out main >>= buildScheme Racket out -generateSchemeFile :: String -> HQ.HashQualified Name -> Cli String -generateSchemeFile out main = do +generateSchemeFile :: Bool -> String -> HQ.HashQualified Name -> Cli String +generateSchemeFile exec out main = do (comp, ppe) <- resolveMainRef main ensureCompilerExists doGenerateSchemeBoot False $ Just ppe @@ -2788,7 +2852,7 @@ generateSchemeFile out main = do fpc = Term.constructor a fprf fp = Term.app a fpc outTm tm :: Term Symbol Ann - tm = Term.apps' sscm [toCmp, fp] + tm = Term.apps' sscm [Term.boolean a exec, toCmp, fp] typecheckAndEval ppe tm pure fullpath where @@ -2819,6 +2883,8 @@ doPullRemoteBranch mayRepo path syncMode pullMode verbosity description = do Cli.ioE (Codebase.importRemoteBranch codebase repo syncMode preprocess) \err -> Cli.returnEarly (Output.GitError err) ReadRemoteNamespaceShare repo -> importRemoteShareBranch repo + when (Branch.isEmpty0 (Branch.head remoteBranch)) do + Cli.respond (PulledEmptyBranch ns) let unchangedMsg = PullAlreadyUpToDate ns path destAbs <- Cli.resolvePath' path let printDiffPath = if Verbosity.isSilent verbosity then Nothing else Just path @@ -2865,24 +2931,118 @@ loadPropagateDiffDefaultPatch inputDescription maybeDest0 dest = do (ppe, diff) <- diffHelper original (Branch.head patched) Cli.respondNumbered (ShowDiffAfterMergePropagate dest0 dest patchPath ppe diff) +delete :: + Input -> + DeleteOutput -> + ((Path.Absolute, HQ'.HQSegment) -> Cli (Set Referent)) -> -- compute matching terms + ((Path.Absolute, HQ'.HQSegment) -> Cli (Set Reference)) -> -- compute matching types + [Path.HQSplit'] -> -- targets for deletion + Cli () +delete input doutput getTerms getTypes hqs' = do + -- persists the original hash qualified entity for error reporting + typesTermsTuple <- + traverse + ( \hq -> do + absolute <- Cli.resolveSplit' hq + types <- getTypes absolute + terms <- getTerms absolute + return (hq, types, terms) + ) + hqs' + let notFounds = List.filter (\(_, types, terms) -> Set.null terms && Set.null types) typesTermsTuple + -- if there are any entities which cannot be deleted because they don't exist, short circuit. + if not $ null notFounds + then do + let toName :: [(Path.HQSplit', Set Reference, Set referent)] -> [Name] + toName notFounds = + mapMaybe (\(split, _, _) -> Path.toName' $ HashQualified.toName (HQSplit'.unsplitHQ' split)) notFounds + Cli.returnEarly $ NamesNotFound (toName notFounds) + else do + checkDeletes typesTermsTuple doutput input + +checkDeletes :: [(Path.HQSplit', Set Reference, Set Referent)] -> DeleteOutput -> Input -> Cli () +checkDeletes typesTermsTuples doutput inputs = do + let toSplitName :: + (Path.HQSplit', Set Reference, Set Referent) -> + Cli (Path.Split, Name, Set Reference, Set Referent) + toSplitName hq = do + resolvedPath <- Path.convert <$> Cli.resolveSplit' (HQ'.toName <$> hq ^. _1) + return (resolvedPath, Path.unsafeToName (Path.unsplit resolvedPath), hq ^. _2, hq ^. _3) + -- get the splits and names with terms and types + splitsNames <- traverse toSplitName typesTermsTuples + let toRel :: (Ord ref) => Set ref -> Name -> R.Relation Name ref + toRel setRef name = R.fromList (fmap (name,) (toList setRef)) + let toDelete = fmap (\(_, names, types, terms) -> Names (toRel terms names) (toRel types names)) splitsNames + -- make sure endangered is compeletely contained in paths + rootNames <- Branch.toNames <$> Cli.getRootBranch0 + -- get only once for the entire deletion set + let allTermsToDelete :: Set LabeledDependency + allTermsToDelete = Set.unions (fmap Names.labeledReferences toDelete) + -- get the endangered dependencies for each entity to delete + endangered <- + Cli.runTransaction $ + traverse + ( \targetToDelete -> + getEndangeredDependents targetToDelete (allTermsToDelete) rootNames + ) + toDelete + -- If the overall dependency map is not completely empty, abort deletion + let endangeredDeletions = List.filter (\m -> not $ null m || Map.foldr (\s b -> null s || b) False m) endangered + if null endangeredDeletions + then do + let deleteTypesTerms = + splitsNames + >>= ( \(split, _, types, terms) -> + (map (BranchUtil.makeDeleteTypeName split) . Set.toList $ types) + ++ (map (BranchUtil.makeDeleteTermName split) . Set.toList $ terms) + ) + before <- Cli.getRootBranch0 + description <- inputDescription inputs + Cli.stepManyAt description deleteTypesTerms + case doutput of + DeleteOutput'Diff -> do + after <- Cli.getRootBranch0 + (ppe, diff) <- diffHelper before after + Cli.respondNumbered (ShowDiffAfterDeleteDefinitions ppe diff) + DeleteOutput'NoDiff -> do + Cli.respond Success + else do + ppeDecl <- currentPrettyPrintEnvDecl Backend.Within + let combineRefs = List.foldl (Map.unionWith NESet.union) Map.empty endangeredDeletions + Cli.respondNumbered (CantDeleteDefinitions ppeDecl combineRefs) + -- | Goal: When deleting, we might be removing the last name of a given definition (i.e. the -- definition is going "extinct"). In this case we may wish to take some action or warn the -- user about these "endangered" definitions which would now contain unnamed references. +-- The argument `otherDesiredDeletions` is included in this function because the user might want to +-- delete a term and all its dependencies in one command, so we give this function access to +-- the full set of entities that the user wishes to delete. getEndangeredDependents :: - -- | Which names we want to delete + -- | Prospective target for deletion Names -> + -- | All entities we want to delete (including the target) + Set LabeledDependency -> -- | All names from the root branch Names -> -- | map from references going extinct to the set of endangered dependents Sqlite.Transaction (Map LabeledDependency (NESet LabeledDependency)) -getEndangeredDependents namesToDelete rootNames = do +getEndangeredDependents targetToDelete otherDesiredDeletions rootNames = do + -- names of terms left over after target deletion let remainingNames :: Names - remainingNames = rootNames `Names.difference` namesToDelete - refsToDelete, remainingRefs, extinct :: Set LabeledDependency - refsToDelete = Names.labeledReferences namesToDelete - remainingRefs = Names.labeledReferences remainingNames -- left over after delete - extinct = refsToDelete `Set.difference` remainingRefs -- deleting and not left over - accumulateDependents :: LabeledDependency -> Sqlite.Transaction (Map LabeledDependency (Set LabeledDependency)) + remainingNames = rootNames `Names.difference` targetToDelete + -- target refs for deletion + let refsToDelete :: Set LabeledDependency + refsToDelete = Names.labeledReferences targetToDelete + -- refs left over after deleting target + let remainingRefs :: Set LabeledDependency + remainingRefs = Names.labeledReferences remainingNames + -- remove the other targets for deletion from the remaining terms + let remainingRefsWithoutOtherTargets :: Set LabeledDependency + remainingRefsWithoutOtherTargets = Set.difference remainingRefs otherDesiredDeletions + -- deleting and not left over + let extinct :: Set LabeledDependency + extinct = refsToDelete `Set.difference` remainingRefs + let accumulateDependents :: LabeledDependency -> Sqlite.Transaction (Map LabeledDependency (Set LabeledDependency)) accumulateDependents ld = let ref = LD.fold id Referent.toReference ld in Map.singleton ld . Set.map LD.termRef <$> Codebase.dependents Queries.ExcludeOwnComponent ref @@ -2895,7 +3055,7 @@ getEndangeredDependents namesToDelete rootNames = do let extinctToEndangered :: Map LabeledDependency (NESet LabeledDependency) extinctToEndangered = allDependentsOfExtinct & Map.mapMaybe \endangeredDeps -> - let remainingEndangered = endangeredDeps `Set.intersection` remainingRefs + let remainingEndangered = endangeredDeps `Set.intersection` remainingRefsWithoutOtherTargets in NESet.nonEmptySet remainingEndangered pure extinctToEndangered @@ -2962,7 +3122,7 @@ docsI srcLoc prettyPrintNames src = codebaseByMetadata :: Cli () codebaseByMetadata = do - (ppe, out) <- getLinks srcLoc src (Left $ Set.fromList [DD.docRef, DD.doc2Ref]) + (ppe, out) <- getLinks srcLoc src (Left $ Set.fromList [DD.docRef, IOSource.doc2Ref]) case out of [] -> codebaseByName [(_name, ref, _tm)] -> do @@ -3048,7 +3208,7 @@ parseType input src = do Type.bindNames Name.unsafeFromVar mempty (NamesWithHistory.currentNames names) (Type.generalizeLowercase mempty typ) & onLeft \errs -> Cli.returnEarly (ParseResolutionFailures src (toList errs)) -getTermsIncludingHistorical :: Monad m => Path.HQSplit -> Branch0 m -> Cli (Set Referent) +getTermsIncludingHistorical :: (Monad m) => Path.HQSplit -> Branch0 m -> Cli (Set Referent) getTermsIncludingHistorical (p, hq) b = case Set.toList refs of [] -> case hq of HQ'.HashQualified n hs -> do @@ -3070,7 +3230,7 @@ data GetTermResult -- -- Otherwise, returns `Nothing`. addWatch :: - Var v => + (Var v) => String -> Maybe (TypecheckedUnisonFile v Ann) -> Maybe (v, TypecheckedUnisonFile v Ann) @@ -3124,7 +3284,7 @@ getTerm main = mainType <- Runtime.mainType <$> view #runtime basicPrettyPrintNames <- getBasicPrettyPrintNames ppe <- suffixifiedPPE (NamesWithHistory.NamesWithHistory basicPrettyPrintNames mempty) - Cli.returnEarly $ BadMainFunction main ty ppe [mainType] + Cli.returnEarly $ BadMainFunction "run" main ty ppe [mainType] GetTermSuccess x -> pure x getTerm' :: String -> Cli GetTermResult @@ -3182,7 +3342,7 @@ createWatcherFile v tm typ = [(magicMainWatcherString, [(v2, tm, typ)])] executePPE :: - Var v => + (Var v) => TypecheckedUnisonFile v a -> Cli PPE.PrettyPrintEnv executePPE unisonFile = @@ -3212,7 +3372,7 @@ hqNameQuery query = do -- | Select a definition from the given branch. -- Returned names will match the provided 'Position' type. -fuzzySelectDefinition :: MonadIO m => Position -> Branch0 m0 -> m (Maybe [HQ.HashQualified Name]) +fuzzySelectDefinition :: (MonadIO m) => Position -> Branch0 m0 -> m (Maybe [HQ.HashQualified Name]) fuzzySelectDefinition pos searchBranch0 = liftIO do let termsAndTypes = Relation.dom (Names.hashQualifyTermsRelation (Relation.swap $ Branch.deepTerms searchBranch0)) @@ -3226,7 +3386,7 @@ fuzzySelectDefinition pos searchBranch0 = liftIO do -- | Select a namespace from the given branch. -- Returned Path's will match the provided 'Position' type. -fuzzySelectNamespace :: MonadIO m => Position -> Branch0 m0 -> m (Maybe [Path']) +fuzzySelectNamespace :: (MonadIO m) => Position -> Branch0 m0 -> m (Maybe [Path']) fuzzySelectNamespace pos searchBranch0 = liftIO do let intoPath' :: Path -> Path' intoPath' = case pos of diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput/AuthLogin.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput/AuthLogin.hs index fd92266ee..e8da16ead 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput/AuthLogin.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput/AuthLogin.hs @@ -25,14 +25,17 @@ import Unison.Auth.CredentialManager (getCredentials, saveCredentials) import Unison.Auth.Discovery (discoveryURIForCodeserver, fetchDiscoveryDoc) import Unison.Auth.Types ( Code, + CodeserverCredentials (..), CredentialFailure (..), DiscoveryDoc (..), OAuthState, PKCEChallenge, PKCEVerifier, - Tokens, + Tokens (..), + UserInfo, codeserverCredentials, ) +import Unison.Auth.UserInfo (getUserInfo) import Unison.Cli.Monad (Cli) import qualified Unison.Cli.Monad as Cli import qualified Unison.Codebase.Editor.Output as Output @@ -46,23 +49,21 @@ ucmOAuthClientID = "ucm" -- | Checks if the user has valid auth for the given codeserver, -- and runs through an authentication flow if not. -ensureAuthenticatedWithCodeserver :: CodeserverURI -> Cli () +ensureAuthenticatedWithCodeserver :: CodeserverURI -> Cli UserInfo ensureAuthenticatedWithCodeserver codeserverURI = do Cli.Env {credentialManager} <- ask getCredentials credentialManager (codeserverIdFromCodeserverURI codeserverURI) >>= \case - Right _ -> pure () + Right (CodeserverCredentials {userInfo}) -> pure userInfo Left _ -> authLogin codeserverURI -- | Direct the user through an authentication flow with the given server and store the credentials in the provided -- credential manager. -authLogin :: CodeserverURI -> Cli () +authLogin :: CodeserverURI -> Cli UserInfo authLogin host = do Cli.Env {credentialManager} <- ask httpClient <- liftIO HTTP.getGlobalManager let discoveryURI = discoveryURIForCodeserver host - doc@(DiscoveryDoc {authorizationEndpoint, tokenEndpoint}) <- - Cli.ioE (fetchDiscoveryDoc discoveryURI) \err -> do - Cli.returnEarly (Output.CredentialFailureMsg err) + doc@(DiscoveryDoc {authorizationEndpoint, tokenEndpoint}) <- bailOnFailure (fetchDiscoveryDoc discoveryURI) Debug.debugM Debug.Auth "Discovery Doc" doc authResultVar <- liftIO (newEmptyMVar @(Either CredentialFailure Tokens)) -- The redirect_uri depends on the port, so we need to spin up the server first, but @@ -71,45 +72,53 @@ authLogin host = do -- and it all works out fine. redirectURIVar <- liftIO newEmptyMVar (verifier, challenge, state) <- generateParams - let codeHandler code mayNextURI = do + let codeHandler :: (Code -> Maybe URI -> (Response -> IO ResponseReceived) -> IO ResponseReceived) + codeHandler code mayNextURI respond = do redirectURI <- readMVar redirectURIVar result <- exchangeCode httpClient tokenEndpoint code verifier redirectURI - putMVar authResultVar result - case result of + respReceived <- case result of Left err -> do Debug.debugM Debug.Auth "Auth Error" err - pure $ Wai.responseLBS internalServerError500 [] "Something went wrong, please try again." + respond $ Wai.responseLBS internalServerError500 [] "Something went wrong, please try again." Right _ -> case mayNextURI of - Nothing -> pure $ Wai.responseLBS found302 [] "Authorization successful. You may close this page and return to UCM." + Nothing -> respond $ Wai.responseLBS found302 [] "Authorization successful. You may close this page and return to UCM." Just nextURI -> - pure $ + respond $ Wai.responseLBS found302 [("LOCATION", BSC.pack $ show @URI nextURI)] "Authorization successful. You may close this page and return to UCM." - tokens <- + -- Wait until we've responded to the browser before putting the result, + -- otherwise the server will shut down prematurely. + putMVar authResultVar result + pure respReceived + tokens@(Tokens {accessToken}) <- Cli.with (Warp.withApplication (pure $ authTransferServer codeHandler)) \port -> do let redirectURI = "http://localhost:" <> show port <> "/redirect" liftIO (putMVar redirectURIVar redirectURI) let authorizationKickoff = authURI authorizationEndpoint redirectURI state challenge void . liftIO $ Web.openBrowser (show authorizationKickoff) Cli.respond . Output.InitiateAuthFlow $ authorizationKickoff - Cli.ioE (readMVar authResultVar) \err -> do - Cli.returnEarly (Output.CredentialFailureMsg err) + bailOnFailure (readMVar authResultVar) + userInfo <- bailOnFailure (getUserInfo doc accessToken) let codeserverId = codeserverIdFromCodeserverURI host - let creds = codeserverCredentials discoveryURI tokens + let creds = codeserverCredentials discoveryURI tokens userInfo liftIO (saveCredentials credentialManager codeserverId creds) Cli.respond Output.Success + pure userInfo + where + bailOnFailure action = Cli.ioE action \err -> do + Cli.returnEarly (Output.CredentialFailureMsg err) -- | A server in the format expected for a Wai Application -- This is a temporary server which is spun up only until we get a code back from the -- auth server. -authTransferServer :: (Code -> Maybe URI -> IO Response) -> Request -> (Response -> IO ResponseReceived) -> IO ResponseReceived +authTransferServer :: (Code -> Maybe URI -> (Response -> IO ResponseReceived) -> IO ResponseReceived) -> Request -> (Response -> IO ResponseReceived) -> IO ResponseReceived authTransferServer callback req respond = case (requestMethod req, pathInfo req, getQueryParams req) of ("GET", ["redirect"], (Just code, maybeNextURI)) -> do - callback code maybeNextURI >>= respond + callback code maybeNextURI respond _ -> respond (responseLBS status404 [] "Not Found") where getQueryParams req = do @@ -137,7 +146,7 @@ addQueryParam key val uri = newParam = (key, Just val) in uri {uriQuery = BSC.unpack $ renderQuery True (existingQuery <> [newParam])} -generateParams :: MonadIO m => m (PKCEVerifier, PKCEChallenge, OAuthState) +generateParams :: (MonadIO m) => m (PKCEVerifier, PKCEChallenge, OAuthState) generateParams = liftIO $ do verifier <- BE.convertToBase @ByteString BE.Base64URLUnpadded <$> getRandomBytes 50 let digest = Crypto.hashWith Crypto.SHA256 verifier @@ -147,7 +156,7 @@ generateParams = liftIO $ do -- | Exchange an authorization code for tokens. exchangeCode :: - MonadIO m => + (MonadIO m) => HTTP.Manager -> URI -> Code -> diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput/MetadataUtils.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput/MetadataUtils.hs index 3e1d8d62d..d988843be 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput/MetadataUtils.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput/MetadataUtils.hs @@ -78,7 +78,7 @@ manageLinks :: [Path.HQSplit'] -> [HQ.HashQualified Name] -> ( forall r. - Ord r => + (Ord r) => (r, Metadata.Type, Metadata.Value) -> Branch.Star r NameSegment -> Branch.Star r NameSegment diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput/TermResolution.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput/TermResolution.hs index f2e8c1e1f..459d1e28d 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput/TermResolution.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput/TermResolution.hs @@ -120,5 +120,5 @@ resolveMainRef main = do lookupTermRefWithType codebase main >>= \case [(rf, ty)] | Typechecker.fitsScheme ty mainType -> pure (rf, ppe) - | otherwise -> Cli.returnEarly (BadMainFunction smain ty ppe [mainType]) + | otherwise -> Cli.returnEarly (BadMainFunction "main" smain ty ppe [mainType]) _ -> Cli.returnEarly (NoMainFunction smain ppe [mainType]) diff --git a/unison-cli/src/Unison/Codebase/Editor/HandleInput/Update.hs b/unison-cli/src/Unison/Codebase/Editor/HandleInput/Update.hs index f55a5cfad..2f65ebe67 100644 --- a/unison-cli/src/Unison/Codebase/Editor/HandleInput/Update.hs +++ b/unison-cli/src/Unison/Codebase/Editor/HandleInput/Update.hs @@ -53,7 +53,7 @@ import qualified Unison.Reference as Reference import Unison.Referent (Referent) import qualified Unison.Referent as Referent import qualified Unison.Result as Result -import Unison.Runtime.IOSource (isTest) +import qualified Unison.Runtime.IOSource as IOSource import qualified Unison.Sqlite as Sqlite import Unison.Symbol (Symbol) import qualified Unison.Syntax.Name as Name (toVar, unsafeFromVar) @@ -171,7 +171,7 @@ handleUpdate input optionalPatch requestedNames = do step1 p (_, r, r') = Patch.updateType r (TypeEdit.Replace r') p step2 p (_, r, r') = Patch.updateTerm typing r (TermEdit.Replace r' (typing r r')) p (p, seg) = Path.toAbsoluteSplit currentPath' patchPath - updatePatches :: Monad m => Branch0 m -> m (Branch0 m) + updatePatches :: (Monad m) => Branch0 m -> m (Branch0 m) updatePatches = Branch.modifyPatches seg updatePatch pure (updatePatch ye'ol'Patch, updatePatches, p) @@ -202,7 +202,8 @@ handleUpdate input optionalPatch requestedNames = do Cli.syncRoot case patchPath of Nothing -> "update.nopatch" Just p -> - p & Path.unsplit' + p + & Path.unsplit' & Path.resolve @_ @_ @Path.Absolute currentPath' & tShow @@ -564,7 +565,7 @@ getSlurpResultForUpdate requestedNames slurpCheckNames = do pure slurp1 -rewriteTermReferences :: Ord v => Map TermReference TermReferenceId -> Term v a -> Term v a +rewriteTermReferences :: (Ord v) => Map TermReference TermReferenceId -> Term v a -> Term v a rewriteTermReferences mapping = ABT.rebuildUp \term -> case term of @@ -577,7 +578,7 @@ rewriteTermReferences mapping = -- updates the namespace for adding `slurp` doSlurpAdds :: forall m. - Monad m => + (Monad m) => SlurpComponent -> TypecheckedUnisonFile Symbol Ann -> (Branch0 m -> Branch0 m) @@ -589,7 +590,7 @@ doSlurpAdds slurp uf = Branch.batchUpdates (typeActions <> termActions) SC.terms slurp <> UF.constructorsForDecls (SC.types slurp) uf names = UF.typecheckedToNames uf tests = Set.fromList $ fst <$> UF.watchesOfKind WK.TestWatch (UF.discardTypes uf) - (isTestType, isTestValue) = isTest + (isTestType, isTestValue) = IOSource.isTest md v = if Set.member v tests then Metadata.singleton isTestType isTestValue @@ -621,7 +622,7 @@ doSlurpAdds slurp uf = Branch.batchUpdates (typeActions <> termActions) errorMissingVar v = error $ "expected to find " ++ show v ++ " in " ++ show uf doSlurpUpdates :: - Monad m => + (Monad m) => [(Name, TypeReference, TypeReference)] -> [(Name, TermReference, TermReference)] -> [(Name, Referent)] -> diff --git a/unison-cli/src/Unison/Codebase/Editor/Input.hs b/unison-cli/src/Unison/Codebase/Editor/Input.hs index 8f1a860ae..a3d97a215 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Input.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Input.hs @@ -129,6 +129,7 @@ data Input | ResolveTypeNameI Path.HQSplit' | -- edits stuff: LoadI (Maybe FilePath) + | ClearI | AddI (Set Name) | PreviewAddI (Set Name) | UpdateI OptionalPatch (Set Name) @@ -155,13 +156,13 @@ data Input | -- make a standalone binary file MakeStandaloneI String (HQ.HashQualified Name) | -- execute an IO thunk using scheme - ExecuteSchemeI (HQ.HashQualified Name) + ExecuteSchemeI (HQ.HashQualified Name) [String] | -- compile to a scheme file CompileSchemeI String (HQ.HashQualified Name) | -- generate scheme libraries GenSchemeLibsI - | -- fetch scheme compiler - FetchSchemeCompilerI + | -- fetch scheme compiler from a given username + FetchSchemeCompilerI String | TestI TestInput | -- metadata -- `link metadata definitions` (adds metadata to all of `definitions`) @@ -269,9 +270,9 @@ data DeleteOutput deriving stock (Eq, Show) data DeleteTarget - = DeleteTarget'TermOrType DeleteOutput Path.HQSplit' - | DeleteTarget'Term DeleteOutput Path.HQSplit' - | DeleteTarget'Type DeleteOutput Path.HQSplit' + = DeleteTarget'TermOrType DeleteOutput [Path.HQSplit'] + | DeleteTarget'Term DeleteOutput [Path.HQSplit'] + | DeleteTarget'Type DeleteOutput [Path.HQSplit'] | DeleteTarget'Branch Insistence (Maybe Path.Split') | DeleteTarget'Patch Path.Split' deriving stock (Eq, Show) diff --git a/unison-cli/src/Unison/Codebase/Editor/Output.hs b/unison-cli/src/Unison/Codebase/Editor/Output.hs index fb67f9ef7..7c8cf0519 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Output.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Output.hs @@ -1,7 +1,7 @@ -{-# LANGUAGE PatternSynonyms #-} - module Unison.Codebase.Editor.Output ( Output (..), + DisplayDefinitionsOutput (..), + WhichBranchEmpty (..), NumberedOutput (..), NumberedArgs, ListDetailed, @@ -54,7 +54,7 @@ import Unison.Parser.Ann (Ann) import Unison.Prelude import qualified Unison.PrettyPrintEnv as PPE import qualified Unison.PrettyPrintEnvDecl as PPE -import Unison.Reference (Reference) +import Unison.Reference (Reference, TermReference) import qualified Unison.Reference as Reference import Unison.Referent (Referent) import Unison.Server.Backend (ShallowListEntry (..)) @@ -121,9 +121,19 @@ data Output | SourceLoadFailed String | -- No main function, the [Type v Ann] are the allowed types NoMainFunction String PPE.PrettyPrintEnv [Type Symbol Ann] - | -- Main function found, but has improper type - BadMainFunction String (Type Symbol Ann) PPE.PrettyPrintEnv [Type Symbol Ann] - | BranchEmpty (Either ShortCausalHash Path') + | -- | Function found, but has improper type + -- Note: the constructor name is misleading here; we weren't necessarily looking for a "main". + BadMainFunction + String + -- ^ what we were trying to do (e.g. "run", "io.test") + String + -- ^ name of function + (Type Symbol Ann) + -- ^ bad type of function + PPE.PrettyPrintEnv + [Type Symbol Ann] + -- ^ acceptable type(s) of function + | BranchEmpty WhichBranchEmpty | BranchNotEmpty Path' | LoadPullRequest ReadRemoteNamespace ReadRemoteNamespace Path' Path' Path' Path' | CreatedNewBranch Path.Absolute @@ -146,6 +156,7 @@ data Output | BranchNotFound Path' | EmptyPush Path' | NameNotFound Path.HQSplit' + | NamesNotFound [Name] | PatchNotFound Path.Split' | TypeNotFound Path.HQSplit' | TermNotFound Path.HQSplit' @@ -203,11 +214,7 @@ data Output | Typechecked SourceName PPE.PrettyPrintEnv SlurpResult (UF.TypecheckedUnisonFile Symbol Ann) | DisplayRendered (Maybe FilePath) (P.Pretty P.ColorText) | -- "display" definitions, possibly to a FilePath on disk (e.g. editing) - DisplayDefinitions - (Maybe FilePath) - PPE.PrettyPrintEnvDecl - (Map Reference (DisplayObject () (Decl Symbol Ann))) - (Map Reference (DisplayObject (Type Symbol Ann) (Term Symbol Ann))) + DisplayDefinitions DisplayDefinitionsOutput | TestIncrementalOutputStart PPE.PrettyPrintEnv (Int, Int) Reference (Term Symbol Ann) | TestIncrementalOutputEnd PPE.PrettyPrintEnv (Int, Int) Reference (Term Symbol Ann) | TestResults @@ -277,6 +284,21 @@ data Output | IntegrityCheck IntegrityResult | DisplayDebugNameDiff NameChanges | DisplayDebugCompletions [Completion.Completion] + | ClearScreen + | PulledEmptyBranch ReadRemoteNamespace + +data DisplayDefinitionsOutput = DisplayDefinitionsOutput + { isTest :: TermReference -> Bool, + outputFile :: Maybe FilePath, + prettyPrintEnv :: PPE.PrettyPrintEnvDecl, + terms :: Map Reference (DisplayObject (Type Symbol Ann) (Term Symbol Ann)), + types :: Map Reference (DisplayObject () (Decl Symbol Ann)) + } + +-- | A branch was empty. But how do we refer to that branch? +data WhichBranchEmpty + = WhichBranchEmptyHash ShortCausalHash + | WhichBranchEmptyPath Path' data ShareError = ShareErrorCheckAndSetPush Sync.CheckAndSetPushError @@ -344,6 +366,7 @@ isFailure o = case o of BadNamespace {} -> True BranchNotFound {} -> True NameNotFound {} -> True + NamesNotFound _ -> True PatchNotFound {} -> True TypeNotFound {} -> True TypeNotFound' {} -> True @@ -369,7 +392,7 @@ isFailure o = case o of EvaluationFailure {} -> True Evaluated {} -> False Typechecked {} -> False - DisplayDefinitions _ _ m1 m2 -> null m1 && null m2 + DisplayDefinitions DisplayDefinitionsOutput {terms, types} -> null terms && null types DisplayRendered {} -> False TestIncrementalOutputStart {} -> False TestIncrementalOutputEnd {} -> False @@ -424,6 +447,8 @@ isFailure o = case o of ViewOnShare {} -> False DisplayDebugCompletions {} -> False DisplayDebugNameDiff {} -> False + ClearScreen -> False + PulledEmptyBranch {} -> False isNumberedFailure :: NumberedOutput -> Bool isNumberedFailure = \case diff --git a/unison-cli/src/Unison/Codebase/Editor/Output/BranchDiff.hs b/unison-cli/src/Unison/Codebase/Editor/Output/BranchDiff.hs index b231559f4..c468cd0c4 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Output/BranchDiff.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Output/BranchDiff.hs @@ -67,7 +67,8 @@ data BranchDiffOutput v a = BranchDiffOutput isEmpty :: BranchDiffOutput v a -> Bool isEmpty BranchDiffOutput {..} = - null updatedTypes && null updatedTerms + null updatedTypes + && null updatedTerms && null newTypeConflicts && null newTermConflicts && null resolvedTypeConflicts @@ -172,7 +173,7 @@ type PatchDisplay = (Name, P.PatchDiff) toOutput :: forall m v a. - Monad m => + (Monad m) => (Referent -> m (Maybe (Type v a))) -> (Reference -> m (Maybe (DeclOrBuiltin v a))) -> Int -> @@ -194,7 +195,7 @@ toOutput -- any of the old references associated with the name -- removes: not-attached metadata that had been attached to any of -- the old references associated with the name - getNewMetadataDiff :: Ord r => Bool -> DiffSlice r -> Name -> Set r -> r -> MetadataDiff Metadata.Value + getNewMetadataDiff :: (Ord r) => Bool -> DiffSlice r -> Name -> Set r -> r -> MetadataDiff Metadata.Value getNewMetadataDiff hidePropagatedMd s n rs_old r_new = let old_metadatas :: [Set Metadata.Value] = toList . R.toMultimap . R.restrictDom rs_old . R3.lookupD2 n $ @@ -215,7 +216,7 @@ toOutput -- must not have been removed and the name must not have been removed or added -- or updated 😅 -- "getMetadataUpdates" = a defn has been updated via change of metadata - getMetadataUpdates :: Ord r => DiffSlice r -> Map Name (Set r, Set r) + getMetadataUpdates :: (Ord r) => DiffSlice r -> Map Name (Set r, Set r) getMetadataUpdates s = Map.fromList [ (n, (Set.singleton r, Set.singleton r)) -- the reference is unchanged @@ -237,7 +238,7 @@ toOutput v /= isPropagatedValue ] - let isSimpleUpdate, isNewConflict, isResolvedConflict :: Eq r => (Set r, Set r) -> Bool + let isSimpleUpdate, isNewConflict, isResolvedConflict :: (Eq r) => (Set r, Set r) -> Bool isSimpleUpdate (old, new) = Set.size old == 1 && Set.size new == 1 isNewConflict (_old, new) = Set.size new > 1 -- should already be the case that old /= new isResolvedConflict (old, new) = Set.size old > 1 && Set.size new == 1 @@ -384,7 +385,8 @@ toOutput for typeAdds $ \(r, nsmd) -> do hqmds :: [(HashQualified Name, [MetadataDisplay v a])] <- for nsmd $ \(n, mdRefs) -> - (,) <$> pure (Names.hqTypeName hqLen names2 n r) + (,) + <$> pure (Names.hqTypeName hqLen names2 n r) <*> fillMetadata ppe mdRefs (hqmds,r,) <$> declOrBuiltin r @@ -401,7 +403,8 @@ toOutput ] for termAdds $ \(r, nsmd) -> do hqmds <- for nsmd $ \(n, mdRefs) -> - (,) <$> pure (Names.hqTermName hqLen names2 n r) + (,) + <$> pure (Names.hqTermName hqLen names2 n r) <*> fillMetadata ppe mdRefs (hqmds,r,) <$> typeOf r @@ -413,18 +416,22 @@ toOutput removedTypes :: [RemovedTypeDisplay v a] <- let typeRemoves :: [(Reference, [Name])] = sortOn snd $ - Map.toList . fmap toList . R.toMultimap . BranchDiff.tallremoves $ typesDiff + Map.toList . fmap toList . R.toMultimap . BranchDiff.tallremoves $ + typesDiff in for typeRemoves $ \(r, ns) -> - (,,) <$> pure ((\n -> Names.hqTypeName hqLen names1 n r) <$> ns) + (,,) + <$> pure ((\n -> Names.hqTypeName hqLen names1 n r) <$> ns) <*> pure r <*> declOrBuiltin r removedTerms :: [RemovedTermDisplay v a] <- let termRemoves :: [(Referent, [Name])] = sortOn snd $ - Map.toList . fmap toList . R.toMultimap . BranchDiff.tallremoves $ termsDiff + Map.toList . fmap toList . R.toMultimap . BranchDiff.tallremoves $ + termsDiff in for termRemoves $ \(r, ns) -> - (,,) <$> pure ((\n -> Names.hqTermName hqLen names1 n r) <$> ns) + (,,) + <$> pure ((\n -> Names.hqTermName hqLen names1 n r) <$> ns) <*> pure r <*> typeOf r @@ -436,7 +443,8 @@ toOutput let renamedTerm :: Map Referent (Set Name, Set Name) -> m [RenameTermDisplay v a] renamedTerm renames = for (sortOn snd $ Map.toList renames) $ \(r, (ol'names, new'names)) -> - (,,,) <$> pure r + (,,,) + <$> pure r <*> typeOf r <*> pure (Set.map (\n -> Names.hqTermName hqLen names1 n r) ol'names) <*> pure (Set.map (\n -> Names.hqTermName hqLen names2 n r) new'names) @@ -444,7 +452,8 @@ toOutput let renamedType :: Map Reference (Set Name, Set Name) -> m [RenameTypeDisplay v a] renamedType renames = for (sortOn snd $ Map.toList renames) $ \(r, (ol'names, new'names)) -> - (,,,) <$> pure r + (,,,) + <$> pure r <*> declOrBuiltin r <*> pure (Set.map (\n -> Names.hqTypeName hqLen names1 n r) ol'names) <*> pure (Set.map (\n -> Names.hqTypeName hqLen names2 n r) new'names) @@ -472,13 +481,13 @@ toOutput renamedTerms } where - fillMetadata :: Traversable t => PPE.PrettyPrintEnv -> t Metadata.Value -> m (t (MetadataDisplay v a)) + fillMetadata :: (Traversable t) => PPE.PrettyPrintEnv -> t Metadata.Value -> m (t (MetadataDisplay v a)) fillMetadata ppe = traverse $ -- metadata values are all terms \(Referent.Ref -> mdRef) -> let name = PPE.termName ppe mdRef in (name,mdRef,) <$> typeOf mdRef - getMetadata :: Ord r => r -> Name -> R3.Relation3 r Name Metadata.Value -> Set Metadata.Value + getMetadata :: (Ord r) => r -> Name -> R3.Relation3 r Name Metadata.Value -> Set Metadata.Value getMetadata r n = R.lookupDom n . R3.lookupD1 r - getAddedMetadata :: Ord r => r -> Name -> BranchDiff.DiffSlice r -> Set Metadata.Value + getAddedMetadata :: (Ord r) => r -> Name -> BranchDiff.DiffSlice r -> Set Metadata.Value getAddedMetadata r n slice = getMetadata r n $ BranchDiff.taddedMetadata slice diff --git a/unison-cli/src/Unison/Codebase/Editor/Propagate.hs b/unison-cli/src/Unison/Codebase/Editor/Propagate.hs index 9f6897540..61a56d03e 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Propagate.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Propagate.hs @@ -559,7 +559,7 @@ typecheckFile :: Codebase m Symbol Ann -> [Type Symbol Ann] -> UF.UnisonFile Symbol Ann -> - Sqlite.Transaction (Result.Result (Seq (Result.Note Symbol Ann)) (Either Names (UF.TypecheckedUnisonFile Symbol Ann))) + Sqlite.Transaction (Result.Result (Seq (Result.Note Symbol Ann)) (Either x (UF.TypecheckedUnisonFile Symbol Ann))) typecheckFile codebase ambient file = do typeLookup <- Codebase.typeLookupForDependencies codebase (UF.dependencies file) pure . fmap Right $ synthesizeFile' ambient (typeLookup <> Builtin.typeLookup) file @@ -582,7 +582,7 @@ unhashTypeComponent' h = where reshuffle (r, (v, decl)) = (v, (r, decl)) -applyDeprecations :: Applicative m => Patch -> Branch0 m -> Branch0 m +applyDeprecations :: (Applicative m) => Patch -> Branch0 m -> Branch0 m applyDeprecations patch = deleteDeprecatedTerms deprecatedTerms . deleteDeprecatedTypes deprecatedTypes @@ -604,7 +604,7 @@ applyDeprecations patch = -- | Things in the patch are not marked as propagated changes, but every other -- definition that is created by the `Edits` which is passed in is marked as -- a propagated change. -applyPropagate :: forall m. Applicative m => Patch -> Edits Symbol -> Branch0 m -> Branch0 m +applyPropagate :: forall m. (Applicative m) => Patch -> Edits Symbol -> Branch0 m -> Branch0 m applyPropagate patch Edits {newTerms, termReplacements, typeReplacements, constructorReplacements} = do let termTypes = Map.map (Hashing.typeToReference . snd) newTerms -- recursively update names and delete deprecated definitions @@ -647,7 +647,7 @@ applyPropagate patch Edits {newTerms, termReplacements, typeReplacements, constr Star3.replaceFacts replaceType typeEdits _types updateMetadatas :: - Ord r => + (Ord r) => Star3.Star3 r NameSegment Metadata.Type (Metadata.Type, Metadata.Value) -> Star3.Star3 r NameSegment Metadata.Type (Metadata.Type, Metadata.Value) updateMetadatas s = Star3.mapD3 go s @@ -694,7 +694,7 @@ applyPropagate patch Edits {newTerms, termReplacements, typeReplacements, constr -- 2. Are not themselves edited in the given patch. -- 3. Pass the given predicate. computeDirty :: - Monad m => + (Monad m) => (Reference -> m (Set Reference)) -> -- eg Codebase.dependents codebase Patch -> (Reference -> Bool) -> diff --git a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs index 837bd474a..b1866a64f 100644 --- a/unison-cli/src/Unison/Codebase/Editor/Slurp.hs +++ b/unison-cli/src/Unison/Codebase/Editor/Slurp.hs @@ -22,8 +22,8 @@ import Unison.Prelude import Unison.Referent (Referent) import qualified Unison.Referent as Referent import qualified Unison.Referent' as Referent -import qualified Unison.Syntax.Name as Name (toText, unsafeFromVar) import Unison.Symbol (Symbol) +import qualified Unison.Syntax.Name as Name (toText, unsafeFromVar) import qualified Unison.UnisonFile as UF import qualified Unison.UnisonFile.Names as UF import qualified Unison.Util.Map as Map @@ -156,12 +156,12 @@ computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars = \case constructorsUnderConsideration = Map.toList (UF.dataDeclarationsId' uf) <> (fmap . fmap . fmap) DD.toDataDecl (Map.toList (UF.effectDeclarationsId' uf)) - & filter (\(typeV, _) -> Set.member (TypeVar typeV) involvedVars) - & concatMap (\(_typeV, (_refId, decl)) -> DD.constructors' decl) - & fmap - ( \(_ann, v, _typ) -> Name.unsafeFromVar v - ) - & Set.fromList + & filter (\(typeV, _) -> Set.member (TypeVar typeV) involvedVars) + & concatMap (\(_typeV, (_refId, decl)) -> DD.constructors' decl) + & fmap + ( \(_ann, v, _typ) -> Name.unsafeFromVar v + ) + & Set.fromList deprecatedConstructors :: Set Name deprecatedConstructors = @@ -215,7 +215,7 @@ computeSelfStatuses vars varReferences codebaseNames = [r] | LD.referent r == ld -> Duplicated _ -> Updated -computeDepStatuses :: Ord k => Map k (Set k) -> Map k DefnStatus -> Map k DepStatus +computeDepStatuses :: (Ord k) => Map k (Set k) -> Map k DefnStatus -> Map k DepStatus computeDepStatuses varDeps selfStatuses = selfStatuses & Map.mapWithKey \name status -> do varDeps @@ -456,11 +456,11 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames selfStatu (Rel.mapRan Referent.Ref $ Names.types fileNames) (SC.types duplicates) - varFromName :: Var v => Name -> v + varFromName :: (Var v) => Name -> v varFromName name = Var.named (Name.toText name) -- | Sort out a set of variables by whether it is a term or type. -partitionVars :: Foldable f => f TaggedVar -> SlurpComponent +partitionVars :: (Foldable f) => f TaggedVar -> SlurpComponent partitionVars = foldMap ( \case diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs index e9ec534b8..348a88b36 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpComponent.hs @@ -126,7 +126,7 @@ closeWithDependencies uf inputs = seenDefns {ctors = constructorDeps} typeNames :: Map Reference Symbol typeNames = invert (fst <$> UF.dataDeclarations' uf) <> invert (fst <$> UF.effectDeclarations' uf) - invert :: forall k v. Ord k => Ord v => Map k v -> Map v k + invert :: forall k v. (Ord k) => (Ord v) => Map k v -> Map v k invert m = Map.fromList (swap <$> Map.toList m) fromTypes :: Set Symbol -> SlurpComponent diff --git a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs index 0e55966ab..e91b935fe 100644 --- a/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs +++ b/unison-cli/src/Unison/Codebase/Editor/SlurpResult.hs @@ -115,7 +115,7 @@ prettyStatus s = case s of type IsPastTense = Bool -prettyVar :: Var v => v -> P.Pretty P.ColorText +prettyVar :: (Var v) => v -> P.Pretty P.ColorText prettyVar = P.text . Var.name aliasesToShow :: Int @@ -197,8 +197,8 @@ pretty isPast ppe sr = Just (_, _, _, ty) -> ( plus <> P.bold (prettyVar v), Just $ ": " <> P.indentNAfterNewline 2 (TP.pretty ppe ty) - ) : - ((,Nothing) <$> aliases) + ) + : ((,Nothing) <$> aliases) where aliases = fmap (P.indentN 2) . aliasesMessage . Map.lookup v $ termAlias sr ok _ _ sc | null (SC.terms sc) && null (SC.types sc) = mempty diff --git a/unison-cli/src/Unison/Codebase/Editor/TodoOutput.hs b/unison-cli/src/Unison/Codebase/Editor/TodoOutput.hs index 3639dfd5f..56e67825c 100644 --- a/unison-cli/src/Unison/Codebase/Editor/TodoOutput.hs +++ b/unison-cli/src/Unison/Codebase/Editor/TodoOutput.hs @@ -34,7 +34,7 @@ data TodoOutput v a = TodoOutput editConflicts :: Patch } -labeledDependencies :: Ord v => TodoOutput v a -> Set LabeledDependency +labeledDependencies :: (Ord v) => TodoOutput v a -> Set LabeledDependency labeledDependencies TodoOutput {..} = Set.fromList ( -- term refs diff --git a/unison-cli/src/Unison/Codebase/Editor/UriParser.hs b/unison-cli/src/Unison/Codebase/Editor/UriParser.hs index e1420459b..cd01f605e 100644 --- a/unison-cli/src/Unison/Codebase/Editor/UriParser.hs +++ b/unison-cli/src/Unison/Codebase/Editor/UriParser.hs @@ -1,5 +1,3 @@ -{-# LANGUAGE OverloadedStrings #-} - module Unison.Codebase.Editor.UriParser ( repoPath, writeGitRepo, @@ -17,12 +15,14 @@ import qualified Data.Text as Text import Data.Void import qualified Text.Megaparsec as P import qualified Text.Megaparsec.Char as C +import qualified U.Util.Base32Hex as Base32Hex import Unison.Codebase.Editor.RemoteRepo ( ReadGitRemoteNamespace (..), ReadGitRepo (..), ReadRemoteNamespace (..), ReadShareRemoteNamespace (..), ShareCodeserver (DefaultCodeserver), + ShareUserHandle (..), WriteGitRemotePath (..), WriteGitRepo (..), WriteRemotePath (..), @@ -31,9 +31,7 @@ import Unison.Codebase.Editor.RemoteRepo import Unison.Codebase.Path (Path (..)) import qualified Unison.Codebase.Path as Path import Unison.Codebase.ShortCausalHash (ShortCausalHash (..)) -import qualified Unison.Hash as Hash import Unison.NameSegment (NameSegment (..)) -import qualified Unison.NameSegment as NameSegment import Unison.Prelude import qualified Unison.Syntax.Lexer import qualified Unison.Util.Pretty as P @@ -91,7 +89,7 @@ writeShareRemotePath = P.label "write share remote path" $ WriteShareRemotePath <$> pure DefaultCodeserver - <*> (NameSegment.toText <$> nameSegment) + <*> shareUserHandle <*> (Path.fromList <$> P.many (C.char '.' *> nameSegment)) -- >>> P.parseMaybe readShareRemoteNamespace ".unisonweb.base._releases.M4" @@ -104,9 +102,21 @@ readShareRemoteNamespace = do ReadShareRemoteNamespace <$> pure DefaultCodeserver -- <*> sch <- P.optional shortBranchHash - <*> (NameSegment.toText <$> nameSegment) + <*> shareUserHandle <*> (Path.fromList <$> P.many (C.char '.' *> nameSegment)) +-- | We're lax in our share user rules here, Share is the source of truth +-- for this stuff and can provide better error messages if required. +-- +-- >>> P.parseMaybe shareUserHandle "unison" +-- Just (ShareUserHandle {shareUserHandleToText = "unison"}) +-- +-- >>> P.parseMaybe shareUserHandle "unison-1337" +-- Just (ShareUserHandle {shareUserHandleToText = "unison-1337"}) +shareUserHandle :: P ShareUserHandle +shareUserHandle = do + ShareUserHandle . Text.pack <$> P.some (P.satisfy \c -> isAlphaNum c || c == '-' || c == '_') + -- >>> P.parseMaybe readGitRemoteNamespace "git(user@server:project.git:branch)#asdf" -- >>> P.parseMaybe readGitRemoteNamespace "git(user@server:project.git:branch)#asdf." -- >>> P.parseMaybe readGitRemoteNamespace "git(user@server:project.git:branch)" @@ -299,7 +309,8 @@ parseGitProtocol = parseHostInfo :: P HostInfo parseHostInfo = P.label "parseHostInfo" $ - HostInfo <$> parseHost + HostInfo + <$> parseHost <*> ( P.optional $ do void $ C.char ':' P.takeWhile1P (Just "digits") isDigit @@ -355,7 +366,8 @@ absolutePath = do nameSegment :: P NameSegment nameSegment = NameSegment . Text.pack - <$> ( (:) <$> P.satisfy Unison.Syntax.Lexer.wordyIdStartChar + <$> ( (:) + <$> P.satisfy Unison.Syntax.Lexer.wordyIdStartChar <*> P.many (P.satisfy Unison.Syntax.Lexer.wordyIdChar) ) @@ -368,4 +380,4 @@ shortCausalHash :: P ShortCausalHash shortCausalHash = P.label "short causal hash" $ do void $ C.char '#' ShortCausalHash - <$> P.takeWhile1P (Just "base32hex chars") (`elem` Hash.validBase32HexChars) + <$> P.takeWhile1P (Just "base32hex chars") (`elem` Base32Hex.validChars) diff --git a/unison-cli/src/Unison/Codebase/Editor/VersionParser.hs b/unison-cli/src/Unison/Codebase/Editor/VersionParser.hs index fc1c935e3..62d3d0e1a 100644 --- a/unison-cli/src/Unison/Codebase/Editor/VersionParser.hs +++ b/unison-cli/src/Unison/Codebase/Editor/VersionParser.hs @@ -42,6 +42,6 @@ defaultBaseLib = fmap makeNS $ release <|> unknown makeNS t = ReadShareRemoteNamespace { server = DefaultCodeserver, - repo = "unison", + repo = ShareUserHandle "unison", path = "public" Path.:< "base" Path.:< Path.fromText t } diff --git a/unison-cli/src/Unison/Codebase/TranscriptParser.hs b/unison-cli/src/Unison/Codebase/TranscriptParser.hs index 545860f63..4728921f0 100644 --- a/unison-cli/src/Unison/Codebase/TranscriptParser.hs +++ b/unison-cli/src/Unison/Codebase/TranscriptParser.hs @@ -177,7 +177,7 @@ type TranscriptRunner = withTranscriptRunner :: forall m r. - UnliftIO.MonadUnliftIO m => + (UnliftIO.MonadUnliftIO m) => UCMVersion -> Maybe FilePath -> (TranscriptRunner -> m r) -> @@ -334,7 +334,9 @@ run dir stanzas codebase runtime sbRuntime config ucmVersion baseURL = UnliftIO. pure $ Right QuitI Just (s, idx) -> do putStr $ - "\r⚙️ Processing stanza " ++ show idx ++ " of " + "\r⚙️ Processing stanza " + ++ show idx + ++ " of " ++ show (length stanzas) ++ "." IO.hFlush IO.stdout diff --git a/unison-cli/src/Unison/CommandLine.hs b/unison-cli/src/Unison/CommandLine.hs index fcaf55bc6..cec8da4f9 100644 --- a/unison-cli/src/Unison/CommandLine.hs +++ b/unison-cli/src/Unison/CommandLine.hs @@ -79,7 +79,7 @@ watchFileSystem q dir = do warnNote :: String -> String warnNote s = "⚠️ " <> s -backtick :: IsString s => P.Pretty s -> P.Pretty s +backtick :: (IsString s) => P.Pretty s -> P.Pretty s backtick s = P.group ("`" <> s <> "`") tip :: (ListLike s Char, IsString s) => P.Pretty s -> P.Pretty s @@ -173,11 +173,11 @@ prompt = "> " -- `plural [] "cat" "cats" = "cats"` -- `plural ["meow"] "cat" "cats" = "cat"` -- `plural ["meow", "meow"] "cat" "cats" = "cats"` -plural :: Foldable f => f a -> b -> b -> b +plural :: (Foldable f) => f a -> b -> b -> b plural items one other = case toList items of [_] -> one _ -> other -plural' :: Integral a => a -> b -> b -> b +plural' :: (Integral a) => a -> b -> b -> b plural' 1 one _other = one plural' _ _one other = other diff --git a/unison-cli/src/Unison/CommandLine/Completion.hs b/unison-cli/src/Unison/CommandLine/Completion.hs index 66c10cff8..15984121f 100644 --- a/unison-cli/src/Unison/CommandLine/Completion.hs +++ b/unison-cli/src/Unison/CommandLine/Completion.hs @@ -64,7 +64,7 @@ import Prelude hiding (readFile, writeFile) -- | A completion func for use with Haskeline haskelineTabComplete :: - MonadIO m => + (MonadIO m) => Map String IP.InputPattern -> Codebase m v a -> AuthenticatedHttpClient -> @@ -92,7 +92,7 @@ data CompletionType -- | The empty completor. noCompletions :: - MonadIO m => + (MonadIO m) => String -> Codebase m v a -> AuthenticatedHttpClient -> @@ -339,14 +339,14 @@ fixupCompletion q cs@(h : t) = else cs sharePathCompletion :: - MonadIO m => + (MonadIO m) => AuthenticatedHttpClient -> String -> m [Completion] sharePathCompletion = shareCompletion (NESet.singleton NamespaceCompletion) shareCompletion :: - MonadIO m => + (MonadIO m) => NESet CompletionType -> AuthenticatedHttpClient -> String -> @@ -391,7 +391,7 @@ shareCompletion completionTypes authHTTPClient str = ) & pure -fetchShareNamespaceInfo :: MonadIO m => AuthenticatedHttpClient -> Text -> Path.Path -> m (Maybe NamespaceListing) +fetchShareNamespaceInfo :: (MonadIO m) => AuthenticatedHttpClient -> Text -> Path.Path -> m (Maybe NamespaceListing) fetchShareNamespaceInfo (AuthenticatedHttpClient httpManager) userHandle path = runMaybeT do let uri = (Share.codeserverToURI Codeserver.defaultCodeserver) @@ -406,7 +406,7 @@ fetchShareNamespaceInfo (AuthenticatedHttpClient httpManager) userHandle path = resp <- either (const empty) pure $ fullResp MaybeT . pure . Aeson.decode @Server.NamespaceListing $ HTTP.responseBody resp -searchUsers :: MonadIO m => AuthenticatedHttpClient -> Text -> m [Text] +searchUsers :: (MonadIO m) => AuthenticatedHttpClient -> Text -> m [Text] searchUsers _ "" = pure [] searchUsers (AuthenticatedHttpClient httpManager) userHandlePrefix = fromMaybe [] <$> runMaybeT do diff --git a/unison-cli/src/Unison/CommandLine/DisplayValues.hs b/unison-cli/src/Unison/CommandLine/DisplayValues.hs index e1f817b39..506071956 100644 --- a/unison-cli/src/Unison/CommandLine/DisplayValues.hs +++ b/unison-cli/src/Unison/CommandLine/DisplayValues.hs @@ -39,7 +39,7 @@ import Unison.Var (Var) type Pretty = P.Pretty P.ColorText displayTerm :: - Monad m => + (Monad m) => PPE.PrettyPrintEnvDecl -> (Reference -> m (Maybe (Term Symbol ()))) -> (Referent -> m (Maybe (Type Symbol ()))) -> @@ -62,7 +62,7 @@ displayTerm = displayTerm' False type ElideUnit = Bool displayTerm' :: - Monad m => + (Monad m) => ElideUnit -> PPE.PrettyPrintEnvDecl -> (Reference -> m (Maybe (Term Symbol ()))) -> @@ -75,16 +75,16 @@ displayTerm' elideUnit pped terms typeOf eval types = \case tm@(Term.Apps' (Term.Constructor' (ConstructorReference typ _)) _) | typ == DD.docRef -> displayDoc pped terms typeOf eval types tm | typ == DD.doc2Ref -> do - -- Pretty.get (doc.formatConsole tm) - let tm' = - Term.app - () - (Term.ref () DD.prettyGetRef) - (Term.app () (Term.ref () DD.doc2FormatConsoleRef) tm) - tm <- eval tm' - case tm of - Nothing -> pure $ errMsg tm' - Just tm -> displayTerm pped terms typeOf eval types tm + -- Pretty.get (doc.formatConsole tm) + let tm' = + Term.app + () + (Term.ref () DD.prettyGetRef) + (Term.app () (Term.ref () DD.doc2FormatConsoleRef) tm) + tm <- eval tm' + case tm of + Nothing -> pure $ errMsg tm' + Just tm -> displayTerm pped terms typeOf eval types tm | typ == DD.prettyAnnotatedRef -> displayPretty pped terms typeOf eval types tm tm@(Term.Constructor' (ConstructorReference typ _)) | typ == DD.prettyAnnotatedRef -> displayPretty pped terms typeOf eval types tm @@ -110,7 +110,7 @@ displayTerm' elideUnit pped terms typeOf eval types = \case -- Pretty.Annotated ann (Either SpecialForm ConsoleText) displayPretty :: forall m. - Monad m => + (Monad m) => PPE.PrettyPrintEnvDecl -> (Reference -> m (Maybe (Term Symbol ()))) -> (Referent -> m (Maybe (Type Symbol ()))) -> diff --git a/unison-cli/src/Unison/CommandLine/Globbing.hs b/unison-cli/src/Unison/CommandLine/Globbing.hs index a52c82b24..5bb2b3176 100644 --- a/unison-cli/src/Unison/CommandLine/Globbing.hs +++ b/unison-cli/src/Unison/CommandLine/Globbing.hs @@ -74,7 +74,8 @@ expandGlobToNameSegments targets branch globPath = matchingTypes = matchingNamesInStar predicate (Branch._types branch) matchingNamesInStar :: (NameSegment -> Bool) -> Branch.Star a NameSegment -> [[NameSegment]] matchingNamesInStar predicate star = - star & Star3.d1 + star + & Star3.d1 & Relation.ran & Set.toList & filter predicate diff --git a/unison-cli/src/Unison/CommandLine/InputPattern.hs b/unison-cli/src/Unison/CommandLine/InputPattern.hs index be1a7108b..6962eebfe 100644 --- a/unison-cli/src/Unison/CommandLine/InputPattern.hs +++ b/unison-cli/src/Unison/CommandLine/InputPattern.hs @@ -51,7 +51,7 @@ data ArgumentType = ArgumentType -- | Generate completion suggestions for this argument type suggestions :: forall m v a. - MonadIO m => + (MonadIO m) => String -> Codebase m v a -> AuthenticatedHttpClient -> @@ -86,7 +86,8 @@ argType ip i = go (i, argTypes ip) -- The argument list spec is invalid if something follows a vararg go args = error $ - "Input pattern " <> show (patternName ip) + "Input pattern " + <> show (patternName ip) <> " has an invalid argument list: " <> show args diff --git a/unison-cli/src/Unison/CommandLine/InputPatterns.hs b/unison-cli/src/Unison/CommandLine/InputPatterns.hs index 111e7afe4..67e87b68f 100644 --- a/unison-cli/src/Unison/CommandLine/InputPatterns.hs +++ b/unison-cli/src/Unison/CommandLine/InputPatterns.hs @@ -168,6 +168,24 @@ load = _ -> Left (I.help load) ) +clear :: InputPattern +clear = + InputPattern + "clear" + [] + I.Visible + [] + ( P.wrapColumn2 + [ ( makeExample' clear, + "Clears the screen." + ) + ] + ) + ( \case + [] -> pure $ Input.ClearI + _ -> Left (I.help clear) + ) + add :: InputPattern add = InputPattern @@ -588,7 +606,7 @@ renameType = "`rename.type` takes two arguments, like `rename.type oldname newname`." ) -deleteGen :: Maybe String -> String -> (Path.HQSplit' -> DeleteTarget) -> InputPattern +deleteGen :: Maybe String -> String -> ([Path.HQSplit'] -> DeleteTarget) -> InputPattern deleteGen suffix target mkTarget = let cmd = maybe "delete" ("delete." <>) suffix info = @@ -613,11 +631,10 @@ deleteGen suffix target mkTarget = [(OnePlus, exactDefinitionTermQueryArg)] info ( \case - [query] -> first fromString $ do - p <- Path.parseHQSplit' query - pure $ Input.DeleteI (mkTarget p) - _ -> - Left . P.warnCallout $ P.wrap warn + [] -> Left . P.warnCallout $ P.wrap warn + queries -> first fromString $ do + paths <- traverse Path.parseHQSplit' queries + pure $ Input.DeleteI (mkTarget paths) ) delete :: InputPattern @@ -1007,29 +1024,30 @@ resetRoot = _ -> Left (I.help resetRoot) ) -pullSilent :: InputPattern -pullSilent = - pullImpl "pull.silent" Verbosity.Silent Input.PullWithHistory "without listing the merged entities" - pull :: InputPattern -pull = pullImpl "pull" Verbosity.Default Input.PullWithHistory "" +pull = + pullImpl "pull" ["pull.silent"] Verbosity.Silent Input.PullWithHistory "without listing the merged entities" + +pullVerbose :: InputPattern +pullVerbose = pullImpl "pull.verbose" [] Verbosity.Verbose Input.PullWithHistory "and lists the merged entities" pullWithoutHistory :: InputPattern pullWithoutHistory = pullImpl "pull.without-history" - Verbosity.Default + [] + Verbosity.Silent Input.PullWithoutHistory "without including the remote's history. This usually results in smaller codebase sizes." -pullImpl :: String -> Verbosity -> Input.PullMode -> P.Pretty CT.ColorText -> InputPattern -pullImpl name verbosity pullMode addendum = do +pullImpl :: String -> [String] -> Verbosity -> Input.PullMode -> P.Pretty CT.ColorText -> InputPattern +pullImpl name aliases verbosity pullMode addendum = do self where self = InputPattern name - [] + aliases I.Visible [(Optional, remoteNamespaceArg), (Optional, namespaceArg)] ( P.lines @@ -1079,8 +1097,10 @@ pullExhaustive = [(Required, remoteNamespaceArg), (Optional, namespaceArg)] ( P.lines [ P.wrap $ - "The " <> makeExample' pullExhaustive <> "command can be used in place of" - <> makeExample' pull + "The " + <> makeExample' pullExhaustive + <> "command can be used in place of" + <> makeExample' pullVerbose <> "to complete namespaces" <> "which were pulled incompletely due to a bug in UCM" <> "versions M1l and earlier. It may be extra slow!" @@ -1088,15 +1108,15 @@ pullExhaustive = ) ( \case [] -> - Right $ Input.PullRemoteBranchI Nothing Path.relativeEmpty' SyncMode.Complete Input.PullWithHistory Verbosity.Default + Right $ Input.PullRemoteBranchI Nothing Path.relativeEmpty' SyncMode.Complete Input.PullWithHistory Verbosity.Verbose [url] -> do ns <- parseReadRemoteNamespace "remote-namespace" url - Right $ Input.PullRemoteBranchI (Just ns) Path.relativeEmpty' SyncMode.Complete Input.PullWithHistory Verbosity.Default + Right $ Input.PullRemoteBranchI (Just ns) Path.relativeEmpty' SyncMode.Complete Input.PullWithHistory Verbosity.Verbose [url, path] -> do ns <- parseReadRemoteNamespace "remote-namespace" url p <- first fromString $ Path.parsePath' path - Right $ Input.PullRemoteBranchI (Just ns) p SyncMode.Complete Input.PullWithHistory Verbosity.Default - _ -> Left (I.help pull) + Right $ Input.PullRemoteBranchI (Just ns) p SyncMode.Complete Input.PullWithHistory Verbosity.Verbose + _ -> Left (I.help pullVerbose) ) debugTabCompletion :: InputPattern @@ -1267,7 +1287,9 @@ pushExhaustive = [(Required, remoteNamespaceArg), (Optional, namespaceArg)] ( P.lines [ P.wrap $ - "The " <> makeExample' pushExhaustive <> "command can be used in place of" + "The " + <> makeExample' pushExhaustive + <> "command can be used in place of" <> makeExample' push <> "to repair remote namespaces" <> "which were pushed incompletely due to a bug in UCM" @@ -1537,19 +1559,20 @@ viewReflog = edit :: InputPattern edit = InputPattern - "edit" - [] - I.Visible - [(OnePlus, definitionQueryArg)] - ( P.lines - [ "`edit foo` prepends the definition of `foo` to the top of the most " - <> "recently saved file.", - "`edit` without arguments invokes a search to select a definition for editing, which requires that `fzf` can be found within your PATH." - ] - ) - ( fmap (Input.ShowDefinitionI Input.LatestFileLocation Input.ShowDefinitionLocal) - . traverse parseHashQualifiedName - ) + { patternName = "edit", + aliases = [], + visibility = I.Visible, + argTypes = [(OnePlus, definitionQueryArg)], + help = + P.lines + [ "`edit foo` prepends the definition of `foo` to the top of the most " + <> "recently saved file.", + "`edit` without arguments invokes a search to select a definition for editing, which requires that `fzf` can be found within your PATH." + ], + parse = + fmap (Input.ShowDefinitionI Input.LatestFileLocation Input.ShowDefinitionLocal) + . traverse parseHashQualifiedName + } topicNameArg :: ArgumentType topicNameArg = @@ -1642,7 +1665,8 @@ helpTopicsMap = testCacheMsg = P.callout "🎈" . P.lines $ [ P.wrap $ - "Unison caches the results of " <> P.blue "test>" + "Unison caches the results of " + <> P.blue "test>" <> "watch expressions. Since these expressions are pure and" <> "always yield the same result when evaluated, there's no need" <> "to run them more than once!", @@ -1654,7 +1678,8 @@ helpTopicsMap = pathnamesMsg = P.callout "\129488" . P.lines $ [ P.wrap $ - "There are two kinds of namespaces," <> P.group (P.blue "absolute" <> ",") + "There are two kinds of namespaces," + <> P.group (P.blue "absolute" <> ",") <> "such as" <> P.group ("(" <> P.blue ".foo.bar") <> "or" @@ -1677,12 +1702,15 @@ helpTopicsMap = P.indentN 2 $ P.green "x" <> " = 41", "", P.wrap $ - "then doing an" <> P.blue "add" + "then doing an" + <> P.blue "add" <> "will create the definition with the absolute name" <> P.group (P.blue ".foo.bar.x" <> " = 41"), "", P.wrap $ - "and you can refer to" <> P.green "x" <> "by its absolute name " + "and you can refer to" + <> P.green "x" + <> "by its absolute name " <> P.blue ".foo.bar.x" <> "elsewhere" <> "in your code. For instance:", @@ -2084,20 +2112,20 @@ saveExecuteResult = ioTest :: InputPattern ioTest = InputPattern - "io.test" - ["test.io"] - I.Visible - [(Required, exactDefinitionTermQueryArg)] - ( P.wrapColumn2 - [ ( "`io.test mytest`", - "Runs `!mytest`, where `mytest` is a delayed test that can use the `IO` and `Exception` abilities. Note: `mytest` must already be added to the codebase." - ) - ] - ) - ( \case + { patternName = "io.test", + aliases = ["test.io"], + visibility = I.Visible, + argTypes = [(Required, exactDefinitionTermQueryArg)], + help = + P.wrapColumn2 + [ ( "`io.test mytest`", + "Runs `!mytest`, where `mytest` is a delayed test that can use the `IO` and `Exception` abilities." + ) + ], + parse = \case [thing] -> fmap Input.IOTestI $ parseHashQualifiedName thing _ -> Left $ showPatternHelp ioTest - ) + } makeStandalone :: InputPattern makeStandalone = @@ -2128,14 +2156,14 @@ runScheme = I.Visible [(Required, exactDefinitionTermQueryArg)] ( P.wrapColumn2 - [ ( makeExample runScheme ["main"], + [ ( makeExample runScheme ["main", "args"], "Executes !main using native compilation via scheme." ) ] ) ( \case - [main] -> - Input.ExecuteSchemeI <$> parseHashQualifiedName main + (main : args) -> + flip Input.ExecuteSchemeI args <$> parseHashQualifiedName main _ -> Left $ showPatternHelp runScheme ) @@ -2201,12 +2229,16 @@ fetchScheme = <> "is run\ \ if the library is not already in the standard location\ \ (unison.internal). However, this command will force\ - \ a pull even if the library already exists." + \ a pull even if the library already exists. You can also specify\ + \ a username to pull from (the default is `unison`) to use an alternate\ + \ implementation of the scheme compiler. It will attempt to fetch\ + \ [username].public.internal.trunk for use." ) ] ) ( \case - [] -> pure Input.FetchSchemeCompilerI + [] -> pure (Input.FetchSchemeCompilerI "unison") + [name] -> pure (Input.FetchSchemeCompilerI name) _ -> Left $ showPatternHelp fetchScheme ) @@ -2323,6 +2355,7 @@ validInputs = [ help, helpTopics, load, + clear, add, previewAdd, update, @@ -2340,9 +2373,9 @@ validInputs = push, pushCreate, pushForce, - pull, + pullVerbose, pullWithoutHistory, - pullSilent, + pull, pushExhaustive, pullExhaustive, createPullRequest, @@ -2574,10 +2607,11 @@ explainRemote pushPull = where gitRepo = PushPull.fold @(P.Pretty P.ColorText) "git@github.com:" "https://github.com/" pushPull -showErrorFancy :: P.ShowErrorComponent e => P.ErrorFancy e -> String +showErrorFancy :: (P.ShowErrorComponent e) => P.ErrorFancy e -> String showErrorFancy (P.ErrorFail msg) = msg showErrorFancy (P.ErrorIndentation ord ref actual) = - "incorrect indentation (got " <> show (P.unPos actual) + "incorrect indentation (got " + <> show (P.unPos actual) <> ", should be " <> p <> show (P.unPos ref) diff --git a/unison-cli/src/Unison/CommandLine/Main.hs b/unison-cli/src/Unison/CommandLine/Main.hs index 32662a89c..9eb5f4a2b 100644 --- a/unison-cli/src/Unison/CommandLine/Main.hs +++ b/unison-cli/src/Unison/CommandLine/Main.hs @@ -15,7 +15,7 @@ import qualified Data.Text as Text import qualified Data.Text.Lazy.IO as Text.Lazy import qualified Ki import qualified System.Console.Haskeline as Line -import System.IO (hPutStrLn, stderr) +import System.IO (hGetEcho, hPutStrLn, hSetEcho, stderr, stdin) import System.IO.Error (isDoesNotExistError) import Text.Pretty.Simple (pShow) import qualified U.Codebase.Sqlite.Operations as Operations @@ -143,8 +143,12 @@ main dir welcome initialPath config initialInputs runtime sbRuntime codebase ser credentialManager <- newCredentialManager let tokenProvider = AuthN.newTokenProvider credentialManager authHTTPClient <- AuthN.newAuthenticatedHTTPClient tokenProvider ucmVersion + initialEcho <- hGetEcho stdin + let restoreEcho = (\currentEcho -> when (currentEcho /= initialEcho) $ hSetEcho stdin initialEcho) let getInput :: Cli.LoopState -> IO Input getInput loopState = do + currentEcho <- hGetEcho stdin + liftIO $ restoreEcho currentEcho getUserInput codebase authHTTPClient diff --git a/unison-cli/src/Unison/CommandLine/OutputMessages.hs b/unison-cli/src/Unison/CommandLine/OutputMessages.hs index 99f167bfa..c64e6c75a 100644 --- a/unison-cli/src/Unison/CommandLine/OutputMessages.hs +++ b/unison-cli/src/Unison/CommandLine/OutputMessages.hs @@ -30,6 +30,7 @@ import qualified Network.HTTP.Types as Http import Network.URI (URI) import qualified Network.URI.Encode as URI import qualified Servant.Client as Servant +import qualified System.Console.ANSI as ANSI import qualified System.Console.Haskeline.Completion as Completion import System.Directory ( canonicalizePath, @@ -42,24 +43,38 @@ import U.Codebase.HashTags (CausalHash (..)) import U.Codebase.Sqlite.DbId (SchemaVersion (SchemaVersion)) import U.Util.Base32Hex (Base32Hex) import qualified U.Util.Base32Hex as Base32Hex -import qualified U.Util.Hash as Hash -import U.Util.Hash32 (Hash32) -import qualified U.Util.Hash32 as Hash32 import qualified Unison.ABT as ABT import qualified Unison.Auth.Types as Auth import qualified Unison.Builtin.Decls as DD import Unison.Codebase.Editor.DisplayObject (DisplayObject (BuiltinObject, MissingObject, UserObject)) import qualified Unison.Codebase.Editor.Input as Input import Unison.Codebase.Editor.Output + ( DisplayDefinitionsOutput (..), + NumberedArgs, + NumberedOutput (..), + Output (..), + ShareError + ( ShareErrorCheckAndSetPush, + ShareErrorFastForwardPush, + ShareErrorGetCausalHashByPath, + ShareErrorPull, + ShareErrorTransport + ), + TestReportStats (CachedTests, NewlyComputed), + UndoFailureReason (CantUndoPastMerge, CantUndoPastStart), + WhichBranchEmpty (..), + ) import qualified Unison.Codebase.Editor.Output as E import qualified Unison.Codebase.Editor.Output.BranchDiff as OBD import qualified Unison.Codebase.Editor.Output.PushPull as PushPull import Unison.Codebase.Editor.RemoteRepo ( ReadGitRepo, ReadRemoteNamespace, + ShareUserHandle (..), WriteGitRepo, WriteRemotePath (..), WriteShareRemotePath (..), + shareUserHandleToText, ) import qualified Unison.Codebase.Editor.RemoteRepo as RemoteRepo import qualified Unison.Codebase.Editor.SlurpResult as SlurpResult @@ -87,6 +102,8 @@ import Unison.ConstructorReference (GConstructorReference (..)) import qualified Unison.ConstructorType as CT import qualified Unison.DataDeclaration as DD import qualified Unison.Hash as Hash +import Unison.Hash32 (Hash32) +import qualified Unison.Hash32 as Hash32 import qualified Unison.HashQualified as HQ import qualified Unison.HashQualified' as HQ' import Unison.LabeledDependency as LD @@ -112,7 +129,7 @@ import Unison.PrintError printNoteWithSource, renderCompilerBug, ) -import Unison.Reference (Reference) +import Unison.Reference (Reference, TermReference) import qualified Unison.Reference as Reference import Unison.Referent (Referent) import qualified Unison.Referent as Referent @@ -125,12 +142,14 @@ import qualified Unison.Share.Sync as Share import Unison.Share.Sync.Types (CodeserverTransportError (..)) import qualified Unison.ShortHash as SH import qualified Unison.ShortHash as ShortHash +import Unison.Symbol (Symbol) import qualified Unison.Sync.Types as Share import qualified Unison.Syntax.DeclPrinter as DeclPrinter import qualified Unison.Syntax.HashQualified as HQ (toString, toText, unsafeFromVar) import qualified Unison.Syntax.Name as Name (toString, toText) import Unison.Syntax.NamePrinter - ( prettyHashQualified, + ( SyntaxText, + prettyHashQualified, prettyHashQualified', prettyLabeledDependency, prettyName, @@ -218,7 +237,8 @@ notifyNumbered o = case o of p, "", tip $ - "You can use " <> IP.makeExample' IP.todo + "You can use " + <> IP.makeExample' IP.todo <> "to see if this generated any work to do in this namespace" <> "and " <> IP.makeExample' IP.test @@ -236,7 +256,8 @@ notifyNumbered o = case o of ( \p -> P.lines [ P.wrap $ - "Here's what's changed in " <> prettyPath' dest' + "Here's what's changed in " + <> prettyPath' dest' <> "after applying the patch at " <> P.group (prettyPath' patchPath' <> ":"), "", @@ -327,7 +348,9 @@ notifyNumbered o = case o of [ p, "", tip $ - "Add" <> prettyName "License" <> "values for" + "Add" + <> prettyName "License" + <> "values for" <> prettyName (Name.fromSegment authorNS) <> "under" <> P.group (prettyPath' authorPath' <> ".") @@ -412,7 +435,8 @@ notifyNumbered o = case o of P.indentN 2 $ prettyDiff diff ] ex = - "Use" <> IP.makeExample IP.history ["#som3n4m3space"] + "Use" + <> IP.makeExample IP.history ["#som3n4m3space"] <> "to view history starting from a given namespace hash." DeletedDespiteDependents ppeDecl endangerments -> ( P.warnCallout $ @@ -430,7 +454,8 @@ notifyNumbered o = case o of undoTip :: P.Pretty P.ColorText undoTip = tip $ - "You can use" <> IP.makeExample' IP.undo + "You can use" + <> IP.makeExample' IP.undo <> "or" <> IP.makeExample' IP.viewReflog <> "to undo this change." @@ -545,7 +570,8 @@ notifyUser dir o = case o of pure . P.warnCallout . P.wrap - $ "Cannot save the last run result into" <> P.backticked (P.string (Name.toString name)) + $ "Cannot save the last run result into" + <> P.backticked (P.string (Name.toString name)) <> "because that name conflicts with a name in the scratch file." NoLastRunResult -> pure @@ -608,7 +634,8 @@ notifyUser dir o = case o of P.wrap $ "Now might be a good time to make a backup of your codebase. 😬", "", P.wrap $ - "After that, you might try using the" <> makeExample' IP.forkLocal + "After that, you might try using the" + <> makeExample' IP.forkLocal <> "command to inspect the namespaces listed above, and decide which" <> "one you want as your root." <> "You can also use" @@ -658,8 +685,7 @@ notifyUser dir o = case o of [prettyReadRemoteNamespace baseNS, prettyPath' squashedPath] <> "to push the changes." ] - DisplayDefinitions outputLoc ppe types terms -> - displayDefinitions outputLoc ppe types terms + DisplayDefinitions output -> displayDefinitions output DisplayRendered outputLoc pp -> displayRendered outputLoc pp TestResults stats ppe _showSuccess _showFailures oks fails -> case stats of @@ -687,7 +713,8 @@ notifyUser dir o = case o of cache = P.bold "Cached test results " <> "(`help testcache` to learn more)" TestIncrementalOutputStart ppe (n, total) r _src -> do putPretty' $ - P.shown (total - n) <> " tests left to run, current test: " + P.shown (total - n) + <> " tests left to run, current test: " <> P.syntaxToColor (prettyHashQualified (PPE.termName ppe $ Referent.Ref r)) pure mempty TestIncrementalOutputEnd _ppe (_n, _total) _r result -> do @@ -708,7 +735,8 @@ notifyUser dir o = case o of MetadataMissingType ppe ref -> pure . P.fatalCallout . P.lines $ [ P.wrap $ - "The metadata value " <> P.red (prettyTermName ppe ref) + "The metadata value " + <> P.red (prettyTermName ppe ref) <> "is missing a type signature in the codebase.", "", P.wrap $ @@ -717,7 +745,8 @@ notifyUser dir o = case o of <> "are being deleted external to UCM." ] MetadataAmbiguous hq _ppe [] -> - pure . P.warnCallout + pure + . P.warnCallout . P.wrap $ "I couldn't find any metadata matching " <> P.syntaxToColor (prettyHashQualified hq) @@ -768,6 +797,11 @@ notifyUser dir o = case o of pure . P.warnCallout $ "I don't know about that patch." NameNotFound _ -> pure . P.warnCallout $ "I don't know about that name." + NamesNotFound hqs -> + pure $ + P.warnCallout "The following names were not found in the codebase. Check your spelling." + <> P.newline + <> (P.syntaxToColor $ P.indent " " (P.lines (fmap prettyName hqs))) TermNotFound _ -> pure . P.warnCallout $ "I don't know about that term." TypeNotFound _ -> @@ -780,7 +814,7 @@ notifyUser dir o = case o of pure . P.warnCallout $ "A patch by that name already exists." BranchEmpty b -> pure . P.warnCallout . P.wrap $ - P.group (either P.shown prettyPath' b) <> "is an empty namespace." + P.group (prettyWhichBranchEmpty b) <> "is an empty namespace." BranchNotEmpty path -> pure . P.warnCallout $ P.lines @@ -794,19 +828,20 @@ notifyUser dir o = case o of pure . P.callout "😶" $ P.lines [ P.wrap $ - "I looked for a function" <> P.backticked (P.string main) + "I looked for a function" + <> P.backticked (P.string main) <> "in the most recently typechecked file and codebase but couldn't find one. It has to have the type:", "", P.indentN 2 $ P.lines [P.string main <> " : " <> TypePrinter.pretty ppe t | t <- ts] ] - BadMainFunction main ty ppe ts -> + BadMainFunction what main ty ppe ts -> pure . P.callout "😶" $ P.lines [ P.string "I found this function:", "", P.indentN 2 $ P.string main <> " : " <> TypePrinter.pretty ppe ty, "", - P.wrap $ P.string "but in order for me to" <> P.backticked (P.string "run") <> "it needs be a subtype of:", + P.wrap $ P.string "but in order for me to" <> P.backticked (P.string what) <> "it needs be a subtype of:", "", P.indentN 2 $ P.lines [P.string main <> " : " <> TypePrinter.pretty ppe t | t <- ts] ] @@ -859,7 +894,9 @@ notifyUser dir o = case o of DeletedEverything -> pure . P.wrap . P.lines $ [ "Okay, I deleted everything except the history.", - "Use " <> IP.makeExample' IP.undo <> " to undo, or " + "Use " + <> IP.makeExample' IP.undo + <> " to undo, or " <> IP.makeExample' IP.mergeBuiltins <> " to restore the absolute " <> "basics to the current path." @@ -867,7 +904,8 @@ notifyUser dir o = case o of DeleteEverythingConfirmation -> pure . P.warnCallout . P.lines $ [ "Are you sure you want to clear away everything?", - "You could use " <> IP.makeExample' IP.cd + "You could use " + <> IP.makeExample' IP.cd <> " to switch to a new namespace instead." ] DeleteBranchConfirmation _uniqueDeletions -> error "todo" @@ -965,12 +1003,12 @@ notifyUser dir o = case o of then P.lit "nothing to show" else numberedEntries ppe entries where - numberedEntries :: Var v => PPE.PrettyPrintEnv -> [ShallowListEntry v a] -> Pretty + numberedEntries :: (Var v) => PPE.PrettyPrintEnv -> [ShallowListEntry v a] -> Pretty numberedEntries ppe entries = (P.column3 . fmap f) ([(1 :: Integer) ..] `zip` fmap (formatEntry ppe) entries) where f (i, (p1, p2)) = (P.hiBlack . fromString $ show i <> ".", p1, p2) - formatEntry :: Var v => PPE.PrettyPrintEnv -> ShallowListEntry v a -> (Pretty, Pretty) + formatEntry :: (Var v) => PPE.PrettyPrintEnv -> ShallowListEntry v a -> (Pretty, Pretty) formatEntry ppe = \case ShallowTermEntry termEntry -> ( P.syntaxToColor . prettyHashQualified' . fmap Name.fromSegment . Backend.termEntryHQName $ termEntry, @@ -1044,11 +1082,11 @@ notifyUser dir o = case o of let prettyBindings = P.bracket . P.lines $ - P.wrap "The watch expression(s) reference these definitions:" : - "" : - [ P.syntaxToColor $ TermPrinter.prettyBinding ppe (HQ.unsafeFromVar v) b - | (v, b) <- bindings - ] + P.wrap "The watch expression(s) reference these definitions:" + : "" + : [ P.syntaxToColor $ TermPrinter.prettyBinding ppe (HQ.unsafeFromVar v) b + | (v, b) <- bindings + ] prettyWatches = P.sep "\n\n" @@ -1072,7 +1110,7 @@ notifyUser dir o = case o of where terms = R.dom termNamespace types = R.dom typeNamespace - showConflicts :: Foldable f => Pretty -> f Name -> Pretty + showConflicts :: (Foldable f) => Pretty -> f Name -> Pretty showConflicts thingsName things = if (null things) then mempty @@ -1150,12 +1188,14 @@ notifyUser dir o = case o of <> P.backticked' (P.string localPath) "." CodebaseRequiresMigration (SchemaVersion fromSv) (SchemaVersion toSv) -> do P.wrap $ - "The specified codebase codebase is on version " <> P.shown fromSv + "The specified codebase codebase is on version " + <> P.shown fromSv <> " but needs to be on version " <> P.shown toSv UnrecognizedSchemaVersion repo localPath (SchemaVersion v) -> P.wrap $ - "I don't know how to interpret schema version " <> P.shown v + "I don't know how to interpret schema version " + <> P.shown v <> "in the repository at" <> prettyReadGitRepo repo <> "in the cache directory at" @@ -1176,12 +1216,18 @@ notifyUser dir o = case o of <> P.group (P.shown e) CloneException repo msg -> P.wrap $ - "I couldn't clone the repository at" <> prettyReadGitRepo repo <> ";" + "I couldn't clone the repository at" + <> prettyReadGitRepo repo + <> ";" <> "the error was:" <> (P.indentNAfterNewline 2 . P.group . P.string) msg CopyException srcRepoPath destPath msg -> P.wrap $ - "I couldn't copy the repository at" <> P.string srcRepoPath <> "into" <> P.string destPath <> ";" + "I couldn't copy the repository at" + <> P.string srcRepoPath + <> "into" + <> P.string destPath + <> ";" <> "the error was:" <> (P.indentNAfterNewline 2 . P.group . P.string) msg PushNoOp repo -> @@ -1189,7 +1235,9 @@ notifyUser dir o = case o of "The repository at" <> prettyWriteGitRepo repo <> "is already up-to-date." PushException repo msg -> P.wrap $ - "I couldn't push to the repository at" <> prettyWriteGitRepo repo <> ";" + "I couldn't push to the repository at" + <> prettyWriteGitRepo repo + <> ";" <> "the error was:" <> (P.indentNAfterNewline 2 . P.group . P.string) msg RemoteRefNotFound repo ref -> @@ -1214,7 +1262,8 @@ notifyUser dir o = case o of PushDestinationHasNewStuff repo -> P.callout "⏸" . P.lines $ [ P.wrap $ - "The repository at" <> prettyWriteGitRepo repo + "The repository at" + <> prettyWriteGitRepo repo <> "has some changes I don't know about.", "", P.wrap $ "Try" <> pull <> "to merge these changes locally, then" <> push <> "again." @@ -1232,12 +1281,13 @@ notifyUser dir o = case o of CouldntLoadRootBranch repo hash -> P.wrap $ "I couldn't load the designated root hash" - <> P.group ("(" <> P.text (Hash.base32Hex $ unCausalHash hash) <> ")") + <> P.group ("(" <> P.text (Hash.toBase32HexText $ unCausalHash hash) <> ")") <> "from the repository at" <> prettyReadGitRepo repo CouldntLoadSyncedBranch ns h -> P.wrap $ - "I just finished importing the branch" <> P.red (P.shown h) + "I just finished importing the branch" + <> P.red (P.shown h) <> "from" <> P.red (prettyReadRemoteNamespace (RemoteRepo.ReadRemoteNamespaceGit ns)) <> "but now I can't find it." @@ -1249,13 +1299,15 @@ notifyUser dir o = case o of <> prettyReadGitRepo repo NoRemoteNamespaceWithHash repo sch -> P.wrap $ - "The repository at" <> prettyReadGitRepo repo + "The repository at" + <> prettyReadGitRepo repo <> "doesn't contain a namespace with the hash prefix" <> (P.blue . P.text . SCH.toText) sch RemoteNamespaceHashAmbiguous repo sch hashes -> P.lines [ P.wrap $ - "The namespace hash" <> prettySCH sch + "The namespace hash" + <> prettySCH sch <> "at" <> prettyReadGitRepo repo <> "is ambiguous." @@ -1282,12 +1334,13 @@ notifyUser dir o = case o of case (new, old) of ([], []) -> error "BustedBuiltins busted, as there were no busted builtins." ([], old) -> - P.wrap ("This codebase includes some builtins that are considered deprecated. Use the " <> makeExample' IP.updateBuiltins <> " command when you're ready to work on eliminating them from your codebase:") : - "" : - fmap (P.text . Reference.toText) old + P.wrap ("This codebase includes some builtins that are considered deprecated. Use the " <> makeExample' IP.updateBuiltins <> " command when you're ready to work on eliminating them from your codebase:") + : "" + : fmap (P.text . Reference.toText) old (new, []) -> - P.wrap ("This version of Unison provides builtins that are not part of your codebase. Use " <> makeExample' IP.updateBuiltins <> " to add them:") : - "" : fmap (P.text . Reference.toText) new + P.wrap ("This version of Unison provides builtins that are not part of your codebase. Use " <> makeExample' IP.updateBuiltins <> " to add them:") + : "" + : fmap (P.text . Reference.toText) new (new@(_ : _), old@(_ : _)) -> [ P.wrap ( "Sorry and/or good news! This version of Unison supports a different set of builtins than this codebase uses. You can use " @@ -1348,18 +1401,21 @@ notifyUser dir o = case o of <> prettyAbsolute p <> "in .unisonConfig", P.wrap $ - "The value I found was" <> (P.backticked . P.blue . P.text) url + "The value I found was" + <> (P.backticked . P.blue . P.text) url <> "but I encountered the following error when trying to parse it:", "", P.string err, "", P.wrap $ - "Type" <> P.backticked ("help " <> PushPull.fold "push" "pull" pp) + "Type" + <> P.backticked ("help " <> PushPull.fold "push" "pull" pp) <> "for more information." ] NoBranchWithHash _h -> pure . P.callout "😶" $ - P.wrap $ "I don't know of a namespace with that hash." + P.wrap $ + "I don't know of a namespace with that hash." NotImplemented -> pure $ P.wrap "That's not implemented yet. Sorry! 😬" BranchAlreadyExists p -> pure . P.wrap $ @@ -1410,7 +1466,9 @@ notifyUser dir o = case o of HashAmbiguous h rs -> pure . P.callout "\129300" . P.lines $ [ P.wrap $ - "The hash" <> prettyShortHash h <> "is ambiguous." + "The hash" + <> prettyShortHash h + <> "is ambiguous." <> "Did you mean one of these hashes?", "", P.indentN 2 $ P.lines (P.shown <$> Set.toList rs), @@ -1420,7 +1478,9 @@ notifyUser dir o = case o of BranchHashAmbiguous h rs -> pure . P.callout "\129300" . P.lines $ [ P.wrap $ - "The namespace hash" <> prettySCH h <> "is ambiguous." + "The namespace hash" + <> prettySCH h + <> "is ambiguous." <> "Did you mean one of these hashes?", "", P.indentN 2 $ P.lines (prettySCH <$> Set.toList rs), @@ -1500,26 +1560,31 @@ notifyUser dir o = case o of PullAlreadyUpToDate ns dest -> pure . P.callout "😶" $ P.wrap $ - prettyPath' dest <> "was already up-to-date with" + prettyPath' dest + <> "was already up-to-date with" <> P.group (prettyReadRemoteNamespace ns <> ".") PullSuccessful ns dest -> pure . P.okCallout $ P.wrap $ - "✅ Successfully updated" <> prettyPath' dest <> "from" + "Successfully updated" + <> prettyPath' dest + <> "from" <> P.group (prettyReadRemoteNamespace ns <> ".") MergeOverEmpty dest -> pure . P.okCallout $ P.wrap $ - "✅ Successfully pulled into newly created namespace " <> P.group (prettyPath' dest <> ".") + "Successfully pulled into newly created namespace " <> P.group (prettyPath' dest <> ".") MergeAlreadyUpToDate src dest -> pure . P.callout "😶" $ P.wrap $ - prettyPath' dest <> "was already up-to-date with" + prettyPath' dest + <> "was already up-to-date with" <> P.group (prettyPath' src <> ".") PreviewMergeAlreadyUpToDate src dest -> pure . P.callout "😶" $ P.wrap $ - prettyPath' dest <> "is already up-to-date with" + prettyPath' dest + <> "is already up-to-date with" <> P.group (prettyPath' src <> ".") DumpNumberedArgs args -> pure . P.numberedList $ fmap P.string args NoConflictsOrEdits -> @@ -1533,10 +1598,12 @@ notifyUser dir o = case o of Nothing -> go (renderLine head [] : output) queue Just tails -> go (renderLine head tails : output) (queue ++ tails) where - renderHash = take 10 . Text.unpack . Hash.base32Hex . unCausalHash + renderHash = take 10 . Text.unpack . Hash.toBase32HexText . unCausalHash renderLine head tail = - (renderHash head) ++ "|" ++ intercalateMap " " renderHash tail - ++ case Map.lookup (Hash.base32Hex . unCausalHash $ head) tags of + (renderHash head) + ++ "|" + ++ intercalateMap " " renderHash tail + ++ case Map.lookup (Hash.toBase32HexText . unCausalHash $ head) tags of Just t -> "|tag: " ++ t Nothing -> "" -- some specific hashes that we want to label in the output @@ -1588,17 +1655,19 @@ notifyUser dir o = case o of if names == mempty && missing == mempty then c (prettyLabeledDependency hqLength ld) <> " doesn't have any dependencies." else - "Dependencies of " <> c (prettyLabeledDependency hqLength ld) <> ":\n\n" + "Dependencies of " + <> c (prettyLabeledDependency hqLength ld) + <> ":\n\n" <> (P.indentN 2 (P.numberedColumn2Header num pairs)) where num n = P.hiBlack $ P.shown n <> "." header = (P.hiBlack "Reference", P.hiBlack "Name") pairs = - header : - ( fmap (first c . second c) $ - [(p $ Reference.toShortHash r, prettyName n) | (n, r) <- names] - ++ [(p $ Reference.toShortHash r, "(no name available)") | r <- toList missing] - ) + header + : ( fmap (first c . second c) $ + [(p $ Reference.toShortHash r, prettyName n) | (n, r) <- names] + ++ [(p $ Reference.toShortHash r, "(no name available)") | r <- toList missing] + ) p = prettyShortHash . SH.take hqLength c = P.syntaxToColor ListNamespaceDependencies _ppe _path Empty -> pure $ "This namespace has no external dependencies." @@ -1662,7 +1731,8 @@ notifyUser dir o = case o of Auth.ReauthRequired host -> P.lines [ "Authentication for host " <> P.red (P.shown host) <> " is required.", - "Run " <> IP.makeExample IP.help [IP.patternName IP.authLogin] + "Run " + <> IP.makeExample IP.help [IP.patternName IP.authLogin] <> " to learn how." ] Auth.CredentialParseFailure fp txt -> @@ -1692,6 +1762,12 @@ notifyUser dir o = case o of [ "Failed to parse a URI from the hostname: " <> P.shown host <> ".", "Host names should NOT include a schema or path." ] + Auth.FailedToFetchUserInfo userInfoEndpoint err -> + P.lines + [ "Failed to parse the response from user info endpoint: " <> P.shown userInfoEndpoint, + P.text err, + "Please `auth.login` then try again, if this error persists please file a bug report and include the above error message." + ] PrintVersion ucmVersion -> pure (P.text ucmVersion) ShareError x -> (pure . P.fatalCallout) case x of ShareErrorCheckAndSetPush e -> case e of @@ -1729,9 +1805,9 @@ notifyUser dir o = case o of (Share.FastForwardPushErrorNoWritePermission sharePath) -> noWritePermission sharePath (Share.FastForwardPushErrorServerMissingDependencies hashes) -> missingDependencies hashes ShareErrorPull e -> case e of - (Share.PullErrorGetCausalHashByPath err) -> handleGetCausalHashByPathError err - (Share.PullErrorNoHistoryAtPath sharePath) -> + Share.PullErrorNoHistoryAtPath sharePath -> P.wrap $ P.text "The server didn't find anything at" <> prettySharePath sharePath + Share.PullErrorNoReadPermission sharePath -> noReadPermission sharePath ShareErrorGetCausalHashByPath err -> handleGetCausalHashByPathError err ShareErrorTransport te -> case te of DecodeFailure msg resp -> @@ -1847,6 +1923,13 @@ notifyUser dir o = case o of else "" in (isCompleteTxt, P.string (Completion.replacement comp)) ) + ClearScreen -> do + ANSI.clearScreen + ANSI.setCursorPosition 0 0 + pure mempty + PulledEmptyBranch remote -> + pure . P.warnCallout . P.wrap $ + P.group (prettyReadRemoteNamespace remote) <> "has some history, but is currently empty." where _nameChange _cmd _pastTenseCmd _oldName _newName _r = error "todo" expectedEmptyPushDest writeRemotePath = @@ -1867,7 +1950,7 @@ notifyUser dir o = case o of ( WriteRemotePathShare WriteShareRemotePath { server = RemoteRepo.DefaultCodeserver, - repo = Share.unRepoName (Share.pathRepoName sharePath), + repo = ShareUserHandle $ Share.unRepoName (Share.pathRepoName sharePath), path = Path.fromList (coerce @[Text] @[NameSegment] (Share.pathCodebasePath sharePath)) } ) @@ -1908,7 +1991,7 @@ prettyShareLink WriteShareRemotePath {repo, path} = Path.toList path & fmap (URI.encodeText . NameSegment.toText) & Text.intercalate "/" - in P.green . P.text $ shareOrigin <> "/@" <> repo <> "/p/code/latest/namespaces/" <> encodedPath + in P.green . P.text $ shareOrigin <> "/@" <> shareUserHandleToText repo <> "/p/code/latest/namespaces/" <> encodedPath prettyFilePath :: FilePath -> Pretty prettyFilePath fp = @@ -1931,22 +2014,22 @@ prettyRelative = P.blue . P.shown prettyAbsolute :: Path.Absolute -> Pretty prettyAbsolute = P.blue . P.shown -prettySCH :: IsString s => ShortCausalHash -> P.Pretty s +prettySCH :: (IsString s) => ShortCausalHash -> P.Pretty s prettySCH hash = P.group $ "#" <> P.text (SCH.toText hash) -prettyCausalHash :: IsString s => CausalHash -> P.Pretty s +prettyCausalHash :: (IsString s) => CausalHash -> P.Pretty s prettyCausalHash hash = P.group $ "#" <> P.text (Hash.toBase32HexText . unCausalHash $ hash) -prettyBase32Hex :: IsString s => Base32Hex -> P.Pretty s +prettyBase32Hex :: (IsString s) => Base32Hex -> P.Pretty s prettyBase32Hex = P.text . Base32Hex.toText -prettyBase32Hex# :: IsString s => Base32Hex -> P.Pretty s +prettyBase32Hex# :: (IsString s) => Base32Hex -> P.Pretty s prettyBase32Hex# b = P.group $ "#" <> prettyBase32Hex b -prettyHash :: IsString s => Hash.Hash -> P.Pretty s +prettyHash :: (IsString s) => Hash.Hash -> P.Pretty s prettyHash = prettyBase32Hex# . Hash.toBase32Hex -prettyHash32 :: IsString s => Hash32 -> P.Pretty s +prettyHash32 :: (IsString s) => Hash32 -> P.Pretty s prettyHash32 = prettyBase32Hex# . Hash32.toBase32Hex formatMissingStuff :: @@ -1967,8 +2050,8 @@ formatMissingStuff terms types = ) displayDefinitions' :: - Var v => - Ord a1 => + (Var v) => + (Ord a1) => PPED.PrettyPrintEnvDecl -> Map Reference.Reference (DisplayObject () (DD.Decl v a1)) -> Map Reference.Reference (DisplayObject (Type v a1) (Term v a1)) -> @@ -2001,7 +2084,9 @@ displayDefinitions' ppe0 types terms = P.syntaxToColor $ P.sep "\n\n" (prettyTyp builtin n = P.wrap $ "--" <> prettyHashQualified n <> " is built-in." missing n r = P.wrap - ( "-- The name " <> prettyHashQualified n <> " is assigned to the " + ( "-- The name " + <> prettyHashQualified n + <> " is assigned to the " <> "reference " <> fromString (show r ++ ",") <> "which is missing from the codebase." @@ -2034,24 +2119,18 @@ displayRendered outputLoc pp = P.indentN 2 pp ] -displayDefinitions :: - Var v => - Ord a1 => - Maybe FilePath -> - PPED.PrettyPrintEnvDecl -> - Map Reference.Reference (DisplayObject () (DD.Decl v a1)) -> - Map Reference.Reference (DisplayObject (Type v a1) (Term v a1)) -> - IO Pretty -displayDefinitions _outputLoc _ppe types terms - | Map.null types && Map.null terms = pure $ P.callout "😶" "No results to display." -displayDefinitions outputLoc ppe types terms = - maybe displayOnly scratchAndDisplay outputLoc +displayDefinitions :: DisplayDefinitionsOutput -> IO Pretty +displayDefinitions DisplayDefinitionsOutput {isTest, outputFile, prettyPrintEnv = ppe, terms, types} = + if Map.null types && Map.null terms + then pure $ P.callout "😶" "No results to display." + else maybe displayOnly scratchAndDisplay outputFile where - displayOnly = pure code + ppeDecl = PPED.unsuffixifiedPPE ppe + displayOnly = pure (code (const False)) scratchAndDisplay path = do path' <- canonicalizePath path - prependToFile code path' - pure (message code path') + prependToFile (code isTest) path' + pure (message (code (const False)) path') where prependToFile code path = do existingContents <- do @@ -2075,50 +2154,69 @@ displayDefinitions outputLoc ppe types terms = P.indentN 2 code, "", P.wrap $ - "You can edit them there, then do" <> makeExample' IP.update + "You can edit them there, then do" + <> makeExample' IP.update <> "to replace the definitions currently in this namespace." ] - code = - P.syntaxToColor $ P.sep "\n\n" (prettyTypes <> prettyTerms) + + code :: (TermReference -> Bool) -> Pretty + code isTest = + P.syntaxToColor $ P.sep "\n\n" (prettyTypes <> prettyTerms isTest) + + prettyTypes :: [P.Pretty SyntaxText] + prettyTypes = + types + & Map.toList + & map (\(ref, dt) -> (PPE.typeName ppeDecl ref, ref, dt)) + & List.sortBy (\(n0, _, _) (n1, _, _) -> Name.compareAlphabetical n0 n1) + & map prettyType + + prettyTerms :: (TermReference -> Bool) -> [P.Pretty SyntaxText] + prettyTerms isTest = + terms + & Map.toList + & map (\(ref, dt) -> (PPE.termName ppeDecl (Referent.Ref ref), ref, dt)) + & List.sortBy (\(n0, _, _) (n1, _, _) -> Name.compareAlphabetical n0 n1) + & map (\t -> prettyTerm (isTest (t ^. _2)) t) + + prettyTerm :: + Bool -> + (HQ.HashQualified Name, Reference, DisplayObject (Type Symbol Ann) (Term Symbol Ann)) -> + P.Pretty SyntaxText + prettyTerm isTest (n, r, dt) = + case dt of + MissingObject r -> missing n r + BuiltinObject typ -> + (if isJust outputFile then P.indent "-- " else id) $ + P.hang + ("builtin " <> prettyHashQualified n <> " :") + (TypePrinter.prettySyntax (ppeBody n r) typ) + UserObject tm -> + if isTest + then WK.TestWatch <> "> " <> TermPrinter.prettyBindingWithoutTypeSignature (ppeBody n r) n tm + else TermPrinter.prettyBinding (ppeBody n r) n tm where ppeBody n r = PPE.biasTo (maybeToList $ HQ.toName n) $ PPE.declarationPPE ppe r - ppeDecl = PPED.unsuffixifiedPPE ppe - prettyTerms = - terms - & Map.toList - & map (\(ref, dt) -> (PPE.termName ppeDecl (Referent.Ref ref), ref, dt)) - & List.sortBy (\(n0, _, _) (n1, _, _) -> Name.compareAlphabetical n0 n1) - & map go - prettyTypes = - types - & Map.toList - & map (\(ref, dt) -> (PPE.typeName ppeDecl ref, ref, dt)) - & List.sortBy (\(n0, _, _) (n1, _, _) -> Name.compareAlphabetical n0 n1) - & map go2 - go (n, r, dt) = - case dt of - MissingObject r -> missing n r - BuiltinObject typ -> - (if isJust outputLoc then P.indent "-- " else id) $ - P.hang - ("builtin " <> prettyHashQualified n <> " :") - (TypePrinter.prettySyntax (ppeBody n r) typ) - UserObject tm -> TermPrinter.prettyBinding (ppeBody n r) n tm - go2 (n, r, dt) = - case dt of - MissingObject r -> missing n r - BuiltinObject _ -> builtin n - UserObject decl -> DeclPrinter.prettyDecl (PPED.biasTo (maybeToList $ HQ.toName n) $ PPE.declarationPPEDecl ppe r) r n decl - builtin n = P.wrap $ "--" <> prettyHashQualified n <> " is built-in." - missing n r = - P.wrap - ( "-- The name " <> prettyHashQualified n <> " is assigned to the " - <> "reference " - <> fromString (show r ++ ",") - <> "which is missing from the codebase." - ) - <> P.newline - <> tip "You might need to repair the codebase manually." + + prettyType :: (HQ.HashQualified Name, Reference, DisplayObject () (DD.Decl Symbol Ann)) -> P.Pretty SyntaxText + prettyType (n, r, dt) = + case dt of + MissingObject r -> missing n r + BuiltinObject _ -> builtin n + UserObject decl -> DeclPrinter.prettyDecl (PPED.biasTo (maybeToList $ HQ.toName n) $ PPE.declarationPPEDecl ppe r) r n decl + + builtin n = P.wrap $ "--" <> prettyHashQualified n <> " is built-in." + missing n r = + P.wrap + ( "-- The name " + <> prettyHashQualified n + <> " is assigned to the " + <> "reference " + <> fromString (show r ++ ",") + <> "which is missing from the codebase." + ) + <> P.newline + <> tip "You might need to repair the codebase manually." displayTestResults :: Bool -> -- whether to show the tip @@ -2151,7 +2249,8 @@ displayTestResults showTip ppe oksUnsorted failsUnsorted = then mempty else tip $ - "Use " <> P.blue ("view " <> P.text (fst $ head (fails ++ oks))) + "Use " + <> P.blue ("view " <> P.text (fst $ head (fails ++ oks))) <> "to view the source of a test." in if null oks && null fails then "😶 No tests available." @@ -2164,7 +2263,7 @@ displayTestResults showTip ppe oksUnsorted failsUnsorted = ] unsafePrettyTermResultSig' :: - Var v => + (Var v) => PPE.PrettyPrintEnv -> SR'.TermResult' v a -> Pretty @@ -2177,7 +2276,7 @@ unsafePrettyTermResultSig' ppe = \case -- -- #5v5UtREE1fTiyTsTK2zJ1YNqfiF25SkfUnnji86Lms#0 -- Optional.None, Maybe.Nothing : Maybe a unsafePrettyTermResultSigFull' :: - Var v => + (Var v) => PPE.PrettyPrintEnv -> SR'.TermResult' v a -> Pretty @@ -2186,7 +2285,8 @@ unsafePrettyTermResultSigFull' ppe = \case P.lines [ P.hiBlack "-- " <> greyHash (HQ.fromReferent r), P.group $ - P.commas (fmap greyHash $ hq : map HQ'.toHQ (toList aliases)) <> " : " + P.commas (fmap greyHash $ hq : map HQ'.toHQ (toList aliases)) + <> " : " <> P.syntaxToColor (TypePrinter.prettySyntax ppe typ), mempty ] @@ -2194,7 +2294,7 @@ unsafePrettyTermResultSigFull' ppe = \case where greyHash = styleHashQualified' id P.hiBlack -prettyTypeResultHeader' :: Var v => SR'.TypeResult' v a -> Pretty +prettyTypeResultHeader' :: (Var v) => SR'.TypeResult' v a -> Pretty prettyTypeResultHeader' (SR'.TypeResult' name dt r _aliases) = prettyDeclTriple (name, r, dt) @@ -2202,20 +2302,20 @@ prettyTypeResultHeader' (SR'.TypeResult' name dt r _aliases) = -- -- #5v5UtREE1fTiyTsTK2zJ1YNqfiF25SkfUnnji86Lms -- type Optional -- type Maybe -prettyTypeResultHeaderFull' :: Var v => SR'.TypeResult' v a -> Pretty +prettyTypeResultHeaderFull' :: (Var v) => SR'.TypeResult' v a -> Pretty prettyTypeResultHeaderFull' (SR'.TypeResult' name dt r aliases) = P.lines stuff <> P.newline where stuff = - (P.hiBlack "-- " <> greyHash (HQ.fromReference r)) : - fmap - (\name -> prettyDeclTriple (name, r, dt)) - (name : map HQ'.toHQ (toList aliases)) + (P.hiBlack "-- " <> greyHash (HQ.fromReference r)) + : fmap + (\name -> prettyDeclTriple (name, r, dt)) + (name : map HQ'.toHQ (toList aliases)) where greyHash = styleHashQualified' id P.hiBlack prettyDeclTriple :: - Var v => + (Var v) => (HQ.HashQualified Name, Reference.Reference, DisplayObject () (DD.Decl v a)) -> Pretty prettyDeclTriple (name, _, displayDecl) = case displayDecl of @@ -2224,7 +2324,7 @@ prettyDeclTriple (name, _, displayDecl) = case displayDecl of UserObject decl -> P.syntaxToColor $ DeclPrinter.prettyDeclHeader name decl prettyDeclPair :: - Var v => + (Var v) => PPE.PrettyPrintEnv -> (Reference, DisplayObject () (DD.Decl v a)) -> Pretty @@ -2278,7 +2378,10 @@ renderNameConflicts ppe conflictedNames = do n <- addNumberedArg (HQ.toString hash) pure $ formatNum n <> (P.blue . P.syntaxToColor . prettyHashQualified $ hash) pure . P.wrap $ - ( "The " <> thingKind <> " " <> P.green (prettyName name) + ( "The " + <> thingKind + <> " " + <> P.green (prettyName name) <> " has conflicting definitions:" ) `P.hang` P.lines prettyConflicts @@ -2289,7 +2392,8 @@ renderEditConflicts ppe Patch {..} = do formattedConflicts <- for editConflicts formatConflict pure . Monoid.unlessM (null editConflicts) . P.callout "❓" . P.sep "\n\n" $ [ P.wrap $ - "These" <> P.bold "definitions were edited differently" + "These" + <> P.bold "definitions were edited differently" <> "in namespaces that have been merged into this one." <> "You'll have to tell me what to use as the new definition:", P.indentN 2 (P.lines formattedConflicts) @@ -2312,12 +2416,14 @@ renderEditConflicts ppe Patch {..} = do replacedType <- numberedHQName (PPE.typeName ppe r) replacements <- for [PPE.typeName ppe r | TypeEdit.Replace r <- es] numberedHQName pure . P.wrap $ - "The type" <> replacedType <> "was" + "The type" + <> replacedType + <> "was" <> ( if TypeEdit.Deprecate `elem` es then "deprecated and also replaced with" else "replaced with" ) - `P.hang` P.lines replacements + `P.hang` P.lines replacements formatTermEdits :: (Reference.TermReference, Set TermEdit.TermEdit) -> Numbered Pretty @@ -2325,12 +2431,14 @@ renderEditConflicts ppe Patch {..} = do replacedTerm <- numberedHQName (PPE.termName ppe (Referent.Ref r)) replacements <- for [PPE.termName ppe (Referent.Ref r) | TermEdit.Replace r _ <- es] numberedHQName pure . P.wrap $ - "The term" <> replacedTerm <> "was" + "The term" + <> replacedTerm + <> "was" <> ( if TermEdit.Deprecate `elem` es then "deprecated and also replaced with" else "replaced with" ) - `P.hang` P.lines replacements + `P.hang` P.lines replacements formatConflict :: Either (Reference, Set TypeEdit.TypeEdit) @@ -2354,7 +2462,7 @@ runNumbered m = let (a, (_, args)) = State.runState m (0, mempty) in (a, Foldable.toList args) -todoOutput :: Var v => PPED.PrettyPrintEnvDecl -> TO.TodoOutput v a -> (Pretty, NumberedArgs) +todoOutput :: (Var v) => PPED.PrettyPrintEnvDecl -> TO.TodoOutput v a -> (Pretty, NumberedArgs) todoOutput ppe todo = runNumbered do conflicts <- todoConflicts edits <- todoEdits @@ -2423,7 +2531,8 @@ todoOutput ppe todo = runNumbered do pure $ Monoid.unlessM (TO.noEdits todo) . P.callout "🚧" . P.sep "\n\n" . P.nonEmpty $ [ P.wrap - ( "The namespace has" <> fromString (show (TO.todoScore todo)) + ( "The namespace has" + <> fromString (show (TO.todoScore todo)) <> "transitive dependent(s) left to upgrade." <> "Your edit frontier is the dependents of these definitions:" ), @@ -2439,12 +2548,12 @@ todoOutput ppe todo = runNumbered do unscore (_score, b, c) = (b, c) listOfDefinitions :: - Var v => Input.FindScope -> PPE.PrettyPrintEnv -> E.ListDetailed -> [SR'.SearchResult' v a] -> IO Pretty + (Var v) => Input.FindScope -> PPE.PrettyPrintEnv -> E.ListDetailed -> [SR'.SearchResult' v a] -> IO Pretty listOfDefinitions fscope ppe detailed results = pure $ listOfDefinitions' fscope ppe detailed results listOfLinks :: - Var v => PPE.PrettyPrintEnv -> [(HQ.HashQualified Name, Maybe (Type v a))] -> IO Pretty + (Var v) => PPE.PrettyPrintEnv -> [(HQ.HashQualified Name, Maybe (Type v a))] -> IO Pretty listOfLinks _ [] = pure . P.callout "😶" . P.wrap $ "No results. Try using the " @@ -2459,7 +2568,8 @@ listOfLinks ppe results = ], "", tip $ - "Try using" <> IP.makeExample IP.display ["1"] + "Try using" + <> IP.makeExample IP.display ["1"] <> "to display the first result or" <> IP.makeExample IP.view ["1"] <> "to view its source." @@ -2476,7 +2586,7 @@ data ShowNumbers = ShowNumbers | HideNumbers -- numbered args showDiffNamespace :: forall v. - Var v => + (Var v) => ShowNumbers -> PPE.PrettyPrintEnv -> Input.AbsBranchId -> @@ -2920,7 +3030,7 @@ noResults fscope = <> "can be used to search outside the current namespace." listOfDefinitions' :: - Var v => + (Var v) => Input.FindScope -> PPE.PrettyPrintEnv -> -- for printing types of terms :-\ E.ListDetailed -> @@ -2932,24 +3042,23 @@ listOfDefinitions' fscope ppe detailed results = else P.lines . P.nonEmpty - $ prettyNumberedResults : - [ formatMissingStuff termsWithMissingTypes missingTypes, - Monoid.unlessM (null missingBuiltins) - . bigproblem - $ P.wrap - "I encountered an inconsistency in the codebase; these definitions refer to built-ins that this version of unison doesn't know about:" - `P.hang` P.column2 - ( (P.bold "Name", P.bold "Built-in") - -- : ("-", "-") - : - fmap - ( bimap - (P.syntaxToColor . prettyHashQualified) - (P.text . Referent.toText) + $ prettyNumberedResults + : [ formatMissingStuff termsWithMissingTypes missingTypes, + Monoid.unlessM (null missingBuiltins) + . bigproblem + $ P.wrap + "I encountered an inconsistency in the codebase; these definitions refer to built-ins that this version of unison doesn't know about:" + `P.hang` P.column2 + ( (P.bold "Name", P.bold "Built-in") + -- : ("-", "-") + : fmap + ( bimap + (P.syntaxToColor . prettyHashQualified) + (P.text . Referent.toText) + ) + missingBuiltins ) - missingBuiltins - ) - ] + ] where prettyNumberedResults = P.numberedList prettyResults -- todo: group this by namespace @@ -2984,7 +3093,7 @@ listOfDefinitions' fscope ppe detailed results = _ -> [] watchPrinter :: - Var v => + (Var v) => Text -> PPE.PrettyPrintEnv -> Ann -> @@ -3116,8 +3225,8 @@ prettyDiff diff = "", P.indentN 2 $ P.column2 $ - (P.hiBlack "Original name", P.hiBlack "New name") : - [(prettyName n, prettyName n2) | (n, n2) <- moved] + (P.hiBlack "Original name", P.hiBlack "New name") + : [(prettyName n, prettyName n2) | (n, n2) <- moved] ] else mempty, if not $ null copied @@ -3127,10 +3236,10 @@ prettyDiff diff = "", P.indentN 2 $ P.column2 $ - (P.hiBlack "Original name", P.hiBlack "New name(s)") : - [ (prettyName n, P.sep " " (prettyName <$> ns)) - | (n, ns) <- copied - ] + (P.hiBlack "Original name", P.hiBlack "New name(s)") + : [ (prettyName n, P.sep " " (prettyName <$> ns)) + | (n, ns) <- copied + ] ] else mempty ] @@ -3157,6 +3266,12 @@ prettyWriteGitRepo RemoteRepo.WriteGitRepo {url} = P.blue (P.text url) -- RemoteRepo.WriteRepoGit RemoteRepo.WriteGitRepo {url} -> P.blue (P.text url) -- RemoteRepo.WriteRepoShare s -> P.blue (P.text (RemoteRepo.printShareRepo s)) +-- | Pretty-print a 'WhichBranchEmpty'. +prettyWhichBranchEmpty :: WhichBranchEmpty -> Pretty +prettyWhichBranchEmpty = \case + WhichBranchEmptyHash hash -> P.shown hash + WhichBranchEmptyPath path -> prettyPath' path + isTestOk :: Term v Ann -> Bool isTestOk tm = case tm of Term.List' ts -> all isSuccess ts @@ -3219,7 +3334,7 @@ endangeredDependentsTable ppeDecl m = -- | Displays a full, non-truncated Branch.CausalHash to a string, e.g. #abcdef displayBranchHash :: CausalHash -> String -displayBranchHash = ("#" <>) . Text.unpack . Hash.base32Hex . unCausalHash +displayBranchHash = ("#" <>) . Text.unpack . Hash.toBase32HexText . unCausalHash prettyHumanReadableTime :: UTCTime -> UTCTime -> Pretty prettyHumanReadableTime now time = diff --git a/unison-cli/src/Unison/LSP.hs b/unison-cli/src/Unison/LSP.hs index 0f511ab60..54f8cba34 100644 --- a/unison-cli/src/Unison/LSP.hs +++ b/unison-cli/src/Unison/LSP.hs @@ -7,7 +7,10 @@ module Unison.LSP where import Colog.Core (LogAction (LogAction)) import qualified Colog.Core as Colog +import Compat (onWindows) import Control.Monad.Reader +import Data.ByteString.Builder.Extra (defaultChunkSize) +import Data.Char (toLower) import GHC.IO.Exception (ioe_errno) import qualified Ki import qualified Language.LSP.Logging as LSP @@ -17,8 +20,8 @@ import Language.LSP.Types.SMethodMap import qualified Language.LSP.Types.SMethodMap as SMM import Language.LSP.VFS import qualified Network.Simple.TCP as TCP -import Network.Socket (socketToHandle) import System.Environment (lookupEnv) +import System.IO (hPutStrLn) import Unison.Codebase import Unison.Codebase.Branch (Branch) import qualified Unison.Codebase.Path as Path @@ -49,17 +52,27 @@ getLspPort = fromMaybe "5757" <$> lookupEnv "UNISON_LSP_PORT" -- | Spawn an LSP server on the configured port. spawnLsp :: Codebase IO Symbol Ann -> Runtime Symbol -> STM (Branch IO) -> STM (Path.Absolute) -> IO () -spawnLsp codebase runtime latestBranch latestPath = TCP.withSocketsDo do - lspPort <- getLspPort - UnliftIO.handleIO (handleFailure lspPort) $ do - TCP.serve (TCP.Host "127.0.0.1") lspPort $ \(sock, _sockaddr) -> do - Ki.scoped \scope -> do - sockHandle <- socketToHandle sock ReadWriteMode - -- currently we have an independent VFS for each LSP client since each client might have - -- different un-saved state for the same file. - initVFS $ \vfs -> do - vfsVar <- newMVar vfs - void $ runServerWithHandles lspServerLogger lspClientLogger sockHandle sockHandle (serverDefinition vfsVar codebase runtime scope latestBranch latestPath) +spawnLsp codebase runtime latestBranch latestPath = + ifEnabled . TCP.withSocketsDo $ do + lspPort <- getLspPort + UnliftIO.handleIO (handleFailure lspPort) $ do + TCP.serve (TCP.Host "127.0.0.1") lspPort $ \(sock, _sockaddr) -> do + Ki.scoped \scope -> do + -- If the socket is closed, reading/writing will throw an exception, + -- but since the socket is closed, this connection will be shutting down + -- immediately anyways, so we just ignore it. + let clientInput = handleAny (\_ -> pure "") do + -- The server will be in the process of shutting down if the socket is closed, + -- so just return empty input in the meantime. + fromMaybe "" <$> TCP.recv sock defaultChunkSize + let clientOutput output = handleAny (\_ -> pure ()) do + TCP.sendLazy sock output + + -- currently we have an independent VFS for each LSP client since each client might have + -- different un-saved state for the same file. + initVFS $ \vfs -> do + vfsVar <- newMVar vfs + void $ runServerWith lspServerLogger lspClientLogger clientInput clientOutput (serverDefinition vfsVar codebase runtime scope latestBranch latestPath) where handleFailure :: String -> IOException -> IO () handleFailure lspPort ioerr = @@ -75,6 +88,14 @@ spawnLsp codebase runtime latestBranch latestPath = TCP.withSocketsDo do lspServerLogger = Colog.filterBySeverity Colog.Error Colog.getSeverity $ Colog.cmap (fmap tShow) (LogAction print) -- Where to send logs that occur after a client connects lspClientLogger = Colog.cmap (fmap tShow) LSP.defaultClientLogger + ifEnabled :: IO () -> IO () + ifEnabled runServer = do + -- Default LSP to disabled on Windows unless explicitly enabled + lookupEnv "UNISON_LSP_ENABLED" >>= \case + Just (fmap toLower -> "false") -> pure () + Just (fmap toLower -> "true") -> runServer + Just x -> hPutStrLn stderr $ "Invalid value for UNISON_LSP_ENABLED, expected 'true' or 'false' but found: " <> x + Nothing -> when (not onWindows) runServer serverDefinition :: MVar VFS -> diff --git a/unison-cli/src/Unison/LSP/Completion.hs b/unison-cli/src/Unison/LSP/Completion.hs index 4f9ba2acd..9af847168 100644 --- a/unison-cli/src/Unison/LSP/Completion.hs +++ b/unison-cli/src/Unison/LSP/Completion.hs @@ -50,7 +50,7 @@ completionHandler :: RequestMessage 'TextDocumentCompletion -> (Either ResponseE completionHandler m respond = respond . maybe (Right $ InL mempty) (Right . InR) =<< runMaybeT do let fileUri = (m ^. params . textDocument . uri) - (range, prefix) <- VFS.completionPrefix (m ^. params) + (range, prefix) <- VFS.completionPrefix (m ^. params . textDocument . uri) (m ^. params . position) ppe <- PPED.suffixifiedPPE <$> lift globalPPED codebaseCompletions <- lift getCodebaseCompletions Config {maxCompletions} <- lift getConfig diff --git a/unison-cli/src/Unison/LSP/Conversions.hs b/unison-cli/src/Unison/LSP/Conversions.hs index 5538d28e3..fb1a1a7c5 100644 --- a/unison-cli/src/Unison/LSP/Conversions.hs +++ b/unison-cli/src/Unison/LSP/Conversions.hs @@ -3,18 +3,18 @@ module Unison.LSP.Conversions where import qualified Data.IntervalMap.Interval as Interval import Language.LSP.Types import Unison.LSP.Orphans () -import qualified Unison.Syntax.Lexer as Lex import Unison.Parser.Ann (Ann) import qualified Unison.Parser.Ann as Ann +import Unison.Prelude +import qualified Unison.Syntax.Lexer as Lex import qualified Unison.Util.Range as Range rangeToInterval :: Range -> Interval.Interval Position -rangeToInterval (Range start end) - -- Selections are are open on the right-side - | start == end = Interval.ClosedInterval start end - -- Ranges of a single 'point' need to be closed for some interval map operations to work as - -- intended (E.g. intersecting). - | otherwise = Interval.IntervalCO start end +rangeToInterval (Range start end) = + Interval.ClosedInterval start end + +annToInterval :: Ann -> Maybe (Interval.Interval Position) +annToInterval ann = annToRange ann <&> rangeToInterval uToLspPos :: Lex.Pos -> Position uToLspPos uPos = @@ -23,6 +23,12 @@ uToLspPos uPos = _character = fromIntegral $ Lex.column uPos - 1 } +lspToUPos :: Position -> Lex.Pos +lspToUPos lspPos = + Lex.Pos + (fromIntegral $ _line lspPos + 1) -- 1 indexed vs 0 indexed + (fromIntegral $ _character lspPos + 1) + uToLspRange :: Range.Range -> Range uToLspRange (Range.Range start end) = Range (uToLspPos start) (uToLspPos end) diff --git a/unison-cli/src/Unison/LSP/Diagnostics.hs b/unison-cli/src/Unison/LSP/Diagnostics.hs index 0a90ac32e..5d7552a04 100644 --- a/unison-cli/src/Unison/LSP/Diagnostics.hs +++ b/unison-cli/src/Unison/LSP/Diagnostics.hs @@ -25,7 +25,7 @@ uToLspRange :: Range.Range -> Range uToLspRange (Range.Range start end) = Range (uToLspPos start) (uToLspPos end) reportDiagnostics :: - Foldable f => + (Foldable f) => Uri -> Maybe FileVersion -> -- | Note, it's important to still send an empty list of diagnostics if there aren't any diff --git a/unison-cli/src/Unison/LSP/FileAnalysis.hs b/unison-cli/src/Unison/LSP/FileAnalysis.hs index 070e9b3a1..87a9b71e4 100644 --- a/unison-cli/src/Unison/LSP/FileAnalysis.hs +++ b/unison-cli/src/Unison/LSP/FileAnalysis.hs @@ -66,13 +66,13 @@ import Unison.WatchKind (pattern TestWatch) import UnliftIO (atomically, modifyTVar', readTVar, readTVarIO, writeTVar) -- | Lex, parse, and typecheck a file. -checkFile :: HasUri d Uri => d -> Lsp (Maybe FileAnalysis) +checkFile :: (HasUri d Uri) => d -> Lsp (Maybe FileAnalysis) checkFile doc = runMaybeT $ do let fileUri = doc ^. uri (fileVersion, contents) <- VFS.getFileContents fileUri parseNames <- lift getParseNames let sourceName = getUri $ doc ^. uri - let lexedSource@(srcText, _tokens) = (contents, L.lexer (Text.unpack sourceName) (Text.unpack contents)) + let lexedSource@(srcText, tokens) = (contents, L.lexer (Text.unpack sourceName) (Text.unpack contents)) let ambientAbilities = [] cb <- asks codebase let generateUniqueName = Parser.uniqueBase32Namegen <$> Random.getSystemDRG @@ -92,6 +92,7 @@ checkFile doc = runMaybeT $ do & foldMap (\(RangedCodeAction {_codeActionRanges, _codeAction}) -> (,_codeAction) <$> _codeActionRanges) & toRangeMap let fileSummary = mkFileSummary parsedFile typecheckedFile + let tokenMap = getTokenMap tokens let fileAnalysis = FileAnalysis {diagnostics = diagnosticRanges, codeActions = codeActionRanges, fileSummary, ..} pure $ fileAnalysis @@ -150,12 +151,14 @@ mkFileSummary parsed typechecked = case (parsed, typechecked) of where declsRefMap :: (Ord v, Ord r) => Map v (r, a) -> Map r (Map v a) declsRefMap m = - m & Map.toList + m + & Map.toList & fmap (\(v, (r, a)) -> (r, Map.singleton v a)) & Map.fromListWith (<>) termsRefMap :: (Ord v, Ord r) => Map v (r, a, b) -> Map r (Map v (a, b)) termsRefMap m = - m & Map.toList + m + & Map.toList & fmap (\(v, (r, a, b)) -> (r, Map.singleton v (a, b))) & Map.fromListWith (<>) -- Gets the user provided type annotation for a term if there is one. @@ -188,12 +191,21 @@ fileAnalysisWorker = forever do for freshlyCheckedFiles \(FileAnalysis {fileUri, fileVersion, diagnostics}) -> do reportDiagnostics fileUri (Just fileVersion) $ fold diagnostics -analyseFile :: Foldable f => Uri -> Text -> f (Note Symbol Ann) -> Lsp ([Diagnostic], [RangedCodeAction]) +analyseFile :: (Foldable f) => Uri -> Text -> f (Note Symbol Ann) -> Lsp ([Diagnostic], [RangedCodeAction]) analyseFile fileUri srcText notes = do pped <- PPED.suffixifiedPPE <$> LSP.globalPPED analyseNotes fileUri pped (Text.unpack srcText) notes -analyseNotes :: Foldable f => Uri -> PrettyPrintEnv -> String -> f (Note Symbol Ann) -> Lsp ([Diagnostic], [RangedCodeAction]) +getTokenMap :: [L.Token L.Lexeme] -> IM.IntervalMap Position L.Lexeme +getTokenMap tokens = + tokens + & mapMaybe + ( \token -> + IM.singleton <$> (annToInterval $ Parser.ann token) <*> pure (L.payload token) + ) + & fold + +analyseNotes :: (Foldable f) => Uri -> PrettyPrintEnv -> String -> f (Note Symbol Ann) -> Lsp ([Diagnostic], [RangedCodeAction]) analyseNotes fileUri ppe src notes = do currentPath <- getCurrentPath flip foldMapM notes \note -> case note of @@ -222,9 +234,31 @@ analyseNotes fileUri ppe src notes = do (_v, locs) <- toList defns (r, rs) <- withNeighbours (locs >>= aToR) pure (r, ("duplicate definition",) <$> rs) - TypeError.Other e -> do - Debug.debugM Debug.LSP "No Diagnostic configured for type error: " e - empty + TypeError.RedundantPattern loc -> singleRange loc + TypeError.UncoveredPatterns loc _pats -> singleRange loc + -- These type errors don't have custom type error conversions, but some + -- still have valid diagnostics. + TypeError.Other e@(Context.ErrorNote {cause}) -> case cause of + Context.PatternArityMismatch loc _typ _numArgs -> singleRange loc + Context.HandlerOfUnexpectedType loc _typ -> singleRange loc + Context.TypeMismatch {} -> shouldHaveBeenHandled e + Context.IllFormedType {} -> shouldHaveBeenHandled e + Context.UnknownSymbol loc _ -> singleRange loc + Context.UnknownTerm loc _ _ _ -> singleRange loc + Context.AbilityCheckFailure {} -> shouldHaveBeenHandled e + Context.AbilityEqFailure {} -> shouldHaveBeenHandled e + Context.EffectConstructorWrongArgCount {} -> shouldHaveBeenHandled e + Context.MalformedEffectBind {} -> shouldHaveBeenHandled e + Context.DuplicateDefinitions {} -> shouldHaveBeenHandled e + Context.UnguardedLetRecCycle {} -> shouldHaveBeenHandled e + Context.ConcatPatternWithoutConstantLength loc _ -> singleRange loc + Context.DataEffectMismatch _ _ decl -> singleRange $ DD.annotation decl + Context.UncoveredPatterns loc _ -> singleRange loc + Context.RedundantPattern loc -> singleRange loc + Context.InaccessiblePattern loc -> singleRange loc + shouldHaveBeenHandled e = do + Debug.debugM Debug.LSP "This diagnostic should have been handled by a previous case but was not" e + empty diags = noteDiagnostic currentPath note ranges -- Sort on match accuracy first, then name. codeActions <- case cause of diff --git a/unison-cli/src/Unison/LSP/Hover.hs b/unison-cli/src/Unison/LSP/Hover.hs index bdf09dac1..e218dc368 100644 --- a/unison-cli/src/Unison/LSP/Hover.hs +++ b/unison-cli/src/Unison/LSP/Hover.hs @@ -8,39 +8,124 @@ import Control.Monad.Reader import qualified Data.Text as Text import Language.LSP.Types import Language.LSP.Types.Lens -import qualified Unison.Codebase.Path as Path +import qualified Unison.ABT as ABT +import qualified Unison.HashQualified as HQ +import Unison.LSP.FileAnalysis (ppedForFile) +import qualified Unison.LSP.Queries as LSPQ import Unison.LSP.Types -import Unison.LSP.VFS +import qualified Unison.LSP.VFS as VFS +import qualified Unison.LabeledDependency as LD +import Unison.Parser.Ann (Ann) +import qualified Unison.Pattern as Pattern import Unison.Prelude -import qualified Unison.Server.Backend as Backend -import qualified Unison.Server.Syntax as Server -import qualified Unison.Server.Types as Backend -import qualified Unison.Syntax.HashQualified as HQ (fromText) +import qualified Unison.PrettyPrintEnvDecl as PPED +import qualified Unison.Reference as Reference +import Unison.Symbol (Symbol) +import qualified Unison.Syntax.DeclPrinter as DeclPrinter +import qualified Unison.Syntax.Name as Name +import qualified Unison.Syntax.TypePrinter as TypePrinter +import qualified Unison.Term as Term +import qualified Unison.Util.Pretty as Pretty --- | Rudimentary hover handler +-- | Hover help handler -- --- TODO: Add docs, use FileAnalysis to select hover target. +-- TODO: +-- * Add docs +-- * Resolve fqn on hover hoverHandler :: RequestMessage 'TextDocumentHover -> (Either ResponseError (ResponseResult 'TextDocumentHover) -> Lsp ()) -> Lsp () hoverHandler m respond = respond . Right =<< runMaybeT do - let p = (m ^. params) - txtIdentifier <- identifierAtPosition p - hqIdentifier <- MaybeT . pure $ HQ.fromText txtIdentifier - cb <- asks codebase - rt <- asks runtime - results <- MaybeT . fmap eitherToMaybe $ (lspBackend $ Backend.prettyDefinitionsForHQName Path.empty Nothing Nothing (Backend.Suffixify True) rt cb hqIdentifier) - let termResults = formatTermDefinition <$> toList (Backend.termDefinitions results) - let typeResults = formatTypeDefinition <$> toList (Backend.typeDefinitions results) - let markup = Text.intercalate "\n\n---\n\n" $ termResults <> typeResults + let pos = (m ^. params . position) + hoverTxt <- hoverInfo (m ^. params . textDocument . uri) pos pure $ Hover - { _contents = HoverContents (MarkupContent MkPlainText markup), + { _contents = HoverContents (MarkupContent MkMarkdown hoverTxt), _range = Nothing -- TODO add range info } - where - formatTermDefinition :: Backend.TermDefinition -> Text - formatTermDefinition (Backend.TermDefinition {bestTermName, signature}) = - bestTermName <> " : " <> Text.pack (Server.toPlain signature) - formatTypeDefinition :: Backend.TypeDefinition -> Text - formatTypeDefinition (Backend.TypeDefinition {bestTypeName}) = bestTypeName +hoverInfo :: Uri -> Position -> MaybeT Lsp Text +hoverInfo uri pos = + markdownify <$> (hoverInfoForRef <|> hoverInfoForLiteral) + where + markdownify :: Text -> Text + markdownify rendered = Text.unlines ["```unison", rendered, "```"] + prettyWidth :: Pretty.Width + prettyWidth = 40 + hoverInfoForRef :: MaybeT Lsp Text + hoverInfoForRef = do + symAtCursor <- VFS.identifierAtPosition uri pos + ref <- LSPQ.refAtPosition uri pos + pped <- lift $ ppedForFile uri + case ref of + LD.TypeReference (Reference.Builtin {}) -> pure (symAtCursor <> " : ") + LD.TypeReference ref@(Reference.DerivedId refId) -> do + nameAtCursor <- MaybeT . pure $ Name.fromText symAtCursor + decl <- LSPQ.getTypeDeclaration uri refId + let typ = Text.pack . Pretty.toPlain prettyWidth . Pretty.syntaxToColor $ DeclPrinter.prettyDecl pped ref (HQ.NameOnly nameAtCursor) decl + pure typ + LD.TermReferent ref -> do + typ <- LSPQ.getTypeOfReferent uri ref + let renderedType = Text.pack $ TypePrinter.prettyStr (Just prettyWidth) (PPED.suffixifiedPPE pped) typ + pure (symAtCursor <> " : " <> renderedType) + hoverInfoForLiteral :: MaybeT Lsp Text + hoverInfoForLiteral = do + LSPQ.nodeAtPosition uri pos >>= \case + LSPQ.TermNode term -> do + typ <- hoistMaybe $ builtinTypeForTermLiterals term + pure (": " <> typ) + LSPQ.TypeNode {} -> empty + LSPQ.PatternNode pat -> do + typ <- hoistMaybe $ builtinTypeForPatternLiterals pat + pure (": " <> typ) + + hoistMaybe :: Maybe a -> MaybeT Lsp a + hoistMaybe = MaybeT . pure + +-- | Get the type for term literals. +builtinTypeForTermLiterals :: Term.Term Symbol Ann -> Maybe Text +builtinTypeForTermLiterals term = + case ABT.out term of + ABT.Tm f -> case f of + Term.Int {} -> Just "Int" + Term.Nat {} -> Just "Nat" + Term.Float {} -> Just "Float" + Term.Boolean {} -> Just "Boolean" + Term.Text {} -> Just "Text" + Term.Char {} -> Just "Char" + Term.Blank {} -> Nothing + Term.Ref {} -> Nothing + Term.Constructor {} -> Nothing + Term.Request {} -> Nothing + Term.Handle {} -> Nothing + Term.App {} -> Nothing + Term.Ann {} -> Nothing + Term.List {} -> Nothing + Term.If {} -> Nothing + Term.And {} -> Nothing + Term.Or {} -> Nothing + Term.Lam {} -> Nothing + Term.LetRec {} -> Nothing + Term.Let {} -> Nothing + Term.Match {} -> Nothing + Term.TermLink {} -> Nothing + Term.TypeLink {} -> Nothing + ABT.Var {} -> Nothing + ABT.Cycle {} -> Nothing + ABT.Abs {} -> Nothing + +builtinTypeForPatternLiterals :: Pattern.Pattern Ann -> Maybe Text +builtinTypeForPatternLiterals = \case + Pattern.Unbound _ -> Nothing + Pattern.Var _ -> Nothing + Pattern.Boolean _ _ -> Just "Boolean" + Pattern.Int _ _ -> Just "Int" + Pattern.Nat _ _ -> Just "Nat" + Pattern.Float _ _ -> Just "Float" + Pattern.Text _ _ -> Just "Text" + Pattern.Char _ _ -> Just "Char" + Pattern.Constructor _ _ _ -> Nothing + Pattern.As _ _ -> Nothing + Pattern.EffectPure _ _ -> Nothing + Pattern.EffectBind _ _ _ _ -> Nothing + Pattern.SequenceLiteral _ _ -> Nothing + Pattern.SequenceOp _ _ _ _ -> Nothing diff --git a/unison-cli/src/Unison/LSP/NotificationHandlers.hs b/unison-cli/src/Unison/LSP/NotificationHandlers.hs index 85264c360..138dec5ca 100644 --- a/unison-cli/src/Unison/LSP/NotificationHandlers.hs +++ b/unison-cli/src/Unison/LSP/NotificationHandlers.hs @@ -9,7 +9,7 @@ import Unison.LSP.Types initializedHandler :: NotificationMessage 'Initialized -> Lsp () initializedHandler _ = pure () -withDebugging :: Show m => (m -> Lsp ()) -> (m -> Lsp ()) +withDebugging :: (Show m) => (m -> Lsp ()) -> (m -> Lsp ()) withDebugging handler message = do Debug.debugM Debug.LSP "Notification" message handler message diff --git a/unison-cli/src/Unison/LSP/Queries.hs b/unison-cli/src/Unison/LSP/Queries.hs index 4e855f450..1c66632bb 100644 --- a/unison-cli/src/Unison/LSP/Queries.hs +++ b/unison-cli/src/Unison/LSP/Queries.hs @@ -1,25 +1,34 @@ +{-# LANGUAGE DataKinds #-} + -- | Rewrites of some codebase queries, but which check the scratch file for info first. module Unison.LSP.Queries - ( refInTerm, + ( getTypeOfReferent, + getTypeDeclaration, + refAtPosition, + nodeAtPosition, + refInTerm, refInType, findSmallestEnclosingNode, findSmallestEnclosingType, refInDecl, - getTypeOfReferent, - getTypeDeclaration, + SourceNode (..), ) where import Control.Lens +import qualified Control.Lens as Lens import Control.Monad.Reader +import Data.Generics.Product (field) import Language.LSP.Types import qualified Unison.ABT as ABT +import qualified Unison.Builtin.Decls as Builtins import qualified Unison.Codebase as Codebase import Unison.ConstructorReference (GConstructorReference (..)) import qualified Unison.ConstructorType as CT import Unison.DataDeclaration (Decl) import qualified Unison.DataDeclaration as DD -import Unison.LSP.FileAnalysis +import Unison.LSP.Conversions (lspToUPos) +import Unison.LSP.FileAnalysis (getFileSummary) import Unison.LSP.Orphans () import Unison.LSP.Types import Unison.LabeledDependency @@ -27,17 +36,41 @@ import qualified Unison.LabeledDependency as LD import Unison.Lexer.Pos (Pos (..)) import Unison.Parser.Ann (Ann) import qualified Unison.Parser.Ann as Ann +import qualified Unison.Pattern as Pattern import Unison.Prelude import Unison.Reference (TypeReference) import qualified Unison.Reference as Reference import Unison.Referent (Referent) import qualified Unison.Referent as Referent import Unison.Symbol (Symbol) +import Unison.Syntax.Parser (ann) import Unison.Term (MatchCase (MatchCase), Term) import qualified Unison.Term as Term import Unison.Type (Type) import qualified Unison.Type as Type +-- | Returns a reference to whatever the symbol at the given position refers to. +refAtPosition :: Uri -> Position -> MaybeT Lsp LabeledDependency +refAtPosition uri pos = do + findInNode <|> findInDecl + where + findInNode :: MaybeT Lsp LabeledDependency + findInNode = + nodeAtPosition uri pos >>= \case + TermNode term -> hoistMaybe $ refInTerm term + TypeNode typ -> hoistMaybe $ fmap TypeReference (refInType typ) + PatternNode pat -> hoistMaybe $ refInPattern pat + findInDecl :: MaybeT Lsp LabeledDependency + findInDecl = + LD.TypeReference <$> do + let uPos = lspToUPos pos + (FileSummary {dataDeclsBySymbol, effectDeclsBySymbol}) <- getFileSummary uri + ( altMap (hoistMaybe . refInDecl uPos . Right . snd) dataDeclsBySymbol + <|> altMap (hoistMaybe . refInDecl uPos . Left . snd) effectDeclsBySymbol + ) + hoistMaybe :: Maybe a -> MaybeT Lsp a + hoistMaybe = MaybeT . pure + -- | Gets the type of a reference from either the parsed file or the codebase. getTypeOfReferent :: Uri -> Referent -> MaybeT Lsp (Type Symbol Ann) getTypeOfReferent fileUri ref = do @@ -123,42 +156,135 @@ refInType typ = case ABT.out typ of ABT.Cycle _r -> Nothing ABT.Abs _v _r -> Nothing +-- Returns the reference a given type node refers to, if any. +refInPattern :: Pattern.Pattern a -> Maybe LabeledDependency +refInPattern = \case + Pattern.Unbound {} -> Nothing + Pattern.Var {} -> Nothing + Pattern.Boolean {} -> Nothing + Pattern.Int {} -> Nothing + Pattern.Nat {} -> Nothing + Pattern.Float {} -> Nothing + Pattern.Text {} -> Nothing + Pattern.Char {} -> Nothing + Pattern.Constructor _loc conRef _ -> Just (LD.ConReference conRef CT.Data) + Pattern.As _loc _pat -> Nothing + Pattern.EffectPure {} -> Nothing + Pattern.EffectBind _loc conRef _ _ -> Just (LD.ConReference conRef CT.Effect) + Pattern.SequenceLiteral {} -> Nothing + Pattern.SequenceOp {} -> Nothing + +data SourceNode a + = TermNode (Term Symbol a) + | TypeNode (Type Symbol a) + | PatternNode (Pattern.Pattern a) + deriving stock (Eq, Show) + +instance Functor SourceNode where + fmap f (TermNode t) = TermNode (Term.amap f t) + fmap f (TypeNode t) = TypeNode (fmap f t) + fmap f (PatternNode t) = PatternNode (fmap f t) + -- | Find the the node in a term which contains the specified position, but none of its -- children contain that position. -findSmallestEnclosingNode :: Pos -> Term Symbol Ann -> Maybe (Either (Term Symbol Ann) (Type Symbol Ann)) +findSmallestEnclosingNode :: Pos -> Term Symbol Ann -> Maybe (SourceNode Ann) findSmallestEnclosingNode pos term - | not (ABT.annotation term `Ann.contains` pos) = Nothing - | otherwise = (<|> Just (Left term)) $ do - case ABT.out term of - ABT.Tm f -> case f of - Term.Int {} -> Just (Left term) - Term.Nat {} -> Just (Left term) - Term.Float {} -> Just (Left term) - Term.Boolean {} -> Just (Left term) - Term.Text {} -> Just (Left term) - Term.Char {} -> Just (Left term) - Term.Blank {} -> Just (Left term) - Term.Ref {} -> Just (Left term) - Term.Constructor {} -> Just (Left term) - Term.Request {} -> Just (Left term) - Term.Handle a b -> findSmallestEnclosingNode pos a <|> findSmallestEnclosingNode pos b - Term.App a b -> findSmallestEnclosingNode pos a <|> findSmallestEnclosingNode pos b - Term.Ann a typ -> findSmallestEnclosingNode pos a <|> (Right <$> findSmallestEnclosingType pos typ) - Term.List xs -> altSum (findSmallestEnclosingNode pos <$> xs) - Term.If cond a b -> findSmallestEnclosingNode pos cond <|> findSmallestEnclosingNode pos a <|> findSmallestEnclosingNode pos b - Term.And l r -> findSmallestEnclosingNode pos l <|> findSmallestEnclosingNode pos r - Term.Or l r -> findSmallestEnclosingNode pos l <|> findSmallestEnclosingNode pos r - Term.Lam a -> findSmallestEnclosingNode pos a - Term.LetRec _isTop xs y -> altSum (findSmallestEnclosingNode pos <$> xs) <|> findSmallestEnclosingNode pos y - Term.Let _isTop a b -> findSmallestEnclosingNode pos a <|> findSmallestEnclosingNode pos b - Term.Match a cases -> - findSmallestEnclosingNode pos a - <|> altSum (cases <&> \(MatchCase _pat grd body) -> altSum (findSmallestEnclosingNode pos <$> grd) <|> findSmallestEnclosingNode pos body) - Term.TermLink {} -> Just (Left term) - Term.TypeLink {} -> Just (Left term) - ABT.Var _v -> Just (Left term) - ABT.Cycle r -> findSmallestEnclosingNode pos r - ABT.Abs _v r -> findSmallestEnclosingNode pos r + | annIsFilePosition (ABT.annotation term) && not (ABT.annotation term `Ann.contains` pos) = Nothing + | Just r <- cleanImplicitUnit term = findSmallestEnclosingNode pos r + | otherwise = do + -- For leaf nodes we require that they be an in-file position, not Intrinsic or + -- external. + -- In some rare cases it's possible for an External/Intrinsic node to have children that + -- ARE in the file, so we need to make sure we still crawl their children. + let guardInFile = guard (annIsFilePosition (ABT.annotation term)) + let bestChild = case ABT.out term of + ABT.Tm f -> case f of + Term.Int {} -> guardInFile *> Just (TermNode term) + Term.Nat {} -> guardInFile *> Just (TermNode term) + Term.Float {} -> guardInFile *> Just (TermNode term) + Term.Boolean {} -> guardInFile *> Just (TermNode term) + Term.Text {} -> guardInFile *> Just (TermNode term) + Term.Char {} -> guardInFile *> Just (TermNode term) + Term.Blank {} -> guardInFile *> Just (TermNode term) + Term.Ref {} -> guardInFile *> Just (TermNode term) + Term.Constructor {} -> guardInFile *> Just (TermNode term) + Term.Request {} -> guardInFile *> Just (TermNode term) + Term.Handle a b -> findSmallestEnclosingNode pos a <|> findSmallestEnclosingNode pos b + Term.App a b -> + -- We crawl the body of the App first because the annotations for certain + -- lambda syntaxes get a bit squirrelly. + -- Specifically Tuple constructor apps will have an annotation which spans the + -- whole tuple, e.g. the annotation of the tuple constructor for `(1, 2)` will + -- cover ALL of `(1, 2)`, so we check the body of the tuple app first to see + -- if the cursor is on 1 or 2 before falling back on the annotation of the + -- 'function' of the app. + findSmallestEnclosingNode pos b <|> findSmallestEnclosingNode pos a + Term.Ann a typ -> findSmallestEnclosingNode pos a <|> (TypeNode <$> findSmallestEnclosingType pos typ) + Term.List xs -> altSum (findSmallestEnclosingNode pos <$> xs) + Term.If cond a b -> findSmallestEnclosingNode pos cond <|> findSmallestEnclosingNode pos a <|> findSmallestEnclosingNode pos b + Term.And l r -> findSmallestEnclosingNode pos l <|> findSmallestEnclosingNode pos r + Term.Or l r -> findSmallestEnclosingNode pos l <|> findSmallestEnclosingNode pos r + Term.Lam a -> findSmallestEnclosingNode pos a + Term.LetRec _isTop xs y -> altSum (findSmallestEnclosingNode pos <$> xs) <|> findSmallestEnclosingNode pos y + Term.Let _isTop a b -> findSmallestEnclosingNode pos a <|> findSmallestEnclosingNode pos b + Term.Match a cases -> + findSmallestEnclosingNode pos a + <|> altSum (cases <&> \(MatchCase pat grd body) -> ((PatternNode <$> findSmallestEnclosingPattern pos pat) <|> (grd >>= findSmallestEnclosingNode pos) <|> findSmallestEnclosingNode pos body)) + Term.TermLink {} -> guardInFile *> Just (TermNode term) + Term.TypeLink {} -> guardInFile *> Just (TermNode term) + ABT.Var _v -> guardInFile *> Just (TermNode term) + ABT.Cycle r -> findSmallestEnclosingNode pos r + ABT.Abs _v r -> findSmallestEnclosingNode pos r + let fallback = if annIsFilePosition (ABT.annotation term) then Just (TermNode term) else Nothing + bestChild <|> fallback + where + -- tuples always end in an implicit unit, but it's annotated with the span of the whole + -- tuple, which is problematic, so we need to detect and remove implicit tuples. + -- We can detect them because we know that the last element of a tuple is always its + -- implicit unit. + cleanImplicitUnit :: Term Symbol Ann -> Maybe (Term Symbol Ann) + cleanImplicitUnit = \case + ABT.Tm' (Term.App (ABT.Tm' (Term.App (ABT.Tm' (Term.Constructor (ConstructorReference ref 0))) x)) trm) + | ref == Builtins.pairRef && Term.amap (const ()) trm == Builtins.unitTerm () -> Just x + _ -> Nothing + +findSmallestEnclosingPattern :: Pos -> Pattern.Pattern Ann -> Maybe (Pattern.Pattern Ann) +findSmallestEnclosingPattern pos pat + | Just validTargets <- cleanImplicitUnit pat = findSmallestEnclosingPattern pos validTargets + | annIsFilePosition (ann pat) && not (ann pat `Ann.contains` pos) = Nothing + | otherwise = do + -- For leaf nodes we require that they be an in-file position, not Intrinsic or + -- external. + -- In some rare cases it's possible for an External/Intrinsic node to have children that + -- ARE in the file, so we need to make sure we still crawl their children. + let guardInFile = guard (annIsFilePosition (ann pat)) + let bestChild = case pat of + Pattern.Unbound {} -> guardInFile *> Just pat + Pattern.Var {} -> guardInFile *> Just pat + Pattern.Boolean {} -> guardInFile *> Just pat + Pattern.Int {} -> guardInFile *> Just pat + Pattern.Nat {} -> guardInFile *> Just pat + Pattern.Float {} -> guardInFile *> Just pat + Pattern.Text {} -> guardInFile *> Just pat + Pattern.Char {} -> guardInFile *> Just pat + Pattern.Constructor _loc _conRef pats -> altSum (findSmallestEnclosingPattern pos <$> pats) + Pattern.As _loc p -> findSmallestEnclosingPattern pos p + Pattern.EffectPure _loc p -> findSmallestEnclosingPattern pos p + Pattern.EffectBind _loc _conRef pats p -> altSum (findSmallestEnclosingPattern pos <$> pats) <|> findSmallestEnclosingPattern pos p + Pattern.SequenceLiteral _loc pats -> altSum (findSmallestEnclosingPattern pos <$> pats) + Pattern.SequenceOp _loc p1 _op p2 -> findSmallestEnclosingPattern pos p1 <|> findSmallestEnclosingPattern pos p2 + let fallback = if annIsFilePosition (ann pat) then Just pat else Nothing + bestChild <|> fallback + where + -- tuple patterns always end in an implicit unit, but it's annotated with the span of the whole + -- tuple, which is problematic, so we need to detect and remove implicit tuples. + -- We can detect them because we know that the last element of a tuple is always its + -- implicit unit. + cleanImplicitUnit :: Pattern.Pattern Ann -> Maybe (Pattern.Pattern Ann) + cleanImplicitUnit = \case + (Pattern.Constructor _loc (ConstructorReference conRef 0) [pat1, Pattern.Constructor _ (ConstructorReference mayUnitRef 0) _]) + | conRef == Builtins.pairRef && mayUnitRef == Builtins.unitRef -> Just pat1 + _ -> Nothing -- | Find the the node in a type which contains the specified position, but none of its -- children contain that position. @@ -166,21 +292,32 @@ findSmallestEnclosingNode pos term -- that a position references. findSmallestEnclosingType :: Pos -> Type Symbol Ann -> Maybe (Type Symbol Ann) findSmallestEnclosingType pos typ - | not (ABT.annotation typ `Ann.contains` pos) = Nothing - | otherwise = (<|> Just typ) $ do - case ABT.out typ of - ABT.Tm f -> case f of - Type.Ref {} -> Just typ - Type.Arrow a b -> findSmallestEnclosingType pos a <|> findSmallestEnclosingType pos b - Type.Effect a b -> findSmallestEnclosingType pos a <|> findSmallestEnclosingType pos b - Type.App a b -> findSmallestEnclosingType pos a <|> findSmallestEnclosingType pos b - Type.Forall r -> findSmallestEnclosingType pos r - Type.Ann a _kind -> findSmallestEnclosingType pos a - Type.Effects es -> altSum (findSmallestEnclosingType pos <$> es) - Type.IntroOuter a -> findSmallestEnclosingType pos a - ABT.Var _v -> Just typ - ABT.Cycle r -> findSmallestEnclosingType pos r - ABT.Abs _v r -> findSmallestEnclosingType pos r + | annIsFilePosition (ABT.annotation typ) && not (ABT.annotation typ `Ann.contains` pos) = Nothing + | otherwise = do + -- For leaf nodes we require that they be an in-file position, not Intrinsic or + -- external. + -- In some rare cases it's possible for an External/Intrinsic node to have children that + -- ARE in the file, so we need to make sure we still crawl their children. + let guardInFile = guard (annIsFilePosition (ABT.annotation typ)) + let bestChild = case ABT.out typ of + ABT.Tm f -> case f of + Type.Ref {} -> guardInFile *> Just typ + Type.Arrow a b -> findSmallestEnclosingType pos a <|> findSmallestEnclosingType pos b + Type.Effect effs rhs -> + -- There's currently a bug in the annotations for effects which cause them to + -- span larger than they should. As a workaround for now we just make sure to + -- search the RHS before the effects. + findSmallestEnclosingType pos rhs <|> findSmallestEnclosingType pos effs + Type.App a b -> findSmallestEnclosingType pos a <|> findSmallestEnclosingType pos b + Type.Forall r -> findSmallestEnclosingType pos r + Type.Ann a _kind -> findSmallestEnclosingType pos a + Type.Effects es -> altSum (findSmallestEnclosingType pos <$> es) + Type.IntroOuter a -> findSmallestEnclosingType pos a + ABT.Var _v -> guardInFile *> Just typ + ABT.Cycle r -> findSmallestEnclosingType pos r + ABT.Abs _v r -> findSmallestEnclosingType pos r + let fallback = if annIsFilePosition (ABT.annotation typ) then Just typ else Nothing + bestChild <|> fallback -- | Returns the type reference the given position applies to within a Decl, if any. -- @@ -193,3 +330,42 @@ refInDecl p (DD.asDataDecl -> dd) = typeNode <- findSmallestEnclosingType p typ ref <- refInType typeNode pure ref + +-- | Returns the ABT node at the provided position. +-- Does not return Decl nodes. +nodeAtPosition :: Uri -> Position -> MaybeT Lsp (SourceNode Ann) +nodeAtPosition uri (lspToUPos -> pos) = do + (FileSummary {termsBySymbol, testWatchSummary, exprWatchSummary}) <- getFileSummary uri + + let (trms, typs) = termsBySymbol & foldMap \(_ref, trm, mayTyp) -> ([trm], toList mayTyp) + ( altMap (hoistMaybe . findSmallestEnclosingNode pos . removeInferredTypeAnnotations) trms + <|> altMap (hoistMaybe . findSmallestEnclosingNode pos . removeInferredTypeAnnotations) (testWatchSummary ^.. folded . _3) + <|> altMap (hoistMaybe . findSmallestEnclosingNode pos . removeInferredTypeAnnotations) (exprWatchSummary ^.. folded . _3) + <|> altMap (fmap TypeNode . hoistMaybe . findSmallestEnclosingType pos) typs + ) + where + hoistMaybe :: Maybe a -> MaybeT Lsp a + hoistMaybe = MaybeT . pure + +annIsFilePosition :: Ann -> Bool +annIsFilePosition = \case + Ann.Intrinsic -> False + Ann.External -> False + Ann.Ann {} -> True + +-- | Okay, so currently during synthesis in typechecking the typechecker adds `Ann` nodes +-- to the term specifying types of subterms. This is a problem because we the types in these +-- Ann nodes are just tagged with the full `Ann` from the term it was inferred for, even +-- though none of these types exist in the file, and at a glance we can't tell whether a type +-- is inferred or user-specified. +-- +-- So for now we crawl the term and remove any Ann nodes from within. The downside being you +-- can no longer hover on Type signatures within a term, but the benefit is that hover +-- actually works. +removeInferredTypeAnnotations :: (Ord v) => Term.Term v Ann -> Term.Term v Ann +removeInferredTypeAnnotations = + Lens.transformOf (field @"out" . traversed) \case + ABT.Term {out = ABT.Tm (Term.Ann trm typ)} + -- If the type's annotation is identical to the term's annotation, then this must be an inferred type + | ABT.annotation typ == ABT.annotation trm -> trm + t -> t diff --git a/unison-cli/src/Unison/LSP/Types.hs b/unison-cli/src/Unison/LSP/Types.hs index 2bda4eb3a..6ea38bfcd 100644 --- a/unison-cli/src/Unison/LSP/Types.hs +++ b/unison-cli/src/Unison/LSP/Types.hs @@ -16,6 +16,7 @@ import qualified Data.Aeson as Aeson import qualified Data.ByteString.Lazy.Char8 as BSC import qualified Data.HashMap.Strict as HM import Data.IntervalMap.Lazy (IntervalMap) +import qualified Data.IntervalMap.Lazy as IM import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.Text as Text @@ -110,6 +111,7 @@ data FileAnalysis = FileAnalysis { fileUri :: Uri, fileVersion :: FileVersion, lexedSource :: LexedSource, + tokenMap :: IM.IntervalMap Position Lexer.Lexeme, parsedFile :: Maybe (UF.UnisonFile Symbol Ann), typecheckedFile :: Maybe (UF.TypecheckedUnisonFile Symbol Ann), notes :: Seq (Note Symbol Ann), diff --git a/unison-cli/src/Unison/LSP/VFS.hs b/unison-cli/src/Unison/LSP/VFS.hs index bd3038141..a2b5ca685 100644 --- a/unison-cli/src/Unison/LSP/VFS.hs +++ b/unison-cli/src/Unison/LSP/VFS.hs @@ -9,7 +9,6 @@ import qualified Colog.Core as Colog import Control.Lens import Control.Monad.Reader import Control.Monad.State -import Data.Char import qualified Data.Map as Map import qualified Data.Set as Set import Data.Set.Lens (setOf) @@ -18,12 +17,13 @@ import qualified Data.Text.Utf16.Rope as Rope import Data.Tuple (swap) import qualified Language.LSP.Logging as LSP import Language.LSP.Types -import Language.LSP.Types.Lens (HasCharacter (character), HasParams (params), HasPosition (position), HasTextDocument (textDocument), HasUri (uri)) +import Language.LSP.Types.Lens (HasCharacter (character), HasParams (params), HasTextDocument (textDocument), HasUri (uri)) import qualified Language.LSP.Types.Lens as LSP import Language.LSP.VFS as VFS hiding (character) import Unison.LSP.Orphans () import Unison.LSP.Types import Unison.Prelude +import qualified Unison.Syntax.Lexer as Lexer import UnliftIO -- | Some VFS combinators require Monad State, this provides it in a transactionally safe @@ -62,32 +62,28 @@ markAllFilesDirty = do markFilesDirty $ Map.keys (vfs ^. vfsMap) -- | Returns the name or symbol which the provided position is contained in. -identifierAtPosition :: (HasPosition p Position, HasTextDocument p TextDocumentIdentifier) => p -> MaybeT Lsp Text -identifierAtPosition p = do - identifierSplitAtPosition p <&> \(before, after) -> (before <> after) +identifierAtPosition :: Uri -> Position -> MaybeT Lsp Text +identifierAtPosition uri pos = do + identifierSplitAtPosition uri pos <&> \(before, after) -> (before <> after) -- | Returns the prefix and suffix of the symbol which the provided position is contained in. -identifierSplitAtPosition :: (HasPosition p Position, HasTextDocument p docId, HasUri docId Uri) => p -> MaybeT Lsp (Text, Text) -identifierSplitAtPosition p = do - vf <- getVirtualFile (p ^. textDocument . uri) - PosPrefixInfo {fullLine, cursorPos} <- MaybeT (VFS.getCompletionPrefix (p ^. position) vf) +identifierSplitAtPosition :: Uri -> Position -> MaybeT Lsp (Text, Text) +identifierSplitAtPosition uri pos = do + vf <- getVirtualFile uri + PosPrefixInfo {fullLine, cursorPos} <- MaybeT (VFS.getCompletionPrefix pos vf) let (before, after) = Text.splitAt (cursorPos ^. character . to fromIntegral) fullLine - pure $ (Text.takeWhileEnd isIdentifierChar before, Text.takeWhile isIdentifierChar after) + pure (Text.takeWhileEnd isIdentifierChar before, Text.takeWhile isIdentifierChar after) where - -- TODO: Should probably use something from the Lexer here - isIdentifierChar = \case - c - | isSpace c -> False - | elem c ("[]()`'\"" :: String) -> False - | otherwise -> True + isIdentifierChar c = + Lexer.wordyIdChar c || Lexer.symbolyIdChar c -- | Returns the prefix of the symbol at the provided location, and the range that prefix -- spans. -completionPrefix :: (HasPosition p Position, HasTextDocument p docId, HasUri docId Uri) => p -> MaybeT Lsp (Range, Text) -completionPrefix p = do - (before, _) <- identifierSplitAtPosition p - let posLine = p ^. position . LSP.line - let posChar = (p ^. position . LSP.character) +completionPrefix :: Uri -> Position -> MaybeT Lsp (Range, Text) +completionPrefix uri pos = do + (before, _) <- identifierSplitAtPosition uri pos + let posLine = pos ^. LSP.line + let posChar = pos ^. LSP.character let range = mkRange posLine (posChar - fromIntegral (Text.length before)) posLine posChar pure (range, before) diff --git a/unison-cli/src/Unison/Share/Sync.hs b/unison-cli/src/Unison/Share/Sync.hs index 14b00ed31..a4e8e6f56 100644 --- a/unison-cli/src/Unison/Share/Sync.hs +++ b/unison-cli/src/Unison/Share/Sync.hs @@ -2,9 +2,7 @@ {-# LANGUAGE TypeOperators #-} module Unison.Share.Sync - ( -- * High-level API - - -- ** Get causal hash by path + ( -- ** Get causal hash by path getCausalHashByPath, GetCausalHashByPathError (..), @@ -22,6 +20,7 @@ where import Control.Concurrent.STM import Control.Monad.Except +import Control.Monad.Reader (ask) import Control.Monad.Trans.Reader (ReaderT, runReaderT) import qualified Control.Monad.Trans.Reader as Reader import qualified Data.Foldable as Foldable (find) @@ -49,10 +48,13 @@ import qualified Servant.Client as Servant import U.Codebase.HashTags (CausalHash) import qualified U.Codebase.Sqlite.Queries as Q import U.Codebase.Sqlite.V2.HashHandle (v2HashHandle) -import U.Util.Hash32 (Hash32) import Unison.Auth.HTTPClient (AuthenticatedHttpClient) import qualified Unison.Auth.HTTPClient as Auth +import Unison.Cli.Monad (Cli) +import qualified Unison.Cli.Monad as Cli +import qualified Unison.Codebase as Codebase import qualified Unison.Debug as Debug +import Unison.Hash32 (Hash32) import Unison.Prelude import Unison.Share.Sync.Types import qualified Unison.Sqlite as Sqlite @@ -60,8 +62,6 @@ import qualified Unison.Sync.API as Share (API) import Unison.Sync.Common (causalHashToHash32, entityToTempEntity, expectEntity, hash32ToCausalHash) import qualified Unison.Sync.Types as Share import Unison.Util.Monoid (foldMapM) -import qualified UnliftIO -import UnliftIO.Exception (throwIO) ------------------------------------------------------------------------------------------------------------------------ -- Pile of constants @@ -83,12 +83,8 @@ maxSimultaneousPushWorkers = 5 -- This flavor of push takes the expected state of the server, and the desired state we want to set; if our expectation -- is off, we won't proceed with the push. checkAndSetPush :: - -- | The HTTP client to use for Unison Share requests. - AuthenticatedHttpClient -> -- | The Unison Share URL. BaseUrl -> - -- | SQLite-connection-making function, for writing entities we pull. - (forall a. (Sqlite.Connection -> IO a) -> IO a) -> -- | The repo+path to push to. Share.Path -> -- | The hash that we expect this repo+path to be at on Unison Share. If not, we'll get back a hash mismatch error. @@ -98,42 +94,54 @@ checkAndSetPush :: CausalHash -> -- | Callback that's given a number of entities we just uploaded. (Int -> IO ()) -> - IO (Either (SyncError CheckAndSetPushError) ()) -checkAndSetPush httpClient unisonShareUrl connect path expectedHash causalHash uploadedCallback = catchSyncErrors do - -- Maybe the server already has this causal; try just setting its remote path. Commonly, it will respond that it needs - -- this causal (UpdatePathMissingDependencies). - updatePath >>= \case - Share.UpdatePathSuccess -> pure (Right ()) - Share.UpdatePathHashMismatch mismatch -> pure (Left (CheckAndSetPushErrorHashMismatch mismatch)) - Share.UpdatePathMissingDependencies (Share.NeedDependencies dependencies) -> do - -- Upload the causal and all of its dependencies. - uploadEntities httpClient unisonShareUrl connect (Share.pathRepoName path) dependencies uploadedCallback >>= \case - False -> pure (Left (CheckAndSetPushErrorNoWritePermission path)) - True -> - -- After uploading the causal and all of its dependencies, try setting the remote path again. - updatePath <&> \case - Share.UpdatePathSuccess -> Right () - -- Between the initial updatePath attempt and this one, someone else managed to update the path. That's ok; - -- we still managed to upload our causal, but the push has indeed failed overall. - Share.UpdatePathHashMismatch mismatch -> Left (CheckAndSetPushErrorHashMismatch mismatch) - -- Unexpected, but possible: we thought we uploaded all we needed to, yet the server still won't accept our - -- causal. Bug in the client because we didn't upload enough? Bug in the server because we weren't told to - -- upload some dependency? Who knows. - Share.UpdatePathMissingDependencies (Share.NeedDependencies dependencies) -> - Left (CheckAndSetPushErrorServerMissingDependencies dependencies) - Share.UpdatePathNoWritePermission _ -> Left (CheckAndSetPushErrorNoWritePermission path) - Share.UpdatePathNoWritePermission _ -> pure (Left (CheckAndSetPushErrorNoWritePermission path)) - where - updatePath :: IO Share.UpdatePathResponse - updatePath = - httpUpdatePath - httpClient - unisonShareUrl - Share.UpdatePathRequest - { path, - expectedHash, - newHash = causalHashToHash32 causalHash - } + Cli (Either (SyncError CheckAndSetPushError) ()) +checkAndSetPush unisonShareUrl path expectedHash causalHash uploadedCallback = do + Cli.Env {authHTTPClient} <- ask + + Cli.label \done -> do + let failed :: SyncError CheckAndSetPushError -> Cli void + failed = done . Left + + let updatePath :: Cli Share.UpdatePathResponse + updatePath = do + liftIO request & onLeftM \err -> failed (TransportError err) + where + request :: IO (Either CodeserverTransportError Share.UpdatePathResponse) + request = + httpUpdatePath + authHTTPClient + unisonShareUrl + Share.UpdatePathRequest + { path, + expectedHash, + newHash = causalHashToHash32 causalHash + } + + -- Maybe the server already has this causal; try just setting its remote path. Commonly, it will respond that it + -- needs this causal (UpdatePathMissingDependencies). + updatePath >>= \case + Share.UpdatePathSuccess -> pure (Right ()) + Share.UpdatePathHashMismatch mismatch -> pure (Left (SyncError (CheckAndSetPushErrorHashMismatch mismatch))) + Share.UpdatePathMissingDependencies (Share.NeedDependencies dependencies) -> do + -- Upload the causal and all of its dependencies. + uploadEntities unisonShareUrl (Share.pathRepoName path) dependencies uploadedCallback & onLeftM \err -> + failed $ + err <&> \case + UploadEntitiesNoWritePermission -> CheckAndSetPushErrorNoWritePermission path + + -- After uploading the causal and all of its dependencies, try setting the remote path again. + updatePath >>= \case + Share.UpdatePathSuccess -> pure (Right ()) + -- Between the initial updatePath attempt and this one, someone else managed to update the path. That's ok; + -- we still managed to upload our causal, but the push has indeed failed overall. + Share.UpdatePathHashMismatch mismatch -> failed (SyncError (CheckAndSetPushErrorHashMismatch mismatch)) + -- Unexpected, but possible: we thought we uploaded all we needed to, yet the server still won't accept our + -- causal. Bug in the client because we didn't upload enough? Bug in the server because we weren't told to + -- upload some dependency? Who knows. + Share.UpdatePathMissingDependencies (Share.NeedDependencies dependencies) -> + failed (SyncError (CheckAndSetPushErrorServerMissingDependencies dependencies)) + Share.UpdatePathNoWritePermission _ -> failed (SyncError (CheckAndSetPushErrorNoWritePermission path)) + Share.UpdatePathNoWritePermission _ -> failed (SyncError (CheckAndSetPushErrorNoWritePermission path)) -- | Perform a fast-forward push (initially of just a causal hash, but ultimately all of its dependencies that the -- server is missing, too) to Unison Share. @@ -141,86 +149,105 @@ checkAndSetPush httpClient unisonShareUrl connect path expectedHash causalHash u -- This flavor of push provides the server with a chain of causal hashes leading from its current state to our desired -- state. fastForwardPush :: - -- | The HTTP client to use for Unison Share requests. - AuthenticatedHttpClient -> -- | The Unison Share URL. BaseUrl -> - -- | SQLite-connection-making function, for writing entities we pull. - (forall a. (Sqlite.Connection -> IO a) -> IO a) -> -- | The repo+path to push to. Share.Path -> -- | The hash of our local causal to push. CausalHash -> -- | Callback that's given a number of entities we just uploaded. (Int -> IO ()) -> - IO (Either (SyncError FastForwardPushError) ()) -fastForwardPush httpClient unisonShareUrl connect path localHeadHash uploadedCallback = catchSyncErrors do - getCausalHashByPath httpClient unisonShareUrl path >>= \case - Left (GetCausalHashByPathErrorNoReadPermission _) -> pure (Left (FastForwardPushErrorNoReadPermission path)) - Right Nothing -> pure (Left (FastForwardPushErrorNoHistory path)) - Right (Just (Share.hashJWTHash -> remoteHeadHash)) -> do - let doLoadCausalSpineBetween = do - -- (Temporary?) optimization - perform the "is ancestor?" check within sqlite before reconstructing the - -- actual path. - let isBefore :: Sqlite.Transaction Bool - isBefore = do - maybeHashIds <- - runMaybeT $ - (,) - <$> MaybeT (Q.loadCausalHashIdByCausalHash (hash32ToCausalHash remoteHeadHash)) - <*> MaybeT (Q.loadCausalHashIdByCausalHash localHeadHash) - case maybeHashIds of - Nothing -> pure False - Just (remoteHeadHashId, localHeadHashId) -> Q.before remoteHeadHashId localHeadHashId - isBefore >>= \case - False -> pure Nothing - True -> loadCausalSpineBetween remoteHeadHash (causalHashToHash32 localHeadHash) - (connect \conn -> Sqlite.runTransaction conn doLoadCausalSpineBetween) >>= \case + Cli (Either (SyncError FastForwardPushError) ()) +fastForwardPush unisonShareUrl path localHeadHash uploadedCallback = do + Cli.label \done -> do + let succeeded :: Cli void + succeeded = + done (Right ()) + + let failed :: SyncError FastForwardPushError -> Cli void + failed = done . Left + + remoteHeadHash <- + getCausalHashByPath unisonShareUrl path >>= \case + Left (TransportError err) -> failed (TransportError err) + Left (SyncError (GetCausalHashByPathErrorNoReadPermission _)) -> + failed (SyncError (FastForwardPushErrorNoReadPermission path)) + Right Nothing -> failed (SyncError (FastForwardPushErrorNoHistory path)) + Right (Just remoteHeadHash) -> pure (Share.hashJWTHash remoteHeadHash) + + let doLoadCausalSpineBetween = do + -- (Temporary?) optimization - perform the "is ancestor?" check within sqlite before reconstructing the + -- actual path. + let isBefore :: Sqlite.Transaction Bool + isBefore = do + maybeHashIds <- + runMaybeT $ + (,) + <$> MaybeT (Q.loadCausalHashIdByCausalHash (hash32ToCausalHash remoteHeadHash)) + <*> MaybeT (Q.loadCausalHashIdByCausalHash localHeadHash) + case maybeHashIds of + Nothing -> pure False + Just (remoteHeadHashId, localHeadHashId) -> Q.before remoteHeadHashId localHeadHashId + isBefore >>= \case + False -> pure Nothing + True -> loadCausalSpineBetween remoteHeadHash (causalHashToHash32 localHeadHash) + + let doUpload :: List.NonEmpty CausalHash -> Cli () + -- Maybe we could save round trips here by including the tail (or the head *and* the tail) as "extra hashes", + -- but we don't have that API yet. So, we only upload the head causal entity (which we don't even know for sure + -- the server doesn't have yet), and will (eventually) end up uploading the casuals in the tail that the server + -- needs. + doUpload (headHash :| _tailHashes) = do + request & onLeftM \err -> + failed $ + err <&> \case + UploadEntitiesNoWritePermission -> (FastForwardPushErrorNoWritePermission path) + where + request = + uploadEntities + unisonShareUrl + (Share.pathRepoName path) + (NESet.singleton (causalHashToHash32 headHash)) + uploadedCallback + + localInnerHashes <- + Cli.runTransaction doLoadCausalSpineBetween >>= \case -- After getting the remote causal hash, we can tell from a local computation that this wouldn't be a -- fast-forward push, so we don't bother trying - just report the error now. - Nothing -> pure (Left (FastForwardPushErrorNotFastForward path)) + Nothing -> failed (SyncError (FastForwardPushErrorNotFastForward path)) -- The path from remote-to-local, excluding local, was empty. So, remote == local; there's nothing to push. - Just [] -> pure (Right ()) - Just (_ : localInnerHashes0) -> do - -- drop remote hash - let localInnerHashes = map hash32ToCausalHash localInnerHashes0 - doUpload (localHeadHash :| localInnerHashes) >>= \case - False -> pure (Left (FastForwardPushErrorNoWritePermission path)) - True -> do - let doFastForwardPath = - httpFastForwardPath - httpClient - unisonShareUrl - Share.FastForwardPathRequest - { expectedHash = remoteHeadHash, - hashes = - causalHashToHash32 <$> List.NonEmpty.fromList (localInnerHashes ++ [localHeadHash]), - path - } - doFastForwardPath <&> \case - Share.FastForwardPathSuccess -> Right () - Share.FastForwardPathMissingDependencies (Share.NeedDependencies dependencies) -> - Left (FastForwardPushErrorServerMissingDependencies dependencies) - -- Weird: someone must have force-pushed no history here, or something. We observed a history at - -- this path but moments ago! - Share.FastForwardPathNoHistory -> Left (FastForwardPushErrorNoHistory path) - Share.FastForwardPathNoWritePermission _ -> Left (FastForwardPushErrorNoWritePermission path) - Share.FastForwardPathNotFastForward _ -> Left (FastForwardPushErrorNotFastForward path) - Share.FastForwardPathInvalidParentage (Share.InvalidParentage parent child) -> - Left (FastForwardPushInvalidParentage parent child) - where - doUpload :: List.NonEmpty CausalHash -> IO Bool - -- Maybe we could save round trips here by including the tail (or the head *and* the tail) as "extra hashes", but we - -- don't have that API yet. So, we only upload the head causal entity (which we don't even know for sure the server - -- doesn't have yet), and will (eventually) end up uploading the casuals in the tail that the server needs. - doUpload (headHash :| _tailHashes) = - uploadEntities - httpClient - unisonShareUrl - connect - (Share.pathRepoName path) - (NESet.singleton (causalHashToHash32 headHash)) - uploadedCallback + Just [] -> succeeded + -- drop remote hash + Just (_ : localInnerHashes) -> pure (map hash32ToCausalHash localInnerHashes) + + doUpload (localHeadHash :| localInnerHashes) + + let doFastForwardPath :: Cli Share.FastForwardPathResponse + doFastForwardPath = do + Cli.Env {authHTTPClient} <- ask + let request = + httpFastForwardPath + authHTTPClient + unisonShareUrl + Share.FastForwardPathRequest + { expectedHash = remoteHeadHash, + hashes = + causalHashToHash32 <$> List.NonEmpty.fromList (localInnerHashes ++ [localHeadHash]), + path + } + liftIO request & onLeftM \err -> failed (TransportError err) + + doFastForwardPath >>= \case + Share.FastForwardPathSuccess -> succeeded + Share.FastForwardPathMissingDependencies (Share.NeedDependencies dependencies) -> + failed (SyncError (FastForwardPushErrorServerMissingDependencies dependencies)) + -- Weird: someone must have force-pushed no history here, or something. We observed a history at + -- this path but moments ago! + Share.FastForwardPathNoHistory -> failed (SyncError (FastForwardPushErrorNoHistory path)) + Share.FastForwardPathNoWritePermission _ -> failed (SyncError (FastForwardPushErrorNoWritePermission path)) + Share.FastForwardPathNotFastForward _ -> failed (SyncError (FastForwardPushErrorNotFastForward path)) + Share.FastForwardPathInvalidParentage (Share.InvalidParentage parent child) -> + failed (SyncError (FastForwardPushInvalidParentage parent child)) -- Return a list (in oldest-to-newest order) of hashes along the causal spine that connects the given arguments, -- excluding the newest hash (second argument). @@ -262,7 +289,7 @@ data Step a -- we'd return -- -- Just [] -dagbfs :: forall a m. Monad m => (a -> Bool) -> (a -> m [a]) -> a -> m (Maybe [a]) +dagbfs :: forall a m. (Monad m) => (a -> Bool) -> (a -> m [a]) -> a -> m (Maybe [a]) dagbfs goal children = let -- The loop state: all distinct paths from the root to the frontier (not including the root, because it's implied, -- as an input to this function), in reverse order, with the invariant that we haven't found a goal state yet. @@ -356,55 +383,82 @@ dagbfs goal children = ------------------------------------------------------------------------------------------------------------------------ -- Pull +data DownloadEntitiesError + = DownloadEntitiesNoReadPermission + pull :: - -- | The HTTP client to use for Unison Share requests. - AuthenticatedHttpClient -> -- | The Unison Share URL. BaseUrl -> - -- | SQLite-connection-making function, for writing entities we pull. - (forall a. (Sqlite.Connection -> IO a) -> IO a) -> -- | The repo+path to pull from. Share.Path -> -- | Callback that's given a number of entities we just downloaded. (Int -> IO ()) -> - IO (Either (SyncError PullError) CausalHash) -pull httpClient unisonShareUrl connect repoPath downloadedCallback = catchSyncErrors do - getCausalHashByPath httpClient unisonShareUrl repoPath >>= \case - Left err -> pure (Left (PullErrorGetCausalHashByPath err)) - -- There's nothing at the remote path, so there's no causal to pull. - Right Nothing -> pure (Left (PullErrorNoHistoryAtPath repoPath)) - Right (Just hashJwt) -> do - let hash = Share.hashJWTHash hashJwt - maybeTempEntities <- - connect \conn -> - Sqlite.runTransaction conn (Q.entityLocation hash) >>= \case - Just Q.EntityInMainStorage -> pure Nothing - Just Q.EntityInTempStorage -> pure (Just (NESet.singleton hash)) - Nothing -> do - Share.DownloadEntitiesSuccess entities <- + Cli (Either (SyncError PullError) CausalHash) +pull unisonShareUrl repoPath downloadedCallback = do + Cli.Env {authHTTPClient, codebase} <- ask + + Cli.label \done -> do + let failed :: SyncError PullError -> Cli void + failed = done . Left + + hashJwt <- + getCausalHashByPath unisonShareUrl repoPath >>= \case + Left err -> failed (getCausalHashByPathErrorToPullError <$> err) + -- There's nothing at the remote path, so there's no causal to pull. + Right Nothing -> failed (SyncError (PullErrorNoHistoryAtPath repoPath)) + Right (Just hashJwt) -> pure hashJwt + + let hash = Share.hashJWTHash hashJwt + + maybeTempEntities <- + Cli.runTransaction (Q.entityLocation hash) >>= \case + Just Q.EntityInMainStorage -> pure Nothing + Just Q.EntityInTempStorage -> pure (Just (NESet.singleton hash)) + Nothing -> do + let request = httpDownloadEntities - httpClient + authHTTPClient unisonShareUrl Share.DownloadEntitiesRequest {repoName, hashes = NESet.singleton hashJwt} - tempEntities <- insertEntities conn entities - downloadedCallback 1 - pure (NESet.nonEmptySet tempEntities) - whenJust maybeTempEntities \tempEntities -> - completeTempEntities - httpClient - unisonShareUrl - connect - repoName - downloadedCallback - tempEntities - -- Since we may have just inserted and then deleted many temp entities, we attempt to recover some disk space by - -- vacuuming after each pull. If the vacuum fails due to another open transaction on this connection, that's ok, - -- we'll try vacuuming again next pull. - _success <- connect Sqlite.vacuum - pure (Right (hash32ToCausalHash hash)) + entities <- + liftIO request >>= \case + Left err -> failed (TransportError err) + Right (Share.DownloadEntitiesNoReadPermission _) -> + failed (SyncError (PullErrorNoReadPermission repoPath)) + Right (Share.DownloadEntitiesSuccess entities) -> pure entities + tempEntities <- Cli.runTransaction (insertEntities entities) + liftIO (downloadedCallback 1) + pure (NESet.nonEmptySet tempEntities) + + whenJust maybeTempEntities \tempEntities -> do + let doCompleteTempEntities = + completeTempEntities + authHTTPClient + unisonShareUrl + ( \action -> + Codebase.withConnection codebase \conn -> + action (Sqlite.runTransaction conn) + ) + repoName + downloadedCallback + tempEntities + liftIO doCompleteTempEntities & onLeftM \err -> + failed $ + err <&> \case + DownloadEntitiesNoReadPermission -> PullErrorNoReadPermission repoPath + + -- Since we may have just inserted and then deleted many temp entities, we attempt to recover some disk space by + -- vacuuming after each pull. If the vacuum fails due to another open transaction on this connection, that's ok, + -- we'll try vacuuming again next pull. + _success <- liftIO (Codebase.withConnection codebase Sqlite.vacuum) + pure (Right (hash32ToCausalHash hash)) where repoName = Share.pathRepoName repoPath +getCausalHashByPathErrorToPullError :: GetCausalHashByPathError -> PullError +getCausalHashByPathErrorToPullError = \case + GetCausalHashByPathErrorNoReadPermission path -> PullErrorNoReadPermission path + type WorkerCount = TVar Int @@ -423,20 +477,21 @@ recordNotWorking sem = -- What the dispatcher is to do data DispatcherJob = DispatcherForkWorker (NESet Share.HashJWT) + | DispatcherReturnEarlyBecauseDownloaderFailed (SyncError DownloadEntitiesError) | DispatcherDone --- | Finish downloading entities from Unison Share. Returns the total number of entities downloaded. +-- | Finish downloading entities from Unison Share (or return the first failure to download something). -- -- Precondition: the entities were *already* downloaded at some point in the past, and are now sitting in the -- `temp_entity` table, waiting for their dependencies to arrive so they can be flushed to main storage. completeTempEntities :: AuthenticatedHttpClient -> BaseUrl -> - (forall a. (Sqlite.Connection -> IO a) -> IO a) -> + (forall a. ((forall x. Sqlite.Transaction x -> IO x) -> IO a) -> IO a) -> Share.RepoName -> (Int -> IO ()) -> NESet Hash32 -> - IO () + IO (Either (SyncError DownloadEntitiesError) ()) completeTempEntities httpClient unisonShareUrl connect repoName downloadedCallback initialNewTempEntities = do -- The set of hashes we still need to download hashesVar <- newTVarIO Set.empty @@ -453,34 +508,43 @@ completeTempEntities httpClient unisonShareUrl connect repoName downloadedCallba -- How many workers (downloader / inserter / elaborator) are currently doing stuff. workerCount <- newWorkerCount + -- The first download error seen by a downloader, if any. + downloaderFailedVar <- newEmptyTMVarIO + -- Kick off the cycle of inserter->elaborator->dispatcher->downloader by giving the elaborator something to do atomically (writeTQueue newTempEntitiesQueue (Set.empty, Just initialNewTempEntities)) Ki.scoped \scope -> do Ki.fork_ scope (inserter entitiesQueue newTempEntitiesQueue workerCount) Ki.fork_ scope (elaborator hashesVar uninsertedHashesVar newTempEntitiesQueue workerCount) - dispatcher hashesVar uninsertedHashesVar entitiesQueue newTempEntitiesQueue workerCount + dispatcher hashesVar uninsertedHashesVar entitiesQueue newTempEntitiesQueue workerCount downloaderFailedVar where -- Dispatcher thread: "dequeue" from `hashesVar`, fork one-shot downloaders. -- - -- We stop when all of the following are true: + -- We stop when either all of the following are true: -- -- - There are no outstanding workers (downloaders, inserter, elaboraror) -- - The inserter thread doesn't have any outstanding work enqueued (in `entitiesQueue`) -- - The elaborator thread doesn't have any outstanding work enqueued (in `newTempEntitiesQueue`) + -- + -- Or: + -- + -- - Some downloader failed to download something dispatcher :: TVar (Set Share.HashJWT) -> TVar (Set Share.HashJWT) -> TQueue (NESet Share.HashJWT, NEMap Hash32 (Share.Entity Text Hash32 Share.HashJWT)) -> TQueue (Set Share.HashJWT, Maybe (NESet Hash32)) -> WorkerCount -> - IO () - dispatcher hashesVar uninsertedHashesVar entitiesQueue newTempEntitiesQueue workerCount = + TMVar (SyncError DownloadEntitiesError) -> + IO (Either (SyncError DownloadEntitiesError) ()) + dispatcher hashesVar uninsertedHashesVar entitiesQueue newTempEntitiesQueue workerCount downloaderFailedVar = Ki.scoped \scope -> - let loop :: IO () + let loop :: IO (Either (SyncError DownloadEntitiesError) ()) loop = - atomically (dispatchWorkMode <|> checkIfDoneMode) >>= \case - DispatcherDone -> pure () + atomically (checkIfDownloaderFailedMode <|> dispatchWorkMode <|> checkIfDoneMode) >>= \case + DispatcherDone -> pure (Right ()) + DispatcherReturnEarlyBecauseDownloaderFailed err -> pure (Left err) DispatcherForkWorker hashes -> do atomically do -- Limit number of simultaneous downloaders (plus 2, for inserter and elaborator) @@ -491,10 +555,17 @@ completeTempEntities httpClient unisonShareUrl connect repoName downloadedCallba -- nothing more for the dispatcher to do, when in fact a downloader thread just hasn't made it as -- far as recording its own existence recordWorking workerCount - _ <- Ki.fork @() scope (downloader entitiesQueue workerCount hashes) + _ <- + Ki.fork @() scope do + downloader entitiesQueue workerCount hashes & onLeftM \err -> + void (atomically (tryPutTMVar downloaderFailedVar err)) loop in loop where + checkIfDownloaderFailedMode :: STM DispatcherJob + checkIfDownloaderFailedMode = + DispatcherReturnEarlyBecauseDownloaderFailed <$> readTMVar downloaderFailedVar + dispatchWorkMode :: STM DispatcherJob dispatchWorkMode = do hashes <- readTVar hashesVar @@ -513,22 +584,26 @@ completeTempEntities httpClient unisonShareUrl connect repoName downloadedCallba isEmptyTQueue newTempEntitiesQueue >>= check pure DispatcherDone - -- Downloader thread: download entities, enqueue to `entitiesQueue` + -- Downloader thread: download entities, (if successful) enqueue to `entitiesQueue` downloader :: TQueue (NESet Share.HashJWT, NEMap Hash32 (Share.Entity Text Hash32 Share.HashJWT)) -> WorkerCount -> NESet Share.HashJWT -> - IO () + IO (Either (SyncError DownloadEntitiesError) ()) downloader entitiesQueue workerCount hashes = do - Share.DownloadEntitiesSuccess entities <- - httpDownloadEntities - httpClient - unisonShareUrl - Share.DownloadEntitiesRequest {repoName, hashes} - downloadedCallback (NESet.size hashes) - atomically do - writeTQueue entitiesQueue (hashes, entities) - recordNotWorking workerCount + httpDownloadEntities httpClient unisonShareUrl Share.DownloadEntitiesRequest {repoName, hashes} >>= \case + Left err -> do + atomically (recordNotWorking workerCount) + pure (Left (TransportError err)) + Right (Share.DownloadEntitiesNoReadPermission _) -> do + atomically (recordNotWorking workerCount) + pure (Left (SyncError DownloadEntitiesNoReadPermission)) + Right (Share.DownloadEntitiesSuccess entities) -> do + downloadedCallback (NESet.size hashes) + atomically do + writeTQueue entitiesQueue (hashes, entities) + recordNotWorking workerCount + pure (Right ()) -- Inserter thread: dequeue from `entitiesQueue`, insert entities, enqueue to `newTempEntitiesQueue` inserter :: @@ -537,7 +612,7 @@ completeTempEntities httpClient unisonShareUrl connect repoName downloadedCallba WorkerCount -> IO Void inserter entitiesQueue newTempEntitiesQueue workerCount = - connect \conn -> + connect \runTransaction -> forever do (hashJwts, entities) <- atomically do @@ -545,7 +620,7 @@ completeTempEntities httpClient unisonShareUrl connect repoName downloadedCallba recordWorking workerCount pure entities newTempEntities0 <- - Sqlite.runTransaction conn do + runTransaction do NEMap.toList entities & foldMapM \(hash, entity) -> upsertEntitySomewhere hash entity <&> \case Q.EntityInMainStorage -> Set.empty @@ -562,7 +637,7 @@ completeTempEntities httpClient unisonShareUrl connect repoName downloadedCallba WorkerCount -> IO Void elaborator hashesVar uninsertedHashesVar newTempEntitiesQueue workerCount = - connect \conn -> + connect \runTransaction -> forever do maybeNewTempEntities <- atomically do @@ -580,7 +655,7 @@ completeTempEntities httpClient unisonShareUrl connect repoName downloadedCallba recordWorking workerCount pure (Just newTempEntities) whenJust maybeNewTempEntities \newTempEntities -> do - newElaboratedHashes <- Sqlite.runTransaction conn (elaborateHashes newTempEntities) + newElaboratedHashes <- runTransaction (elaborateHashes newTempEntities) atomically do uninsertedHashes <- readTVar uninsertedHashesVar hashes0 <- readTVar hashesVar @@ -589,85 +664,99 @@ completeTempEntities httpClient unisonShareUrl connect repoName downloadedCallba -- | Insert entities into the database, and return the subset that went into temp storage (`temp_entitiy`) rather than -- of main storage (`object` / `causal`) due to missing dependencies. -insertEntities :: Sqlite.Connection -> NEMap Hash32 (Share.Entity Text Hash32 Share.HashJWT) -> IO (Set Hash32) -insertEntities conn entities = - Sqlite.runTransaction conn do - NEMap.toList entities & foldMapM \(hash, entity) -> - upsertEntitySomewhere hash entity <&> \case - Q.EntityInMainStorage -> Set.empty - Q.EntityInTempStorage -> Set.singleton hash +insertEntities :: NEMap Hash32 (Share.Entity Text Hash32 Share.HashJWT) -> Sqlite.Transaction (Set Hash32) +insertEntities entities = + NEMap.toList entities & foldMapM \(hash, entity) -> + upsertEntitySomewhere hash entity <&> \case + Q.EntityInMainStorage -> Set.empty + Q.EntityInTempStorage -> Set.singleton hash ------------------------------------------------------------------------------------------------------------------------ -- Get causal hash by path -- | Get the causal hash of a path hosted on Unison Share. getCausalHashByPath :: - -- | The HTTP client to use for Unison Share requests. - AuthenticatedHttpClient -> -- | The Unison Share URL. BaseUrl -> Share.Path -> - IO (Either GetCausalHashByPathError (Maybe Share.HashJWT)) -getCausalHashByPath httpClient unisonShareUrl repoPath = - httpGetCausalHashByPath httpClient unisonShareUrl (Share.GetCausalHashByPathRequest repoPath) <&> \case - Share.GetCausalHashByPathSuccess maybeHashJwt -> Right maybeHashJwt - Share.GetCausalHashByPathNoReadPermission _ -> Left (GetCausalHashByPathErrorNoReadPermission repoPath) + Cli (Either (SyncError GetCausalHashByPathError) (Maybe Share.HashJWT)) +getCausalHashByPath unisonShareUrl repoPath = do + Cli.Env {authHTTPClient} <- ask + liftIO (httpGetCausalHashByPath authHTTPClient unisonShareUrl (Share.GetCausalHashByPathRequest repoPath)) <&> \case + Left err -> Left (TransportError err) + Right (Share.GetCausalHashByPathSuccess maybeHashJwt) -> Right maybeHashJwt + Right (Share.GetCausalHashByPathNoReadPermission _) -> + Left (SyncError (GetCausalHashByPathErrorNoReadPermission repoPath)) ------------------------------------------------------------------------------------------------------------------------ -- Upload entities data UploadDispatcherJob - = UploadDispatcherReturnFailure + = UploadDispatcherReturnFailure (SyncError UploadEntitiesError) | UploadDispatcherForkWorkerWhenAvailable (NESet Hash32) | UploadDispatcherForkWorker (NESet Hash32) | UploadDispatcherDone +data UploadEntitiesError + = UploadEntitiesNoWritePermission + -- | Upload a set of entities to Unison Share. If the server responds that it cannot yet store any hash(es) due to -- missing dependencies, send those dependencies too, and on and on, until the server stops responding that it's missing -- anything. -- -- Returns true on success, false on failure (because the user does not have write permission). uploadEntities :: - AuthenticatedHttpClient -> BaseUrl -> - (forall a. (Sqlite.Connection -> IO a) -> IO a) -> Share.RepoName -> NESet Hash32 -> (Int -> IO ()) -> - IO Bool -uploadEntities httpClient unisonShareUrl connect repoName hashes0 uploadedCallback = do - hashesVar <- newTVarIO (NESet.toSet hashes0) - -- Semantically, this is the set of hashes we've uploaded so far, but we do delete from it when it's safe to, so it - -- doesn't grow unbounded. It's used to filter out hashes that would be duplicate uploads: the server, when responding - -- to any particular upload request, may declare that it still needs some hashes that we're in the process of - -- uploading from another thread. - dedupeVar <- newTVarIO Set.empty - nextWorkerIdVar <- newTVarIO 0 - workersVar <- newTVarIO Set.empty - workerFailedVar <- newEmptyTMVarIO + Cli (Either (SyncError UploadEntitiesError) ()) +uploadEntities unisonShareUrl repoName hashes0 uploadedCallback = do + Cli.Env {authHTTPClient, codebase} <- ask - Ki.scoped \scope -> - dispatcher scope hashesVar dedupeVar nextWorkerIdVar workersVar workerFailedVar + liftIO do + hashesVar <- newTVarIO (NESet.toSet hashes0) + -- Semantically, this is the set of hashes we've uploaded so far, but we do delete from it when it's safe to, so it + -- doesn't grow unbounded. It's used to filter out hashes that would be duplicate uploads: the server, when + -- responding to any particular upload request, may declare that it still needs some hashes that we're in the + -- process of uploading from another thread. + dedupeVar <- newTVarIO Set.empty + nextWorkerIdVar <- newTVarIO 0 + workersVar <- newTVarIO Set.empty + workerFailedVar <- newEmptyTMVarIO + + Ki.scoped \scope -> + dispatcher + scope + authHTTPClient + (Codebase.runTransaction codebase) + hashesVar + dedupeVar + nextWorkerIdVar + workersVar + workerFailedVar where dispatcher :: Ki.Scope -> + AuthenticatedHttpClient -> + (forall a. Sqlite.Transaction a -> IO a) -> TVar (Set Hash32) -> TVar (Set Hash32) -> TVar Int -> TVar (Set Int) -> - TMVar () -> - IO Bool - dispatcher scope hashesVar dedupeVar nextWorkerIdVar workersVar workerFailedVar = do + TMVar (SyncError UploadEntitiesError) -> + IO (Either (SyncError UploadEntitiesError) ()) + dispatcher scope httpClient runTransaction hashesVar dedupeVar nextWorkerIdVar workersVar workerFailedVar = do loop where - loop :: IO Bool + loop :: IO (Either (SyncError UploadEntitiesError) ()) loop = doJob [checkForFailureMode, dispatchWorkMode, checkIfDoneMode] - doJob :: [STM UploadDispatcherJob] -> IO Bool + doJob :: [STM UploadDispatcherJob] -> IO (Either (SyncError UploadEntitiesError) ()) doJob jobs = atomically (asum jobs) >>= \case - UploadDispatcherReturnFailure -> pure False + UploadDispatcherReturnFailure err -> pure (Left err) UploadDispatcherForkWorkerWhenAvailable hashes -> doJob [forkWorkerMode hashes, checkForFailureMode] UploadDispatcherForkWorker hashes -> do workerId <- @@ -678,14 +767,14 @@ uploadEntities httpClient unisonShareUrl connect repoName hashes0 uploadedCallba pure workerId _ <- Ki.fork @() scope do - worker hashesVar dedupeVar workersVar workerFailedVar workerId hashes + worker httpClient runTransaction hashesVar dedupeVar workersVar workerFailedVar workerId hashes loop - UploadDispatcherDone -> pure True + UploadDispatcherDone -> pure (Right ()) checkForFailureMode :: STM UploadDispatcherJob checkForFailureMode = do - () <- readTMVar workerFailedVar - pure UploadDispatcherReturnFailure + err <- readTMVar workerFailedVar + pure (UploadDispatcherReturnFailure err) dispatchWorkMode :: STM UploadDispatcherJob dispatchWorkMode = do @@ -708,25 +797,35 @@ uploadEntities httpClient unisonShareUrl connect repoName hashes0 uploadedCallba when (not (Set.null workers)) retry pure UploadDispatcherDone - worker :: TVar (Set Hash32) -> TVar (Set Hash32) -> TVar (Set Int) -> TMVar () -> Int -> NESet Hash32 -> IO () - worker hashesVar dedupeVar workersVar workerFailedVar workerId hashes = do + worker :: + AuthenticatedHttpClient -> + (forall a. Sqlite.Transaction a -> IO a) -> + TVar (Set Hash32) -> + TVar (Set Hash32) -> + TVar (Set Int) -> + TMVar (SyncError UploadEntitiesError) -> + Int -> + NESet Hash32 -> + IO () + worker httpClient runTransaction hashesVar dedupeVar workersVar workerFailedVar workerId hashes = do entities <- fmap NEMap.fromAscList do - connect \conn -> - Sqlite.runTransaction conn do - for (NESet.toAscList hashes) \hash -> do - entity <- expectEntity hash - pure (hash, entity) + runTransaction do + for (NESet.toAscList hashes) \hash -> do + entity <- expectEntity hash + pure (hash, entity) result <- httpUploadEntities httpClient unisonShareUrl Share.UploadEntitiesRequest {entities, repoName} <&> \case - Share.UploadEntitiesNeedDependencies (Share.NeedDependencies moreHashes) -> Right (NESet.toSet moreHashes) - Share.UploadEntitiesNoWritePermission _ -> Left () - Share.UploadEntitiesHashMismatchForEntity _ -> error "hash mismatch; fixme" - Share.UploadEntitiesSuccess -> Right Set.empty + Left err -> Left (TransportError err) + Right (Share.UploadEntitiesNeedDependencies (Share.NeedDependencies moreHashes)) -> + Right (NESet.toSet moreHashes) + Right (Share.UploadEntitiesNoWritePermission _) -> Left (SyncError UploadEntitiesNoWritePermission) + Right (Share.UploadEntitiesHashMismatchForEntity _) -> error "hash mismatch; fixme" + Right Share.UploadEntitiesSuccess -> Right Set.empty case result of - Left () -> void (atomically (tryPutTMVar workerFailedVar ())) + Left err -> void (atomically (tryPutTMVar workerFailedVar err)) Right moreHashes -> do uploadedCallback (NESet.size hashes) maybeYoungestWorkerThatWasAlive <- @@ -815,11 +914,31 @@ upsertEntitySomewhere hash entity = ------------------------------------------------------------------------------------------------------------------------ -- HTTP calls -httpGetCausalHashByPath :: Auth.AuthenticatedHttpClient -> BaseUrl -> Share.GetCausalHashByPathRequest -> IO Share.GetCausalHashByPathResponse -httpFastForwardPath :: Auth.AuthenticatedHttpClient -> BaseUrl -> Share.FastForwardPathRequest -> IO Share.FastForwardPathResponse -httpUpdatePath :: Auth.AuthenticatedHttpClient -> BaseUrl -> Share.UpdatePathRequest -> IO Share.UpdatePathResponse -httpDownloadEntities :: Auth.AuthenticatedHttpClient -> BaseUrl -> Share.DownloadEntitiesRequest -> IO Share.DownloadEntitiesResponse -httpUploadEntities :: Auth.AuthenticatedHttpClient -> BaseUrl -> Share.UploadEntitiesRequest -> IO Share.UploadEntitiesResponse +httpGetCausalHashByPath :: + Auth.AuthenticatedHttpClient -> + BaseUrl -> + Share.GetCausalHashByPathRequest -> + IO (Either CodeserverTransportError Share.GetCausalHashByPathResponse) +httpFastForwardPath :: + Auth.AuthenticatedHttpClient -> + BaseUrl -> + Share.FastForwardPathRequest -> + IO (Either CodeserverTransportError Share.FastForwardPathResponse) +httpUpdatePath :: + Auth.AuthenticatedHttpClient -> + BaseUrl -> + Share.UpdatePathRequest -> + IO (Either CodeserverTransportError Share.UpdatePathResponse) +httpDownloadEntities :: + Auth.AuthenticatedHttpClient -> + BaseUrl -> + Share.DownloadEntitiesRequest -> + IO (Either CodeserverTransportError Share.DownloadEntitiesResponse) +httpUploadEntities :: + Auth.AuthenticatedHttpClient -> + BaseUrl -> + Share.UploadEntitiesRequest -> + IO (Either CodeserverTransportError Share.UploadEntitiesResponse) ( httpGetCausalHashByPath, httpFastForwardPath, httpUpdatePath, @@ -843,14 +962,14 @@ httpUploadEntities :: Auth.AuthenticatedHttpClient -> BaseUrl -> Share.UploadEnt go httpUploadEntities ) where - hoist :: Servant.ClientM a -> ReaderT Servant.ClientEnv IO a + hoist :: Servant.ClientM a -> ReaderT Servant.ClientEnv (ExceptT CodeserverTransportError IO) a hoist m = do clientEnv <- Reader.ask liftIO (Servant.runClientM m clientEnv) >>= \case Right a -> pure a Left err -> do Debug.debugLogM Debug.Sync (show err) - throwIO case err of + throwError case err of Servant.FailureResponse _req resp -> case HTTP.statusCode $ Servant.responseStatusCode resp of 401 -> Unauthenticated (Servant.baseUrl clientEnv) @@ -867,25 +986,18 @@ httpUploadEntities :: Auth.AuthenticatedHttpClient -> BaseUrl -> Share.UploadEnt Servant.ConnectionError _ -> UnreachableCodeserver (Servant.baseUrl clientEnv) go :: - (req -> ReaderT Servant.ClientEnv IO resp) -> + (req -> ReaderT Servant.ClientEnv (ExceptT CodeserverTransportError IO) resp) -> Auth.AuthenticatedHttpClient -> BaseUrl -> req -> - IO resp + IO (Either CodeserverTransportError resp) go f (Auth.AuthenticatedHttpClient httpClient) unisonShareUrl req = - runReaderT - (f req) - (Servant.mkClientEnv httpClient unisonShareUrl) - { Servant.makeClientRequest = \url request -> - -- Disable client-side timeouts - (Servant.defaultMakeClientRequest url request) - { Http.Client.responseTimeout = Http.Client.responseTimeoutNone - } - } - -catchSyncErrors :: IO (Either e a) -> IO (Either (SyncError e) a) -catchSyncErrors action = - UnliftIO.try @_ @CodeserverTransportError action >>= \case - Left te -> pure (Left . TransportError $ te) - Right (Left e) -> pure . Left . SyncError $ e - Right (Right a) -> pure $ Right a + (Servant.mkClientEnv httpClient unisonShareUrl) + { Servant.makeClientRequest = \url request -> + -- Disable client-side timeouts + (Servant.defaultMakeClientRequest url request) + { Http.Client.responseTimeout = Http.Client.responseTimeoutNone + } + } + & runReaderT (f req) + & runExceptT diff --git a/unison-cli/src/Unison/Share/Sync/Types.hs b/unison-cli/src/Unison/Share/Sync/Types.hs index 3d9903457..ba52eabf7 100644 --- a/unison-cli/src/Unison/Share/Sync/Types.hs +++ b/unison-cli/src/Unison/Share/Sync/Types.hs @@ -1,11 +1,17 @@ -{-# LANGUAGE DeriveAnyClass #-} - -- | Types used by the UCM client during sync. -module Unison.Share.Sync.Types where +module Unison.Share.Sync.Types + ( CheckAndSetPushError (..), + CodeserverTransportError (..), + FastForwardPushError (..), + GetCausalHashByPathError (..), + PullError (..), + SyncError (..), + ) +where import Data.Set.NonEmpty (NESet) import qualified Servant.Client as Servant -import U.Util.Hash32 (Hash32) +import Unison.Hash32 (Hash32) import Unison.Prelude import qualified Unison.Sync.Types as Share @@ -29,9 +35,8 @@ data FastForwardPushError -- | An error occurred while pulling code from Unison Share. data PullError - = -- | An error occurred while resolving a repo+path to a causal hash. - PullErrorGetCausalHashByPath GetCausalHashByPathError - | PullErrorNoHistoryAtPath Share.Path + = PullErrorNoHistoryAtPath Share.Path + | PullErrorNoReadPermission Share.Path deriving (Show) -- | An error occurred when getting causal hash by path. @@ -57,3 +62,4 @@ data CodeserverTransportError data SyncError e = TransportError CodeserverTransportError | SyncError e + deriving stock (Functor) diff --git a/unison-cli/tests/Unison/Test/GitSync.hs b/unison-cli/tests/Unison/Test/GitSync.hs index 93bb5f285..82d8b9eab 100644 --- a/unison-cli/tests/Unison/Test/GitSync.hs +++ b/unison-cli/tests/Unison/Test/GitSync.hs @@ -35,20 +35,20 @@ writeTranscriptOutput = False test :: Test () test = scope "gitsync22" . tests $ - fastForwardPush : - nonFastForwardPush : - destroyedRemote : - flip - map - [(Ucm.CodebaseFormat2, "sc")] - \(fmt, name) -> - scope name $ - tests - [ pushPullTest - "pull-over-deleted-namespace" - fmt - ( \repo -> - [i| + fastForwardPush + : nonFastForwardPush + : destroyedRemote + : flip + map + [(Ucm.CodebaseFormat2, "sc")] + \(fmt, name) -> + scope name $ + tests + [ pushPullTest + "pull-over-deleted-namespace" + fmt + ( \repo -> + [i| ```unison:hide x = 1 ``` @@ -57,9 +57,9 @@ test = .> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```unison:hide child.y = 2 ``` @@ -71,12 +71,12 @@ test = .> pull git(${repo}) child ``` |] - ), - pushPullTest - "pull.without-history" - fmt - ( \repo -> - [i| + ), + pushPullTest + "pull.without-history" + fmt + ( \repo -> + [i| ```unison:hide child.x = 1 ``` @@ -102,9 +102,9 @@ test = .> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| Should be able to pull the branch from the remote without its history. Note that this only tests that the pull succeeds, since (at time of writing) we don't track/test transcript output for these tests in the unison repo. @@ -113,12 +113,12 @@ test = .> history .child ``` |] - ), - pushPullTest - "push-over-deleted-namespace" - fmt - ( \repo -> - [i| + ), + pushPullTest + "push-over-deleted-namespace" + fmt + ( \repo -> + [i| ```unison:hide child.x = 1 y = 2 @@ -129,9 +129,9 @@ test = .> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```unison:hide child.z = 3 ``` @@ -142,12 +142,12 @@ test = .> push.create git(${repo}).child child ``` |] - ), - pushPullTest - "typeAlias" - fmt - ( \repo -> - [i| + ), + pushPullTest + "typeAlias" + fmt + ( \repo -> + [i| ```ucm .> alias.type ##Nat builtin.Nat .> history @@ -155,9 +155,9 @@ test = .> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .> pull git(${repo}) ``` @@ -166,12 +166,12 @@ test = x = 3 ``` |] - ), - pushPullTest - "topLevelTerm" - fmt - ( \repo -> - [i| + ), + pushPullTest + "topLevelTerm" + fmt + ( \repo -> + [i| ```unison:hide y = 3 ``` @@ -181,9 +181,9 @@ test = .> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .> pull git(${repo}) .> find @@ -192,12 +192,12 @@ test = > y ``` |] - ), - pushPullTest - "metadataForTerm" - fmt - ( \repo -> - [i| + ), + pushPullTest + "metadataForTerm" + fmt + ( \repo -> + [i| ```unison:hide doc = "y is the number 3" y = 3 @@ -211,20 +211,20 @@ test = .> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .> pull git(${repo}) .> links y ``` |] - ), - pushPullTest - "metadataForType" - fmt - ( \repo -> - [i| + ), + pushPullTest + "metadataForType" + fmt + ( \repo -> + [i| ```unison:hide doc = "Nat means natural number" ``` @@ -235,20 +235,20 @@ test = .> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .> pull git(${repo}) .> links Nat ``` |] - ), - pushPullTest - "subNamespace" - fmt - ( \repo -> - [i| + ), + pushPullTest + "subNamespace" + fmt + ( \repo -> + [i| ```ucm .> alias.type ##Nat builtin.Nat ``` @@ -261,9 +261,9 @@ test = .> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .> pull.silent git(${repo}) .> find @@ -272,12 +272,12 @@ test = > a.b.C.C a.b.d ``` |] - ), - pushPullTest - "accessPatch" - fmt - ( \repo -> - [i| + ), + pushPullTest + "accessPatch" + fmt + ( \repo -> + [i| ```ucm .> alias.type ##Nat builtin.Nat ``` @@ -302,20 +302,20 @@ test = .> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .> pull.silent git(${repo}) .> view.patch patch ``` |] - ), - pushPullTest - "history" - fmt - ( \repo -> - [i| + ), + pushPullTest + "history" + fmt + ( \repo -> + [i| ```unison foo = 3 ``` @@ -331,9 +331,9 @@ test = .> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .> pull git(${repo}) .> history @@ -341,19 +341,19 @@ test = .> history ``` |] -- Not sure why this hash is here. - -- Is it to test `reset-root`? - -- Or to notice a change in hashing? - -- Or to test that two distinct points of history were pulled? - -- It would be great to not need the explicit hash here, - -- since it does change periodically. - -- Though, I guess that should also be rare, so maybe this is fine. - ), - pushPullTest - "one-term" - fmt - -- simplest-author - ( \repo -> - [i| + -- Is it to test `reset-root`? + -- Or to notice a change in hashing? + -- Or to test that two distinct points of history were pulled? + -- It would be great to not need the explicit hash here, + -- since it does change periodically. + -- Though, I guess that should also be rare, so maybe this is fine. + ), + pushPullTest + "one-term" + fmt + -- simplest-author + ( \repo -> + [i| ```unison c = 3 ``` @@ -363,10 +363,10 @@ test = .myLib> push.create git(${repo}) ``` |] - ) - -- simplest-user - ( \repo -> - [i| + ) + -- simplest-user + ( \repo -> + [i| ```ucm .yourLib> pull git(${repo}) ``` @@ -374,13 +374,13 @@ test = > c ``` |] - ), - pushPullTest - "one-type" - fmt - -- simplest-author - ( \repo -> - [i| + ), + pushPullTest + "one-type" + fmt + -- simplest-author + ( \repo -> + [i| ```unison structural type Foo = Foo ``` @@ -390,10 +390,10 @@ test = .myLib> push.create git(${repo}) ``` |] - ) - -- simplest-user - ( \repo -> - [i| + ) + -- simplest-user + ( \repo -> + [i| ```ucm .yourLib> pull git(${repo}) ``` @@ -401,12 +401,12 @@ test = > Foo.Foo ``` |] - ), - pushPullTest - "patching" - fmt - ( \repo -> - [i| + ), + pushPullTest + "patching" + fmt + ( \repo -> + [i| ```ucm .myLib> alias.term ##Nat.+ + ``` @@ -429,9 +429,9 @@ test = .workaround1552.myLib> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .myApp> pull git(${repo}).v1 external.yourLib .myApp> alias.term ##Nat.* * @@ -454,13 +454,13 @@ test = > greatApp ``` |] - ), - -- TODO: remove the alias.type .defns.A A line once patch syncing is fixed - pushPullTest - "lightweightPatch" - fmt - ( \repo -> - [i| + ), + -- TODO: remove the alias.type .defns.A A line once patch syncing is fixed + pushPullTest + "lightweightPatch" + fmt + ( \repo -> + [i| ```ucm .> builtins.merge ``` @@ -478,21 +478,21 @@ test = .patches> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .> builtins.merge .> pull git(${repo}) patches .> view.patch patches.patch ``` |] - ), - watchPushPullTest - "test-watches" - fmt - ( \repo -> - [i| + ), + watchPushPullTest + "test-watches" + fmt + ( \repo -> + [i| ```ucm .> builtins.merge ``` @@ -504,60 +504,60 @@ test = .> push.create git(${repo}) ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .> pull git(${repo}) ``` |] - ) - ( \cb -> do - Codebase.runTransaction cb do - void . fmap (fromJust . sequence) $ - traverse (Codebase.getWatch cb TestWatch) - =<< Codebase.watches TestWatch - ), - gistTest fmt, - pushPullBranchesTests fmt, - pushPullTest - "fix2068_a_" - fmt - -- this triggers - {- - gitsync22.sc.fix2068(a) EXCEPTION!!!: Called SqliteCodebase.setNamespaceRoot on unknown causal hash CausalHash (fromBase32Hex "codddvgt1ep57qpdkhe2j4pe1ehlpi5iitcrludtb8ves1aaqjl453onvfphqg83vukl7bbrj49itceqfob2b3alf47u4vves5s7pog") - CallStack (from HasCallStack): - error, called at src/Unison/Codebase/SqliteCodebase.hs:1072:17 in unison-parser-typechecker-0.0.0-6U6boimwb8GAC5qrhLfs8h:Unison.Codebase.SqliteCodebase - -} - ( \repo -> - [i| + ) + ( \cb -> do + Codebase.runTransaction cb do + void . fmap (fromJust . sequence) $ + traverse (Codebase.getWatch cb TestWatch) + =<< Codebase.watches TestWatch + ), + gistTest fmt, + pushPullBranchesTests fmt, + pushPullTest + "fix2068_a_" + fmt + -- this triggers + {- + gitsync22.sc.fix2068(a) EXCEPTION!!!: Called SqliteCodebase.setNamespaceRoot on unknown causal hash CausalHash (fromBase32Hex "codddvgt1ep57qpdkhe2j4pe1ehlpi5iitcrludtb8ves1aaqjl453onvfphqg83vukl7bbrj49itceqfob2b3alf47u4vves5s7pog") + CallStack (from HasCallStack): + error, called at src/Unison/Codebase/SqliteCodebase.hs:1072:17 in unison-parser-typechecker-0.0.0-6U6boimwb8GAC5qrhLfs8h:Unison.Codebase.SqliteCodebase + -} + ( \repo -> + [i| ```ucm .> alias.type ##Nat builtin.Nat2 .> alias.type ##Int builtin.Int2 .> push.create git(${repo}).foo.bar ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .> pull git(${repo}) pulled .> view pulled.foo.bar.builtin.Nat2 .> view pulled.foo.bar.builtin.Int2 ``` |] - ), - pushPullTest - "fix2068_b_" - fmt - -- this triggers - {- - - gitsync22.sc.fix2068(b) EXCEPTION!!!: I couldn't find the hash ndn6fa85ggqtbgffqhd4d3bca2d08pgp3im36oa8k6p257aid90ovjq75htmh7lmg7akaqneva80ml1o21iscjmp9n1uc3lmqgg9rgg that I just synced to the cached copy of /private/var/folders/6m/p3szds2j67d8vwmxr51yrf5c0000gn/T/git-simple-1047398c149d3d5c/repo.git in "/Users/pchiusano/.cache/unisonlanguage/gitfiles/$x2F$private$x2F$var$x2F$folders$x2F$6m$x2F$p3szds2j67d8vwmxr51yrf5c0000gn$x2F$T$x2F$git-simple-1047398c149d3d5c$x2F$repo$dot$git". - CallStack (from HasCallStack): - error, called at src/Unison/Codebase/SqliteCodebase.hs:1046:13 in unison-parser-typechecker-0.0.0-6U6boimwb8GAC5qrhLfs8h:Unison.Codebase.SqliteCodebase - -} - ( \repo -> - [i| + ), + pushPullTest + "fix2068_b_" + fmt + -- this triggers + {- + - gitsync22.sc.fix2068(b) EXCEPTION!!!: I couldn't find the hash ndn6fa85ggqtbgffqhd4d3bca2d08pgp3im36oa8k6p257aid90ovjq75htmh7lmg7akaqneva80ml1o21iscjmp9n1uc3lmqgg9rgg that I just synced to the cached copy of /private/var/folders/6m/p3szds2j67d8vwmxr51yrf5c0000gn/T/git-simple-1047398c149d3d5c/repo.git in "/Users/pchiusano/.cache/unisonlanguage/gitfiles/$x2F$private$x2F$var$x2F$folders$x2F$6m$x2F$p3szds2j67d8vwmxr51yrf5c0000gn$x2F$T$x2F$git-simple-1047398c149d3d5c$x2F$repo$dot$git". + CallStack (from HasCallStack): + error, called at src/Unison/Codebase/SqliteCodebase.hs:1046:13 in unison-parser-typechecker-0.0.0-6U6boimwb8GAC5qrhLfs8h:Unison.Codebase.SqliteCodebase + -} + ( \repo -> + [i| ```ucm .> alias.type ##Nat builtin.Nat2 .> alias.type ##Int builtin.Int2 @@ -565,17 +565,17 @@ test = .> push.create git(${repo}).foo.bar ``` |] - ) - ( \repo -> - [i| + ) + ( \repo -> + [i| ```ucm .> pull git(${repo}) pulled .> view pulled.foo.bar.builtin.Nat2 .> view pulled.foo.bar.builtin.Int2 ``` |] - ) - ] + ) + ] pushPullTest :: String -> CodebaseFormat -> (FilePath -> Transcript) -> (FilePath -> Transcript) -> Test () pushPullTest name fmt authorScript userScript = scope name do diff --git a/unison-cli/tests/Unison/Test/LSP.hs b/unison-cli/tests/Unison/Test/LSP.hs index 48dc236f4..95b7aaeda 100644 --- a/unison-cli/tests/Unison/Test/LSP.hs +++ b/unison-cli/tests/Unison/Test/LSP.hs @@ -4,7 +4,6 @@ module Unison.Test.LSP (test) where import qualified Crypto.Random as Random -import Data.Bifunctor (bimap) import Data.List.Extra (firstJust) import Data.String.Here.Uninterpolated (here) import Data.Text @@ -12,37 +11,58 @@ import qualified Data.Text as Text import EasyTest import qualified System.IO.Temp as Temp import qualified Unison.ABT as ABT +import Unison.Builtin.Decls (unitRef) import qualified Unison.Cli.TypeCheck as Typecheck import Unison.Codebase (Codebase) import qualified Unison.Codebase.Init as Codebase.Init import qualified Unison.Codebase.SqliteCodebase as SC +import Unison.ConstructorReference (GConstructorReference (..)) import qualified Unison.LSP.Queries as LSPQ import qualified Unison.Lexer.Pos as Lexer import Unison.Parser.Ann (Ann (..)) +import qualified Unison.Parser.Ann as Ann +import qualified Unison.Pattern as Pattern import Unison.Prelude import qualified Unison.Reference as Reference import qualified Unison.Result as Result import Unison.Symbol (Symbol) import qualified Unison.Syntax.Lexer as L import qualified Unison.Syntax.Parser as Parser -import Unison.Term (Term) import qualified Unison.Term as Term -import Unison.Type (Type) import qualified Unison.Type as Type import qualified Unison.UnisonFile as UF +import Unison.Util.Monoid (foldMapM) test :: Test () -test = - scope "annotations" . tests . fmap makeNodeSelectionTest $ +test = do + scope "annotations" $ + tests + [ refFinding, + annotationNesting + ] + +trm :: Term.F Symbol () () (ABT.Term (Term.F Symbol () ()) Symbol ()) -> LSPQ.SourceNode () +trm = LSPQ.TermNode . ABT.tm + +typ :: Type.F (ABT.Term Type.F Symbol ()) -> LSPQ.SourceNode () +typ = LSPQ.TypeNode . ABT.tm + +pat :: Pattern.Pattern () -> LSPQ.SourceNode () +pat = LSPQ.PatternNode + +-- | Test that we can find the correct reference for a given cursor position. +refFinding :: Test () +refFinding = + scope "refs" . tests . fmap makeNodeSelectionTest $ [ ( "Binary Op lhs", [here|term = tr^ue && false|], True, - Left (Term.Boolean True) + trm (Term.Boolean True) ), ( "Binary Op rhs", [here|term = true && fa^lse|], True, - Left (Term.Boolean False) + trm (Term.Boolean False) ), ( "Custom Op lhs", [here| @@ -50,7 +70,7 @@ a &&& b = a && b term = tr^ue &&& false |], True, - Left (Term.Boolean True) + trm (Term.Boolean True) ), ( "Simple type annotation on non-typechecking file", [here| @@ -59,7 +79,7 @@ term : Thi^ng term = "this won't typecheck" |], False, - Right (Type.Ref (Reference.unsafeFromText "#6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0")) + typ (Type.Ref (Reference.unsafeFromText "#6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0")) ), ( "Simple type annotation on typechecking file", [here| @@ -68,7 +88,151 @@ term : Thi^ng term = This |], True, - Right (Type.Ref (Reference.unsafeFromText "#6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0")) + typ (Type.Ref (Reference.unsafeFromText "#6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0")) + ), + ( "Test annotations within bindings for do-block elements", + [here| +term = do + first = false + second = tr^ue + first && second + |], + True, + trm (Term.Boolean True) + ), + ( "Test annotations within bindings for let-block elements", + [here| +term = let + first = false + second = tr^ue + first && second + |], + True, + trm (Term.Boolean True) + ), + ( "Test annotations within actions for let-block elements", + [here| +term = let + first = false + first && tr^ue + |], + True, + trm (Term.Boolean True) + ), + ( "Test annotations for blocks with destructuring binds", + [here| +structural type Identity a = Identity a +term = let + (Identity a) = Identity tr^ue + a + |], + True, + trm (Term.Boolean True) + ), + ( "Test annotations for destructuring tuples (they have a special parser)", + [here| +term = let + (true, fal^se) + |], + True, + trm (Term.Boolean False) + ), + ( "Test annotations within pattern binds", + [here| +term = let + (third, (^)) = (false, ()) + true + |], + True, + pat (Pattern.Constructor () (ConstructorReference unitRef 0) []) + ), + ( "Test annotations for types with arrows", + [here| +structural type Thing = This | That + +term : Thing -> Thing -> Thi^ng +term a b = This + |], + True, + typ (Type.Ref (Reference.unsafeFromText "#6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0")) + ), + ( "Test annotations for types with effects", + [here| +unique ability Foo a where + foo : a + +unique ability Bar b where + bar : b + +structural type Thing = This | That + +term : (Thing -> {Foo a, Bar b} Th^ing) -> {Foo a, Bar b} Thing +term f = f This + |], + True, + typ (Type.Ref (Reference.unsafeFromText "#6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0")) + ), + ( "Test annotations for effects themselves", + [here| +structural ability Foo a where + foo : a + +structural type Thing = This | That + +term : () -> {F^oo a} Thing +term _ = This + |], + True, + typ (Type.Ref (Reference.unsafeFromText "#h4uhcub76va4tckj1iccnsb07rh0fhgpigqapb4jh5n07s0tugec4nm2vikuv973mab7oh4ne07o6armcnnl7mbfjtb4imphgrjgimg")) + ), + ( "Test annotations for types with arrows", + [here| +structural type Thing = This | That + +term : Thing -> Thing -> Thi^ng +term a b = This + |], + True, + typ (Type.Ref (Reference.unsafeFromText "#6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0")) + ), + ( "Test annotations for types with effects", + [here| +unique ability Foo a where + foo : a + +unique ability Bar b where + bar : b + +structural type Thing = This | That + +term : (Thing -> {Foo a, Bar b} Th^ing) -> {Foo a, Bar b} Thing +term f = f This + |], + True, + typ (Type.Ref (Reference.unsafeFromText "#6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0")) + ), + ( "Test annotations for effects themselves", + [here| +structural ability Foo a where + foo : a + +structural type Thing = This | That + +term : () -> {F^oo a} Thing +term _ = This + |], + True, + typ (Type.Ref (Reference.unsafeFromText "#h4uhcub76va4tckj1iccnsb07rh0fhgpigqapb4jh5n07s0tugec4nm2vikuv973mab7oh4ne07o6armcnnl7mbfjtb4imphgrjgimg")) + ), + ( "Test annotations for blocks recursive binds", + [here| +term = let + f x = g true && x + g y = f fal^se && y + f true + |], + True, + trm (Term.Boolean False) ) ] @@ -82,10 +246,86 @@ extractCursor txt = in pure $ (Lexer.Pos line col, before <> after) _ -> crash "expected exactly one cursor" -makeNodeSelectionTest :: (String, Text, Bool, Either ((Term.F Symbol Ann Ann (Term Symbol Ann))) (Type.F (Type Symbol Ann))) -> Test () +makeNodeSelectionTest :: (String, Text, Bool, LSPQ.SourceNode ()) -> Test () makeNodeSelectionTest (name, testSrc, testTypechecked, expected) = scope name $ do (pos, src) <- extractCursor testSrc - (notes, mayParsedFile, mayTypecheckedFile) <- withTestCodebase \codebase -> do + (notes, mayParsedFile, mayTypecheckedFile) <- typecheckSrc name src + scope "parsed file" $ do + pf <- maybe (crash (show ("Failed to parse" :: String, notes))) pure mayParsedFile + let pfResult = + UF.terms pf + & firstJust \(_v, trm) -> + LSPQ.findSmallestEnclosingNode pos trm + expectEqual (Just expected) (void <$> pfResult) + + when testTypechecked $ + scope "typechecked file" $ do + tf <- maybe (crash "Failed to typecheck") pure mayTypecheckedFile + let tfResult = + UF.hashTermsId tf + & toList + & firstJust \(_refId, _wk, trm, _typ) -> + LSPQ.findSmallestEnclosingNode pos trm + expectEqual (Just expected) (void <$> tfResult) + +-- | Tests which assert that the annotation for each ABT node spans at least the span of +-- its children, i.e. all child annotations are contained within the annotation of their parent. +annotationNesting :: Test () +annotationNesting = + scope "nesting" . tests . fmap annotationNestingTest $ + [ ( "let blocks", + [here| +term = let + x = true + y = false + true && false +|] + ), + ( "let-rec blocks", + [here| +term = let + x a = a && y true + y b = b && x true + x true && y true +|] + ), + ( "function bindings", + [here| +term x y = x && y +|] + ) + ] + +annotationNestingTest :: (String, Text) -> Test () +annotationNestingTest (name, src) = scope name do + (_notes, _pf, maytf) <- typecheckSrc name src + tf <- maybe (crash "Failed to typecheck") pure maytf + UF.hashTermsId tf + & toList + & traverse_ \(_refId, _wk, trm, _typ) -> + assertAnnotationsAreNested trm + +-- | Asserts that for all nodes in the provided ABT, the annotations of all child nodes are +-- within the span of the parent node. +assertAnnotationsAreNested :: forall f. (Foldable f, Functor f, Show (f (Either String Ann))) => ABT.Term f Symbol Ann -> Test () +assertAnnotationsAreNested term = do + case ABT.cata alg term of + Right _ -> pure () + Left err -> crash err + where + alg :: Ann -> ABT.ABT f Symbol (Either String Ann) -> Either String Ann + alg ann abt = do + childSpan <- abt & foldMapM id + case ann `Ann.encompasses` childSpan of + -- one of the annotations isn't in the file, don't bother checking. + Nothing -> pure (ann <> childSpan) + Just isInFile + | isInFile -> pure ann + | otherwise -> Left $ "Containment breach: children aren't contained with the parent:" <> show (ann, abt) + +typecheckSrc :: String -> Text -> Test (Seq (Result.Note Symbol Ann), Maybe (UF.UnisonFile Symbol Ann), Maybe (UF.TypecheckedUnisonFile Symbol Ann)) +typecheckSrc name src = do + withTestCodebase \codebase -> do let generateUniqueName = Parser.uniqueBase32Namegen <$> Random.getSystemDRG let ambientAbilities = [] let parseNames = mempty @@ -97,23 +337,6 @@ makeNodeSelectionTest (name, testSrc, testTypechecked, expected) = scope name $ Just (Left uf) -> (Just uf, Nothing) Just (Right tf) -> (Just $ UF.discardTypes tf, Just tf) pure (notes, parsedFile, typecheckedFile) - scope "parsed file" $ do - pf <- maybe (crash (show ("Failed to parse" :: String, notes))) pure mayParsedFile - let pfResult = - UF.terms pf - & firstJust \(_v, trm) -> - LSPQ.findSmallestEnclosingNode pos trm - expectEqual (Just $ bimap ABT.Tm ABT.Tm expected) (bimap ABT.out ABT.out <$> pfResult) - - when testTypechecked $ - scope "typechecked file" $ do - tf <- maybe (crash "Failed to typecheck") pure mayTypecheckedFile - let tfResult = - UF.hashTermsId tf - & toList - & firstJust \(_refId, _wk, trm, _typ) -> - LSPQ.findSmallestEnclosingNode pos trm - expectEqual (Just $ bimap ABT.Tm ABT.Tm expected) (bimap ABT.out ABT.out <$> tfResult) withTestCodebase :: (Codebase IO Symbol Ann -> IO r) -> Test r diff --git a/unison-cli/tests/Unison/Test/UriParser.hs b/unison-cli/tests/Unison/Test/UriParser.hs index e0133b7ff..30dbb1c0f 100644 --- a/unison-cli/tests/Unison/Test/UriParser.hs +++ b/unison-cli/tests/Unison/Test/UriParser.hs @@ -9,13 +9,7 @@ import Data.Text (Text) import qualified Data.Text as Text import EasyTest import qualified Text.Megaparsec as P -import Unison.Codebase.Editor.RemoteRepo - ( ReadGitRepo (..), - ReadRemoteNamespace (..), - ShareCodeserver(..), - pattern ReadGitRemoteNamespace, - pattern ReadShareRemoteNamespace, - ) +import Unison.Codebase.Editor.RemoteRepo (ReadGitRepo (..), ReadRemoteNamespace (..), ShareCodeserver (..), ShareUserHandle (..), pattern ReadGitRemoteNamespace, pattern ReadShareRemoteNamespace) import qualified Unison.Codebase.Editor.UriParser as UriParser import Unison.Codebase.Path (Path (..)) import qualified Unison.Codebase.Path as Path @@ -33,7 +27,7 @@ testShare = scope "share" . tests $ [ parseAugmented ( "unisonweb.base._releases.M4", - ReadRemoteNamespaceShare (ReadShareRemoteNamespace DefaultCodeserver "unisonweb" (path ["base", "_releases", "M4"])) + ReadRemoteNamespaceShare (ReadShareRemoteNamespace DefaultCodeserver (ShareUserHandle "unisonweb") (path ["base", "_releases", "M4"])) ), expectParseFailure ".unisonweb.base" ] diff --git a/unison-cli/tests/Unison/Test/VersionParser.hs b/unison-cli/tests/Unison/Test/VersionParser.hs index 41eaec9dc..5e32c8009 100644 --- a/unison-cli/tests/Unison/Test/VersionParser.hs +++ b/unison-cli/tests/Unison/Test/VersionParser.hs @@ -30,7 +30,7 @@ makeTest (version, path) = ( Just ( ReadShareRemoteNamespace { server = DefaultCodeserver, - repo = "unison", + repo = ShareUserHandle "unison", path = Path.fromList ["public", "base"] <> Path.fromText path } ) diff --git a/unison-cli/unison-cli.cabal b/unison-cli/unison-cli.cabal index ae28ff085..6417c69b5 100644 --- a/unison-cli/unison-cli.cabal +++ b/unison-cli/unison-cli.cabal @@ -30,6 +30,7 @@ library Unison.Auth.HTTPClient Unison.Auth.Tokens Unison.Auth.Types + Unison.Auth.UserInfo Unison.Cli.Monad Unison.Cli.MonadUtils Unison.Cli.NamesUtils @@ -129,6 +130,7 @@ library , ListLike , aeson , aeson-pretty + , ansi-terminal , async , base , bytes @@ -186,6 +188,7 @@ library , unison-codebase-sqlite-hashing-v2 , unison-core , unison-core1 + , unison-hash , unison-parser-typechecker , unison-prelude , unison-pretty-printer @@ -252,6 +255,7 @@ executable cli-integration-tests , ListLike , aeson , aeson-pretty + , ansi-terminal , async , base , bytes @@ -312,6 +316,7 @@ executable cli-integration-tests , unison-codebase-sqlite-hashing-v2 , unison-core , unison-core1 + , unison-hash , unison-parser-typechecker , unison-prelude , unison-pretty-printer @@ -371,6 +376,7 @@ executable transcripts , ListLike , aeson , aeson-pretty + , ansi-terminal , async , base , bytes @@ -432,6 +438,7 @@ executable transcripts , unison-codebase-sqlite-hashing-v2 , unison-core , unison-core1 + , unison-hash , unison-parser-typechecker , unison-prelude , unison-pretty-printer @@ -496,6 +503,7 @@ executable unison , ListLike , aeson , aeson-pretty + , ansi-terminal , async , base , bytes @@ -559,6 +567,7 @@ executable unison , unison-codebase-sqlite-hashing-v2 , unison-core , unison-core1 + , unison-hash , unison-parser-typechecker , unison-prelude , unison-pretty-printer @@ -627,6 +636,7 @@ test-suite cli-tests , ListLike , aeson , aeson-pretty + , ansi-terminal , async , base , bytes @@ -689,6 +699,7 @@ test-suite cli-tests , unison-codebase-sqlite-hashing-v2 , unison-core , unison-core1 + , unison-hash , unison-parser-typechecker , unison-prelude , unison-pretty-printer diff --git a/unison-cli/unison/ArgParse.hs b/unison-cli/unison/ArgParse.hs index 207e5b39b..bc24034f2 100644 --- a/unison-cli/unison/ArgParse.hs +++ b/unison-cli/unison/ArgParse.hs @@ -86,7 +86,7 @@ data ShouldDownloadBase deriving (Show, Eq) data ShouldSaveCodebase - = SaveCodebase + = SaveCodebase (Maybe FilePath) | DontSaveCodebase deriving (Show, Eq) @@ -179,7 +179,9 @@ initCommand = command "init" (info initParser (progDesc initHelp)) runDesc :: String -> String -> String runDesc cmd location = - "Execute a definition from " <> location <> ", passing on the provided arguments. " + "Execute a definition from " + <> location + <> ", passing on the provided arguments. " <> " To pass flags to your program, use `" <> cmd <> " -- --my-flag`" @@ -369,7 +371,8 @@ runSymbolParser = runFileParser :: Parser Command runFileParser = Run - <$> ( RunFromFile <$> fileArgument "path/to/file" + <$> ( RunFromFile + <$> fileArgument "path/to/file" <*> strArgument (metavar "SYMBOL") ) <*> runArgumentParser @@ -392,10 +395,23 @@ rtsStatsOption = in optional (option OptParse.str meta) saveCodebaseFlag :: Parser ShouldSaveCodebase -saveCodebaseFlag = flag DontSaveCodebase SaveCodebase (long "save-codebase" <> help saveHelp) +saveCodebaseFlag = flag DontSaveCodebase (SaveCodebase Nothing) (long "save-codebase" <> help saveHelp) where saveHelp = "if set the resulting codebase will be saved to a new directory, otherwise it will be deleted" +saveCodebaseToFlag :: Parser ShouldSaveCodebase +saveCodebaseToFlag = do + path <- + optional . strOption $ + long "save-codebase-to" + <> short 'S' + <> help "Where the codebase should be created. Implies --save-codebase" + pure + ( case path of + Just _ -> SaveCodebase path + _ -> DontSaveCodebase + ) + downloadBaseFlag :: Parser ShouldDownloadBase downloadBaseFlag = flag @@ -457,18 +473,30 @@ fileArgument varName = transcriptParser :: Parser Command transcriptParser = do -- ApplicativeDo + shouldSaveCodebaseTo <- saveCodebaseToFlag shouldSaveCodebase <- saveCodebaseFlag mrtsStatsFp <- rtsStatsOption files <- liftA2 (NE.:|) (fileArgument "FILE") (many (fileArgument "FILES...")) - pure (Transcript DontFork shouldSaveCodebase mrtsStatsFp files) + pure + ( let saveCodebase = case shouldSaveCodebaseTo of + DontSaveCodebase -> shouldSaveCodebase + _ -> shouldSaveCodebaseTo + in Transcript DontFork saveCodebase mrtsStatsFp files + ) transcriptForkParser :: Parser Command transcriptForkParser = do -- ApplicativeDo + shouldSaveCodebaseTo <- saveCodebaseToFlag shouldSaveCodebase <- saveCodebaseFlag mrtsStatsFp <- rtsStatsOption files <- liftA2 (NE.:|) (fileArgument "FILE") (many (fileArgument "FILES...")) - pure (Transcript UseFork shouldSaveCodebase mrtsStatsFp files) + pure + ( let saveCodebase = case shouldSaveCodebaseTo of + DontSaveCodebase -> shouldSaveCodebase + _ -> shouldSaveCodebaseTo + in Transcript UseFork saveCodebase mrtsStatsFp files + ) unisonHelp :: String -> String -> P.Doc unisonHelp (P.text -> executable) (P.text -> version) = diff --git a/unison-cli/unison/Main.hs b/unison-cli/unison/Main.hs index 2bb07c19c..0701863ce 100644 --- a/unison-cli/unison/Main.hs +++ b/unison-cli/unison/Main.hs @@ -22,7 +22,7 @@ import ArgParse UsageRenderer, parseCLIArgs, ) -import Compat (defaultInterruptHandler, onWindows, withInterruptHandler) +import Compat (defaultInterruptHandler, withInterruptHandler) import Control.Concurrent (newEmptyMVar, runInUnboundThread, takeMVar) import Control.Concurrent.STM import Control.Error.Safe (rightMay) @@ -118,7 +118,7 @@ main = withCP65001 . runInUnboundThread . Ki.scoped $ \scope -> do ] ) Run (RunFromSymbol mainName) args -> do - getCodebaseOrExit mCodePathOption SC.MigrateAutomatically \(_, _, theCodebase) -> do + getCodebaseOrExit mCodePathOption (SC.MigrateAutomatically SC.Backup) \(_, _, theCodebase) -> do RTI.withRuntime False RTI.OneOff Version.gitDescribeWithDate \runtime -> do withArgs args (execute theCodebase runtime mainName) >>= \case Left err -> exitError err @@ -130,7 +130,7 @@ main = withCP65001 . runInUnboundThread . Ki.scoped $ \scope -> do case e of Left _ -> exitError "I couldn't find that file or it is for some reason unreadable." Right contents -> do - getCodebaseOrExit mCodePathOption SC.MigrateAutomatically \(initRes, _, theCodebase) -> do + getCodebaseOrExit mCodePathOption (SC.MigrateAutomatically SC.Backup) \(initRes, _, theCodebase) -> do withRuntimes RTI.OneOff \(rt, sbrt) -> do let fileEvent = Input.UnisonFileChanged (Text.pack file) contents let noOpRootNotifier _ = pure () @@ -156,7 +156,7 @@ main = withCP65001 . runInUnboundThread . Ki.scoped $ \scope -> do case e of Left _ -> exitError "I had trouble reading this input." Right contents -> do - getCodebaseOrExit mCodePathOption SC.MigrateAutomatically \(initRes, _, theCodebase) -> do + getCodebaseOrExit mCodePathOption (SC.MigrateAutomatically SC.Backup) \(initRes, _, theCodebase) -> do withRuntimes RTI.OneOff \(rt, sbrt) -> do let fileEvent = Input.UnisonFileChanged (Text.pack "") contents let noOpRootNotifier _ = pure () @@ -247,7 +247,7 @@ main = withCP65001 . runInUnboundThread . Ki.scoped $ \scope -> do Nothing -> action Just fp -> recordRtsStats fp action Launch isHeadless codebaseServerOpts downloadBase mayStartingPath shouldWatchFiles -> do - getCodebaseOrExit mCodePathOption SC.MigrateAfterPrompt \(initRes, _, theCodebase) -> do + getCodebaseOrExit mCodePathOption (SC.MigrateAfterPrompt SC.Backup) \(initRes, _, theCodebase) -> do withRuntimes RTI.Persistent \(runtime, sbRuntime) -> do rootVar <- newEmptyTMVarIO pathVar <- newTVarIO initialPath @@ -264,7 +264,7 @@ main = withCP65001 . runInUnboundThread . Ki.scoped $ \scope -> do -- prevent UCM from shutting down properly. Hopefully we can re-enable LSP on -- Windows when we move to GHC 9.* -- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/1224 - when (not onWindows) . void . Ki.fork scope $ LSP.spawnLsp theCodebase runtime (readTMVar rootVar) (readTVar pathVar) + void . Ki.fork scope $ LSP.spawnLsp theCodebase runtime (readTMVar rootVar) (readTVar pathVar) Server.startServer (Backend.BackendEnv {Backend.useNamesIndex = False}) codebaseServerOpts sbRuntime theCodebase $ \baseUrl -> do case exitOption of DoNotExit -> do @@ -333,14 +333,16 @@ initHTTPClient = do manager <- HTTP.newTlsManagerWith managerSettings HTTP.setGlobalManager manager -prepareTranscriptDir :: ShouldForkCodebase -> Maybe CodebasePathOption -> IO FilePath -prepareTranscriptDir shouldFork mCodePathOption = do - tmp <- Temp.getCanonicalTemporaryDirectory >>= (`Temp.createTempDirectory` "transcript") +prepareTranscriptDir :: ShouldForkCodebase -> Maybe CodebasePathOption -> ShouldSaveCodebase -> IO FilePath +prepareTranscriptDir shouldFork mCodePathOption shouldSaveCodebase = do + tmp <- case shouldSaveCodebase of + SaveCodebase (Just path) -> pure path + _ -> Temp.getCanonicalTemporaryDirectory >>= (`Temp.createTempDirectory` "transcript") let cbInit = SC.init case shouldFork of UseFork -> do -- A forked codebase does not need to Create a codebase, because it already exists - getCodebaseOrExit mCodePathOption SC.MigrateAutomatically $ const (pure ()) + getCodebaseOrExit mCodePathOption (SC.MigrateAutomatically SC.Backup) $ const (pure ()) path <- Codebase.getCodebaseDir (fmap codebasePathOptionToPath mCodePathOption) PT.putPrettyLn $ P.lines @@ -364,7 +366,7 @@ runTranscripts' progName mcodepath transcriptDir markdownFiles = do currentDir <- getCurrentDirectory configFilePath <- getConfigFilePath mcodepath -- We don't need to create a codebase through `getCodebaseOrExit` as we've already done so previously. - and <$> getCodebaseOrExit (Just (DontCreateCodebaseWhenMissing transcriptDir)) SC.MigrateAutomatically \(_, codebasePath, theCodebase) -> do + and <$> getCodebaseOrExit (Just (DontCreateCodebaseWhenMissing transcriptDir)) (SC.MigrateAutomatically SC.Backup) \(_, codebasePath, theCodebase) -> do TR.withTranscriptRunner Version.gitDescribeWithDate (Just configFilePath) $ \runTranscript -> do for markdownFiles $ \(MarkdownFile fileName) -> do transcriptSrc <- readUtf8 fileName @@ -430,12 +432,12 @@ runTranscripts renderUsageInfo shouldFork shouldSaveTempCodebase mCodePathOption Exit.exitWith (Exit.ExitFailure 1) Success markdownFiles -> pure markdownFiles progName <- getProgName - transcriptDir <- prepareTranscriptDir shouldFork mCodePathOption + transcriptDir <- prepareTranscriptDir shouldFork mCodePathOption shouldSaveTempCodebase completed <- runTranscripts' progName (Just transcriptDir) transcriptDir markdownFiles case shouldSaveTempCodebase of DontSaveCodebase -> removeDirectoryRecursive transcriptDir - SaveCodebase -> + SaveCodebase _ -> when completed $ do PT.putPrettyLn $ P.callout diff --git a/unison-cli/unison/System/Path.hs b/unison-cli/unison/System/Path.hs index 09049046d..4afaed558 100644 --- a/unison-cli/unison/System/Path.hs +++ b/unison-cli/unison/System/Path.hs @@ -40,7 +40,7 @@ filterUseless :: [FilePath] -> [FilePath] filterUseless = (\\ [".", ".."]) -- | Returns a list of nodes in a tree via a depth-first walk. -mtreeList :: Monad m => (a -> m [a]) -> a -> m [a] +mtreeList :: (Monad m) => (a -> m [a]) -> a -> m [a] mtreeList children root = do xs <- children root subChildren <- mapM (mtreeList children) xs diff --git a/unison-core/package.yaml b/unison-core/package.yaml index 3ccbbe99f..d4e9b395e 100644 --- a/unison-core/package.yaml +++ b/unison-core/package.yaml @@ -26,6 +26,7 @@ library: - text - transformers - unison-core + - unison-hash - unison-prelude - unison-util-base32hex - unison-util-relation diff --git a/unison-core/src/Unison/ABT.hs b/unison-core/src/Unison/ABT.hs index 0805b2978..087a98482 100644 --- a/unison-core/src/Unison/ABT.hs +++ b/unison-core/src/Unison/ABT.hs @@ -115,8 +115,8 @@ import U.Core.ABT unabs, visit, visit', - visit_, visitPure, + visit_, vmap, pattern AbsN', pattern Tm', @@ -135,14 +135,15 @@ abt_ = lens out setter -- a.k.a. baseFunctor_ :: Traversal' (Term f v a) (f _) baseFunctor_ :: - Applicative m => + (Applicative m) => (f (Term f v a) -> m (f (Term f v a))) -> Term f v a -> m (Term f v a) baseFunctor_ f t = - t & abt_ %%~ \case - Tm fx -> Tm <$> f (fx) - x -> pure x + t + & abt_ %%~ \case + Tm fx -> Tm <$> f (fx) + x -> pure x -- deriving instance (Data a, Data v, Typeable f, Data (f (Term f v a)), Ord v) => Data (Term f v a) @@ -152,7 +153,7 @@ unvar :: V v -> v unvar (Free v) = v unvar (Bound v) = v -instance Var v => Var (V v) where +instance (Var v) => Var (V v) where freshIn s v = freshIn (Set.map unvar s) <$> v wrap :: (Functor f, Foldable f, Var v) => v -> Term f (V v) a -> (V v, Term f (V v) a) @@ -182,7 +183,7 @@ annotateBound = go Set.empty Tm body -> tm' a (go bound <$> body) -- | `True` if `v` is a member of the set of free variables of `t` -isFreeIn :: Ord v => v -> Term f v a -> Bool +isFreeIn :: (Ord v) => v -> Term f v a -> Bool isFreeIn v t = Set.member v (freeVars t) -- | Replace the annotation with the given argument. @@ -206,10 +207,10 @@ amap' f t@(Term _ a out) = case out of Cycle r -> cycle' (f t a) (amap' f r) Abs v body -> abs' (f t a) v (amap' f body) -extraMap :: Functor g => (forall k. f k -> g k) -> Term f v a -> Term g v a +extraMap :: (Functor g) => (forall k. f k -> g k) -> Term f v a -> Term g v a extraMap p (Term fvs a sub) = Term fvs a (go p sub) where - go :: Functor g => (forall k. f k -> g k) -> ABT f v (Term f v a) -> ABT g v (Term g v a) + go :: (Functor g) => (forall k. f k -> g k) -> ABT f v (Term f v a) -> ABT g v (Term g v a) go p = \case Var v -> Var v Cycle r -> Cycle (extraMap p r) @@ -244,10 +245,10 @@ var = annotatedVar () annotatedVar :: a -> v -> Term f v a annotatedVar = U.Core.ABT.var -abs :: Ord v => v -> Term f v () -> Term f v () +abs :: (Ord v) => v -> Term f v () -> Term f v () abs = abs' () -abs' :: Ord v => a -> v -> Term f v a -> Term f v a +abs' :: (Ord v) => a -> v -> Term f v a -> Term f v a abs' = U.Core.ABT.abs absr :: (Functor f, Foldable f, Var v) => v -> Term f (V v) () -> Term f (V v) () @@ -257,10 +258,10 @@ absr = absr' () absr' :: (Functor f, Foldable f, Var v) => a -> v -> Term f (V v) a -> Term f (V v) a absr' a v body = wrap' v body $ \v body -> abs' a v body -absChain :: Ord v => [v] -> Term f v () -> Term f v () +absChain :: (Ord v) => [v] -> Term f v () -> Term f v () absChain vs t = foldr abs t vs -absChain' :: Ord v => [(a, v)] -> Term f v a -> Term f v a +absChain' :: (Ord v) => [(a, v)] -> Term f v a -> Term f v a absChain' vs t = foldr (\(a, v) t -> abs' a v t) t vs tm :: (Foldable f, Ord v) => f (Term f v ()) -> Term f v () @@ -328,10 +329,10 @@ changeVars m t = case out t of Just v -> annotatedVar (annotation t) v Tm v -> tm' (annotation t) (changeVars m <$> v) -fresh :: Var v => Term f v a -> v -> v +fresh :: (Var v) => Term f v a -> v -> v fresh t = freshIn (freeVars t) -allVars :: Foldable f => Term f v a -> [v] +allVars :: (Foldable f) => Term f v a -> [v] allVars t = case out t of Var v -> [v] Cycle body -> allVars body @@ -443,7 +444,7 @@ rewriteDown_ f t = do Tm body -> tm' (annotation t') <$> (traverse (rewriteDown_ f) body) data Subst f v a = Subst - { freshen :: forall m v'. Monad m => (v -> m v') -> m v', + { freshen :: forall m v'. (Monad m) => (v -> m v') -> m v', bind :: Term f v a -> Term f v a, bindInheritAnnotation :: forall b. Term f v b -> Term f v a, variable :: v @@ -508,21 +509,21 @@ find' :: [Term f v a] find' p = Unison.ABT.find (\t -> if p t then Found t else Continue) -components :: Var v => [(v, Term f v a)] -> [[(v, Term f v a)]] +components :: (Var v) => [(v, Term f v a)] -> [[(v, Term f v a)]] components = Components.components freeVars -- Converts to strongly connected components while preserving the -- order of definitions. Satisfies `join (orderedComponents bs) == bs`. -orderedComponents' :: Var v => [(v, Term f v a)] -> [[(v, Term f v a)]] +orderedComponents' :: (Var v) => [(v, Term f v a)] -> [[(v, Term f v a)]] orderedComponents' tms = go [] Set.empty tms where go [] _ [] = [] go [] deps (hd : rem) = go [hd] (deps <> freeVars (snd hd)) rem go cur deps rem = case findIndex isDep rem of Nothing -> - reverse cur : - let (hd, tl) = splitAt 1 rem - in go hd (depsFor hd) tl + reverse cur + : let (hd, tl) = splitAt 1 rem + in go hd (depsFor hd) tl Just i -> go (reverse newMembers ++ cur) deps' (drop (i + 1) rem) where deps' = deps <> depsFor newMembers @@ -538,10 +539,10 @@ orderedComponents' tms = go [] Set.empty tms -- Example: given `[[x],[ping,r,s,pong]]`, where `ping` and `pong` -- are mutually recursive but `r` and `s` are uninvolved, this produces: -- `[[x], [ping,pong], [r], [s]]`. -orderedComponents :: Var v => [(v, Term f v a)] -> [[(v, Term f v a)]] +orderedComponents :: (Var v) => [(v, Term f v a)] -> [[(v, Term f v a)]] orderedComponents bs0 = tweak =<< orderedComponents' bs0 where - tweak :: Var v => [(v, Term f v a)] -> [[(v, Term f v a)]] + tweak :: (Var v) => [(v, Term f v a)] -> [[(v, Term f v a)]] tweak bs@(_ : _ : _) = case takeWhile isCyclic (components bs) of [] -> [bs] cycles -> cycles <> orderedComponents rest diff --git a/unison-core/src/Unison/ABT/Normalized.hs b/unison-core/src/Unison/ABT/Normalized.hs index 36dd06a4e..ba2b98f4a 100644 --- a/unison-core/src/Unison/ABT/Normalized.hs +++ b/unison-core/src/Unison/ABT/Normalized.hs @@ -12,6 +12,8 @@ module Unison.ABT.Normalized ( ABT (..), Term (.., TAbs, TTm, TAbss), + Align (..), + alpha, renames, rename, transform, @@ -20,6 +22,7 @@ where import Data.Bifoldable import Data.Bifunctor +import Data.Foldable (toList) -- import Data.Bitraversable import Data.Map.Strict (Map) @@ -41,24 +44,41 @@ data Term f v = Term } instance - (forall a b. Show a => Show b => Show (f a b), Show v) => + (forall a b. (Show a) => (Show b) => Show (f a b), Show v) => Show (ABT f v) where showsPrec p a = showParen (p >= 9) $ case a of Abs v tm -> - showString "Abs " . showsPrec 10 v + showString "Abs " + . showsPrec 10 v . showString " " . showsPrec 10 tm Tm e -> showString "Tm " . showsPrec 10 e instance - (forall a b. Show a => Show b => Show (f a b), Show v) => + (forall a b. (Show a) => (Show b) => Show (f a b), Show v) => Show (Term f v) where showsPrec p (Term _ e) = showParen (p >= 9) $ showString "Term " . showsPrec 10 e -pattern TAbs :: Var v => v -> Term f v -> Term f v +instance + (forall a b. (Eq a) => (Eq b) => Eq (f a b), Bifunctor f, Bifoldable f, Var v) => + Eq (ABT f v) + where + Abs v1 e1 == Abs v2 e2 + | v1 == v2 = e1 == e2 + | otherwise = e1 == rename v2 v1 e2 + Tm e1 == Tm e2 = e1 == e2 + _ == _ = False + +instance + (forall a b. (Eq a) => (Eq b) => Eq (f a b), Bifunctor f, Bifoldable f, Var v) => + Eq (Term f v) + where + Term _ abt1 == Term _ abt2 = abt1 == abt2 + +pattern TAbs :: (Var v) => v -> Term f v -> Term f v pattern TAbs u bd <- Term _ (Abs u bd) where @@ -72,11 +92,40 @@ pattern TTm bd <- {-# COMPLETE TAbs, TTm #-} -unabss :: Var v => Term f v -> ([v], Term f v) +class (Bifoldable f, Bifunctor f) => Align f where + align :: + (Applicative g) => + (vl -> vr -> g vs) -> + (el -> er -> g es) -> + f vl el -> + f vr er -> + Maybe (g (f vs es)) + +alphaErr :: + (Align f) => (Var v) => Map v v -> Term f v -> Term f v -> Either (Term f v, Term f v) a +alphaErr un tml tmr = Left (tml, renames count un tmr) + where + count = Map.fromListWith (+) . flip zip [1, 1 ..] $ toList un + +-- Checks if two terms are equal up to a given variable renaming. The +-- renaming should map variables in the right hand term to the +-- equivalent variable in the left hand term. +alpha :: (Align f) => (Var v) => Map v v -> Term f v -> Term f v -> Either (Term f v, Term f v) () +alpha un (TAbs u tml) (TAbs v tmr) = + alpha (Map.insert v u (Map.filter (/= u) un)) tml tmr +alpha un tml@(TTm bdl) tmr@(TTm bdr) + | Just sub <- align av (alpha un) bdl bdr = () <$ sub + where + av u v + | maybe False (== u) (Map.lookup v un) = pure () + | otherwise = alphaErr un tml tmr +alpha un tml tmr = alphaErr un tml tmr + +unabss :: (Var v) => Term f v -> ([v], Term f v) unabss (TAbs v (unabss -> (vs, bd))) = (v : vs, bd) unabss bd = ([], bd) -pattern TAbss :: Var v => [v] -> Term f v -> Term f v +pattern TAbss :: (Var v) => [v] -> Term f v -> Term f v pattern TAbss vs bd <- (unabss -> (vs, bd)) where diff --git a/unison-core/src/Unison/DataDeclaration.hs b/unison-core/src/Unison/DataDeclaration.hs index 8588c40e3..01c78d5c2 100644 --- a/unison-core/src/Unison/DataDeclaration.hs +++ b/unison-core/src/Unison/DataDeclaration.hs @@ -17,8 +17,10 @@ module Unison.DataDeclaration constructorIds, declConstructorReferents, declDependencies, + labeledDeclDependencies, declFields, dependencies, + labeledDependencies, generateRecordAccessors, unhashComponent, mkDataDecl', @@ -41,6 +43,7 @@ import qualified Unison.ABT as ABT import Unison.ConstructorReference (GConstructorReference (..)) import qualified Unison.ConstructorType as CT import Unison.DataDeclaration.ConstructorId (ConstructorId) +import qualified Unison.LabeledDependency as LD import qualified Unison.Name as Name import qualified Unison.Names.ResolutionResult as Names import qualified Unison.Pattern as Pattern @@ -67,9 +70,12 @@ data DeclOrBuiltin v a asDataDecl :: Decl v a -> DataDeclaration v a asDataDecl = either toDataDecl id -declDependencies :: Ord v => Decl v a -> Set Reference +declDependencies :: (Ord v) => Decl v a -> Set Reference declDependencies = either (dependencies . toDataDecl) dependencies +labeledDeclDependencies :: (Ord v) => Decl v a -> Set LD.LabeledDependency +labeledDeclDependencies = Set.map LD.TypeReference . declDependencies + constructorType :: Decl v a -> CT.ConstructorType constructorType = \case Left {} -> CT.Effect @@ -84,7 +90,7 @@ data DataDeclaration v a = DataDeclaration bound :: [v], constructors' :: [(a, v, Type v a)] } - deriving (Eq, Show, Functor) + deriving (Eq, Ord, Show, Functor) constructors_ :: Lens' (DataDeclaration v a) [(a, v, Type v a)] constructors_ = lens getter setter @@ -95,13 +101,13 @@ constructors_ = lens getter setter newtype EffectDeclaration v a = EffectDeclaration { toDataDecl :: DataDeclaration v a } - deriving (Eq, Show, Functor) + deriving (Eq, Ord, Show, Functor) asDataDecl_ :: Iso' (EffectDeclaration v a) (DataDeclaration v a) asDataDecl_ = iso toDataDecl EffectDeclaration withEffectDeclM :: - Functor f => + (Functor f) => (DataDeclaration v a -> f (DataDeclaration v' a')) -> EffectDeclaration v a -> f (EffectDeclaration v' a') @@ -194,7 +200,7 @@ constructorTypes :: DataDeclaration v a -> [Type v a] constructorTypes = (snd <$>) . constructors -- what is declFields? —AI -declFields :: Var v => Decl v a -> Either [Int] [Int] +declFields :: (Var v) => Decl v a -> Either [Int] [Int] declFields = bimap cf cf . first toDataDecl where cf = fmap fields . constructorTypes @@ -211,7 +217,7 @@ constructors (DataDeclaration _ _ _ ctors) = [(v, t) | (_, v, t) <- ctors] constructorVars :: DataDeclaration v a -> [v] constructorVars dd = fst <$> constructors dd -constructorNames :: Var v => DataDeclaration v a -> [Text] +constructorNames :: (Var v) => DataDeclaration v a -> [Text] constructorNames dd = Var.name <$> constructorVars dd -- This function is unsound, since the `rid` and the `decl` have to match. @@ -228,18 +234,18 @@ constructorIds dd = [0 .. fromIntegral $ length (constructors dd) - 1] -- | All variables mentioned in the given data declaration. -- Includes both term and type variables, both free and bound. -allVars :: Ord v => DataDeclaration v a -> Set v +allVars :: (Ord v) => DataDeclaration v a -> Set v allVars (DataDeclaration _ _ bound ctors) = Set.unions $ Set.fromList bound : [Set.insert v (Set.fromList $ ABT.allVars tp) | (_, v, tp) <- ctors] -- | All variables mentioned in the given declaration. -- Includes both term and type variables, both free and bound. -allVars' :: Ord v => Decl v a -> Set v +allVars' :: (Ord v) => Decl v a -> Set v allVars' = allVars . either toDataDecl id bindReferences :: - Var v => + (Var v) => (v -> Name.Name) -> Set v -> Map Name.Name Reference -> @@ -250,10 +256,13 @@ bindReferences unsafeVarToName keepFree names (DataDeclaration m a bound constru (a,v,) <$> Type.bindReferences unsafeVarToName keepFree names ty pure $ DataDeclaration m a bound constructors -dependencies :: Ord v => DataDeclaration v a -> Set Reference +dependencies :: (Ord v) => DataDeclaration v a -> Set Reference dependencies dd = Set.unions (Type.dependencies <$> constructorTypes dd) +labeledDependencies :: (Ord v) => DataDeclaration v a -> Set LD.LabeledDependency +labeledDependencies = Set.map LD.TypeReference . dependencies + mkEffectDecl' :: Modifier -> a -> [v] -> [(a, v, Type v a)] -> EffectDeclaration v a mkEffectDecl' m a b cs = EffectDeclaration (DataDeclaration m a b cs) @@ -269,7 +278,7 @@ data F a | Modified Modifier a deriving (Functor, Foldable, Show) -updateDependencies :: Ord v => Map Reference Reference -> Decl v a -> Decl v a +updateDependencies :: (Ord v) => Map Reference Reference -> Decl v a -> Decl v a updateDependencies typeUpdates decl = back $ dataDecl @@ -288,7 +297,7 @@ updateDependencies typeUpdates decl = -- have been replaced with the corresponding output `v`s in the output `Decl`s, -- which are fresh with respect to all input Decls. unhashComponent :: - forall v a. Var v => Map Reference.Id (Decl v a) -> Map Reference.Id (v, Decl v a) + forall v a. (Var v) => Map Reference.Id (Decl v a) -> Map Reference.Id (v, Decl v a) unhashComponent m = let usedVars :: Set v usedVars = foldMap allVars' m diff --git a/unison-core/src/Unison/DataDeclaration/ConstructorId.hs b/unison-core/src/Unison/DataDeclaration/ConstructorId.hs index 1461f66ff..011effb5e 100644 --- a/unison-core/src/Unison/DataDeclaration/ConstructorId.hs +++ b/unison-core/src/Unison/DataDeclaration/ConstructorId.hs @@ -1,11 +1,3 @@ -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DeriveFoldable #-} -{-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ViewPatterns #-} - module Unison.DataDeclaration.ConstructorId (ConstructorId) where import Data.Word (Word64) diff --git a/unison-core/src/Unison/DataDeclaration/Names.hs b/unison-core/src/Unison/DataDeclaration/Names.hs index 2564f7948..900aa48a2 100644 --- a/unison-core/src/Unison/DataDeclaration/Names.hs +++ b/unison-core/src/Unison/DataDeclaration/Names.hs @@ -18,7 +18,7 @@ import Unison.Var (Var) import Prelude hiding (cycle) -- implementation of dataDeclToNames and effectDeclToNames -toNames :: Var v => (v -> Name.Name) -> CT.ConstructorType -> v -> Reference.Id -> DataDeclaration v a -> Names +toNames :: (Var v) => (v -> Name.Name) -> CT.ConstructorType -> v -> Reference.Id -> DataDeclaration v a -> Names toNames varToName ct typeSymbol (Reference.DerivedId -> r) dd = -- constructor names foldMap names (DD.constructorVars dd `zip` [0 ..]) @@ -28,20 +28,20 @@ toNames varToName ct typeSymbol (Reference.DerivedId -> r) dd = names (ctor, i) = Names (Rel.singleton (varToName ctor) (Referent.Con (ConstructorReference r i) ct)) mempty -dataDeclToNames :: Var v => (v -> Name.Name) -> v -> Reference.Id -> DataDeclaration v a -> Names +dataDeclToNames :: (Var v) => (v -> Name.Name) -> v -> Reference.Id -> DataDeclaration v a -> Names dataDeclToNames varToName = toNames varToName CT.Data -effectDeclToNames :: Var v => (v -> Name.Name) -> v -> Reference.Id -> EffectDeclaration v a -> Names +effectDeclToNames :: (Var v) => (v -> Name.Name) -> v -> Reference.Id -> EffectDeclaration v a -> Names effectDeclToNames varToName typeSymbol r ed = toNames varToName CT.Effect typeSymbol r $ DD.toDataDecl ed -dataDeclToNames' :: Var v => (v -> Name.Name) -> (v, (Reference.Id, DataDeclaration v a)) -> Names +dataDeclToNames' :: (Var v) => (v -> Name.Name) -> (v, (Reference.Id, DataDeclaration v a)) -> Names dataDeclToNames' varToName (v, (r, d)) = dataDeclToNames varToName v r d -effectDeclToNames' :: Var v => (v -> Name.Name) -> (v, (Reference.Id, EffectDeclaration v a)) -> Names +effectDeclToNames' :: (Var v) => (v -> Name.Name) -> (v, (Reference.Id, EffectDeclaration v a)) -> Names effectDeclToNames' varToName (v, (r, d)) = effectDeclToNames varToName v r d bindNames :: - Var v => + (Var v) => (v -> Name.Name) -> Set v -> Names -> diff --git a/unison-core/src/Unison/Hash.hs b/unison-core/src/Unison/Hash.hs deleted file mode 100644 index 04c2e65cb..000000000 --- a/unison-core/src/Unison/Hash.hs +++ /dev/null @@ -1,29 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE OverloadedStrings #-} - -module Unison.Hash - ( Hash (Hash), - HashFor (..), - base32Hex, - fromBase32Hex, - Hash.toByteString, - validBase32HexChars, - ) -where - -import qualified U.Util.Base32Hex as Base32Hex -import U.Util.Hash (Hash (Hash), HashFor (..)) -import qualified U.Util.Hash as Hash -import Unison.Prelude - --- | Return the lowercase unpadded base32Hex encoding of this 'Hash'. --- Multibase prefix would be 'v', see https://github.com/multiformats/multibase -base32Hex :: Hash -> Text -base32Hex = Base32Hex.toText . Hash.toBase32Hex - --- | Produce a 'Hash' from a base32hex-encoded version of its binary representation -fromBase32Hex :: Text -> Maybe Hash -fromBase32Hex = fmap Hash.fromBase32Hex . Base32Hex.fromText - -validBase32HexChars :: Set Char -validBase32HexChars = Base32Hex.validChars diff --git a/unison-core/src/Unison/HashQualified'.hs b/unison-core/src/Unison/HashQualified'.hs index a6afd4bec..ac6579730 100644 --- a/unison-core/src/Unison/HashQualified'.hs +++ b/unison-core/src/Unison/HashQualified'.hs @@ -78,12 +78,12 @@ fromNamedReference n r = HashQualified n (Reference.toShortHash r) fromName :: n -> HashQualified n fromName = NameOnly -matchesNamedReferent :: Eq n => n -> Referent -> HashQualified n -> Bool +matchesNamedReferent :: (Eq n) => n -> Referent -> HashQualified n -> Bool matchesNamedReferent n r = \case NameOnly n' -> n' == n HashQualified n' sh -> n' == n && sh `SH.isPrefixOf` Referent.toShortHash r -matchesNamedReference :: Eq n => n -> Reference -> HashQualified n -> Bool +matchesNamedReference :: (Eq n) => n -> Reference -> HashQualified n -> Bool matchesNamedReference n r = \case NameOnly n' -> n' == n HashQualified n' sh -> n' == n && sh `SH.isPrefixOf` Reference.toShortHash r @@ -101,14 +101,14 @@ sortByLength = NameOnly name -> (length (Name.reverseSegments name), Nothing, Name.isAbsolute name) HashQualified name hash -> (length (Name.reverseSegments name), Just hash, Name.isAbsolute name) -instance Name.Alphabetical n => Name.Alphabetical (HashQualified n) where +instance (Name.Alphabetical n) => Name.Alphabetical (HashQualified n) where compareAlphabetical (NameOnly n) (NameOnly n2) = Name.compareAlphabetical n n2 -- NameOnly comes first compareAlphabetical NameOnly {} HashQualified {} = LT compareAlphabetical HashQualified {} NameOnly {} = GT compareAlphabetical (HashQualified n sh) (HashQualified n2 sh2) = Name.compareAlphabetical n n2 <> compare sh sh2 -instance Convert n n2 => Parse (HashQualified n) n2 where +instance (Convert n n2) => Parse (HashQualified n) n2 where parse = \case NameOnly n -> Just (Name.convert n) _ -> Nothing diff --git a/unison-core/src/Unison/HashQualified.hs b/unison-core/src/Unison/HashQualified.hs index d5d927990..88276b9fd 100644 --- a/unison-core/src/Unison/HashQualified.hs +++ b/unison-core/src/Unison/HashQualified.hs @@ -126,7 +126,7 @@ requalify hq r = case hq of HashQualified n _ -> fromNamedReferent n r HashOnly _ -> fromReferent r -instance Name.Alphabetical n => Name.Alphabetical (HashQualified n) where +instance (Name.Alphabetical n) => Name.Alphabetical (HashQualified n) where -- Ordered alphabetically, based on the name. Hashes come last. compareAlphabetical a b = case (toName a, toName b) of @@ -140,7 +140,7 @@ instance Name.Alphabetical n => Name.Alphabetical (HashQualified n) where (Just _, Nothing) -> GT (Just sh, Just sh2) -> compare sh sh2 -instance Convert n n2 => Convert (HashQualified n) (HashQualified n2) where +instance (Convert n n2) => Convert (HashQualified n) (HashQualified n2) where convert = fmap Name.convert instance Convert n (HashQualified n) where diff --git a/unison-core/src/Unison/Hashable.hs b/unison-core/src/Unison/Hashable.hs index 5bc24743f..a51109f79 100644 --- a/unison-core/src/Unison/Hashable.hs +++ b/unison-core/src/Unison/Hashable.hs @@ -7,9 +7,8 @@ import Data.ByteString.Builder (doubleBE, int64BE, toLazyByteString, word64BE) import qualified Data.ByteString.Lazy as BL import qualified Data.Map as Map import qualified Data.Set as Set -import U.Util.Hash (Hash) -import qualified U.Util.Hash as H -import qualified U.Util.Hash as Hash +import Unison.Hash (Hash) +import qualified Unison.Hash as Hash import Unison.Prelude import Unison.Util.Relation (Relation) import qualified Unison.Util.Relation as Relation @@ -46,9 +45,9 @@ hash = accumulate' -- useful in algorithms, the runtime, etc. -- Consider carefully which class you want in each use-case. class Hashable t where - tokens :: Accumulate h => t -> [Token h] + tokens :: (Accumulate h) => t -> [Token h] -instance Hashable a => Hashable [a] where +instance (Hashable a) => Hashable [a] where tokens = map accumulateToken instance (Hashable a, Hashable b) => Hashable (a, b) where @@ -109,8 +108,8 @@ instance Accumulate Hash where toBS (Text txt) = let tbytes = encodeUtf8 txt in [encodeLength (B.length tbytes), tbytes] - toBS (Hashed h) = [H.toByteString h] - encodeLength :: Integral n => n -> B.ByteString + toBS (Hashed h) = [Hash.toByteString h] + encodeLength :: (Integral n) => n -> B.ByteString encodeLength = BL.toStrict . toLazyByteString . word64BE . fromIntegral - fromBytes = H.fromByteString - toBytes = H.toByteString + fromBytes = Hash.fromByteString + toBytes = Hash.toByteString diff --git a/unison-core/src/Unison/LabeledDependency.hs b/unison-core/src/Unison/LabeledDependency.hs index 1ab378101..13ebe0369 100644 --- a/unison-core/src/Unison/LabeledDependency.hs +++ b/unison-core/src/Unison/LabeledDependency.hs @@ -62,14 +62,14 @@ dataConstructor r = ConReference r Data effectConstructor :: ConstructorReference -> LabeledDependency effectConstructor r = ConReference r Effect -referents :: Foldable f => f Referent -> Set LabeledDependency +referents :: (Foldable f) => f Referent -> Set LabeledDependency referents rs = Set.fromList (map referent $ toList rs) fold :: (Reference -> a) -> (Referent -> a) -> LabeledDependency -> a fold f _ (TypeReference r) = f r fold _ g (TermReferent r) = g r -partition :: Foldable t => t LabeledDependency -> ([Reference], [Referent]) +partition :: (Foldable t) => t LabeledDependency -> ([Reference], [Referent]) partition = foldMap \case TypeReference ref -> ([ref], []) diff --git a/unison-core/src/Unison/Name.hs b/unison-core/src/Unison/Name.hs index e90686e6a..bd93b87cc 100644 --- a/unison-core/src/Unison/Name.hs +++ b/unison-core/src/Unison/Name.hs @@ -102,7 +102,7 @@ compareSuffix (Name _ ss0) = -- /Precondition/: the name is relative -- -- /O(n)/, where /n/ is the number of segments. -cons :: HasCallStack => NameSegment -> Name -> Name +cons :: (HasCallStack) => NameSegment -> Name -> Name cons x name = case name of Name Absolute _ -> @@ -187,7 +187,7 @@ isPrefixOf :: Name -> Name -> Bool isPrefixOf (Name p0 ss0) (Name p1 ss1) = p0 == p1 && List.isPrefixOf (reverse (toList ss0)) (reverse (toList ss1)) -joinDot :: HasCallStack => Name -> Name -> Name +joinDot :: (HasCallStack) => Name -> Name -> Name joinDot n1@(Name p0 ss0) n2@(Name p1 ss1) = case p1 of Relative -> Name p0 (ss1 <> ss0) @@ -344,7 +344,7 @@ sortNames toText = -- @ -- -- /Precondition/: the name is relative. -splits :: HasCallStack => Name -> [([NameSegment], Name)] +splits :: (HasCallStack) => Name -> [([NameSegment], Name)] splits (Name p ss0) = ss0 & List.NonEmpty.toList @@ -356,7 +356,7 @@ splits (Name p ss0) = -- ([], a.b.c) : over (mapped . _1) (a.) (splits b.c) -- ([], a.b.c) : over (mapped . _1) (a.) (([], b.c) : over (mapped . _1) (b.) (splits c)) -- [([], a.b.c), ([a], b.c), ([a.b], c)] - splits0 :: HasCallStack => [a] -> [([a], NonEmpty a)] + splits0 :: (HasCallStack) => [a] -> [([a], NonEmpty a)] splits0 = \case [] -> [] [x] -> [([], x :| [])] @@ -417,7 +417,7 @@ suffixFrom (Name p0 ss0) (Name _ ss1) = do -- that match. -- -- align [a,b] [x,a,b,y] = Just [x,a,b] - align :: forall a. Eq a => [a] -> [a] -> Maybe [a] + align :: forall a. (Eq a) => [a] -> [a] -> Maybe [a] align xs = go id where @@ -447,7 +447,7 @@ unqualified (Name _ (s :| _)) = -- -- NB: Only works if the `Ord` instance for `Name` orders based on -- `Name.reverseSegments`. -shortestUniqueSuffix :: forall r. Ord r => Name -> r -> R.Relation Name r -> Name +shortestUniqueSuffix :: forall r. (Ord r) => Name -> r -> R.Relation Name r -> Name shortestUniqueSuffix fqn r rel = fromMaybe fqn (List.find isOk (suffixes' fqn)) where diff --git a/unison-core/src/Unison/Names.hs b/unison-core/src/Unison/Names.hs index cb4182534..ebb41b0dc 100644 --- a/unison-core/src/Unison/Names.hs +++ b/unison-core/src/Unison/Names.hs @@ -29,6 +29,7 @@ module Unison.Names prefix0, restrictReferences, refTermsNamed, + refTermsHQNamed, termReferences, termReferents, typeReferences, @@ -71,6 +72,7 @@ import qualified Unison.ShortHash as SH import Unison.Util.Relation (Relation) import qualified Unison.Util.Relation as R import qualified Unison.Util.Relation as Relation +import qualified Unison.Util.Set as Set (mapMaybe) import Prelude hiding (filter, map) import qualified Prelude @@ -249,9 +251,23 @@ numHashChars = 3 termsNamed :: Names -> Name -> Set Referent termsNamed = flip R.lookupDom . terms +-- | Get all terms with a specific name. refTermsNamed :: Names -> Name -> Set TermReference refTermsNamed names n = - Set.fromList [r | Referent.Ref r <- toList $ termsNamed names n] + Set.mapMaybe Referent.toTermReference (termsNamed names n) + +-- | Get all terms with a specific hash-qualified name. +refTermsHQNamed :: Names -> HQ.HashQualified Name -> Set TermReference +refTermsHQNamed names = \case + HQ.NameOnly name -> refTermsNamed names name + HQ.HashOnly _hash -> Set.empty + HQ.HashQualified name hash -> + let f :: Referent -> Maybe TermReference + f ref0 = do + ref <- Referent.toTermReference ref0 + guard (Reference.isPrefixOf hash ref) + Just ref + in Set.mapMaybe f (termsNamed names name) typesNamed :: Names -> Name -> Set TypeReference typesNamed = flip R.lookupDom . types @@ -479,7 +495,7 @@ hashQualifyTermsRelation = hashQualifyRelation HQ.fromNamedReferent hashQualifyTypesRelation :: R.Relation Name TypeReference -> R.Relation (HQ.HashQualified Name) TypeReference hashQualifyTypesRelation = hashQualifyRelation HQ.fromNamedReference -hashQualifyRelation :: Ord r => (Name -> r -> HQ.HashQualified Name) -> R.Relation Name r -> R.Relation (HQ.HashQualified Name) r +hashQualifyRelation :: (Ord r) => (Name -> r -> HQ.HashQualified Name) -> R.Relation Name r -> R.Relation (HQ.HashQualified Name) r hashQualifyRelation fromNamedRef rel = R.map go rel where go (n, r) = diff --git a/unison-core/src/Unison/NamesWithHistory.hs b/unison-core/src/Unison/NamesWithHistory.hs index 86fb1fb59..96749b028 100644 --- a/unison-core/src/Unison/NamesWithHistory.hs +++ b/unison-core/src/Unison/NamesWithHistory.hs @@ -183,7 +183,7 @@ lookupHQTerm' = -- See 'lookupHQTerm', 'lookupHQType' for monomorphic versions. lookupHQRef :: forall r. - Ord r => + (Ord r) => -- | A projection of types or terms from a Names. (Names -> Relation Name r) -> -- | isPrefixOf, for references or referents diff --git a/unison-core/src/Unison/Pattern.hs b/unison-core/src/Unison/Pattern.hs index c2b597521..d0446434e 100644 --- a/unison-core/src/Unison/Pattern.hs +++ b/unison-core/src/Unison/Pattern.hs @@ -5,7 +5,6 @@ module Unison.Pattern where -import qualified Data.Foldable as Foldable hiding (foldMap') import Data.List (intercalate) import qualified Data.Map as Map import qualified Data.Set as Set @@ -90,7 +89,21 @@ application (Constructor _ _ (_ : _)) = True application _ = False loc :: Pattern loc -> loc -loc p = head $ Foldable.toList p +loc = \case + Unbound loc -> loc + Var loc -> loc + Boolean loc _ -> loc + Int loc _ -> loc + Nat loc _ -> loc + Float loc _ -> loc + Text loc _ -> loc + Char loc _ -> loc + Constructor loc _ _ -> loc + As loc _ -> loc + EffectPure loc _ -> loc + EffectBind loc _ _ _ -> loc + SequenceLiteral loc _ -> loc + SequenceOp loc _ _ _ -> loc setLoc :: Pattern loc -> loc -> Pattern loc setLoc p loc = case p of @@ -119,7 +132,7 @@ instance Eq (Pattern loc) where SequenceOp _ ph op pt == SequenceOp _ ph2 op2 pt2 = ph == ph2 && op == op2 && pt == pt2 _ == _ = False -foldMap' :: Monoid m => (Pattern loc -> m) -> Pattern loc -> m +foldMap' :: (Monoid m) => (Pattern loc -> m) -> Pattern loc -> m foldMap' f p = case p of Unbound _ -> f p Var _ -> f p @@ -137,7 +150,7 @@ foldMap' f p = case p of SequenceOp _ p1 _ p2 -> f p <> foldMap' f p1 <> foldMap' f p2 generalizedDependencies :: - Ord r => + (Ord r) => (Reference -> r) -> (Reference -> ConstructorId -> r) -> (Reference -> r) -> diff --git a/unison-core/src/Unison/Reference.hs b/unison-core/src/Unison/Reference.hs index 06a799bb7..4a616a359 100644 --- a/unison-core/src/Unison/Reference.hs +++ b/unison-core/src/Unison/Reference.hs @@ -1,8 +1,4 @@ {-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE ViewPatterns #-} module Unison.Reference ( Reference, @@ -110,8 +106,8 @@ idToShortHash = toShortHash . DerivedId -- but Show Reference currently depends on SH toShortHash :: Reference -> ShortHash toShortHash (Builtin b) = SH.Builtin b -toShortHash (Derived h 0) = SH.ShortHash (H.base32Hex h) Nothing Nothing -toShortHash (Derived h i) = SH.ShortHash (H.base32Hex h) (Just $ showSuffix i) Nothing +toShortHash (Derived h 0) = SH.ShortHash (H.toBase32HexText h) Nothing Nothing +toShortHash (Derived h i) = SH.ShortHash (H.toBase32HexText h) (Just $ showSuffix i) Nothing -- toShortHash . fromJust . fromShortHash == id and -- fromJust . fromShortHash . toShortHash == id @@ -122,7 +118,7 @@ toShortHash (Derived h i) = SH.ShortHash (H.base32Hex h) (Just $ showSuffix i) N fromShortHash :: ShortHash -> Maybe Reference fromShortHash (SH.Builtin b) = Just (Builtin b) fromShortHash (SH.ShortHash prefix cycle Nothing) = do - h <- H.fromBase32Hex prefix + h <- H.fromBase32HexText prefix case cycle of Nothing -> Just (Derived h 0) Just i -> Derived h <$> readMay (Text.unpack i) @@ -164,11 +160,10 @@ componentFor h as = [(Id h i, a) | (i, a) <- zip [0 ..] as] componentFromLength :: H.Hash -> CycleSize -> Set Id componentFromLength h size = Set.fromList [Id h i | i <- [0 .. size - 1]] -derivedBase32Hex :: Text -> Pos -> Reference -derivedBase32Hex b32Hex i = DerivedId (Id (fromMaybe msg h) i) +derivedBase32Hex :: Text -> Pos -> Maybe Reference +derivedBase32Hex b32Hex i = mayH <&> \h -> DerivedId (Id h i) where - msg = error $ "Reference.derivedBase32Hex " <> show h - h = H.fromBase32Hex b32Hex + mayH = H.fromBase32HexText b32Hex unsafeFromText :: Text -> Reference unsafeFromText = either error id . fromText @@ -198,18 +193,27 @@ toHash r = idToHash <$> toId r -- Right ##Text.take -- -- derived, no cycle --- >>> fromText "#2tWjVAuc7" --- Reference.derivedBase32Hex Nothing +-- >>> fromText "#dqp2oi4iderlrgp2h11sgkff6drk92omo4c84dncfhg9o0jn21cli4lhga72vlchmrb2jk0b3bdc2gie1l06sqdli8ego4q0akm3au8" +-- Right #dqp2o -- -- derived, part of cycle --- >>> fromText "#y9ycWkiC1.12345" --- Reference.derivedBase32Hex Nothing +-- >>> fromText "#dqp2oi4iderlrgp2h11sgkff6drk92omo4c84dncfhg9o0jn21cli4lhga72vlchmrb2jk0b3bdc2gie1l06sqdli8ego4q0akm3au8.12345" +-- Right #dqp2o.12345 +-- +-- Errors with 'Left' on invalid hashes +-- >>> fromText "#invalid_hash.12345" +-- Left "Invalid hash: \"invalid_hash\"" fromText :: Text -> Either String Reference fromText t = case Text.split (== '#') t of [_, "", b] -> Right (Builtin b) [_, h] -> case Text.split (== '.') h of - [hash] -> Right (derivedBase32Hex hash 0) - [hash, suffix] -> derivedBase32Hex hash <$> readSuffix suffix + [hash] -> + case derivedBase32Hex hash 0 of + Nothing -> Left $ "Invalid hash: " <> show hash + Just r -> Right r + [hash, suffix] -> do + pos <- readSuffix suffix + maybe (Left $ "Invalid hash: " <> show hash) Right (derivedBase32Hex hash pos) _ -> bail _ -> bail where diff --git a/unison-core/src/Unison/Term.hs b/unison-core/src/Unison/Term.hs index 28a79e92a..cc359cd50 100644 --- a/unison-core/src/Unison/Term.hs +++ b/unison-core/src/Unison/Term.hs @@ -46,7 +46,7 @@ data MatchCase loc a = MatchCase matchGuard :: Maybe a, matchBody :: a } - deriving (Show, Eq, Foldable, Functor, Generic, Generic1, Traversable) + deriving (Show, Eq, Ord, Foldable, Functor, Generic, Generic1, Traversable) matchPattern_ :: Lens' (MatchCase loc a) (Pattern loc) matchPattern_ = lens matchPattern setter @@ -93,7 +93,7 @@ data F typeVar typeAnn patternAnn a Match a [MatchCase patternAnn a] | TermLink Referent | TypeLink Reference - deriving (Foldable, Functor, Generic, Generic1, Traversable) + deriving (Ord, Foldable, Functor, Generic, Generic1, Traversable) _Ref :: Prism' (F tv ta pa a) Reference _Ref = _Ctor @"Ref" @@ -916,7 +916,7 @@ letRec' isTop bindings body = -- => -- let rec x = 42; y = "hi" in (x,y) consLetRec :: - (Ord v) => + (Ord v, Semigroup a) => Bool -> -- isTop parameter a -> -- annotation for overall let rec (a, v, Term' vt v a) -> -- the binding @@ -927,19 +927,24 @@ consLetRec isTop a (ab, vb, b) body = case body of _ -> letRec isTop a [((ab, vb), b)] body letRec :: + forall v vt a. (Ord v) => Bool -> + -- Annotation spanning the full let rec a -> [((a, v), Term' vt v a)] -> Term' vt v a -> Term' vt v a letRec _ _ [] e = e -letRec isTop a bindings e = +letRec isTop blockAnn bindings e = ABT.cycle' - a - (foldr (uncurry ABT.abs' . fst) z bindings) + blockAnn + (foldr addAbs body bindings) where - z = ABT.tm' a (LetRec isTop (map snd bindings) e) + addAbs :: ((a, v), b) -> ABT.Term f v a -> ABT.Term f v a + addAbs ((_a, v), _b) t = ABT.abs' blockAnn v t + body :: Term' vt v a + body = ABT.tm' blockAnn (LetRec isTop (map snd bindings) e) -- | Smart constructor for let rec blocks. Each binding in the block may -- reference any other binding in the block in its body (including itself), @@ -961,14 +966,14 @@ let1_ isTop bindings e = foldr f e bindings -- | annotations are applied to each nested Let expression let1 :: - (Ord v) => + (Ord v, Semigroup a) => IsTop -> [((a, v), Term2 vt at ap v a)] -> Term2 vt at ap v a -> Term2 vt at ap v a let1 isTop bindings e = foldr f e bindings where - f ((ann, v), b) body = ABT.tm' ann (Let isTop b (ABT.abs' ann v body)) + f ((ann, v), b) body = ABT.tm' (ann <> ABT.annotation body) (Let isTop b (ABT.abs' (ABT.annotation body) v body)) let1' :: (Semigroup a, Ord v) => @@ -979,10 +984,21 @@ let1' :: let1' isTop bindings e = foldr f e bindings where ann = ABT.annotation - f (v, b) body = ABT.tm' a (Let isTop b (ABT.abs' a v body)) + f (v, b) body = ABT.tm' (a <> ABT.annotation body) (Let isTop b (ABT.abs' (ABT.annotation body) v body)) where a = ann b <> ann body +-- | Like 'let1', but for a single binding, avoiding the Semigroup constraint. +singleLet :: + (Ord v) => + IsTop -> + -- Annotation spanning the whole let-binding + a -> + (v, Term2 vt at ap v a) -> + Term2 vt at ap v a -> + Term2 vt at ap v a +singleLet isTop a (v, body) e = ABT.tm' a (Let isTop body (ABT.abs' a v e)) + -- let1' :: Var v => [(Text, Term0 vt v)] -> Term0 vt v -> Term0 vt v -- let1' bs e = let1 [(ABT.v' name, b) | (name,b) <- bs ] e diff --git a/unison-core/src/Unison/Type.hs b/unison-core/src/Unison/Type.hs index 18f4fe4c9..81801b723 100644 --- a/unison-core/src/Unison/Type.hs +++ b/unison-core/src/Unison/Type.hs @@ -11,6 +11,7 @@ import Data.Monoid (Any (..)) import qualified Data.Set as Set import qualified Unison.ABT as ABT import qualified Unison.Kind as K +import qualified Unison.LabeledDependency as LD import qualified Unison.Name as Name import qualified Unison.Names.ResolutionResult as Names import Unison.Prelude @@ -41,18 +42,18 @@ _Ref = _Ctor @"Ref" -- | Types are represented as ABTs over the base functor F, with variables in `v` type Type v a = ABT.Term F v a -wrapV :: Ord v => Type v a -> Type (ABT.V v) a +wrapV :: (Ord v) => Type v a -> Type (ABT.V v) a wrapV = ABT.vmap ABT.Bound freeVars :: Type v a -> Set v freeVars = ABT.freeVars bindExternal :: - ABT.Var v => [(v, Reference)] -> Type v a -> Type v a + (ABT.Var v) => [(v, Reference)] -> Type v a -> Type v a bindExternal bs = ABT.substsInheritAnnotation [(v, ref () r) | (v, r) <- bs] bindReferences :: - Var v => + (Var v) => (v -> Name.Name) -> Set v -> Map Name.Name Reference -> @@ -71,7 +72,7 @@ instance (Show v) => Show (Monotype v a) where show = show . getPolytype -- Smart constructor which checks if a `Type` has no `Forall` quantifiers. -monotype :: ABT.Var v => Type v a -> Maybe (Monotype v a) +monotype :: (ABT.Var v) => Type v a -> Maybe (Monotype v a) monotype t = Monotype <$> ABT.visit isMono t where isMono (Forall' _) = Just Nothing @@ -90,7 +91,7 @@ pattern Ref' r <- ABT.Tm' (Ref r) pattern Arrow' :: ABT.Term F v a -> ABT.Term F v a -> ABT.Term F v a pattern Arrow' i o <- ABT.Tm' (Arrow i o) -pattern Arrow'' :: Ord v => ABT.Term F v a -> [Type v a] -> Type v a -> ABT.Term F v a +pattern Arrow'' :: (Ord v) => ABT.Term F v a -> [Type v a] -> Type v a -> ABT.Term F v a pattern Arrow'' i es o <- Arrow' i (Effect'' es o) pattern Arrows' :: [Type v a] -> Type v a @@ -108,7 +109,7 @@ pattern App' f x <- ABT.Tm' (App f x) pattern Apps' :: Type v a -> [Type v a] -> Type v a pattern Apps' f args <- (unApps -> Just (f, args)) -pattern Pure' :: Ord v => Type v a -> Type v a +pattern Pure' :: (Ord v) => Type v a -> Type v a pattern Pure' t <- (unPure -> Just t) pattern Effects' :: [ABT.Term F v a] -> ABT.Term F v a @@ -118,20 +119,20 @@ pattern Effects' es <- ABT.Tm' (Effects es) pattern Effect1' :: ABT.Term F v a -> ABT.Term F v a -> ABT.Term F v a pattern Effect1' e t <- ABT.Tm' (Effect e t) -pattern Effect' :: Ord v => [Type v a] -> Type v a -> Type v a +pattern Effect' :: (Ord v) => [Type v a] -> Type v a -> Type v a pattern Effect' es t <- (unEffects1 -> Just (es, t)) -pattern Effect'' :: Ord v => [Type v a] -> Type v a -> Type v a +pattern Effect'' :: (Ord v) => [Type v a] -> Type v a -> Type v a pattern Effect'' es t <- (unEffect0 -> (es, t)) -- Effect0' may match zero effects -pattern Effect0' :: Ord v => [Type v a] -> Type v a -> Type v a +pattern Effect0' :: (Ord v) => [Type v a] -> Type v a -> Type v a pattern Effect0' es t <- (unEffect0 -> (es, t)) -pattern Forall' :: ABT.Var v => ABT.Subst F v a -> ABT.Term F v a +pattern Forall' :: (ABT.Var v) => ABT.Subst F v a -> ABT.Term F v a pattern Forall' subst <- ABT.Tm' (Forall (ABT.Abs' subst)) -pattern IntroOuter' :: ABT.Var v => ABT.Subst F v a -> ABT.Term F v a +pattern IntroOuter' :: (ABT.Var v) => ABT.Subst F v a -> ABT.Term F v a pattern IntroOuter' subst <- ABT.Tm' (IntroOuter (ABT.Abs' subst)) pattern IntroOuterNamed' :: v -> ABT.Term F v a -> ABT.Term F v a @@ -152,7 +153,7 @@ pattern Cycle' xs t <- ABT.Cycle' xs t pattern Abs' :: (Foldable f, Functor f, ABT.Var v) => ABT.Subst f v a -> ABT.Term f v a pattern Abs' subst <- ABT.Abs' subst -unPure :: Ord v => Type v a -> Maybe (Type v a) +unPure :: (Ord v) => Type v a -> Maybe (Type v a) unPure (Effect'' [] t) = Just t unPure (Effect'' _ _) = Nothing unPure t = Just t @@ -207,35 +208,35 @@ unForalls t = go t [] go _body [] = Nothing go body vs = Just (reverse vs, body) -unEffect0 :: Ord v => Type v a -> ([Type v a], Type v a) +unEffect0 :: (Ord v) => Type v a -> ([Type v a], Type v a) unEffect0 (Effect1' e a) = (flattenEffects e, a) unEffect0 t = ([], t) -unEffects1 :: Ord v => Type v a -> Maybe ([Type v a], Type v a) +unEffects1 :: (Ord v) => Type v a -> Maybe ([Type v a], Type v a) unEffects1 (Effect1' (Effects' es) a) = Just (es, a) unEffects1 _ = Nothing -- | True if the given type is a function, possibly quantified -isArrow :: ABT.Var v => Type v a -> Bool +isArrow :: (ABT.Var v) => Type v a -> Bool isArrow (ForallNamed' _ t) = isArrow t isArrow (Arrow' _ _) = True isArrow _ = False -- some smart constructors -ref :: Ord v => a -> Reference -> Type v a +ref :: (Ord v) => a -> Reference -> Type v a ref a = ABT.tm' a . Ref -refId :: Ord v => a -> Reference.Id -> Type v a +refId :: (Ord v) => a -> Reference.Id -> Type v a refId a = ref a . Reference.DerivedId -termLink :: Ord v => a -> Type v a +termLink :: (Ord v) => a -> Type v a termLink a = ABT.tm' a . Ref $ termLinkRef -typeLink :: Ord v => a -> Type v a +typeLink :: (Ord v) => a -> Type v a typeLink a = ABT.tm' a . Ref $ typeLinkRef -derivedBase32Hex :: Ord v => Reference -> a -> Type v a +derivedBase32Hex :: (Ord v) => Reference -> a -> Type v a derivedBase32Hex r a = ref a r intRef, natRef, floatRef, booleanRef, textRef, charRef, listRef, bytesRef, effectRef, termLinkRef, typeLinkRef :: Reference @@ -258,6 +259,9 @@ filePathRef = Reference.Builtin "FilePath" threadIdRef = Reference.Builtin "ThreadId" socketRef = Reference.Builtin "Socket" +processHandleRef :: Reference +processHandleRef = Reference.Builtin "ProcessHandle" + scopeRef, refRef :: Reference scopeRef = Reference.Builtin "Scope" refRef = Reference.Builtin "Ref" @@ -274,6 +278,12 @@ mvarRef, tvarRef :: Reference mvarRef = Reference.Builtin "MVar" tvarRef = Reference.Builtin "TVar" +ticketRef :: Reference +ticketRef = Reference.Builtin "Ref.Ticket" + +promiseRef :: Reference +promiseRef = Reference.Builtin "Promise" + tlsRef :: Reference tlsRef = Reference.Builtin "Tls" @@ -283,6 +293,9 @@ stmRef = Reference.Builtin "STM" patternRef :: Reference patternRef = Reference.Builtin "Pattern" +charClassRef :: Reference +charClassRef = Reference.Builtin "Char.Class" + tlsClientConfigRef :: Reference tlsClientConfigRef = Reference.Builtin "Tls.ClientConfig" @@ -314,73 +327,76 @@ anyRef = Reference.Builtin "Any" timeSpecRef :: Reference timeSpecRef = Reference.Builtin "TimeSpec" -any :: Ord v => a -> Type v a +any :: (Ord v) => a -> Type v a any a = ref a anyRef -builtin :: Ord v => a -> Text -> Type v a +builtin :: (Ord v) => a -> Text -> Type v a builtin a = ref a . Reference.Builtin -int :: Ord v => a -> Type v a +int :: (Ord v) => a -> Type v a int a = ref a intRef -nat :: Ord v => a -> Type v a +nat :: (Ord v) => a -> Type v a nat a = ref a natRef -float :: Ord v => a -> Type v a +float :: (Ord v) => a -> Type v a float a = ref a floatRef -boolean :: Ord v => a -> Type v a +boolean :: (Ord v) => a -> Type v a boolean a = ref a booleanRef -text :: Ord v => a -> Type v a +text :: (Ord v) => a -> Type v a text a = ref a textRef -char :: Ord v => a -> Type v a +char :: (Ord v) => a -> Type v a char a = ref a charRef -fileHandle :: Ord v => a -> Type v a +fileHandle :: (Ord v) => a -> Type v a fileHandle a = ref a fileHandleRef -threadId :: Ord v => a -> Type v a +processHandle :: (Ord v) => a -> Type v a +processHandle a = ref a processHandleRef + +threadId :: (Ord v) => a -> Type v a threadId a = ref a threadIdRef -builtinIO :: Ord v => a -> Type v a +builtinIO :: (Ord v) => a -> Type v a builtinIO a = ref a builtinIORef -scopeType :: Ord v => a -> Type v a +scopeType :: (Ord v) => a -> Type v a scopeType a = ref a scopeRef -refType :: Ord v => a -> Type v a +refType :: (Ord v) => a -> Type v a refType a = ref a refRef -iarrayType, marrayType, ibytearrayType, mbytearrayType :: Ord v => a -> Type v a +iarrayType, marrayType, ibytearrayType, mbytearrayType :: (Ord v) => a -> Type v a iarrayType a = ref a iarrayRef marrayType a = ref a marrayRef ibytearrayType a = ref a ibytearrayRef mbytearrayType a = ref a mbytearrayRef -socket :: Ord v => a -> Type v a +socket :: (Ord v) => a -> Type v a socket a = ref a socketRef -list :: Ord v => a -> Type v a +list :: (Ord v) => a -> Type v a list a = ref a listRef -bytes :: Ord v => a -> Type v a +bytes :: (Ord v) => a -> Type v a bytes a = ref a bytesRef -effectType :: Ord v => a -> Type v a +effectType :: (Ord v) => a -> Type v a effectType a = ref a $ effectRef -code, value :: Ord v => a -> Type v a +code, value :: (Ord v) => a -> Type v a code a = ref a codeRef value a = ref a valueRef -app :: Ord v => a -> Type v a -> Type v a -> Type v a +app :: (Ord v) => a -> Type v a -> Type v a -> Type v a app a f arg = ABT.tm' a (App f arg) -- `f x y z` means `((f x) y) z` and the annotation paired with `y` is the one -- meant for `app (f x) y` -apps :: Ord v => Type v a -> [(a, Type v a)] -> Type v a +apps :: (Ord v) => Type v a -> [(a, Type v a)] -> Type v a apps = foldl' go where go f (a, t) = app a f t app' :: (Ord v, Semigroup a) => Type v a -> Type v a -> Type v a @@ -389,87 +405,87 @@ app' f arg = app (ABT.annotation f <> ABT.annotation arg) f arg apps' :: (Semigroup a, Ord v) => Type v a -> [Type v a] -> Type v a apps' = foldl app' -arrow :: Ord v => a -> Type v a -> Type v a -> Type v a +arrow :: (Ord v) => a -> Type v a -> Type v a -> Type v a arrow a i o = ABT.tm' a (Arrow i o) arrow' :: (Semigroup a, Ord v) => Type v a -> Type v a -> Type v a arrow' i o = arrow (ABT.annotation i <> ABT.annotation o) i o -ann :: Ord v => a -> Type v a -> K.Kind -> Type v a +ann :: (Ord v) => a -> Type v a -> K.Kind -> Type v a ann a e t = ABT.tm' a (Ann e t) -forall :: Ord v => a -> v -> Type v a -> Type v a +forall :: (Ord v) => a -> v -> Type v a -> Type v a forall a v body = ABT.tm' a (Forall (ABT.abs' a v body)) -introOuter :: Ord v => a -> v -> Type v a -> Type v a +introOuter :: (Ord v) => a -> v -> Type v a -> Type v a introOuter a v body = ABT.tm' a (IntroOuter (ABT.abs' a v body)) -iff :: Var v => Type v () +iff :: (Var v) => Type v () iff = forall () aa $ arrows (f <$> [boolean (), a, a]) a where aa = Var.named "a" a = var () aa f x = ((), x) -iff' :: Var v => a -> Type v a +iff' :: (Var v) => a -> Type v a iff' loc = forall loc aa $ arrows (f <$> [boolean loc, a, a]) a where aa = Var.named "a" a = var loc aa f x = (loc, x) -iff2 :: Var v => a -> Type v a +iff2 :: (Var v) => a -> Type v a iff2 loc = forall loc aa $ arrows (f <$> [a, a]) a where aa = Var.named "a" a = var loc aa f x = (loc, x) -andor :: Ord v => Type v () +andor :: (Ord v) => Type v () andor = arrows (f <$> [boolean (), boolean ()]) $ boolean () where f x = ((), x) -andor' :: Ord v => a -> Type v a +andor' :: (Ord v) => a -> Type v a andor' a = arrows (f <$> [boolean a, boolean a]) $ boolean a where f x = (a, x) -var :: Ord v => a -> v -> Type v a +var :: (Ord v) => a -> v -> Type v a var = ABT.annotatedVar -v' :: Var v => Text -> Type v () +v' :: (Var v) => Text -> Type v () v' s = ABT.var (Var.named s) -- Like `v'`, but creates an annotated variable given an annotation -av' :: Var v => a -> Text -> Type v a +av' :: (Var v) => a -> Text -> Type v a av' a s = ABT.annotatedVar a (Var.named s) -forall' :: Var v => a -> [Text] -> Type v a -> Type v a +forall' :: (Var v) => a -> [Text] -> Type v a -> Type v a forall' a vs body = foldr (forall a) body (Var.named <$> vs) -foralls :: Ord v => a -> [v] -> Type v a -> Type v a +foralls :: (Ord v) => a -> [v] -> Type v a -> Type v a foralls a vs body = foldr (forall a) body vs -- Note: `a -> b -> c` parses as `a -> (b -> c)` -- the annotation associated with `b` will be the annotation for the `b -> c` -- node -arrows :: Ord v => [(a, Type v a)] -> Type v a -> Type v a +arrows :: (Ord v) => [(a, Type v a)] -> Type v a -> Type v a arrows ts result = foldr go result ts where go = uncurry arrow -- The types of effectful computations -effect :: Ord v => a -> [Type v a] -> Type v a -> Type v a +effect :: (Ord v) => a -> [Type v a] -> Type v a -> Type v a effect a es (Effect1' fs t) = let es' = (es >>= flattenEffects) ++ flattenEffects fs in ABT.tm' a (Effect (ABT.tm' a (Effects es')) t) effect a es t = ABT.tm' a (Effect (ABT.tm' a (Effects es)) t) -effects :: Ord v => a -> [Type v a] -> Type v a +effects :: (Ord v) => a -> [Type v a] -> Type v a effects a es = ABT.tm' a (Effects $ es >>= flattenEffects) -effect1 :: Ord v => a -> Type v a -> Type v a -> Type v a +effect1 :: (Ord v) => a -> Type v a -> Type v a -> Type v a effect1 a es (Effect1' fs t) = let es' = flattenEffects es ++ flattenEffects fs in ABT.tm' a (Effect (ABT.tm' a (Effects es')) t) @@ -481,28 +497,28 @@ flattenEffects es = [es] -- The types of first-class effect values -- which get deconstructed in effect handlers. -effectV :: Ord v => a -> (a, Type v a) -> (a, Type v a) -> Type v a +effectV :: (Ord v) => a -> (a, Type v a) -> (a, Type v a) -> Type v a effectV builtinA e t = apps (builtin builtinA "Effect") [e, t] -- Strips effects from a type. E.g. `{e} a` becomes `a`. -stripEffect :: Ord v => Type v a -> ([Type v a], Type v a) +stripEffect :: (Ord v) => Type v a -> ([Type v a], Type v a) stripEffect (Effect' e t) = case stripEffect t of (ei, t) -> (e ++ ei, t) stripEffect t = ([], t) -- The type of the flipped function application operator: -- `(a -> (a -> b) -> b)` -flipApply :: Var v => Type v () -> Type v () +flipApply :: (Var v) => Type v () -> Type v () flipApply t = forall () b $ arrow () (arrow () t (var () b)) (var () b) where b = ABT.fresh t (Var.named "b") -generalize' :: Var v => Var.Type -> Type v a -> Type v a +generalize' :: (Var v) => Var.Type -> Type v a -> Type v a generalize' k t = generalize vsk t where vsk = [v | v <- Set.toList (freeVars t), Var.typeOf v == k] -- | Bind the given variables with an outer `forall`, if they are used in `t`. -generalize :: Ord v => [v] -> Type v a -> Type v a +generalize :: (Ord v) => [v] -> Type v a -> Type v a generalize vs t = foldr f t vs where f v t = @@ -516,19 +532,22 @@ unforall' :: Type v a -> ([v], Type v a) unforall' (ForallsNamed' vs t) = (vs, t) unforall' t = ([], t) -dependencies :: Ord v => Type v a -> Set Reference +dependencies :: (Ord v) => Type v a -> Set Reference dependencies t = Set.fromList . Writer.execWriter $ ABT.visit' f t where f t@(Ref r) = Writer.tell [r] $> t f t = pure t -updateDependencies :: Ord v => Map Reference Reference -> Type v a -> Type v a +labeledDependencies :: (Ord v) => Type v a -> Set LD.LabeledDependency +labeledDependencies = Set.map LD.TypeReference . dependencies + +updateDependencies :: (Ord v) => Map Reference Reference -> Type v a -> Type v a updateDependencies typeUpdates = ABT.rebuildUp go where go (Ref r) = Ref (Map.findWithDefault r r typeUpdates) go f = f -usesEffects :: Ord v => Type v a -> Bool +usesEffects :: (Ord v) => Type v a -> Bool usesEffects t = getAny . getConst $ ABT.visit go t where go (Effect1' _ _) = Just (Const (Any True)) @@ -540,7 +559,7 @@ usesEffects t = getAny . getConst $ ABT.visit go t -- -- This function would return the set {e, e2}, but not `e3` since `e3` -- is bound by the enclosing forall. -freeEffectVars :: Ord v => Type v a -> Set v +freeEffectVars :: (Ord v) => Type v a -> Set v freeEffectVars t = Set.fromList . join . runIdentity $ ABT.foreachSubterm go (snd <$> ABT.annotateBound t) @@ -593,7 +612,7 @@ purifyArrows = ABT.visitPure go go _ = Nothing -- Remove free effect variables from the type that are in the set -removeEffectVars :: ABT.Var v => Set v -> Type v a -> Type v a +removeEffectVars :: (ABT.Var v) => Set v -> Type v a -> Type v a removeEffectVars removals t = let z = effects () [] t' = ABT.substsInheritAnnotation ((,z) <$> Set.toList removals) t @@ -612,7 +631,7 @@ removeEffectVars removals t = -- Used for type-based search, we apply this transformation to both the -- indexed type and the query type, so the user can supply `a -> b` that will -- match `a ->{e} b` (but not `a ->{IO} b`). -removeAllEffectVars :: ABT.Var v => Type v a -> Type v a +removeAllEffectVars :: (ABT.Var v) => Type v a -> Type v a removeAllEffectVars t = let allEffectVars = foldMap go (ABT.subterms t) go (Effects' vs) = Set.fromList [v | Var' v <- vs] @@ -621,7 +640,7 @@ removeAllEffectVars t = (vs, tu) = unforall' t in generalize vs (removeEffectVars allEffectVars tu) -removePureEffects :: ABT.Var v => Type v a -> Type v a +removePureEffects :: (ABT.Var v) => Type v a -> Type v a removePureEffects t | not Settings.removePureEffects = t | otherwise = @@ -651,7 +670,7 @@ removePureEffects t editFunctionResult :: forall v a. - Ord v => + (Ord v) => (Type v a -> Type v a) -> Type v a -> Type v a @@ -680,14 +699,14 @@ functionResult = go False -- `B -> B` becomes `B -> B` (not changed) -- `.foo -> .foo` becomes `.foo -> .foo` (not changed) -- `.foo.bar -> blarrg.woot` becomes `.foo.bar -> blarrg.woot` (unchanged) -generalizeLowercase :: Var v => Set v -> Type v a -> Type v a +generalizeLowercase :: (Var v) => Set v -> Type v a -> Type v a generalizeLowercase except t = foldr (forall (ABT.annotation t)) t vars where vars = [v | v <- Set.toList (ABT.freeVars t `Set.difference` except), Var.universallyQuantifyIfFree v] -- Convert all free variables in `allowed` to variables bound by an `introOuter`. -freeVarsToOuters :: Ord v => Set v -> Type v a -> Type v a +freeVarsToOuters :: (Ord v) => Set v -> Type v a -> Type v a freeVarsToOuters allowed t = foldr (introOuter (ABT.annotation t)) t vars where vars = Set.toList $ ABT.freeVars t `Set.intersection` allowed @@ -695,7 +714,7 @@ freeVarsToOuters allowed t = foldr (introOuter (ABT.annotation t)) t vars -- | This function removes all variable shadowing from the types and reduces -- fresh ids to the minimum possible to avoid ambiguity. Useful when showing -- two different types. -cleanupVars :: Var v => [Type v a] -> [Type v a] +cleanupVars :: (Var v) => [Type v a] -> [Type v a] cleanupVars ts | not Settings.cleanupTypes = ts cleanupVars ts = let changedVars = cleanupVarsMap ts @@ -704,7 +723,7 @@ cleanupVars ts = -- Compute a variable replacement map from a collection of types, which -- can be passed to `cleanupVars1'`. This is used to cleanup variable ids -- for multiple related types, like when reporting a type error. -cleanupVarsMap :: Var v => [Type v a] -> Map.Map v v +cleanupVarsMap :: (Var v) => [Type v a] -> Map.Map v v cleanupVarsMap ts = let varsByName = foldl' step Map.empty (ts >>= ABT.allVars) step m v = Map.insertWith (++) (Var.name $ Var.reset v) [v] m @@ -716,12 +735,12 @@ cleanupVarsMap ts = ] in changedVars -cleanupVars1' :: Var v => Map.Map v v -> Type v a -> Type v a +cleanupVars1' :: (Var v) => Map.Map v v -> Type v a -> Type v a cleanupVars1' = ABT.changeVars -- | This function removes all variable shadowing from the type and reduces -- fresh ids to the minimum possible to avoid ambiguity. -cleanupVars1 :: Var v => Type v a -> Type v a +cleanupVars1 :: (Var v) => Type v a -> Type v a cleanupVars1 t | not Settings.cleanupTypes = t cleanupVars1 t = case cleanupVars [t] of @@ -729,7 +748,7 @@ cleanupVars1 t = _ -> error "cleanupVars1: expected exactly one result" -- This removes duplicates and normalizes the order of ability lists -cleanupAbilityLists :: Var v => Type v a -> Type v a +cleanupAbilityLists :: (Var v) => Type v a -> Type v a cleanupAbilityLists = ABT.visitPure go where -- leave explicitly empty `{}` alone @@ -741,17 +760,17 @@ cleanupAbilityLists = ABT.visitPure go _ -> Just (effect (ABT.annotation t) es $ ABT.visitPure go v) go _ = Nothing -cleanups :: Var v => [Type v a] -> [Type v a] +cleanups :: (Var v) => [Type v a] -> [Type v a] cleanups ts = cleanupVars $ map cleanupAbilityLists ts -cleanup :: Var v => Type v a -> Type v a +cleanup :: (Var v) => Type v a -> Type v a cleanup t | not Settings.cleanupTypes = t cleanup t = cleanupVars1 . cleanupAbilityLists $ t builtinAbilities :: Set Reference builtinAbilities = Set.fromList [builtinIORef, stmRef] -instance Show a => Show (F a) where +instance (Show a) => Show (F a) where showsPrec = go where go _ (Ref r) = shows r @@ -766,7 +785,8 @@ instance Show a => Show (F a) where s "{" <> shows es <> s "}" go p (Effect e t) = showParen (p > 0) $ - showParen True $ shows e <> s " " <> showsPrec p t + showParen True $ + shows e <> s " " <> showsPrec p t go p (Forall body) = case p of 0 -> showsPrec p body _ -> showParen True $ s "∀ " <> shows body diff --git a/unison-core/src/Unison/Type/Names.hs b/unison-core/src/Unison/Type/Names.hs index cc5ef09a0..7949874b1 100644 --- a/unison-core/src/Unison/Type/Names.hs +++ b/unison-core/src/Unison/Type/Names.hs @@ -13,7 +13,7 @@ import qualified Unison.Util.List as List import Unison.Var (Var) bindNames :: - Var v => + (Var v) => (v -> Name.Name) -> Set v -> Names.Names -> diff --git a/unison-core/src/Unison/Util/Components.hs b/unison-core/src/Unison/Util/Components.hs index 88a79fb4c..927aea21d 100644 --- a/unison-core/src/Unison/Util/Components.hs +++ b/unison-core/src/Unison/Util/Components.hs @@ -31,7 +31,7 @@ import Unison.Prelude -- -- Uses Tarjan's algorithm: -- https://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm -components :: Ord v => (t -> Set v) -> [(v, t)] -> [[(v, t)]] +components :: (Ord v) => (t -> Set v) -> [(v, t)] -> [[(v, t)]] components freeVars bs = let varIds = Map.fromList (map fst bs `zip` reverse [(1 :: Int) .. length bs]) diff --git a/unison-core/src/Unison/Util/List.hs b/unison-core/src/Unison/Util/List.hs index 018c2432f..acd565a20 100644 --- a/unison-core/src/Unison/Util/List.hs +++ b/unison-core/src/Unison/Util/List.hs @@ -6,7 +6,7 @@ import qualified Data.Map as Map import qualified Data.Set as Set import Unison.Prelude -multimap :: Foldable f => Ord k => f (k, v) -> Map k [v] +multimap :: (Foldable f) => (Ord k) => f (k, v) -> Map k [v] multimap kvs = -- preserve the order of the values from the original list reverse <$> foldl' step Map.empty kvs @@ -49,7 +49,7 @@ nubOrdOn = uniqueBy uniqueBy' :: (Foldable f, Ord b) => (a -> b) -> f a -> [a] uniqueBy' f = reverse . uniqueBy f . reverse . toList -safeHead :: Foldable f => f a -> Maybe a +safeHead :: (Foldable f) => f a -> Maybe a safeHead = headMay . toList validate :: (Semigroup e, Foldable f) => (a -> Either e b) -> f a -> Either e [b] @@ -72,7 +72,7 @@ intercalateMapWith sep f xs = result -- Take runs of consecutive occurrences of r within a list, -- and in each run, overwrite all but the first occurrence of r with w. -quenchRuns :: Eq a => a -> a -> [a] -> [a] +quenchRuns :: (Eq a) => a -> a -> [a] -> [a] quenchRuns r w = reverse . go False r w [] where go inRun r w acc = \case diff --git a/unison-core/src/Unison/Var.hs b/unison-core/src/Unison/Var.hs index 0ff8d3336..25cbf3757 100644 --- a/unison-core/src/Unison/Var.hs +++ b/unison-core/src/Unison/Var.hs @@ -53,10 +53,10 @@ class (Show v, ABT.Var v) => Var v where freshId :: v -> Word64 freshenId :: Word64 -> v -> v -freshIn :: ABT.Var v => Set v -> v -> v +freshIn :: (ABT.Var v) => Set v -> v -> v freshIn = ABT.freshIn -named :: Var v => Text -> v +named :: (Var v) => Text -> v named n = typed (User n) rawName :: Type -> Text @@ -82,26 +82,26 @@ rawName typ = case typ of UnnamedReference ref -> Reference.idToText ref UnnamedWatch k guid -> fromString k <> "." <> guid -name :: Var v => v -> Text +name :: (Var v) => v -> Text name v = rawName (typeOf v) <> showid v where showid (freshId -> 0) = "" showid (freshId -> n) = pack (show n) -- | Currently, actions in blocks are encoded as bindings --- with names of the form _123 (an underscore, followed by +-- with names of the form _123 (an underscore, followed by -- 1 or more digits). This function returns `True` if the -- input variable has this form. --- +-- -- Various places check for this (the pretty-printer, to -- determine how to print the binding), and the typechecker -- (to decide if it should ensure the binding has type `()`). -isAction :: Var v => v -> Bool +isAction :: (Var v) => v -> Bool isAction v = case Text.unpack (name v) of ('_' : rest) | Just _ <- (readMaybe rest :: Maybe Int) -> True _ -> False -uncapitalize :: Var v => v -> v +uncapitalize :: (Var v) => v -> v uncapitalize v = nameds $ go (nameStr v) where go (c : rest) = toLower c : rest @@ -119,7 +119,7 @@ missingResult, inferTypeConstructor, inferTypeConstructorArg, inferOther :: - Var v => v + (Var v) => v missingResult = typed MissingResult blank = typed Blank inferInput = typed (Inference Input) @@ -133,10 +133,10 @@ inferTypeConstructor = typed (Inference TypeConstructor) inferTypeConstructorArg = typed (Inference TypeConstructorArg) inferOther = typed (Inference Other) -unnamedRef :: Var v => Reference.Id -> v +unnamedRef :: (Var v) => Reference.Id -> v unnamedRef ref = typed (UnnamedReference ref) -unnamedTest :: Var v => Text -> v +unnamedTest :: (Var v) => Text -> v unnamedTest guid = typed (UnnamedWatch TestWatch guid) data Type @@ -183,33 +183,33 @@ data InferenceType | Other deriving (Eq, Ord, Show) -reset :: Var v => v -> v +reset :: (Var v) => v -> v reset v = typed (typeOf v) -unqualifiedName :: Var v => v -> Text +unqualifiedName :: (Var v) => v -> Text unqualifiedName = fromMaybe "" . lastMay . Name.segments' . name -unqualified :: Var v => v -> v +unqualified :: (Var v) => v -> v unqualified v = case typeOf v of User _ -> named . unqualifiedName $ v _ -> v -namespaced :: Var v => [v] -> v +namespaced :: (Var v) => [v] -> v namespaced vs = named $ intercalateMap "." name vs -nameStr :: Var v => v -> String +nameStr :: (Var v) => v -> String nameStr = Text.unpack . name -nameds :: Var v => String -> v +nameds :: (Var v) => String -> v nameds s = named (Text.pack s) -joinDot :: Var v => v -> v -> v +joinDot :: (Var v) => v -> v -> v joinDot prefix v2 = if name prefix == "." then named (name prefix `mappend` name v2) else named (name prefix `mappend` "." `mappend` name v2) -universallyQuantifyIfFree :: forall v. Var v => v -> Bool +universallyQuantifyIfFree :: forall v. (Var v) => v -> Bool universallyQuantifyIfFree v = ok (name $ reset v) && unqualified v == v where diff --git a/unison-core/src/Unison/WatchKind.hs b/unison-core/src/Unison/WatchKind.hs index 084cbcdce..3e52f45f1 100644 --- a/unison-core/src/Unison/WatchKind.hs +++ b/unison-core/src/Unison/WatchKind.hs @@ -1,7 +1,4 @@ -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ViewPatterns #-} module Unison.WatchKind where diff --git a/unison-core/unison-core1.cabal b/unison-core/unison-core1.cabal index 78692f0e4..48eac1036 100644 --- a/unison-core/unison-core1.cabal +++ b/unison-core/unison-core1.cabal @@ -31,7 +31,6 @@ library Unison.DataDeclaration Unison.DataDeclaration.ConstructorId Unison.DataDeclaration.Names - Unison.Hash Unison.Hashable Unison.HashQualified Unison.HashQualified' @@ -102,6 +101,7 @@ library , text , transformers , unison-core + , unison-hash , unison-prelude , unison-util-base32hex , unison-util-relation diff --git a/unison-hashing-v2/package.yaml b/unison-hashing-v2/package.yaml index 2b568bc16..de7e72bee 100644 --- a/unison-hashing-v2/package.yaml +++ b/unison-hashing-v2/package.yaml @@ -14,11 +14,15 @@ dependencies: - semialign - text - unison-core1 + - unison-hash + - unison-hashing - unison-prelude - unison-util-base32hex - unison-util-relation library: + exposed-modules: + Unison.Hashing.V2 source-dirs: src when: - condition: false @@ -38,6 +42,7 @@ default-extensions: - FlexibleContexts - FlexibleInstances - GeneralizedNewtypeDeriving + - InstanceSigs - LambdaCase - MultiParamTypeClasses - NamedFieldPuns diff --git a/unison-hashing-v2/src/Unison/Hashing/V2.hs b/unison-hashing-v2/src/Unison/Hashing/V2.hs new file mode 100644 index 000000000..e7515859b --- /dev/null +++ b/unison-hashing-v2/src/Unison/Hashing/V2.hs @@ -0,0 +1,55 @@ +-- | +-- This module exports: +-- +-- * Data types with 'ContentAddressable' instances that correspond to v2 of the Unison hash function. +-- * Miscellaneous helper functions related to hashing. +module Unison.Hashing.V2 + ( Branch (..), + Causal (..), + DataDeclaration (..), + Decl, + EffectDeclaration (..), + Kind (..), + MatchCase (..), + MdValues (..), + Modifier (..), + NameSegment (..), + Patch (..), + Pattern (..), + Reference (..), + pattern ReferenceDerived, + ReferenceId (..), + Referent (..), + SeqOp (..), + Term, + TermEdit (..), + TermF (..), + Type, + TypeEdit (..), + TypeF (..), + hashClosedTerm, + hashDecls, + hashTermComponents, + hashTermComponentsWithoutTypes, + typeToReference, + typeToReferenceMentions, + + -- * Re-exports + ContentAddressable (..), + ) +where + +import Unison.Hashing.ContentAddressable (ContentAddressable (..)) +import Unison.Hashing.V2.Branch (Branch (..), MdValues (..)) +import Unison.Hashing.V2.Causal (Causal (..)) +import Unison.Hashing.V2.DataDeclaration (DataDeclaration (..), Decl, EffectDeclaration (..), Modifier (..), hashDecls) +import Unison.Hashing.V2.Kind (Kind (..)) +import Unison.Hashing.V2.NameSegment (NameSegment (..)) +import Unison.Hashing.V2.Patch (Patch (..)) +import Unison.Hashing.V2.Pattern (Pattern (..), SeqOp (..)) +import Unison.Hashing.V2.Reference (Reference (..), ReferenceId (..), pattern ReferenceDerived) +import Unison.Hashing.V2.Referent (Referent (..)) +import Unison.Hashing.V2.Term (MatchCase (..), Term, TermF (..), hashClosedTerm, hashTermComponents, hashTermComponentsWithoutTypes) +import Unison.Hashing.V2.TermEdit (TermEdit (..)) +import Unison.Hashing.V2.Type (Type, TypeF (..), typeToReference, typeToReferenceMentions) +import Unison.Hashing.V2.TypeEdit (TypeEdit (..)) diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/ABT.hs b/unison-hashing-v2/src/Unison/Hashing/V2/ABT.hs index 9950b92bf..6f3bbd599 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/ABT.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/ABT.hs @@ -14,17 +14,18 @@ import qualified Data.List as List (sort) import qualified Data.Map as Map import qualified Data.Set as Set import Unison.ABT -import Unison.Hashing.V2.Tokenizable (Accumulate, Hashable1, hash1) +import Unison.Hash (Hash) +import Unison.Hashing.V2.Tokenizable (Hashable1, hash1) import qualified Unison.Hashing.V2.Tokenizable as Hashable import Unison.Prelude import Prelude hiding (abs, cycle) -- Hash a strongly connected component and sort its definitions into a canonical order. hashComponent :: - forall a f h v. - (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Ord v, Ord h, Accumulate h) => + forall a f v. + (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Ord v) => Map.Map v (Term f v a) -> - (h, [(v, Term f v a)]) + (Hash, [(v, Term f v a)]) hashComponent byName = let ts = Map.toList byName -- First, compute a canonical hash ordering of the component, as well as an environment in which we can hash @@ -32,11 +33,11 @@ hashComponent byName = (hashes, env) = doHashCycle [] ts -- Construct a list of tokens that is shared by all members of the component. They are disambiguated only by their -- name that gets tumbled into the hash. - commonTokens :: [Hashable.Token h] + commonTokens :: [Hashable.Token] commonTokens = Hashable.Tag 1 : map Hashable.Hashed hashes -- Use a helper function that hashes a single term given its name, now that we have an environment in which we can -- look the name up, as well as the common tokens. - hashName :: v -> h + hashName :: v -> Hash hashName v = Hashable.accumulate (commonTokens ++ [Hashable.Hashed (hash' env (var v :: Term f v ()))]) (hashes', permutedTerms) = ts @@ -53,10 +54,10 @@ hashComponent byName = -- components (using the `termFromHash` function). Requires that the -- overall component has no free variables. hashComponents :: - (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Var v, Ord h, Accumulate h) => - (h -> Word64 -> Term f v ()) -> + (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Var v) => + (Hash -> Word64 -> Term f v ()) -> Map.Map v (Term f v a) -> - [(h, [(v, Term f v a)])] + [(Hash, [(v, Term f v a)])] hashComponents termFromHash termsByName = let bound = Set.fromList (Map.keys termsByName) escapedVars = Set.unions (freeVars <$> Map.elems termsByName) `Set.difference` bound @@ -82,36 +83,37 @@ hashComponents termFromHash termsByName = -- | We ignore annotations in the `Term`, as these should never affect the -- meaning of the term. hash :: - forall f v a h. - (Functor f, Hashable1 f, Eq v, Show v, Ord h, Accumulate h) => + forall f v a. + (Functor f, Hashable1 f, Eq v, Show v) => Term f v a -> - h -hash = hash' [] where + Hash +hash = hash' [] hash' :: - forall f v a h. - (Functor f, Hashable1 f, Eq v, Show v, Ord h, Accumulate h) => + forall f v a. + (Functor f, Hashable1 f, Eq v, Show v) => [Either [v] v] -> Term f v a -> - h + Hash hash' env = \case Var' v -> maybe die hashInt ind where lookup (Left cycle) = v `elem` cycle lookup (Right v') = v == v' ind = findIndex lookup env - hashInt :: Int -> h + hashInt :: Int -> Hash hashInt i = Hashable.accumulate [Hashable.Nat $ fromIntegral i] die = error $ - "unknown var in environment: " ++ show v + "unknown var in environment: " + ++ show v ++ " environment = " ++ show env Cycle' vs t -> hash1 (hashCycle vs env) undefined t Abs'' v t -> hash' (Right v : env) t Tm' t -> hash1 (\ts -> (List.sort (map (hash' env) ts), hash' env)) (hash' env) t where - hashCycle :: [v] -> [Either [v] v] -> [Term f v a] -> ([h], Term f v a -> h) + hashCycle :: [v] -> [Either [v] v] -> [Term f v a] -> ([Hash], Term f v a -> Hash) hashCycle cycle env ts = let (ts', env') = doHashCycle env (zip cycle ts) in (ts', hash' env') @@ -119,11 +121,11 @@ hash' env = \case -- | @doHashCycle env terms@ hashes cycle @terms@ in environment @env@, and returns the canonical ordering of the hashes -- of those terms, as well as an updated environment with each of the terms' bindings in the canonical ordering. doHashCycle :: - forall a f h v. - (Accumulate h, Eq v, Functor f, Hashable1 f, Ord h, Show v) => + forall a f v. + (Eq v, Functor f, Hashable1 f, Show v) => [Either [v] v] -> [(v, Term f v a)] -> - ([h], [Either [v] v]) + ([Hash], [Either [v] v]) doHashCycle env namedTerms = (map (hash' newEnv) permutedTerms, newEnv) where @@ -132,7 +134,7 @@ doHashCycle env namedTerms = permutationEnv = Left names : env (permutedNames, permutedTerms) = namedTerms - & sortOn @h (hash' permutationEnv . snd) + & sortOn (hash' permutationEnv . snd) & unzip -- The new environment, which includes the names of all of the terms in the cycle, now that we have computed their -- canonical ordering diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Branch.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Branch.hs index 725d2faea..df7c8bf7b 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Branch.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/Branch.hs @@ -1,13 +1,12 @@ -{-# LANGUAGE DerivingVia #-} -{-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE ViewPatterns #-} - -module Unison.Hashing.V2.Branch (NameSegment (..), Raw (..), MdValues (..), hashBranch) where +module Unison.Hashing.V2.Branch + ( Branch (..), + MdValues (..), + ) +where import Unison.Hash (Hash) +import Unison.Hashing.ContentAddressable (ContentAddressable (..)) +import Unison.Hashing.V2.NameSegment (NameSegment) import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Referent (Referent) import Unison.Hashing.V2.Tokenizable (Tokenizable) @@ -20,25 +19,20 @@ newtype MdValues = MdValues (Set MetadataValue) deriving (Eq, Ord, Show) deriving (Tokenizable) via Set MetadataValue -newtype NameSegment = NameSegment Text deriving (Eq, Ord, Show) - -hashBranch :: Raw -> Hash -hashBranch = H.hashTokenizable - -data Raw = Raw +data Branch = Branch { terms :: Map NameSegment (Map Referent MdValues), types :: Map NameSegment (Map Reference MdValues), patches :: Map NameSegment Hash, children :: Map NameSegment Hash -- the Causal Hash } -instance Tokenizable Raw where +instance ContentAddressable Branch where + contentHash = H.hashTokenizable + +instance Tokenizable Branch where tokens b = [ H.accumulateToken (terms b), H.accumulateToken (types b), H.accumulateToken (children b), H.accumulateToken (patches b) ] - -instance H.Tokenizable NameSegment where - tokens (NameSegment t) = [H.Text t] diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Causal.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Causal.hs index 8c74fdd46..5f0b300c5 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Causal.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/Causal.hs @@ -1,23 +1,18 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} - module Unison.Hashing.V2.Causal ( Causal (..), - hashCausal, ) where -import Data.Set (Set) import qualified Data.Set as Set import Unison.Hash (Hash) +import Unison.Hashing.ContentAddressable (ContentAddressable (..)) import qualified Unison.Hashing.V2.Tokenizable as H -import qualified Unison.Hashing.V2.Tokenizable as Tokenizable - -hashCausal :: Causal -> Hash -hashCausal = Tokenizable.hashTokenizable +import Unison.Prelude data Causal = Causal {branchHash :: Hash, parents :: Set Hash} +instance ContentAddressable Causal where + contentHash = H.hashTokenizable + instance H.Tokenizable Causal where tokens c = H.tokens $ branchHash c : Set.toList (parents c) diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/ConstructorId.hs b/unison-hashing-v2/src/Unison/Hashing/V2/ConstructorId.hs new file mode 100644 index 000000000..f64b67534 --- /dev/null +++ b/unison-hashing-v2/src/Unison/Hashing/V2/ConstructorId.hs @@ -0,0 +1,8 @@ +module Unison.Hashing.V2.ConstructorId + ( ConstructorId, + ) +where + +import Data.Word (Word64) + +type ConstructorId = Word64 diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/DataDeclaration.hs b/unison-hashing-v2/src/Unison/Hashing/V2/DataDeclaration.hs index 46d4b67d5..c8d08ec79 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/DataDeclaration.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/DataDeclaration.hs @@ -1,5 +1,3 @@ -{-# LANGUAGE RecordWildCards #-} - module Unison.Hashing.V2.DataDeclaration ( DataDeclaration (..), EffectDeclaration (..), @@ -15,12 +13,11 @@ import qualified Data.Map as Map import qualified Unison.ABT as ABT import Unison.Hash (Hash) import qualified Unison.Hashing.V2.ABT as ABT -import Unison.Hashing.V2.Reference (Reference) -import qualified Unison.Hashing.V2.Reference as Reference +import Unison.Hashing.V2.Reference (Reference (..), ReferenceId) import qualified Unison.Hashing.V2.Reference.Util as Reference.Util import Unison.Hashing.V2.Tokenizable (Hashable1) import qualified Unison.Hashing.V2.Tokenizable as Hashable -import Unison.Hashing.V2.Type (Type) +import Unison.Hashing.V2.Type (Type, TypeF) import qualified Unison.Hashing.V2.Type as Type import qualified Unison.Name as Name import qualified Unison.Names.ResolutionResult as Names @@ -52,16 +49,16 @@ constructorTypes = (snd <$>) . constructors constructors :: DataDeclaration v a -> [(v, Type v a)] constructors (DataDeclaration _ _ _ ctors) = [(v, t) | (_, v, t) <- ctors] -toABT :: ABT.Var v => DataDeclaration v () -> ABT.Term F v () +toABT :: (ABT.Var v) => DataDeclaration v () -> ABT.Term F v () toABT dd = ABT.tm $ Modified (modifier dd) dd' where dd' = ABT.absChain (bound dd) (ABT.tm (Constructors (ABT.transform Type <$> constructorTypes dd))) -- Implementation detail of `hashDecls`, works with unannotated data decls -hashDecls0 :: (Eq v, ABT.Var v, Show v) => Map v (DataDeclaration v ()) -> [(v, Reference.Id)] +hashDecls0 :: (Eq v, ABT.Var v, Show v) => Map v (DataDeclaration v ()) -> [(v, ReferenceId)] hashDecls0 decls = let abts = toABT <$> decls - ref r = ABT.tm (Type (Type.Ref (Reference.DerivedId r))) + ref r = ABT.tm (Type (Type.TypeRef (ReferenceDerivedId r))) cs = Reference.Util.hashComponents ref abts in [(v, r) | (v, (r, _)) <- Map.toList cs] @@ -80,11 +77,11 @@ hashDecls :: (Eq v, Var v, Show v) => (v -> Name.Name) -> Map v (DataDeclaration v a) -> - Names.ResolutionResult v a [(v, Reference.Id, DataDeclaration v a)] + Names.ResolutionResult v a [(v, ReferenceId, DataDeclaration v a)] hashDecls unsafeVarToName decls = do -- todo: make sure all other external references are resolved before calling this let varToRef = hashDecls0 (void <$> decls) - varToRef' = second Reference.DerivedId <$> varToRef + varToRef' = second ReferenceDerivedId <$> varToRef decls' = bindTypes <$> decls bindTypes dd = dd {constructors' = over _3 (Type.bindExternal varToRef') <$> constructors' dd} typeReferences = Map.fromList (first unsafeVarToName <$> varToRef') @@ -95,7 +92,7 @@ hashDecls unsafeVarToName decls = do pure [(v, r, dd) | (v, r) <- varToRef, Just dd <- [Map.lookup v decls']] bindReferences :: - Var v => + (Var v) => (v -> Name.Name) -> Set v -> Map Name.Name Reference -> @@ -107,7 +104,7 @@ bindReferences unsafeVarToName keepFree names (DataDeclaration m a bound constru pure $ DataDeclaration m a bound constructors data F a - = Type (Type.F a) + = Type (TypeF a) | LetRec [a] a | Constructors [a] | Modified Modifier a diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Hashable.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Hashable.hs deleted file mode 100644 index f6ed2ece7..000000000 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Hashable.hs +++ /dev/null @@ -1,30 +0,0 @@ -module Unison.Hashing.V2.Hashable - ( Hashable (..), - hashFor, - HashFor (..), - ) -where - -import Data.Int (Int64) -import Data.Set (Set) -import Unison.Hash (Hash (..), HashFor (..)) -import qualified Unison.Hashing.V2.Tokenizable as Tokenizable - --- | This typeclass provides a mechanism for obtaining a content-based hash for Unison types & --- terms. --- Be wary that Unison requires that these hashes be deterministic, any change to a Hashable --- instance requires a full codebase migration and should not be taken lightly. -class Hashable t where - hash :: t -> Hash - -instance Tokenizable.Tokenizable a => Hashable [a] where - hash = Tokenizable.hashTokenizable - -instance Tokenizable.Tokenizable a => Hashable (Set a) where - hash = Tokenizable.hashTokenizable - -instance Hashable Int64 where - hash = Tokenizable.hashTokenizable - -hashFor :: Hashable t => t -> HashFor t -hashFor = HashFor . hash diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Kind.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Kind.hs index 98d6791a6..9cc130b0d 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Kind.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/Kind.hs @@ -1,14 +1,18 @@ -{-# LANGUAGE DeriveGeneric #-} - -module Unison.Hashing.V2.Kind where +module Unison.Hashing.V2.Kind + ( Kind (..), + ) +where import Unison.Hashing.V2.Tokenizable (Tokenizable) import qualified Unison.Hashing.V2.Tokenizable as Hashable import Unison.Prelude -data Kind = Star | Arrow Kind Kind deriving (Eq, Ord, Read, Show, Generic) +data Kind + = KindStar + | KindArrow Kind Kind + deriving (Eq, Ord, Read, Show, Generic) instance Tokenizable Kind where tokens k = case k of - Star -> [Hashable.Tag 0] - Arrow k1 k2 -> (Hashable.Tag 1 : Hashable.tokens k1) ++ Hashable.tokens k2 + KindStar -> [Hashable.Tag 0] + KindArrow k1 k2 -> (Hashable.Tag 1 : Hashable.tokens k1) ++ Hashable.tokens k2 diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/NameSegment.hs b/unison-hashing-v2/src/Unison/Hashing/V2/NameSegment.hs new file mode 100644 index 000000000..22bb34cf2 --- /dev/null +++ b/unison-hashing-v2/src/Unison/Hashing/V2/NameSegment.hs @@ -0,0 +1,15 @@ +module Unison.Hashing.V2.NameSegment + ( NameSegment (..), + ) +where + +import qualified Unison.Hashing.V2.Tokenizable as H +import Unison.Prelude + +-- | A name segment. +newtype NameSegment + = NameSegment Text + deriving stock (Eq, Ord, Show) + +instance H.Tokenizable NameSegment where + tokens (NameSegment t) = [H.Text t] diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Patch.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Patch.hs index 017f68d54..b9554a05b 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Patch.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/Patch.hs @@ -1,27 +1,25 @@ -{-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE TemplateHaskell #-} +module Unison.Hashing.V2.Patch + ( Patch (..), + ) +where -module Unison.Hashing.V2.Patch (Patch (..), hashPatch) where - -import Data.Map (Map) -import Data.Set (Set) -import Unison.Hash (Hash) +import Unison.Hashing.ContentAddressable (ContentAddressable (contentHash)) import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Referent (Referent) import Unison.Hashing.V2.TermEdit (TermEdit) import Unison.Hashing.V2.Tokenizable (Tokenizable) import qualified Unison.Hashing.V2.Tokenizable as H import Unison.Hashing.V2.TypeEdit (TypeEdit) - -hashPatch :: Patch -> Hash -hashPatch = H.hashTokenizable +import Unison.Prelude data Patch = Patch { termEdits :: Map Referent (Set TermEdit), typeEdits :: Map Reference (Set TypeEdit) } +instance ContentAddressable Patch where + contentHash = H.hashTokenizable + instance Tokenizable Patch where tokens p = [ H.accumulateToken (termEdits p), diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Pattern.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Pattern.hs index 1cfe22542..433370b29 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Pattern.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/Pattern.hs @@ -1,35 +1,30 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternSynonyms #-} +module Unison.Hashing.V2.Pattern + ( Pattern (..), + SeqOp (..), + ) +where -module Unison.Hashing.V2.Pattern where - -import Data.Foldable as Foldable hiding (foldMap') -import Data.List (intercalate) -import qualified Data.Set as Set import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.Hashing.V2.Reference (Reference) import qualified Unison.Hashing.V2.Tokenizable as H -import qualified Unison.Hashing.V2.Type as Type import Unison.Prelude data Pattern loc - = Unbound loc - | Var loc - | Boolean loc !Bool - | Int loc !Int64 - | Nat loc !Word64 - | Float loc !Double - | Text loc !Text - | Char loc !Char - | Constructor loc !Reference !ConstructorId [Pattern loc] - | As loc (Pattern loc) - | EffectPure loc (Pattern loc) - | EffectBind loc !Reference !ConstructorId [Pattern loc] (Pattern loc) - | SequenceLiteral loc [Pattern loc] - | SequenceOp loc (Pattern loc) !SeqOp (Pattern loc) - deriving (Ord, Generic, Functor, Foldable, Traversable) + = PatternUnbound loc + | PatternVar loc + | PatternBoolean loc !Bool + | PatternInt loc !Int64 + | PatternNat loc !Word64 + | PatternFloat loc !Double + | PatternText loc !Text + | PatternChar loc !Char + | PatternConstructor loc !Reference !ConstructorId [Pattern loc] + | PatternAs loc (Pattern loc) + | PatternEffectPure loc (Pattern loc) + | PatternEffectBind loc !Reference !ConstructorId [Pattern loc] (Pattern loc) + | PatternSequenceLiteral loc [Pattern loc] + | PatternSequenceOp loc (Pattern loc) !SeqOp (Pattern loc) + deriving stock (Foldable, Functor, Generic, Ord, Show, Traversable) data SeqOp = Cons @@ -42,118 +37,36 @@ instance H.Tokenizable SeqOp where tokens Snoc = [H.Tag 1] tokens Concat = [H.Tag 2] -instance Show (Pattern loc) where - show (Unbound _) = "Unbound" - show (Var _) = "Var" - show (Boolean _ x) = "Boolean " <> show x - show (Int _ x) = "Int " <> show x - show (Nat _ x) = "Nat " <> show x - show (Float _ x) = "Float " <> show x - show (Text _ t) = "Text " <> show t - show (Char _ c) = "Char " <> show c - show (Constructor _ r i ps) = - "Constructor " <> unwords [show r, show i, show ps] - show (As _ p) = "As " <> show p - show (EffectPure _ k) = "EffectPure " <> show k - show (EffectBind _ r i ps k) = - "EffectBind " <> unwords [show r, show i, show ps, show k] - show (SequenceLiteral _ ps) = "Sequence " <> intercalate ", " (fmap show ps) - show (SequenceOp _ ph op pt) = "Sequence " <> show ph <> " " <> show op <> " " <> show pt - -application :: Pattern loc -> Bool -application (Constructor _ _ _ (_ : _)) = True -application _ = False - -loc :: Pattern loc -> loc -loc p = head $ Foldable.toList p - -setLoc :: Pattern loc -> loc -> Pattern loc -setLoc p loc = case p of - EffectBind _ a b c d -> EffectBind loc a b c d - EffectPure _ a -> EffectPure loc a - As _ a -> As loc a - Constructor _ a b c -> Constructor loc a b c - SequenceLiteral _ ps -> SequenceLiteral loc ps - SequenceOp _ ph op pt -> SequenceOp loc ph op pt - x -> fmap (const loc) x - instance H.Tokenizable (Pattern p) where - tokens (Unbound _) = [H.Tag 0] - tokens (Var _) = [H.Tag 1] - tokens (Boolean _ b) = H.Tag 2 : [H.Tag $ if b then 1 else 0] - tokens (Int _ n) = H.Tag 3 : [H.Int n] - tokens (Nat _ n) = H.Tag 4 : [H.Nat n] - tokens (Float _ f) = H.Tag 5 : H.tokens f - tokens (Constructor _ r n args) = + tokens (PatternUnbound _) = [H.Tag 0] + tokens (PatternVar _) = [H.Tag 1] + tokens (PatternBoolean _ b) = H.Tag 2 : [H.Tag $ if b then 1 else 0] + tokens (PatternInt _ n) = H.Tag 3 : [H.Int n] + tokens (PatternNat _ n) = H.Tag 4 : [H.Nat n] + tokens (PatternFloat _ f) = H.Tag 5 : H.tokens f + tokens (PatternConstructor _ r n args) = [H.Tag 6, H.accumulateToken r, H.Nat $ fromIntegral n, H.accumulateToken args] - tokens (EffectPure _ p) = H.Tag 7 : H.tokens p - tokens (EffectBind _ r n args k) = + tokens (PatternEffectPure _ p) = H.Tag 7 : H.tokens p + tokens (PatternEffectBind _ r n args k) = [H.Tag 8, H.accumulateToken r, H.Nat $ fromIntegral n, H.accumulateToken args, H.accumulateToken k] - tokens (As _ p) = H.Tag 9 : H.tokens p - tokens (Text _ t) = H.Tag 10 : H.tokens t - tokens (SequenceLiteral _ ps) = H.Tag 11 : concatMap H.tokens ps - tokens (SequenceOp _ l op r) = H.Tag 12 : H.tokens op ++ H.tokens l ++ H.tokens r - tokens (Char _ c) = H.Tag 13 : H.tokens c + tokens (PatternAs _ p) = H.Tag 9 : H.tokens p + tokens (PatternText _ t) = H.Tag 10 : H.tokens t + tokens (PatternSequenceLiteral _ ps) = H.Tag 11 : concatMap H.tokens ps + tokens (PatternSequenceOp _ l op r) = H.Tag 12 : H.tokens op ++ H.tokens l ++ H.tokens r + tokens (PatternChar _ c) = H.Tag 13 : H.tokens c instance Eq (Pattern loc) where - Unbound _ == Unbound _ = True - Var _ == Var _ = True - Boolean _ b == Boolean _ b2 = b == b2 - Int _ n == Int _ m = n == m - Nat _ n == Nat _ m = n == m - Float _ f == Float _ g = f == g - Constructor _ r n args == Constructor _ s m brgs = r == s && n == m && args == brgs - EffectPure _ p == EffectPure _ q = p == q - EffectBind _ r ctor ps k == EffectBind _ r2 ctor2 ps2 k2 = r == r2 && ctor == ctor2 && ps == ps2 && k == k2 - As _ p == As _ q = p == q - Text _ t == Text _ t2 = t == t2 - SequenceLiteral _ ps == SequenceLiteral _ ps2 = ps == ps2 - SequenceOp _ ph op pt == SequenceOp _ ph2 op2 pt2 = ph == ph2 && op == op2 && pt == pt2 + PatternUnbound _ == PatternUnbound _ = True + PatternVar _ == PatternVar _ = True + PatternBoolean _ b == PatternBoolean _ b2 = b == b2 + PatternInt _ n == PatternInt _ m = n == m + PatternNat _ n == PatternNat _ m = n == m + PatternFloat _ f == PatternFloat _ g = f == g + PatternConstructor _ r n args == PatternConstructor _ s m brgs = r == s && n == m && args == brgs + PatternEffectPure _ p == PatternEffectPure _ q = p == q + PatternEffectBind _ r ctor ps k == PatternEffectBind _ r2 ctor2 ps2 k2 = r == r2 && ctor == ctor2 && ps == ps2 && k == k2 + PatternAs _ p == PatternAs _ q = p == q + PatternText _ t == PatternText _ t2 = t == t2 + PatternSequenceLiteral _ ps == PatternSequenceLiteral _ ps2 = ps == ps2 + PatternSequenceOp _ ph op pt == PatternSequenceOp _ ph2 op2 pt2 = ph == ph2 && op == op2 && pt == pt2 _ == _ = False - -foldMap' :: Monoid m => (Pattern loc -> m) -> Pattern loc -> m -foldMap' f p = case p of - Unbound _ -> f p - Var _ -> f p - Boolean _ _ -> f p - Int _ _ -> f p - Nat _ _ -> f p - Float _ _ -> f p - Text _ _ -> f p - Char _ _ -> f p - Constructor _ _ _ ps -> f p <> foldMap (foldMap' f) ps - As _ p' -> f p <> foldMap' f p' - EffectPure _ p' -> f p <> foldMap' f p' - EffectBind _ _ _ ps p' -> f p <> foldMap (foldMap' f) ps <> foldMap' f p' - SequenceLiteral _ ps -> f p <> foldMap (foldMap' f) ps - SequenceOp _ p1 _ p2 -> f p <> foldMap' f p1 <> foldMap' f p2 - -generalizedDependencies :: - Ord r => - (Reference -> r) -> - (Reference -> ConstructorId -> r) -> - (Reference -> r) -> - (Reference -> ConstructorId -> r) -> - (Reference -> r) -> - Pattern loc -> - Set r -generalizedDependencies literalType dataConstructor dataType effectConstructor effectType = - Set.fromList - . foldMap' - ( \case - Unbound _ -> mempty - Var _ -> mempty - As _ _ -> mempty - Constructor _ r cid _ -> [dataType r, dataConstructor r cid] - EffectPure _ _ -> [effectType Type.effectRef] - EffectBind _ r cid _ _ -> - [effectType Type.effectRef, effectType r, effectConstructor r cid] - SequenceLiteral _ _ -> [literalType Type.listRef] - SequenceOp {} -> [literalType Type.listRef] - Boolean _ _ -> [literalType Type.booleanRef] - Int _ _ -> [literalType Type.intRef] - Nat _ _ -> [literalType Type.natRef] - Float _ _ -> [literalType Type.floatRef] - Text _ _ -> [literalType Type.textRef] - Char _ _ -> [literalType Type.charRef] - ) diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Reference.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Reference.hs index 8dff969e5..7b95c0326 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Reference.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/Reference.hs @@ -1,25 +1,17 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE ViewPatterns #-} - module Unison.Hashing.V2.Reference - ( Reference, - pattern Builtin, - pattern Derived, - pattern DerivedId, - Id (..), + ( Reference (..), + pattern ReferenceDerived, + ReferenceId (..), components, ) where import qualified Data.Text as Text -import qualified Unison.Hash as H +import Unison.Hash (Hash) +import qualified Unison.Hash as Hash import Unison.Hashing.V2.Tokenizable (Tokenizable) import qualified Unison.Hashing.V2.Tokenizable as Hashable import Unison.Prelude -import Unison.ShortHash (ShortHash) -import qualified Unison.ShortHash as SH -- | Either a builtin or a user defined (hashed) top-level declaration. -- @@ -27,45 +19,33 @@ import qualified Unison.ShortHash as SH -- -- Other used defined things like local variables don't get @Reference@s. data Reference - = Builtin Text.Text + = ReferenceBuiltin Text.Text | -- `Derived` can be part of a strongly connected component. -- The `Pos` refers to a particular element of the component -- and the `Size` is the number of elements in the component. -- Using an ugly name so no one tempted to use this - DerivedId Id - deriving (Eq, Ord) + ReferenceDerivedId ReferenceId + deriving stock (Eq, Ord, Show) type Pos = Word64 -pattern Derived :: H.Hash -> Pos -> Reference -pattern Derived h i = DerivedId (Id h i) +pattern ReferenceDerived :: Hash -> Pos -> Reference +pattern ReferenceDerived h i = ReferenceDerivedId (ReferenceId h i) -{-# COMPLETE Builtin, Derived #-} +{-# COMPLETE ReferenceBuiltin, ReferenceDerived #-} -- | @Pos@ is a position into a cycle of size @Size@, as cycles are hashed together. -data Id = Id H.Hash Pos deriving (Eq, Ord) +data ReferenceId + = ReferenceId Hash Pos + deriving stock (Eq, Ord, Show) --- todo: delete these, but `instance Show Reference` currently depends on SH -toShortHash :: Reference -> ShortHash -toShortHash (Builtin b) = SH.Builtin b -toShortHash (Derived h 0) = SH.ShortHash (H.base32Hex h) Nothing Nothing -toShortHash (Derived h i) = SH.ShortHash (H.base32Hex h) (Just $ showSuffix i) Nothing - -showSuffix :: Pos -> Text -showSuffix = Text.pack . show - -component :: H.Hash -> [k] -> [(k, Id)] +component :: Hash -> [k] -> [(k, ReferenceId)] component h ks = - let - in [(k, (Id h i)) | (k, i) <- ks `zip` [0 ..]] + [(k, (ReferenceId h i)) | (k, i) <- ks `zip` [0 ..]] -components :: [(H.Hash, [k])] -> [(k, Id)] +components :: [(Hash, [k])] -> [(k, ReferenceId)] components sccs = uncurry component =<< sccs -instance Show Id where show = SH.toString . SH.take 5 . toShortHash . DerivedId - -instance Show Reference where show = SH.toString . SH.take 5 . toShortHash - instance Tokenizable Reference where - tokens (Builtin txt) = [Hashable.Tag 0, Hashable.Text txt] - tokens (DerivedId (Id h i)) = [Hashable.Tag 1, Hashable.Bytes (H.toByteString h), Hashable.Nat i] + tokens (ReferenceBuiltin txt) = [Hashable.Tag 0, Hashable.Text txt] + tokens (ReferenceDerivedId (ReferenceId h i)) = [Hashable.Tag 1, Hashable.Bytes (Hash.toByteString h), Hashable.Nat i] diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Reference/Util.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Reference/Util.hs index e1db55628..1534f4aef 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Reference/Util.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/Reference/Util.hs @@ -1,19 +1,23 @@ -module Unison.Hashing.V2.Reference.Util where +module Unison.Hashing.V2.Reference.Util + ( hashComponents, + ) +where import qualified Data.Map as Map import Unison.ABT (Var) import qualified Unison.Hashing.V2.ABT as ABT +import Unison.Hashing.V2.Reference (ReferenceId (..)) import qualified Unison.Hashing.V2.Reference as Reference import Unison.Hashing.V2.Tokenizable (Hashable1) import Unison.Prelude hashComponents :: (Functor f, Hashable1 f, Foldable f, Eq v, Show v, Var v) => - (Reference.Id -> ABT.Term f v ()) -> + (ReferenceId -> ABT.Term f v ()) -> Map v (ABT.Term f v a) -> - Map v (Reference.Id, ABT.Term f v a) + Map v (ReferenceId, ABT.Term f v a) hashComponents embedRef tms = Map.fromList [(v, (r, e)) | ((v, e), r) <- cs] where cs = Reference.components $ ABT.hashComponents ref tms - ref h i = embedRef (Reference.Id h i) + ref h i = embedRef (ReferenceId h i) diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Referent.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Referent.hs index 6c8806631..61b81f01c 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Referent.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/Referent.hs @@ -1,22 +1,18 @@ -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternSynonyms #-} - module Unison.Hashing.V2.Referent - ( Referent, - pattern Ref, - pattern Con, - ConstructorId, + ( Referent (..), ) where -import Unison.DataDeclaration.ConstructorId (ConstructorId) +import Unison.Hashing.V2.ConstructorId (ConstructorId) import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Tokenizable (Tokenizable) import qualified Unison.Hashing.V2.Tokenizable as H -data Referent = Ref Reference | Con Reference ConstructorId - deriving (Show, Ord, Eq) +data Referent + = ReferentRef Reference + | ReferentCon Reference ConstructorId + deriving stock (Show, Ord, Eq) instance Tokenizable Referent where - tokens (Ref r) = [H.Tag 0] ++ H.tokens r - tokens (Con r i) = [H.Tag 2] ++ H.tokens r ++ H.tokens i + tokens (ReferentRef r) = [H.Tag 0] ++ H.tokens r + tokens (ReferentCon r i) = [H.Tag 2] ++ H.tokens r ++ H.tokens i diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Term.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Term.hs index 336ae02c7..b7ac255b8 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Term.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/Term.hs @@ -1,21 +1,10 @@ -{-# LANGUAGE DeriveFoldable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE InstanceSigs #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PartialTypeSignatures #-} -{-# LANGUAGE PatternSynonyms #-} -{-# LANGUAGE Rank2Types #-} -{-# LANGUAGE UnicodeSyntax #-} -{-# LANGUAGE ViewPatterns #-} - module Unison.Hashing.V2.Term ( Term, - F (..), + TermF (..), MatchCase (..), hashClosedTerm, - hashComponents, - hashComponentsWithoutTypes, + hashTermComponents, + hashTermComponentsWithoutTypes, ) where @@ -25,11 +14,11 @@ import qualified Data.Zip as Zip import qualified Unison.ABT as ABT import qualified Unison.Blank as B import Unison.DataDeclaration.ConstructorId (ConstructorId) +import Unison.Hash (Hash) import qualified Unison.Hash as Hash import qualified Unison.Hashing.V2.ABT as ABT import Unison.Hashing.V2.Pattern (Pattern) -import Unison.Hashing.V2.Reference (Reference) -import qualified Unison.Hashing.V2.Reference as Reference +import Unison.Hashing.V2.Reference (Reference (..), ReferenceId (..), pattern ReferenceDerived) import qualified Unison.Hashing.V2.Reference.Util as ReferenceUtil import Unison.Hashing.V2.Referent (Referent) import Unison.Hashing.V2.Tokenizable (Hashable1, accumulateToken) @@ -40,37 +29,37 @@ import Unison.Var (Var) import Prelude hiding (and, or) data MatchCase loc a = MatchCase (Pattern loc) (Maybe a) a - deriving (Show, Eq, Foldable, Functor, Generic, Generic1, Traversable) + deriving stock (Show, Eq, Foldable, Functor, Generic, Generic1, Traversable) -- | Base functor for terms in the Unison language -- We need `typeVar` because the term and type variables may differ. -data F typeVar typeAnn patternAnn a - = Int Int64 - | Nat Word64 - | Float Double - | Boolean Bool - | Text Text - | Char Char - | Blank (B.Blank typeAnn) - | Ref Reference +data TermF typeVar typeAnn patternAnn a + = TermInt Int64 + | TermNat Word64 + | TermFloat Double + | TermBoolean Bool + | TermText Text + | TermChar Char + | TermBlank (B.Blank typeAnn) + | TermRef Reference | -- First argument identifies the data type, -- second argument identifies the constructor - Constructor Reference ConstructorId - | Request Reference ConstructorId - | Handle a a - | App a a - | Ann a (Type typeVar typeAnn) - | List (Seq a) - | If a a a - | And a a - | Or a a - | Lam a + TermConstructor Reference ConstructorId + | TermRequest Reference ConstructorId + | TermHandle a a + | TermApp a a + | TermAnn a (Type typeVar typeAnn) + | TermList (Seq a) + | TermIf a a a + | TermAnd a a + | TermOr a a + | TermLam a | -- Note: let rec blocks have an outer ABT.Cycle which introduces as many -- variables as there are bindings - LetRec [a] a + TermLetRec [a] a | -- Note: first parameter is the binding, second is the expression which may refer -- to this let bound variable. Constructed as `Let b (abs v e)` - Let a a + TermLet a a | -- Pattern matching / eliminating data types, example: -- case x of -- Just n -> rhs1 @@ -81,9 +70,9 @@ data F typeVar typeAnn patternAnn a -- Match x -- [ (Constructor 0 [Var], ABT.abs n rhs1) -- , (Constructor 1 [], rhs2) ] - Match a [MatchCase patternAnn a] - | TermLink Referent - | TypeLink Reference + TermMatch a [MatchCase patternAnn a] + | TermTermLink Referent + | TermTypeLink Reference deriving (Foldable, Functor, Generic, Generic1, Traversable) -- | Like `Term v`, but with an annotation of type `a` at every level in the tree @@ -91,32 +80,32 @@ type Term v a = Term2 v a a v a -- | Allow type variables, term variables, type annotations and term annotations -- to all differ -type Term2 vt at ap v a = ABT.Term (F vt at ap) v a +type Term2 vt at ap v a = ABT.Term (TermF vt at ap) v a -- some smart constructors -ref :: Ord v => a -> Reference -> Term2 vt at ap v a -ref a r = ABT.tm' a (Ref r) +ref :: (Ord v) => a -> Reference -> Term2 vt at ap v a +ref a r = ABT.tm' a (TermRef r) -refId :: Ord v => a -> Reference.Id -> Term2 vt at ap v a -refId a = ref a . Reference.DerivedId +refId :: (Ord v) => a -> ReferenceId -> Term2 vt at ap v a +refId a = ref a . ReferenceDerivedId -hashComponents :: +hashTermComponents :: forall v a extra. - Var v => + (Var v) => Map v (Term v a, Type v a, extra) -> - Map v (Reference.Id, Term v a, Type v a, extra) -hashComponents terms = + Map v (ReferenceId, Term v a, Type v a, extra) +hashTermComponents terms = Zip.zipWith keepExtra terms (ReferenceUtil.hashComponents (refId ()) terms') where terms' :: Map v (Term v a) terms' = incorporateType <$> terms - keepExtra :: ((Term v a, Type v a, extra) -> (Reference.Id, Term v a) -> (Reference.Id, Term v a, Type v a, extra)) + keepExtra :: ((Term v a, Type v a, extra) -> (ReferenceId, Term v a) -> (ReferenceId, Term v a, Type v a, extra)) keepExtra (_oldTrm, typ, extra) (refId, trm) = (refId, trm, typ, extra) incorporateType :: (Term v a, Type v a, extra) -> Term v a - incorporateType (a@(ABT.out -> ABT.Tm (Ann e _tp)), typ, _extra) = ABT.tm' (ABT.annotation a) (Ann e typ) - incorporateType (e, typ, _extra) = ABT.tm' (ABT.annotation e) (Ann e typ) + incorporateType (a@(ABT.out -> ABT.Tm (TermAnn e _tp)), typ, _extra) = ABT.tm' (ABT.annotation a) (TermAnn e typ) + incorporateType (e, typ, _extra) = ABT.tm' (ABT.annotation e) (TermAnn e typ) -- keep these until we decide if we want to add the appropriate smart constructors back into this module -- incorporateType (Term.Ann' e _) typ = Term.ann () e typ @@ -127,16 +116,16 @@ hashComponents terms = -- What if there's a top-level Annotation but it doesn't match -- the type that was provided? -hashComponentsWithoutTypes :: Var v => Map v (Term v a) -> Map v (Reference.Id, Term v a) -hashComponentsWithoutTypes = ReferenceUtil.hashComponents $ refId () +hashTermComponentsWithoutTypes :: (Var v) => Map v (Term v a) -> Map v (ReferenceId, Term v a) +hashTermComponentsWithoutTypes = ReferenceUtil.hashComponents $ refId () -hashClosedTerm :: Var v => Term v a -> Reference.Id -hashClosedTerm tm = Reference.Id (ABT.hash tm) 0 +hashClosedTerm :: (Var v) => Term v a -> ReferenceId +hashClosedTerm tm = ReferenceId (ABT.hash tm) 0 -instance Var v => Hashable1 (F v a p) where - hash1 :: forall h x. (Ord h, Hashable.Accumulate h) => ([x] -> ([h], x -> h)) -> (x -> h) -> (F v a p) x -> h +instance (Var v) => Hashable1 (TermF v a p) where + hash1 :: forall x. ([x] -> ([Hash], x -> Hash)) -> (x -> Hash) -> (TermF v a p) x -> Hash hash1 hashCycle hash e = - let varint :: Integral i => i -> Hashable.Token h + let varint :: (Integral i) => i -> Hashable.Token varint = Hashable.Nat . fromIntegral tag = Hashable.Tag hashed = Hashable.Hashed @@ -146,11 +135,11 @@ instance Var v => Hashable1 (F v a p) where -- are 'transparent' wrt hash and hashing is unaffected by whether -- expressions are linked. So for example `x = 1 + 1` and `y = x` hash -- the same. - Ref (Reference.Derived h 0) -> Hashable.fromBytes (Hash.toByteString h) - Ref (Reference.Derived h i) -> + TermRef (ReferenceDerived h 0) -> Hash.fromByteString (Hash.toByteString h) + TermRef (ReferenceDerived h i) -> Hashable.accumulate [ tag 1, - hashed $ Hashable.fromBytes (Hash.toByteString h), + hashed $ Hash.fromByteString (Hash.toByteString h), Hashable.Nat i ] -- Note: start each layer with leading `1` byte, to avoid collisions @@ -160,42 +149,42 @@ instance Var v => Hashable1 (F v a p) where Hashable.accumulate $ tag 1 : case e of - Nat i -> [tag 64, accumulateToken i] - Int i -> [tag 65, accumulateToken i] - Float n -> [tag 66, Hashable.Double n] - Boolean b -> [tag 67, accumulateToken b] - Text t -> [tag 68, accumulateToken t] - Char c -> [tag 69, accumulateToken c] - Blank b -> + TermNat i -> [tag 64, accumulateToken i] + TermInt i -> [tag 65, accumulateToken i] + TermFloat n -> [tag 66, Hashable.Double n] + TermBoolean b -> [tag 67, accumulateToken b] + TermText t -> [tag 68, accumulateToken t] + TermChar c -> [tag 69, accumulateToken c] + TermBlank b -> tag 1 : case b of B.Blank -> [tag 0] B.Recorded (B.Placeholder _ s) -> [tag 1, Hashable.Text (Text.pack s)] B.Recorded (B.Resolve _ s) -> [tag 2, Hashable.Text (Text.pack s)] - Ref (Reference.Builtin name) -> [tag 2, accumulateToken name] - Ref Reference.Derived {} -> + TermRef (ReferenceBuiltin name) -> [tag 2, accumulateToken name] + TermRef ReferenceDerived {} -> error "handled above, but GHC can't figure this out" - App a a2 -> [tag 3, hashed (hash a), hashed (hash a2)] - Ann a t -> [tag 4, hashed (hash a), hashed (ABT.hash t)] - List as -> + TermApp a a2 -> [tag 3, hashed (hash a), hashed (hash a2)] + TermAnn a t -> [tag 4, hashed (hash a), hashed (ABT.hash t)] + TermList as -> tag 5 : varint (Sequence.length as) : map (hashed . hash) (toList as) - Lam a -> [tag 6, hashed (hash a)] + TermLam a -> [tag 6, hashed (hash a)] -- note: we use `hashCycle` to ensure result is independent of -- let binding order - LetRec as a -> case hashCycle as of + TermLetRec as a -> case hashCycle as of (hs, hash) -> tag 7 : hashed (hash a) : map hashed hs -- here, order is significant, so don't use hashCycle - Let b a -> [tag 8, hashed $ hash b, hashed $ hash a] - If b t f -> + TermLet b a -> [tag 8, hashed $ hash b, hashed $ hash a] + TermIf b t f -> [tag 9, hashed $ hash b, hashed $ hash t, hashed $ hash f] - Request r n -> [tag 10, accumulateToken r, varint n] - Constructor r n -> [tag 12, accumulateToken r, varint n] - Match e branches -> + TermRequest r n -> [tag 10, accumulateToken r, varint n] + TermConstructor r n -> [tag 12, accumulateToken r, varint n] + TermMatch e branches -> tag 13 : hashed (hash e) : concatMap h branches where h (MatchCase pat guard branch) = @@ -204,8 +193,8 @@ instance Var v => Hashable1 (F v a p) where toList (hashed . hash <$> guard), [hashed (hash branch)] ] - Handle h b -> [tag 15, hashed $ hash h, hashed $ hash b] - And x y -> [tag 16, hashed $ hash x, hashed $ hash y] - Or x y -> [tag 17, hashed $ hash x, hashed $ hash y] - TermLink r -> [tag 18, accumulateToken r] - TypeLink r -> [tag 19, accumulateToken r] + TermHandle h b -> [tag 15, hashed $ hash h, hashed $ hash b] + TermAnd x y -> [tag 16, hashed $ hash x, hashed $ hash y] + TermOr x y -> [tag 17, hashed $ hash x, hashed $ hash y] + TermTermLink r -> [tag 18, accumulateToken r] + TermTypeLink r -> [tag 19, accumulateToken r] diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/TermEdit.hs b/unison-hashing-v2/src/Unison/Hashing/V2/TermEdit.hs index e642df595..91bb5072e 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/TermEdit.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/TermEdit.hs @@ -4,9 +4,11 @@ import Unison.Hashing.V2.Referent (Referent) import Unison.Hashing.V2.Tokenizable (Tokenizable) import qualified Unison.Hashing.V2.Tokenizable as H -data TermEdit = Replace Referent | Deprecate +data TermEdit + = TermEditReplace Referent + | TermEditDeprecate deriving (Eq, Ord, Show) instance Tokenizable TermEdit where - tokens (Replace r) = [H.Tag 0] ++ H.tokens r - tokens Deprecate = [H.Tag 1] + tokens (TermEditReplace r) = [H.Tag 0] ++ H.tokens r + tokens TermEditDeprecate = [H.Tag 1] diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Tokenizable.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Tokenizable.hs index 54339a51f..83d74e308 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Tokenizable.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/Tokenizable.hs @@ -1,9 +1,9 @@ module Unison.Hashing.V2.Tokenizable ( Tokenizable (..), - Accumulate (..), Hashable1 (..), Token (..), hashTokenizable, + accumulate, accumulateToken, ) where @@ -15,8 +15,8 @@ import Data.ByteString.Builder (doubleBE, int64BE, toLazyByteString, word64BE) import qualified Data.ByteString.Lazy as BL import qualified Data.Map as Map import qualified Data.Set as Set -import U.Util.Hash (Hash) -import qualified U.Util.Hash as Hash +import Unison.Hash (Hash) +import qualified Unison.Hash as Hash import Unison.Prelude import Unison.Util.Relation (Relation) import qualified Unison.Util.Relation as Relation @@ -34,28 +34,23 @@ import qualified Unison.Util.Relation4 as Relation4 -- simple types (like an Int for example) to keep the same hashes, which would lead to -- collisions in the `hash` table, since each hash has a different hash version but the same -- base32 representation. -hashingVersion :: Token h +hashingVersion :: Token hashingVersion = Tag 2 -data Token h +data Token = Tag !Word8 | Bytes !ByteString | Int !Int64 | Text !Text | Double !Double - | Hashed !h + | Hashed !Hash | Nat !Word64 -class Accumulate h where - accumulate :: [Token h] -> h - fromBytes :: ByteString -> h - toBytes :: h -> ByteString - -accumulateToken :: (Accumulate h, Tokenizable t) => t -> Token h +accumulateToken :: (Tokenizable t) => t -> Token accumulateToken = Hashed . hashTokenizable -- | Tokenize then accumulate a type into a Hash. -hashTokenizable :: (Tokenizable t, Accumulate h) => t -> h +hashTokenizable :: (Tokenizable t) => t -> Hash hashTokenizable = accumulate . tokens -- | Tokenizable converts a value into a set of hashing tokens which will later be accumulated @@ -74,9 +69,9 @@ hashTokenizable = accumulate . tokens -- hash (TaggedBranch _ b) = hash b -- @@ class Tokenizable t where - tokens :: Accumulate h => t -> [Token h] + tokens :: t -> [Token] -instance Tokenizable a => Tokenizable [a] where +instance (Tokenizable a) => Tokenizable [a] where tokens = map accumulateToken instance (Tokenizable a, Tokenizable b) => Tokenizable (a, b) where @@ -124,27 +119,22 @@ instance Tokenizable Bool where instance Tokenizable Hash where tokens h = [Bytes (Hash.toByteString h)] --- | A class for all types which can accumulate tokens into a hash. --- If you want to provide an instance for hashing a Unison value, see 'Tokenizable' --- and 'Hashable' instead. -instance Accumulate Hash where - accumulate = fromBytes . BA.convert . CH.hashFinalize . go CH.hashInit - where - go :: CH.Context CH.SHA3_512 -> [Token Hash] -> CH.Context CH.SHA3_512 - go acc tokens = CH.hashUpdates acc (hashingVersion : tokens >>= toBS) - toBS (Tag b) = [B.singleton b] - toBS (Bytes bs) = [encodeLength $ B.length bs, bs] - toBS (Int i) = [BL.toStrict . toLazyByteString . int64BE $ i] - toBS (Nat i) = [BL.toStrict . toLazyByteString . word64BE $ i] - toBS (Double d) = [BL.toStrict . toLazyByteString . doubleBE $ d] - toBS (Text txt) = - let tbytes = encodeUtf8 txt - in [encodeLength (B.length tbytes), tbytes] - toBS (Hashed h) = [Hash.toByteString h] - encodeLength :: Integral n => n -> B.ByteString - encodeLength = BL.toStrict . toLazyByteString . word64BE . fromIntegral - fromBytes = Hash.fromByteString - toBytes = Hash.toByteString +accumulate :: [Token] -> Hash +accumulate = Hash.fromByteString . BA.convert . CH.hashFinalize . go CH.hashInit + where + go :: CH.Context CH.SHA3_512 -> [Token] -> CH.Context CH.SHA3_512 + go acc tokens = CH.hashUpdates acc (hashingVersion : tokens >>= toBS) + toBS (Tag b) = [B.singleton b] + toBS (Bytes bs) = [encodeLength $ B.length bs, bs] + toBS (Int i) = [BL.toStrict . toLazyByteString . int64BE $ i] + toBS (Nat i) = [BL.toStrict . toLazyByteString . word64BE $ i] + toBS (Double d) = [BL.toStrict . toLazyByteString . doubleBE $ d] + toBS (Text txt) = + let tbytes = encodeUtf8 txt + in [encodeLength (B.length tbytes), tbytes] + toBS (Hashed h) = [Hash.toByteString h] + encodeLength :: (Integral n) => n -> B.ByteString + encodeLength = BL.toStrict . toLazyByteString . word64BE . fromIntegral class Hashable1 f where -- | Produce a hash for an `f a`, given a hashing function for `a`. @@ -173,4 +163,4 @@ class Hashable1 f where -- hash1 hashUnordered _ (U unordered uno dos) = -- let (hs, hash) = hashUnordered unordered -- in accumulate $ map Hashed hs ++ [Hashed (hash uno), Hashed (hash dos)] - hash1 :: (Ord h, Accumulate h) => ([a] -> ([h], a -> h)) -> (a -> h) -> f a -> h + hash1 :: ([a] -> ([Hash], a -> Hash)) -> (a -> Hash) -> f a -> Hash diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/Type.hs b/unison-hashing-v2/src/Unison/Hashing/V2/Type.hs index c587a2858..ee5ea8757 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/Type.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/Type.hs @@ -1,12 +1,12 @@ module Unison.Hashing.V2.Type ( Type, - F (..), + TypeF (..), bindExternal, bindReferences, -- * find by type index stuff - toReference, - toReferenceMentions, + typeToReference, + typeToReferenceMentions, -- * builtin term references booleanRef, @@ -25,8 +25,7 @@ import qualified Data.Set as Set import qualified Unison.ABT as ABT import qualified Unison.Hashing.V2.ABT as ABT import qualified Unison.Hashing.V2.Kind as K -import Unison.Hashing.V2.Reference (Reference) -import qualified Unison.Hashing.V2.Reference as Reference +import Unison.Hashing.V2.Reference (Reference (..), pattern ReferenceDerived) import Unison.Hashing.V2.Tokenizable (Hashable1) import qualified Unison.Hashing.V2.Tokenizable as Hashable import qualified Unison.Name as Name @@ -36,31 +35,31 @@ import qualified Unison.Util.List as List import Unison.Var (Var) -- | Base functor for types in the Unison language -data F a - = Ref Reference - | Arrow a a - | Ann a K.Kind - | App a a - | Effect a a - | Effects [a] - | Forall a - | IntroOuter a -- binder like ∀, used to introduce variables that are +data TypeF a + = TypeRef Reference + | TypeArrow a a + | TypeAnn a K.Kind + | TypeApp a a + | TypeEffect a a + | TypeEffects [a] + | TypeForall a + | TypeIntroOuter a -- binder like ∀, used to introduce variables that are -- bound by outer type signatures, to support scoped type -- variables deriving (Foldable, Functor, Traversable) -- | Types are represented as ABTs over the base functor F, with variables in `v` -type Type v a = ABT.Term F v a +type Type v a = ABT.Term TypeF v a freeVars :: Type v a -> Set v freeVars = ABT.freeVars bindExternal :: - ABT.Var v => [(v, Reference)] -> Type v a -> Type v a + (ABT.Var v) => [(v, Reference)] -> Type v a -> Type v a bindExternal bs = ABT.substsInheritAnnotation [(v, ref () r) | (v, r) <- bs] bindReferences :: - Var v => + (Var v) => (v -> Name.Name) -> Set v -> Map Name.Name Reference -> @@ -74,14 +73,14 @@ bindReferences unsafeVarToName keepFree ns t = in List.validate ok rs <&> \es -> bindExternal es t -- some smart patterns -pattern Ref' :: Reference -> ABT.Term F v a -pattern Ref' r <- ABT.Tm' (Ref r) +pattern TypeRef' :: Reference -> ABT.Term TypeF v a +pattern TypeRef' r <- ABT.Tm' (TypeRef r) pattern ForallsNamed' :: [v] -> Type v a -> Type v a pattern ForallsNamed' vs body <- (unForalls -> Just (vs, body)) -pattern ForallNamed' :: v -> ABT.Term F v a -> ABT.Term F v a -pattern ForallNamed' v body <- ABT.Tm' (Forall (ABT.out -> ABT.Abs v body)) +pattern ForallNamed' :: v -> ABT.Term TypeF v a -> ABT.Term TypeF v a +pattern ForallNamed' v body <- ABT.Tm' (TypeForall (ABT.out -> ABT.Abs v body)) unForalls :: Type v a -> Maybe ([v], Type v a) unForalls t = go t [] @@ -91,24 +90,24 @@ unForalls t = go t [] go body vs = Just (reverse vs, body) -- some smart constructors -ref :: Ord v => a -> Reference -> Type v a -ref a = ABT.tm' a . Ref +ref :: (Ord v) => a -> Reference -> Type v a +ref a = ABT.tm' a . TypeRef intRef, natRef, floatRef, booleanRef, textRef, charRef, listRef, effectRef :: Reference -intRef = Reference.Builtin "Int" -natRef = Reference.Builtin "Nat" -floatRef = Reference.Builtin "Float" -booleanRef = Reference.Builtin "Boolean" -textRef = Reference.Builtin "Text" -charRef = Reference.Builtin "Char" -listRef = Reference.Builtin "Sequence" -effectRef = Reference.Builtin "Effect" +intRef = ReferenceBuiltin "Int" +natRef = ReferenceBuiltin "Nat" +floatRef = ReferenceBuiltin "Float" +booleanRef = ReferenceBuiltin "Boolean" +textRef = ReferenceBuiltin "Text" +charRef = ReferenceBuiltin "Char" +listRef = ReferenceBuiltin "Sequence" +effectRef = ReferenceBuiltin "Effect" -forall :: Ord v => a -> v -> Type v a -> Type v a -forall a v body = ABT.tm' a (Forall (ABT.abs' a v body)) +forall :: (Ord v) => a -> v -> Type v a -> Type v a +forall a v body = ABT.tm' a (TypeForall (ABT.abs' a v body)) -- | Bind the given variables with an outer `forall`, if they are used in `t`. -generalize :: Ord v => [v] -> Type v a -> Type v a +generalize :: (Ord v) => [v] -> Type v a -> Type v a generalize vs t = foldr f t vs where f v t = @@ -118,36 +117,36 @@ unforall' :: Type v a -> ([v], Type v a) unforall' (ForallsNamed' vs t) = (vs, t) unforall' t = ([], t) -toReference :: (Ord v, Show v) => Type v a -> Reference -toReference (Ref' r) = r +typeToReference :: (Ord v, Show v) => Type v a -> Reference +typeToReference (TypeRef' r) = r -- a bit of normalization - any unused type parameters aren't part of the hash -toReference (ForallNamed' v body) | not (Set.member v (ABT.freeVars body)) = toReference body -toReference t = Reference.Derived (ABT.hash t) 0 +typeToReference (ForallNamed' v body) | not (Set.member v (ABT.freeVars body)) = typeToReference body +typeToReference t = ReferenceDerived (ABT.hash t) 0 -toReferenceMentions :: (Ord v, Show v) => Type v a -> Set Reference -toReferenceMentions ty = +typeToReferenceMentions :: (Ord v, Show v) => Type v a -> Set Reference +typeToReferenceMentions ty = let (vs, _) = unforall' ty gen ty = generalize (Set.toList (freeVars ty)) $ generalize vs ty - in Set.fromList $ toReference . gen <$> ABT.subterms ty + in Set.fromList $ typeToReference . gen <$> ABT.subterms ty -instance Hashable1 F where +instance Hashable1 TypeF where hash1 hashCycle hash e = let (tag, hashed) = (Hashable.Tag, Hashable.Hashed) in -- Note: start each layer with leading `0` byte, to avoid collisions with -- terms, which start each layer with leading `1`. See `Hashable1 Term.F` Hashable.accumulate $ tag 0 : case e of - Ref r -> [tag 0, Hashable.accumulateToken r] - Arrow a b -> [tag 1, hashed (hash a), hashed (hash b)] - App a b -> [tag 2, hashed (hash a), hashed (hash b)] - Ann a k -> [tag 3, hashed (hash a), Hashable.accumulateToken k] + TypeRef r -> [tag 0, Hashable.accumulateToken r] + TypeArrow a b -> [tag 1, hashed (hash a), hashed (hash b)] + TypeApp a b -> [tag 2, hashed (hash a), hashed (hash b)] + TypeAnn a k -> [tag 3, hashed (hash a), Hashable.accumulateToken k] -- Example: -- a) {Remote, Abort} (() -> {Remote} ()) should hash the same as -- b) {Abort, Remote} (() -> {Remote} ()) but should hash differently from -- c) {Remote, Abort} (() -> {Abort} ()) - Effects es -> + TypeEffects es -> let (hs, _) = hashCycle es in tag 4 : map hashed hs - Effect e t -> [tag 5, hashed (hash e), hashed (hash t)] - Forall a -> [tag 6, hashed (hash a)] - IntroOuter a -> [tag 7, hashed (hash a)] + TypeEffect e t -> [tag 5, hashed (hash e), hashed (hash t)] + TypeForall a -> [tag 6, hashed (hash a)] + TypeIntroOuter a -> [tag 7, hashed (hash a)] diff --git a/unison-hashing-v2/src/Unison/Hashing/V2/TypeEdit.hs b/unison-hashing-v2/src/Unison/Hashing/V2/TypeEdit.hs index cf5833fb4..fc05e7bc3 100644 --- a/unison-hashing-v2/src/Unison/Hashing/V2/TypeEdit.hs +++ b/unison-hashing-v2/src/Unison/Hashing/V2/TypeEdit.hs @@ -4,9 +4,11 @@ import Unison.Hashing.V2.Reference (Reference) import Unison.Hashing.V2.Tokenizable (Tokenizable) import qualified Unison.Hashing.V2.Tokenizable as H -data TypeEdit = Replace Reference | Deprecate +data TypeEdit + = TypeEditReplace Reference + | TypeEditDeprecate deriving (Eq, Ord, Show) instance Tokenizable TypeEdit where - tokens (Replace r) = H.Tag 0 : H.tokens r - tokens Deprecate = [H.Tag 1] + tokens (TypeEditReplace r) = H.Tag 0 : H.tokens r + tokens TypeEditDeprecate = [H.Tag 1] diff --git a/unison-hashing-v2/unison-hashing-v2.cabal b/unison-hashing-v2/unison-hashing-v2.cabal index b971aede4..91abf7bbd 100644 --- a/unison-hashing-v2/unison-hashing-v2.cabal +++ b/unison-hashing-v2/unison-hashing-v2.cabal @@ -17,12 +17,15 @@ source-repository head library exposed-modules: + Unison.Hashing.V2 + other-modules: Unison.Hashing.V2.ABT Unison.Hashing.V2.Branch Unison.Hashing.V2.Causal + Unison.Hashing.V2.ConstructorId Unison.Hashing.V2.DataDeclaration - Unison.Hashing.V2.Hashable Unison.Hashing.V2.Kind + Unison.Hashing.V2.NameSegment Unison.Hashing.V2.Patch Unison.Hashing.V2.Pattern Unison.Hashing.V2.Reference @@ -49,6 +52,7 @@ library FlexibleContexts FlexibleInstances GeneralizedNewtypeDeriving + InstanceSigs LambdaCase MultiParamTypeClasses NamedFieldPuns @@ -70,6 +74,8 @@ library , semialign , text , unison-core1 + , unison-hash + , unison-hashing , unison-prelude , unison-util-base32hex , unison-util-relation diff --git a/unison-share-api/package.yaml b/unison-share-api/package.yaml index 2d92ecdf4..fbbd504ab 100644 --- a/unison-share-api/package.yaml +++ b/unison-share-api/package.yaml @@ -44,11 +44,12 @@ dependencies: - unison-codebase-sqlite - unison-core - unison-core1 + - unison-hash + - unison-hash-orphans-aeson - unison-parser-typechecker - unison-prelude - unison-pretty-printer - unison-util-base32hex - - unison-util-base32hex-orphans-aeson - unison-util-relation - unison-sqlite - unison-syntax @@ -69,6 +70,7 @@ default-extensions: - ConstraintKinds - DeriveAnyClass - DeriveFunctor + - DeriveGeneric - DerivingStrategies - DerivingVia - DoAndIfThenElse @@ -86,6 +88,7 @@ default-extensions: - PatternSynonyms - RankNTypes - ScopedTypeVariables + - StandaloneDeriving - TupleSections - TypeApplications - TypeOperators diff --git a/unison-share-api/src/Unison/Server/Backend.hs b/unison-share-api/src/Unison/Server/Backend.hs index f02159f48..dffb01b91 100644 --- a/unison-share-api/src/Unison/Server/Backend.hs +++ b/unison-share-api/src/Unison/Server/Backend.hs @@ -213,10 +213,12 @@ parseNamesForBranch root = namesForBranch root <&> \(n, _, _) -> n prettyNamesForBranch :: Branch m -> NameScoping -> Names prettyNamesForBranch root = namesForBranch root <&> \(_, n, _) -> n -shallowPPE :: MonadIO m => Codebase m v a -> V2Branch.Branch m -> m PPE.PrettyPrintEnv +shallowPPE :: (MonadIO m) => Codebase m v a -> V2Branch.Branch m -> m PPE.PrettyPrintEnv shallowPPE codebase b = do - hashLength <- Codebase.runTransaction codebase Codebase.hashLength - names <- shallowNames codebase b + (hashLength, names) <- Codebase.runTransaction codebase do + hl <- Codebase.hashLength + names <- shallowNames codebase b + pure (hl, names) pure $ PPED.suffixifiedPPE . PPED.fromNamesDecl hashLength $ NamesWithHistory names mempty -- | A 'Names' which only includes mappings for things _directly_ accessible from the branch. @@ -224,7 +226,7 @@ shallowPPE codebase b = do -- I.e. names in nested children are omitted. -- This should probably live elsewhere, but the package dependency graph makes it hard to find -- a good place. -shallowNames :: forall m v a. Monad m => Codebase m v a -> V2Branch.Branch m -> m Names +shallowNames :: forall m v a. (Monad m) => Codebase m v a -> V2Branch.Branch m -> Sqlite.Transaction Names shallowNames codebase b = do newTerms <- V2Branch.terms b @@ -336,7 +338,7 @@ fuzzyFind printNames query = -- List the immediate children of a namespace lsAtPath :: - MonadIO m => + (MonadIO m) => Codebase m Symbol Ann -> -- The root to follow the path from. Maybe (V2Branch.Branch Sqlite.Transaction) -> @@ -406,14 +408,16 @@ resultListType :: (Ord v, Monoid a) => Type v a resultListType = Type.app mempty (Type.list mempty) (Type.ref mempty Decls.testResultRef) termListEntry :: - MonadIO m => + (MonadIO m) => Codebase m Symbol Ann -> V2Branch.Branch n -> ExactName NameSegment V2Referent.Referent -> m (TermEntry Symbol Ann) termListEntry codebase branch (ExactName nameSegment ref) = do - v1Referent <- Cv.referent2to1 (Codebase.getDeclType codebase) ref - ot <- Codebase.runTransaction codebase (loadReferentType codebase v1Referent) + ot <- Codebase.runTransaction codebase $ do + v1Referent <- Cv.referent2to1 (Codebase.getDeclType codebase) ref + ot <- loadReferentType codebase v1Referent + pure (ot) tag <- getTermTag codebase ref ot pure $ TermEntry @@ -433,7 +437,7 @@ termListEntry codebase branch (ExactName nameSegment ref) = do & (> 1) getTermTag :: - (Monad m, Var v) => + (Var v, MonadIO m) => Codebase m v a -> V2Referent.Referent -> Maybe (Type v Ann) -> @@ -452,7 +456,7 @@ getTermTag codebase r sig = do Nothing -> False constructorType <- case r of V2Referent.Ref {} -> pure Nothing - V2Referent.Con ref _ -> Just <$> Codebase.getDeclType codebase ref + V2Referent.Con ref _ -> Just <$> Codebase.runTransaction codebase (Codebase.getDeclType codebase ref) pure $ if | isDoc -> Doc @@ -462,7 +466,7 @@ getTermTag codebase r sig = do | otherwise -> Plain getTypeTag :: - Var v => + (Var v) => Codebase m v Ann -> Reference -> Sqlite.Transaction TypeTag @@ -476,7 +480,7 @@ getTypeTag codebase r = do _ -> pure (if Set.member r Type.builtinAbilities then Ability else Data) typeListEntry :: - Var v => + (Var v) => Codebase m v Ann -> V2Branch.Branch n -> ExactName NameSegment Reference -> @@ -502,7 +506,7 @@ typeListEntry codebase b (ExactName nameSegment ref) = do typeDeclHeader :: forall v m. - Var v => + (Var v) => Codebase m v Ann -> PPE.PrettyPrintEnv -> Reference -> @@ -531,7 +535,7 @@ formatTypeName' ppe r = $ PPE.typeName ppe r termEntryToNamedTerm :: - Var v => PPE.PrettyPrintEnv -> Maybe Width -> TermEntry v a -> NamedTerm + (Var v) => PPE.PrettyPrintEnv -> Maybe Width -> TermEntry v a -> NamedTerm termEntryToNamedTerm ppe typeWidth te@TermEntry {termEntryType = mayType, termEntryTag = tag, termEntryHash} = NamedTerm { termName = termEntryHQName te, @@ -550,7 +554,7 @@ typeEntryToNamedType te@TypeEntry {typeEntryTag, typeEntryHash} = -- | Find all definitions and children reachable from the given 'V2Branch.Branch', lsBranch :: - MonadIO m => + (MonadIO m) => Codebase m Symbol Ann -> V2Branch.Branch n -> m [ShallowListEntry Symbol Ann] @@ -778,9 +782,9 @@ hqNameQuery codebase NameSearch {typeSearch, termSearch} hqs = do } -- TODO: Move this to its own module -data DefinitionResults v = DefinitionResults - { termResults :: Map Reference (DisplayObject (Type v Ann) (Term v Ann)), - typeResults :: Map Reference (DisplayObject () (DD.Decl v Ann)), +data DefinitionResults = DefinitionResults + { termResults :: Map Reference (DisplayObject (Type Symbol Ann) (Term Symbol Ann)), + typeResults :: Map Reference (DisplayObject () (DD.Decl Symbol Ann)), noResults :: [HQ.HashQualified Name] } @@ -805,15 +809,15 @@ getShallowCausalAtPathFromRootHash mayRootHash path = do Just h -> Codebase.expectCausalBranchByCausalHash h Codebase.getShallowCausalAtPath path (Just shallowRoot) -formatType' :: Var v => PPE.PrettyPrintEnv -> Width -> Type v a -> SyntaxText +formatType' :: (Var v) => PPE.PrettyPrintEnv -> Width -> Type v a -> SyntaxText formatType' ppe w = Pretty.render w . TypePrinter.prettySyntax ppe -formatType :: Var v => PPE.PrettyPrintEnv -> Width -> Type v a -> Syntax.SyntaxText +formatType :: (Var v) => PPE.PrettyPrintEnv -> Width -> Type v a -> Syntax.SyntaxText formatType ppe w = mungeSyntaxText . formatType' ppe w formatSuffixedType :: - Var v => + (Var v) => PPED.PrettyPrintEnvDecl -> Width -> Type v Ann -> @@ -821,7 +825,7 @@ formatSuffixedType :: formatSuffixedType ppe = formatType (PPED.suffixifiedPPE ppe) mungeSyntaxText :: - Functor g => g (UST.Element Reference) -> g Syntax.Element + (Functor g) => g (UST.Element Reference) -> g Syntax.Element mungeSyntaxText = fmap Syntax.convertElement -- | Renders a definition for the given name or hash alongside its documentation. @@ -974,7 +978,7 @@ renderDoc ppe width rt codebase r = do let hash = Reference.toText r (name,hash,) <$> let tm = Term.ref () r - in Doc.renderDoc + in Doc.evalAndRenderDoc ppe terms typeOf @@ -1089,7 +1093,7 @@ docsInBranchToHtmlFiles runtime codebase root currentPath directory = do writeFile fullPath (Text.unpack fileContents) bestNameForTerm :: - forall v. Var v => PPE.PrettyPrintEnv -> Width -> Referent -> Text + forall v. (Var v) => PPE.PrettyPrintEnv -> Width -> Referent -> Text bestNameForTerm ppe width = Text.pack . Pretty.render width @@ -1099,7 +1103,7 @@ bestNameForTerm ppe width = . Term.fromReferent mempty bestNameForType :: - forall v. Var v => PPE.PrettyPrintEnv -> Width -> Reference -> Text + forall v. (Var v) => PPE.PrettyPrintEnv -> Width -> Reference -> Text bestNameForType ppe width = Text.pack . Pretty.render width @@ -1116,7 +1120,7 @@ bestNameForType ppe width = -- The 'suffixified' component of this ppe will search for the shortest unambiguous suffix within the scope in which the name is found (local, falling back to global) scopedNamesForBranchHash :: forall m n v a. - MonadIO m => + (MonadIO m) => Codebase m v a -> Maybe (V2Branch.CausalBranch n) -> Path -> @@ -1159,7 +1163,7 @@ scopedNamesForBranchHash codebase mbh path = do pure (ScopedNames.parseNames scopedNames, ScopedNames.namesAtPath scopedNames) resolveCausalHash :: - Monad m => Maybe CausalHash -> Codebase m v a -> Backend m (Branch m) + (Monad m) => Maybe CausalHash -> Codebase m v a -> Backend m (Branch m) resolveCausalHash h codebase = case h of Nothing -> lift (Codebase.getRootBranch codebase) Just bhash -> do @@ -1172,7 +1176,7 @@ resolveCausalHashV2 h = case h of Just ch -> Codebase.expectCausalBranchByCausalHash ch resolveRootBranchHash :: - MonadIO m => Maybe ShortCausalHash -> Codebase m v a -> Backend m (Branch m) + (MonadIO m) => Maybe ShortCausalHash -> Codebase m v a -> Backend m (Branch m) resolveRootBranchHash mayRoot codebase = case mayRoot of Nothing -> lift (Codebase.getRootBranch codebase) @@ -1204,7 +1208,7 @@ definitionsBySuffixes :: NameSearch -> IncludeCycles -> [HQ.HashQualified Name] -> - Sqlite.Transaction (DefinitionResults Symbol) + Sqlite.Transaction DefinitionResults definitionsBySuffixes codebase nameSearch includeCycles query = do QueryResult misses results <- hqNameQuery codebase nameSearch query -- todo: remember to replace this with getting components directly, @@ -1257,8 +1261,8 @@ displayType codebase = \case pure (UserObject decl) termsToSyntax :: - Var v => - Ord a => + (Var v) => + (Ord a) => Suffixify -> Width -> PPED.PrettyPrintEnvDecl -> @@ -1287,8 +1291,8 @@ termsToSyntax suff width ppe0 terms = $ TermPrinter.prettyBinding (ppeBody r) n tm typesToSyntax :: - Var v => - Ord a => + (Var v) => + (Ord a) => Suffixify -> Width -> PPED.PrettyPrintEnvDecl -> diff --git a/unison-share-api/src/Unison/Server/CodebaseServer.hs b/unison-share-api/src/Unison/Server/CodebaseServer.hs index 4344903e6..9892ad2e8 100644 --- a/unison-share-api/src/Unison/Server/CodebaseServer.hs +++ b/unison-share-api/src/Unison/Server/CodebaseServer.hs @@ -277,7 +277,7 @@ startServer env opts rt codebase onStart = do token <- case token opts of Just t -> return $ C8.pack t _ -> genToken - let baseUrl = BaseUrl "http://127.0.0.1" token + let baseUrl = BaseUrl (fromMaybe "http://127.0.0.1" (host opts)) token let settings = defaultSettings & maybe id setPort (port opts) @@ -347,7 +347,7 @@ serveDocs _ respond = respond $ responseLBS ok200 [plain] docsBS serveOpenAPI :: Handler OpenApi serveOpenAPI = pure openAPI -hoistWithAuth :: forall api. HasServer api '[] => Proxy api -> ByteString -> ServerT api Handler -> ServerT (Authed api) Handler +hoistWithAuth :: forall api. (HasServer api '[]) => Proxy api -> ByteString -> ServerT api Handler -> ServerT (Authed api) Handler hoistWithAuth api expectedToken server token = hoistServer @api @Handler @Handler api (\h -> handleAuth expectedToken token *> h) server serveUnison :: diff --git a/unison-share-api/src/Unison/Server/Doc.hs b/unison-share-api/src/Unison/Server/Doc.hs index ea0c9eb44..d0b2db216 100644 --- a/unison-share-api/src/Unison/Server/Doc.hs +++ b/unison-share-api/src/Unison/Server/Doc.hs @@ -3,25 +3,20 @@ {-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE ViewPatterns #-} module Unison.Server.Doc where import Control.Lens (view, (^.)) import Control.Monad -import Control.Monad.Trans.Maybe (MaybeT (..), runMaybeT) import Data.Aeson (ToJSON) import Data.Foldable import Data.Functor -import Data.Map (Map) import qualified Data.Map as Map -import Data.Maybe (fromMaybe) import Data.OpenApi (ToSchema) import qualified Data.Set as Set -import Data.Text (Text) -import qualified Data.Text as Text import Data.Word -import GHC.Generics (Generic) import qualified Unison.ABT as ABT import qualified Unison.Builtin.Decls as DD import qualified Unison.Builtin.Decls as Decls @@ -29,6 +24,8 @@ import Unison.Codebase.Editor.DisplayObject (DisplayObject) import qualified Unison.Codebase.Editor.DisplayObject as DO import qualified Unison.ConstructorReference as ConstructorReference import qualified Unison.DataDeclaration as DD +import qualified Unison.LabeledDependency as LD +import Unison.Prelude import qualified Unison.PrettyPrintEnv as PPE import qualified Unison.PrettyPrintEnvDecl as PPE import Unison.Reference (Reference) @@ -57,37 +54,48 @@ type Nat = Word64 type SSyntaxText = S.SyntaxText' Reference -data Doc +-- | A doc rendered down to SyntaxText. +type Doc = DocG RenderedSpecialForm + +-- | A doc which has been evaluated and includes all information necessary to be rendered. +type EvaluatedDoc v = DocG (EvaluatedSpecialForm v) + +type SrcRefs = Ref (UnisonHash, DisplayObject SyntaxText Src) + +-- | A doc parameterized by its special forms. +data DocG specialForm = Word Text - | Code Doc - | CodeBlock Text Doc - | Bold Doc - | Italic Doc - | Strikethrough Doc - | Style Text Doc - | Anchor Text Doc - | Blockquote Doc + | Code (DocG specialForm) + | CodeBlock Text (DocG specialForm) + | Bold (DocG specialForm) + | Italic (DocG specialForm) + | Strikethrough (DocG specialForm) + | Style Text (DocG specialForm) + | Anchor Text (DocG specialForm) + | Blockquote (DocG specialForm) | Blankline | Linebreak | SectionBreak - | Tooltip Doc Doc - | Aside Doc - | Callout (Maybe Doc) Doc - | Table [[Doc]] - | Folded Bool Doc Doc - | Paragraph [Doc] - | BulletedList [Doc] - | NumberedList Nat [Doc] - | Section Doc [Doc] - | NamedLink Doc Doc - | Image Doc Doc (Maybe Doc) - | Special SpecialForm - | Join [Doc] - | UntitledSection [Doc] - | Column [Doc] - | Group Doc - deriving stock (Eq, Show, Generic) - deriving anyclass (ToJSON, ToSchema) + | Tooltip (DocG specialForm) (DocG specialForm) + | Aside (DocG specialForm) + | Callout (Maybe (DocG specialForm)) (DocG specialForm) + | Table [[(DocG specialForm)]] + | Folded Bool (DocG specialForm) (DocG specialForm) + | Paragraph [(DocG specialForm)] + | BulletedList [(DocG specialForm)] + | NumberedList Nat [(DocG specialForm)] + | Section (DocG specialForm) [(DocG specialForm)] + | NamedLink (DocG specialForm) (DocG specialForm) + | Image (DocG specialForm) (DocG specialForm) (Maybe (DocG specialForm)) + | Special specialForm + | Join [(DocG specialForm)] + | UntitledSection [(DocG specialForm)] + | Column [(DocG specialForm)] + | Group (DocG specialForm) + deriving stock (Eq, Show, Generic, Functor, Foldable, Traversable) + deriving anyclass (ToJSON) + +deriving instance (ToSchema specialForm) => ToSchema (DocG specialForm) type UnisonHash = Text @@ -95,15 +103,15 @@ data Ref a = Term a | Type a deriving stock (Eq, Show, Generic, Functor, Foldable, Traversable) deriving anyclass (ToJSON) -instance ToSchema a => ToSchema (Ref a) +instance (ToSchema a) => ToSchema (Ref a) data MediaSource = MediaSource {mediaSourceUrl :: Text, mediaSourceMimeType :: Maybe Text} deriving stock (Eq, Show, Generic) deriving anyclass (ToJSON, ToSchema) -data SpecialForm - = Source [Ref (UnisonHash, DisplayObject SyntaxText Src)] - | FoldedSource [Ref (UnisonHash, DisplayObject SyntaxText Src)] +data RenderedSpecialForm + = Source [SrcRefs] + | FoldedSource [SrcRefs] | Example SyntaxText | ExampleBlock SyntaxText | Link SyntaxText @@ -115,15 +123,40 @@ data SpecialForm | EmbedInline SyntaxText | Video [MediaSource] (Map Text Text) | FrontMatter (Map Text [Text]) + | LaTeXInline Text + | Svg Text + | RenderError (RenderError SyntaxText) deriving stock (Eq, Show, Generic) deriving anyclass (ToJSON, ToSchema) +data EvaluatedSpecialForm v + = ESource [(EvaluatedSrc v)] + | EFoldedSource [(EvaluatedSrc v)] + | EExample (Term v ()) + | EExampleBlock (Term v ()) + | ELink (Either (Term v ()) LD.LabeledDependency) + | ESignature [(Referent, Type v ())] + | ESignatureInline (Referent, Type v ()) + | -- Result is Nothing if there was an Eval failure + EEval (Term v ()) (Maybe (Term v ())) + | -- Result is Nothing if there was an Eval failure + EEvalInline (Term v ()) (Maybe (Term v ())) + | EEmbed (Term v ()) + | EEmbedInline (Term v ()) + | EVideo [MediaSource] (Map Text Text) + | EFrontMatter (Map Text [Text]) + | ELaTeXInline Text + | ESvg Text + | ERenderError (RenderError (Term v ())) + deriving stock (Eq, Show, Generic) + -- `Src folded unfolded` data Src = Src SyntaxText SyntaxText deriving stock (Eq, Show, Generic) deriving anyclass (ToJSON, ToSchema) -renderDoc :: +-- | Evaluate the doc, then render it. +evalAndRenderDoc :: forall v m. (Var v, Monad m) => PPE.PrettyPrintEnvDecl -> @@ -133,11 +166,121 @@ renderDoc :: (Reference -> m (Maybe (DD.Decl v ()))) -> Term v () -> m Doc -renderDoc pped terms typeOf eval types tm = +evalAndRenderDoc pped terms typeOf eval types tm = + renderDoc pped <$> evalDoc terms typeOf eval types tm + +-- | Renders the given doc, which must have been evaluated using 'evalDoc' +renderDoc :: + forall v. + (Var v) => + PPE.PrettyPrintEnvDecl -> + EvaluatedDoc v -> + Doc +renderDoc pped doc = renderSpecial <$> doc + where + suffixifiedPPE = PPE.suffixifiedPPE pped + formatPretty = fmap Syntax.convertElement . P.render (P.Width 70) + + formatPrettyType :: PPE.PrettyPrintEnv -> Type v a -> SyntaxText + formatPrettyType ppe typ = formatPretty (TypePrinter.prettySyntax ppe typ) + + source :: Term v () -> SyntaxText + source tm = formatPretty $ TermPrinter.prettyBlock' True (PPE.suffixifiedPPE pped) tm + + goSignatures :: [(Referent, Type v ())] -> [P.Pretty SSyntaxText] + goSignatures types = + fmap P.group $ + TypePrinter.prettySignaturesST + (PPE.suffixifiedPPE pped) + [(r, PPE.termName (PPE.suffixifiedPPE pped) r, ty) | (r, ty) <- types] + + renderSpecial :: EvaluatedSpecialForm v -> RenderedSpecialForm + renderSpecial = \case + ESource srcs -> Source (renderSrc srcs) + EFoldedSource srcs -> FoldedSource (renderSrc srcs) + EExample trm -> Example (source trm) + EExampleBlock trm -> ExampleBlock (source trm) + ELink ref -> + let ppe = PPE.suffixifiedPPE pped + tm :: Referent -> P.Pretty SSyntaxText + tm r = (NP.styleHashQualified'' (NP.fmt (S.TermReference r)) . PPE.termName ppe) r + ty :: Reference -> P.Pretty SSyntaxText + ty r = (NP.styleHashQualified'' (NP.fmt (S.TypeReference r)) . PPE.typeName ppe) r + in Link $ case ref of + Left trm -> source trm + Right ld -> case ld of + LD.TermReferent r -> (formatPretty . tm) r + LD.TypeReference r -> (formatPretty . ty) r + ESignature rs -> Signature (map formatPretty $ goSignatures rs) + ESignatureInline r -> SignatureInline (formatPretty (P.lines $ goSignatures [r])) + EEval trm result -> + let renderedTrm = source trm + in case result of + Nothing -> Eval renderedTrm evalErrMsg + Just renderedResult -> Eval renderedTrm (source renderedResult) + EEvalInline trm result -> + let renderedTrm = source trm + in case result of + Nothing -> EvalInline renderedTrm evalErrMsg + Just renderedResult -> EvalInline renderedTrm (source renderedResult) + EEmbed any -> Embed ("{{ embed {{" <> source any <> "}} }}") + EEmbedInline any -> EmbedInline ("{{ embed {{" <> source any <> "}} }}") + EVideo sources config -> Video sources config + EFrontMatter frontMatter -> FrontMatter frontMatter + ELaTeXInline latex -> LaTeXInline latex + ESvg svg -> Svg svg + ERenderError (InvalidTerm tm) -> Embed ("🆘 unable to render " <> source tm) + + evalErrMsg :: SyntaxText + evalErrMsg = "🆘 An error occured during evaluation" + + renderSrc :: [EvaluatedSrc v] -> [Ref (UnisonHash, DisplayObject SyntaxText Src)] + renderSrc srcs = + srcs & foldMap \case + EvaluatedSrcDecl srcDecl -> case srcDecl of + MissingDecl r -> [(Type (Reference.toText r, DO.MissingObject (SH.unsafeFromText $ Reference.toText r)))] + BuiltinDecl r -> + let name = + formatPretty + . NP.styleHashQualified (NP.fmt (S.TypeReference r)) + . PPE.typeName suffixifiedPPE + $ r + in [Type (Reference.toText r, DO.BuiltinObject name)] + FoundDecl r decl -> [Type (Reference.toText r, DO.UserObject (Src folded full))] + where + full = formatPretty (DeclPrinter.prettyDecl pped r (PPE.typeName suffixifiedPPE r) decl) + folded = formatPretty (DeclPrinter.prettyDeclHeader (PPE.typeName suffixifiedPPE r) decl) + EvaluatedSrcTerm srcTerm -> case srcTerm of + MissingBuiltinTypeSig r -> [(Type (Reference.toText r, DO.BuiltinObject "🆘 missing type signature"))] + BuiltinTypeSig r typ -> [Type (Reference.toText r, DO.BuiltinObject (formatPrettyType suffixifiedPPE typ))] + MissingTerm r -> [Term (Reference.toText r, DO.MissingObject (SH.unsafeFromText $ Reference.toText r))] + FoundTerm ref typ tm -> + let name = PPE.termName suffixifiedPPE (Referent.Ref ref) + folded = + formatPretty . P.lines $ + TypePrinter.prettySignaturesST suffixifiedPPE [(Referent.Ref ref, name, typ)] + full tm@(Term.Ann' _ _) _ = + formatPretty (TermPrinter.prettyBinding suffixifiedPPE name tm) + full tm typ = + formatPretty (TermPrinter.prettyBinding suffixifiedPPE name (Term.ann () tm typ)) + in [Term (Reference.toText ref, DO.UserObject (Src folded (full tm typ)))] + +-- | Evaluates the given doc, expanding transclusions, expressions, etc. +evalDoc :: + forall v m. + (Var v, Monad m) => + (Reference -> m (Maybe (Term v ()))) -> + (Referent -> m (Maybe (Type v ()))) -> + (Term v () -> m (Maybe (Term v ()))) -> + (Reference -> m (Maybe (DD.Decl v ()))) -> + Term v () -> + m (EvaluatedDoc v) +evalDoc terms typeOf eval types tm = eval tm >>= \case Nothing -> pure $ Word "🆘 doc rendering failed during evaluation" Just tm -> go tm where + go :: Term v () -> m (EvaluatedDoc v) go = \case DD.Doc2Word txt -> pure $ Word txt DD.Doc2Code d -> Code <$> go d @@ -172,78 +315,62 @@ renderDoc pped terms typeOf eval types tm = DD.Doc2UntitledSection ds -> UntitledSection <$> traverse go ds DD.Doc2Column ds -> Column <$> traverse go ds DD.Doc2Group d -> Group <$> go d - wat -> - pure . Word . Text.pack . P.toPlain (P.Width 80) . P.indent "🆘 " - . TermPrinter.pretty (PPE.suffixifiedPPE pped) - $ wat + wat -> pure $ Special $ ERenderError (InvalidTerm wat) - formatPretty = fmap Syntax.convertElement . P.render (P.Width 70) - formatPrettyType ppe typ = formatPretty (TypePrinter.prettySyntax ppe typ) - - source :: Term v () -> m SyntaxText - source tm = pure . formatPretty $ TermPrinter.prettyBlock' True (PPE.suffixifiedPPE pped) tm - - goSignatures :: [Referent] -> m [P.Pretty SSyntaxText] + goSignatures :: [Referent] -> m [(Referent, Type v ())] goSignatures rs = runMaybeT (traverse (MaybeT . typeOf) rs) >>= \case - Nothing -> pure ["🆘 codebase is missing type signature for these definitions"] - Just types -> - pure . fmap P.group $ - TypePrinter.prettySignaturesST - (PPE.suffixifiedPPE pped) - [(r, PPE.termName (PPE.suffixifiedPPE pped) r, ty) | (r, ty) <- zip rs types] + Nothing -> error "🆘 codebase is missing type signature for these definitions" + Just types -> pure (zip rs types) - goSpecial :: Term v () -> m SpecialForm + goSpecial :: Term v () -> m (EvaluatedSpecialForm v) goSpecial = \case - DD.Doc2SpecialFormFoldedSource (Term.List' es) -> FoldedSource <$> goSrc (toList es) + DD.Doc2SpecialFormFoldedSource (Term.List' es) -> EFoldedSource <$> goSrc (toList es) -- Source [Either Link.Type Doc2.Term] - DD.Doc2SpecialFormSource (Term.List' es) -> Source <$> goSrc (toList es) + DD.Doc2SpecialFormSource (Term.List' es) -> ESource <$> goSrc (toList es) -- Example Nat Doc2.Term -- Examples like `foo x y` are encoded as `Example 2 (_ x y -> foo)`, where -- 2 is the number of variables that should be dropped from the rendering. -- So this will render as `foo x y`. DD.Doc2SpecialFormExample n (DD.Doc2Example vs body) -> - Example <$> source ex + pure $ EExample ex where ex = Term.lam' (ABT.annotation body) (drop (fromIntegral n) vs) body DD.Doc2SpecialFormExampleBlock n (DD.Doc2Example vs body) -> - ExampleBlock <$> source ex + pure $ EExampleBlock ex where ex = Term.lam' (ABT.annotation body) (drop (fromIntegral n) vs) body -- Link (Either Link.Type Doc2.Term) DD.Doc2SpecialFormLink e -> - let ppe = PPE.suffixifiedPPE pped - tm :: Referent -> P.Pretty SSyntaxText - tm r = (NP.styleHashQualified'' (NP.fmt (S.TermReference r)) . PPE.termName ppe) r - ty :: Reference -> P.Pretty SSyntaxText - ty r = (NP.styleHashQualified'' (NP.fmt (S.TypeReference r)) . PPE.typeName ppe) r - in Link <$> case e of - DD.EitherLeft' (Term.TypeLink' r) -> (pure . formatPretty . ty) r + let tm :: Referent -> (Either a LD.LabeledDependency) + tm r = Right $ LD.TermReferent r + ty :: Reference -> (Either a LD.LabeledDependency) + ty r = Right $ LD.TypeReference r + in ELink <$> case e of + DD.EitherLeft' (Term.TypeLink' r) -> pure $ ty r DD.EitherRight' (DD.Doc2Term t) -> case Term.etaNormalForm t of - Term.Referent' r -> (pure . formatPretty . tm) r - x -> source x - _ -> source e + Term.Referent' r -> pure $ tm r + x -> pure $ Left x + _ -> pure $ Left e DD.Doc2SpecialFormSignature (Term.List' tms) -> let rs = [r | DD.Doc2Term (Term.Referent' r) <- toList tms] - in goSignatures rs <&> \s -> Signature (map formatPretty s) + in goSignatures rs <&> \s -> ESignature s -- SignatureInline Doc2.Term DD.Doc2SpecialFormSignatureInline (DD.Doc2Term (Term.Referent' r)) -> - goSignatures [r] <&> \s -> SignatureInline (formatPretty (P.lines s)) + goSignatures [r] <&> \[s] -> ESignatureInline s -- Eval Doc2.Term - DD.Doc2SpecialFormEval (DD.Doc2Term tm) -> - eval tm >>= \case - Nothing -> Eval <$> source tm <*> pure evalErrMsg - Just result -> Eval <$> source tm <*> source result + DD.Doc2SpecialFormEval (DD.Doc2Term tm) -> do + result <- eval tm + pure $ EEval tm result -- EvalInline Doc2.Term - DD.Doc2SpecialFormEvalInline (DD.Doc2Term tm) -> - eval tm >>= \case - Nothing -> EvalInline <$> source tm <*> pure evalErrMsg - Just result -> EvalInline <$> source tm <*> source result + DD.Doc2SpecialFormEvalInline (DD.Doc2Term tm) -> do + result <- eval tm + pure $ EEvalInline tm result -- Embed Video DD.Doc2SpecialFormEmbedVideo sources config -> - pure $ Video sources' config' + pure $ EVideo sources' config' where sources' = [MediaSource url mimeType | DD.Doc2MediaSource (Term.Text' url) (maybeText -> mimeType) <- sources] config' = Map.fromList [(k, v) | Decls.TupleTerm' [Term.Text' k, Term.Text' v] <- config] @@ -252,49 +379,43 @@ renderDoc pped terms typeOf eval types tm = -- Embed FrontMatter DD.Doc2SpecialFormEmbedFrontMatter frontMatter -> - pure $ FrontMatter frontMatter' + pure $ EFrontMatter frontMatter' where frontMatter' = List.multimap [(k, v) | Decls.TupleTerm' [Term.Text' k, Term.Text' v] <- frontMatter] + -- Embed LaTeXInline + DD.Doc2SpecialFormEmbedLaTeXInline latex -> + pure $ ELaTeXInline latex + -- Embed Svg + DD.Doc2SpecialFormEmbedSvg svg -> + pure $ ESvg svg -- Embed Any DD.Doc2SpecialFormEmbed (Term.App' _ any) -> - source any <&> \p -> Embed ("{{ embed {{" <> p <> "}} }}") + pure $ EEmbed any -- EmbedInline Any DD.Doc2SpecialFormEmbedInline any -> - source any <&> \p -> EmbedInline ("{{ embed {{" <> p <> "}} }}") - tm -> source tm <&> \p -> Embed ("🆘 unable to render " <> p) + pure $ EEmbedInline any + tm -> pure $ ERenderError (InvalidTerm tm) - evalErrMsg = "🆘 An error occured during evaluation" - - goSrc :: [Term v ()] -> m [Ref (UnisonHash, DisplayObject SyntaxText Src)] + goSrc :: [Term v ()] -> m [EvaluatedSrc v] goSrc es = do let toRef (Term.Ref' r) = Set.singleton r toRef (Term.RequestOrCtor' r) = Set.singleton (r ^. ConstructorReference.reference_) toRef _ = mempty - ppe = PPE.suffixifiedPPE pped - goType :: Reference -> m (Ref (UnisonHash, DisplayObject SyntaxText Src)) - goType r@(Reference.Builtin _) = - pure (Type (Reference.toText r, DO.BuiltinObject name)) - where - name = - formatPretty . NP.styleHashQualified (NP.fmt (S.TypeReference r)) - . PPE.typeName ppe - $ r - goType r = - Type . (Reference.toText r,) <$> do - d <- types r - case d of - Nothing -> pure (DO.MissingObject (SH.unsafeFromText $ Reference.toText r)) - Just decl -> - pure $ DO.UserObject (Src folded full) - where - full = formatPretty (DeclPrinter.prettyDecl pped r (PPE.typeName ppe r) decl) - folded = formatPretty (DeclPrinter.prettyDeclHeader (PPE.typeName ppe r) decl) + goType :: Reference -> m (EvaluatedSrc v) + goType r@(Reference.Builtin _builtin) = + pure (EvaluatedSrcDecl (BuiltinDecl r)) + goType r = do + d <- types r + case d of + Nothing -> pure (EvaluatedSrcDecl $ MissingDecl r) + Just decl -> + pure $ EvaluatedSrcDecl (FoundDecl r decl) go :: - (Set.Set Reference, [Ref (UnisonHash, DisplayObject SyntaxText Src)]) -> + (Set.Set Reference, [EvaluatedSrc v]) -> Term v () -> - m (Set.Set Reference, [Ref (UnisonHash, DisplayObject SyntaxText Src)]) + m (Set.Set Reference, [EvaluatedSrc v]) go s1@(!seen, !acc) = \case -- we ignore the annotations; but this could be extended later DD.TupleTerm' [DD.EitherRight' (DD.Doc2Term tm), _anns] -> @@ -303,29 +424,87 @@ renderDoc pped terms typeOf eval types tm = acc' = case tm of Term.Ref' r | Set.notMember r seen -> - (: acc) . Term . (Reference.toText r,) <$> case r of - Reference.Builtin _ -> - typeOf (Referent.Ref r) <&> \case - Nothing -> DO.BuiltinObject "🆘 missing type signature" - Just ty -> DO.BuiltinObject (formatPrettyType ppe ty) - ref -> - terms ref >>= \case - Nothing -> pure $ DO.MissingObject (SH.unsafeFromText $ Reference.toText ref) - Just tm -> do - typ <- fromMaybe (Type.builtin () "unknown") <$> typeOf (Referent.Ref ref) - let name = PPE.termName ppe (Referent.Ref ref) - let folded = - formatPretty . P.lines $ - TypePrinter.prettySignaturesST ppe [(Referent.Ref ref, name, typ)] - let full tm@(Term.Ann' _ _) _ = - formatPretty (TermPrinter.prettyBinding ppe name tm) - full tm typ = - formatPretty (TermPrinter.prettyBinding ppe name (Term.ann () tm typ)) - pure (DO.UserObject (Src folded (full tm typ))) + (: acc) <$> case r of + Reference.Builtin _ -> + typeOf (Referent.Ref r) <&> \case + Nothing -> EvaluatedSrcTerm (MissingBuiltinTypeSig r) + Just ty -> EvaluatedSrcTerm (BuiltinTypeSig r ty) + ref -> + terms ref >>= \case + Nothing -> pure . EvaluatedSrcTerm . MissingTerm $ ref + Just tm -> do + typ <- fromMaybe (Type.builtin () "unknown") <$> typeOf (Referent.Ref ref) + pure $ EvaluatedSrcTerm (FoundTerm ref typ tm) Term.RequestOrCtor' (view ConstructorReference.reference_ -> r) | Set.notMember r seen -> (: acc) <$> goType r _ -> pure acc DD.TupleTerm' [DD.EitherLeft' (Term.TypeLink' ref), _anns] | Set.notMember ref seen -> - (Set.insert ref seen,) . (: acc) <$> goType ref + (Set.insert ref seen,) . (: acc) <$> goType ref _ -> pure s1 reverse . snd <$> foldM go mempty es + +data RenderError trm + = InvalidTerm trm + deriving stock (Eq, Show, Generic) + deriving anyclass (ToJSON) + +deriving anyclass instance (ToSchema trm) => ToSchema (RenderError trm) + +data EvaluatedSrc v + = EvaluatedSrcDecl (EvaluatedDecl v) + | EvaluatedSrcTerm (EvaluatedTerm v) + deriving stock (Show, Eq, Generic) + +data EvaluatedDecl v + = MissingDecl Reference + | BuiltinDecl Reference + | FoundDecl Reference (DD.Decl v ()) + deriving stock (Show, Eq, Generic) + +data EvaluatedTerm v + = MissingTerm Reference + | BuiltinTypeSig Reference (Type v ()) + | MissingBuiltinTypeSig Reference + | FoundTerm Reference (Type v ()) (Term v ()) + deriving stock (Show, Eq, Generic) + +-- Determines all dependencies which will be required to render a doc. +dependencies :: (Ord v) => EvaluatedDoc v -> Set LD.LabeledDependency +dependencies = foldMap dependenciesSpecial + +-- | Determines all dependencies of a special form +dependenciesSpecial :: forall v. (Ord v) => EvaluatedSpecialForm v -> Set LD.LabeledDependency +dependenciesSpecial = \case + ESource srcs -> srcDeps srcs + EFoldedSource srcs -> srcDeps srcs + EExample trm -> Term.labeledDependencies trm + EExampleBlock trm -> Term.labeledDependencies trm + ELink ref -> either Term.labeledDependencies Set.singleton ref + ESignature sigtyps -> sigtypDeps sigtyps + ESignatureInline sig -> sigtypDeps [sig] + EEval trm mayTrm -> Term.labeledDependencies trm <> foldMap Term.labeledDependencies mayTrm + EEvalInline trm mayTrm -> Term.labeledDependencies trm <> foldMap Term.labeledDependencies mayTrm + EEmbed trm -> Term.labeledDependencies trm + EEmbedInline trm -> Term.labeledDependencies trm + EVideo {} -> mempty + EFrontMatter {} -> mempty + ELaTeXInline {} -> mempty + ESvg {} -> mempty + ERenderError (InvalidTerm trm) -> Term.labeledDependencies trm + where + sigtypDeps :: [(Referent, Type v a)] -> Set LD.LabeledDependency + sigtypDeps sigtyps = + sigtyps & foldMap \(ref, typ) -> + Set.singleton (LD.TermReferent ref) <> Type.labeledDependencies typ + srcDeps :: [EvaluatedSrc v] -> Set LD.LabeledDependency + srcDeps srcs = + srcs & foldMap \case + EvaluatedSrcDecl srcDecl -> case srcDecl of + MissingDecl ref -> Set.singleton (LD.TypeReference ref) + BuiltinDecl ref -> Set.singleton (LD.TypeReference ref) + FoundDecl ref decl -> Set.singleton (LD.TypeReference ref) <> DD.labeledDeclDependencies decl + EvaluatedSrcTerm srcTerm -> case srcTerm of + MissingTerm ref -> Set.singleton (LD.TermReference ref) + BuiltinTypeSig ref _ -> Set.singleton (LD.TermReference ref) + MissingBuiltinTypeSig ref -> Set.singleton (LD.TermReference ref) + FoundTerm ref typ trm -> Set.singleton (LD.TermReference ref) <> Type.labeledDependencies typ <> Term.labeledDependencies trm diff --git a/unison-share-api/src/Unison/Server/Doc/AsHtml.hs b/unison-share-api/src/Unison/Server/Doc/AsHtml.hs index db2e7115c..bbebbbcff 100644 --- a/unison-share-api/src/Unison/Server/Doc/AsHtml.hs +++ b/unison-share-api/src/Unison/Server/Doc/AsHtml.hs @@ -11,7 +11,6 @@ import qualified Data.Char as Char import Data.Foldable import Data.Map (Map) import qualified Data.Map as Map -import qualified Unison.Syntax.Name as Name (toText) import Data.Maybe import Data.Sequence (Seq) import Data.Text (Text) @@ -27,6 +26,7 @@ import Unison.Server.Doc import qualified Unison.Server.Doc as Doc import Unison.Server.Syntax (SyntaxText) import qualified Unison.Server.Syntax as Syntax +import qualified Unison.Syntax.Name as Name (toText) data NamedLinkHref = Href Text @@ -474,10 +474,15 @@ toHtml docNamesByRef document = Doc.FrontMatter fm -> do Writer.tell (pure $ FrontMatterContent fm) pure mempty + LaTeXInline latex -> + pure $ div_ [class_ "source rich embed latex-inline"] $ codeBlock [] (L.toHtml latex) + Svg svg -> + pure $ iframe_ [class_ "embed svg", sandbox_ "true", srcdoc_ svg] $ sequence_ [] Embed syntax -> pure $ div_ [class_ "source rich embed"] $ codeBlock [] (Syntax.toHtml syntax) EmbedInline syntax -> pure $ span_ [class_ "source rich embed-inline"] $ inlineCode [] (Syntax.toHtml syntax) + RenderError (InvalidTerm err) -> pure $ Syntax.toHtml err Join docs -> span_ [class_ "join"] <$> renderSequence currentSectionLevelToHtml (mergeWords " " docs) UntitledSection docs -> diff --git a/unison-share-api/src/Unison/Server/Endpoints/DefinitionSummary.hs b/unison-share-api/src/Unison/Server/Endpoints/DefinitionSummary.hs index 1aa7f60bc..b6523703a 100644 --- a/unison-share-api/src/Unison/Server/Endpoints/DefinitionSummary.hs +++ b/unison-share-api/src/Unison/Server/Endpoints/DefinitionSummary.hs @@ -50,7 +50,11 @@ import Unison.Symbol (Symbol) import Unison.Util.Pretty (Width) type TermSummaryAPI = - "definitions" :> "terms" :> "by-hash" :> Capture "hash" Referent :> "summary" + "definitions" + :> "terms" + :> "by-hash" + :> Capture "hash" Referent + :> "summary" -- Optional name to include in summary. -- It's propagated through to the response as-is. -- If missing, the short hash will be used instead. @@ -113,7 +117,11 @@ serveTermSummary codebase referent mayName mayRoot relativeTo mayWidth = do else UserObject termSig type TypeSummaryAPI = - "definitions" :> "types" :> "by-hash" :> Capture "hash" Reference :> "summary" + "definitions" + :> "types" + :> "by-hash" + :> Capture "hash" Reference + :> "summary" -- Optional name to include in summary. -- It's propagated through to the response as-is. -- If missing, the short hash will be used instead. diff --git a/unison-share-api/src/Unison/Server/Endpoints/FuzzyFind.hs b/unison-share-api/src/Unison/Server/Endpoints/FuzzyFind.hs index a041bd3fd..e8c03a28c 100644 --- a/unison-share-api/src/Unison/Server/Endpoints/FuzzyFind.hs +++ b/unison-share-api/src/Unison/Server/Endpoints/FuzzyFind.hs @@ -52,7 +52,8 @@ import Unison.Symbol (Symbol) import Unison.Util.Pretty (Width) type FuzzyFindAPI = - "find" :> QueryParam "rootBranch" SCH.ShortCausalHash + "find" + :> QueryParam "rootBranch" SCH.ShortCausalHash :> QueryParam "relativeTo" Path.Path :> QueryParam "limit" Int :> QueryParam "renderWidth" Width @@ -128,7 +129,7 @@ instance ToSample FoundResult where serveFuzzyFind :: forall m. - MonadIO m => + (MonadIO m) => Codebase m Symbol Ann -> Maybe SCH.ShortCausalHash -> Maybe Path.Path -> diff --git a/unison-share-api/src/Unison/Server/Endpoints/GetDefinitions.hs b/unison-share-api/src/Unison/Server/Endpoints/GetDefinitions.hs index 243dc1949..dee7f8375 100644 --- a/unison-share-api/src/Unison/Server/Endpoints/GetDefinitions.hs +++ b/unison-share-api/src/Unison/Server/Endpoints/GetDefinitions.hs @@ -41,7 +41,8 @@ import Unison.Util.Monoid (foldMapM) import Unison.Util.Pretty (Width) type DefinitionsAPI = - "getDefinition" :> QueryParam "rootBranch" ShortCausalHash + "getDefinition" + :> QueryParam "rootBranch" ShortCausalHash :> QueryParam "relativeTo" Path.Path :> QueryParams "names" (HQ.HashQualified Name) :> QueryParam "renderWidth" Width diff --git a/unison-share-api/src/Unison/Server/Endpoints/NamespaceDetails.hs b/unison-share-api/src/Unison/Server/Endpoints/NamespaceDetails.hs index 5d27be939..b5ec7b189 100644 --- a/unison-share-api/src/Unison/Server/Endpoints/NamespaceDetails.hs +++ b/unison-share-api/src/Unison/Server/Endpoints/NamespaceDetails.hs @@ -36,7 +36,8 @@ import Unison.Symbol (Symbol) import Unison.Util.Pretty (Width) type NamespaceDetailsAPI = - "namespaces" :> Capture "namespace" Path.Path + "namespaces" + :> Capture "namespace" Path.Path :> QueryParam "rootBranch" ShortCausalHash :> QueryParam "renderWidth" Width :> APIGet NamespaceDetails @@ -80,7 +81,7 @@ namespaceDetails :: namespaceDetails runtime codebase namespacePath maySCH mayWidth = let width = mayDefaultWidth mayWidth in do - (rootCausal, namespaceCausal, shallowBranch) <- + (rootCausal, namespaceCausal, shallowBranch) <- Backend.hoistBackend (Codebase.runTransaction codebase) do rootCausal <- Backend.resolveRootBranchHashV2 maySCH namespaceCausal <- lift $ Codebase.getShallowCausalAtPath namespacePath (Just rootCausal) diff --git a/unison-share-api/src/Unison/Server/Endpoints/NamespaceListing.hs b/unison-share-api/src/Unison/Server/Endpoints/NamespaceListing.hs index 7edb0045f..19d07cedb 100644 --- a/unison-share-api/src/Unison/Server/Endpoints/NamespaceListing.hs +++ b/unison-share-api/src/Unison/Server/Endpoints/NamespaceListing.hs @@ -1,10 +1,4 @@ {-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE KindSignatures #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE StandaloneDeriving #-} -{-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -Wno-orphans #-} module Unison.Server.Endpoints.NamespaceListing (serve, NamespaceListingAPI, NamespaceListing (..), NamespaceObject (..), NamedNamespace (..), NamedPatch (..), KindExpression (..)) where @@ -28,11 +22,11 @@ import U.Codebase.Branch (NamespaceStats (..)) import qualified U.Codebase.Causal as V2Causal import U.Codebase.HashTags (CausalHash (..)) import qualified U.Codebase.Sqlite.Operations as Operations -import qualified U.Util.Hash as Hash import Unison.Codebase (Codebase) import qualified Unison.Codebase as Codebase import qualified Unison.Codebase.Path as Path import Unison.Codebase.ShortCausalHash (ShortCausalHash) +import qualified Unison.Hash as Hash import qualified Unison.NameSegment as NameSegment import Unison.Parser.Ann (Ann) import Unison.Prelude @@ -53,7 +47,8 @@ import Unison.Util.Pretty (Width) import Unison.Var (Var) type NamespaceListingAPI = - "list" :> QueryParam "rootBranch" ShortCausalHash + "list" + :> QueryParam "rootBranch" ShortCausalHash :> QueryParam "relativeTo" Path.Path :> QueryParam "namespace" Path.Path :> APIGet NamespaceListing @@ -82,7 +77,7 @@ data NamespaceListing = NamespaceListing namespaceListingHash :: UnisonHash, namespaceListingChildren :: [NamespaceObject] } - deriving (Generic, Show) + deriving stock (Generic, Show) instance ToJSON NamespaceListing where toEncoding = genericToEncoding defaultOptions @@ -140,7 +135,7 @@ instance ToJSON KindExpression where toEncoding = genericToEncoding defaultOptions backendListEntryToNamespaceObject :: - Var v => + (Var v) => PPE.PrettyPrintEnv -> Maybe Width -> Backend.ShallowListEntry v a -> diff --git a/unison-share-api/src/Unison/Server/Endpoints/Projects.hs b/unison-share-api/src/Unison/Server/Endpoints/Projects.hs index 7fd928acc..1755e54a7 100644 --- a/unison-share-api/src/Unison/Server/Endpoints/Projects.hs +++ b/unison-share-api/src/Unison/Server/Endpoints/Projects.hs @@ -1,10 +1,4 @@ {-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE KindSignatures #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE StandaloneDeriving #-} -{-# LANGUAGE TypeOperators #-} module Unison.Server.Endpoints.Projects where @@ -27,12 +21,12 @@ import Servant.Docs import qualified U.Codebase.Branch as V2Branch import qualified U.Codebase.Causal as V2Causal import U.Codebase.HashTags (CausalHash (..)) -import qualified U.Util.Hash as Hash import Unison.Codebase (Codebase) import qualified Unison.Codebase as Codebase import qualified Unison.Codebase.Path as Path import qualified Unison.Codebase.Path.Parse as Path import Unison.Codebase.ShortCausalHash (ShortCausalHash) +import qualified Unison.Hash as Hash import qualified Unison.NameSegment as NameSegment import Unison.Parser.Ann (Ann) import Unison.Prelude @@ -44,7 +38,8 @@ import Unison.Symbol (Symbol) import Unison.Util.Monoid (foldMapM) type ProjectsAPI = - "projects" :> QueryParam "rootBranch" ShortCausalHash + "projects" + :> QueryParam "rootBranch" ShortCausalHash :> QueryParam "owner" ProjectOwner :> APIGet [ProjectListing] @@ -124,7 +119,7 @@ entryToOwner = \case serve :: forall m. - MonadIO m => + (MonadIO m) => Codebase m Symbol Ann -> Maybe ShortCausalHash -> Maybe ProjectOwner -> diff --git a/unison-share-api/src/Unison/Server/Orphans.hs b/unison-share-api/src/Unison/Server/Orphans.hs index e64f8752b..d731007ab 100644 --- a/unison-share-api/src/Unison/Server/Orphans.hs +++ b/unison-share-api/src/Unison/Server/Orphans.hs @@ -1,5 +1,4 @@ {-# LANGUAGE DataKinds #-} -{-# LANGUAGE StandaloneDeriving #-} {-# OPTIONS_GHC -Wno-orphans #-} module Unison.Server.Orphans where @@ -16,8 +15,6 @@ import qualified Data.Text as Text import Servant import Servant.Docs (DocCapture (DocCapture), DocQueryParam (..), ParamKind (..), ToCapture (..), ToParam (..)) import U.Codebase.HashTags -import U.Util.Hash (Hash (..)) -import qualified U.Util.Hash as Hash import Unison.Codebase.Editor.DisplayObject import qualified Unison.Codebase.Path as Path import qualified Unison.Codebase.Path.Parse as Path @@ -26,6 +23,8 @@ import Unison.Codebase.ShortCausalHash ) import qualified Unison.Codebase.ShortCausalHash as SCH import Unison.ConstructorType (ConstructorType) +import Unison.Hash (Hash (..)) +import qualified Unison.Hash as Hash import qualified Unison.HashQualified as HQ import qualified Unison.HashQualified' as HQ' import Unison.Name (Name) @@ -354,6 +353,6 @@ instance ToHttpApiData Name where deriving newtype instance ToSchema NameSegment -deriving anyclass instance ToSchema n => ToSchema (HQ.HashQualified n) +deriving anyclass instance (ToSchema n) => ToSchema (HQ.HashQualified n) -deriving anyclass instance ToSchema n => ToSchema (HQ'.HashQualified n) +deriving anyclass instance (ToSchema n) => ToSchema (HQ'.HashQualified n) diff --git a/unison-share-api/src/Unison/Server/SearchResult'.hs b/unison-share-api/src/Unison/Server/SearchResult'.hs index 58b845288..886435be4 100644 --- a/unison-share-api/src/Unison/Server/SearchResult'.hs +++ b/unison-share-api/src/Unison/Server/SearchResult'.hs @@ -39,18 +39,20 @@ data TypeResult' v a (Set (HQ'.HashQualified Name)) deriving (Eq, Show) -pattern Tm :: HQ.HashQualified Name - -> Maybe (Type v a) - -> Referent - -> Set (HQ'.HashQualified Name) - -> SearchResult' v a +pattern Tm :: + HQ.HashQualified Name -> + Maybe (Type v a) -> + Referent -> + Set (HQ'.HashQualified Name) -> + SearchResult' v a pattern Tm n t r as = Tm' (TermResult' n t r as) -pattern Tp :: HQ.HashQualified Name - -> DisplayObject () (Decl v a) - -> Reference - -> Set (HQ'.HashQualified Name) - -> SearchResult' v a +pattern Tp :: + HQ.HashQualified Name -> + DisplayObject () (Decl v a) -> + Reference -> + Set (HQ'.HashQualified Name) -> + SearchResult' v a pattern Tp n t r as = Tp' (TypeResult' n t r as) tmReferent :: SearchResult' v a -> Maybe Referent @@ -67,7 +69,7 @@ foldResult' f g = \case -- todo: comment me out, is this actually useful, given what we saw in ShowDefinitionI? -- namely, that it doesn't include the Term's deps, just the Decl's and the -- result Term/Type names. -labeledDependencies :: Ord v => SearchResult' v a -> Set LabeledDependency +labeledDependencies :: (Ord v) => SearchResult' v a -> Set LabeledDependency labeledDependencies = \case Tm' (TermResult' _ t r _) -> Set.insert (LD.referent r) $ maybe mempty (Set.map LD.typeRef . Type.dependencies) t diff --git a/unison-share-api/src/Unison/Server/Syntax.hs b/unison-share-api/src/Unison/Server/Syntax.hs index 5fb62d9eb..705a4c904 100644 --- a/unison-share-api/src/Unison/Server/Syntax.hs +++ b/unison-share-api/src/Unison/Server/Syntax.hs @@ -47,11 +47,11 @@ instance FromJSON Element deriving instance ToSchema Element -instance ToJSON a => ToJSON (Segment a) +instance (ToJSON a) => ToJSON (Segment a) -instance FromJSON a => FromJSON (Segment a) +instance (FromJSON a) => FromJSON (Segment a) -deriving instance ToSchema a => ToSchema (Segment a) +deriving instance (ToSchema a) => ToSchema (Segment a) instance ToJSON SeqOp @@ -65,7 +65,7 @@ instance FromJSON SyntaxText deriving anyclass instance ToSchema SyntaxText -instance ToSchema r => ToSchema (Seq r) where +instance (ToSchema r) => ToSchema (Seq r) where declareNamedSchema _ = declareNamedSchema (Proxy @[r]) convertElement :: SyntaxText.Element Reference -> Element diff --git a/unison-share-api/src/Unison/Server/Types.hs b/unison-share-api/src/Unison/Server/Types.hs index 9b511e468..80af376e6 100644 --- a/unison-share-api/src/Unison/Server/Types.hs +++ b/unison-share-api/src/Unison/Server/Types.hs @@ -1,10 +1,5 @@ {-# LANGUAGE DataKinds #-} -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DerivingVia #-} -{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE StandaloneDeriving #-} module Unison.Server.Types where @@ -295,7 +290,7 @@ deriving instance ToSchema TypeTag munge :: Text -> LZ.ByteString munge = Text.encodeUtf8 . Text.Lazy.fromStrict -mungeShow :: Show s => s -> LZ.ByteString +mungeShow :: (Show s) => s -> LZ.ByteString mungeShow = mungeString . show mungeString :: String -> LZ.ByteString @@ -304,7 +299,7 @@ mungeString = Text.encodeUtf8 . Text.Lazy.pack defaultWidth :: Width defaultWidth = 80 -discard :: Applicative m => a -> m () +discard :: (Applicative m) => a -> m () discard = const $ pure () mayDefaultWidth :: Maybe Width -> Width @@ -315,8 +310,8 @@ setCacheControl = addHeader @"Cache-Control" "public" branchToUnisonHash :: Branch.Branch m -> UnisonHash branchToUnisonHash b = - ("#" <>) . Hash.base32Hex . unCausalHash $ Branch.headHash b + ("#" <>) . Hash.toBase32HexText . unCausalHash $ Branch.headHash b v2CausalBranchToUnisonHash :: V2Branch.CausalBranch m -> UnisonHash v2CausalBranchToUnisonHash b = - ("#" <>) . Hash.base32Hex . unCausalHash $ V2Causal.causalHash b + ("#" <>) . Hash.toBase32HexText . unCausalHash $ V2Causal.causalHash b diff --git a/unison-share-api/src/Unison/Sync/Common.hs b/unison-share-api/src/Unison/Sync/Common.hs index 8bad96a97..bdc2ae341 100644 --- a/unison-share-api/src/Unison/Sync/Common.hs +++ b/unison-share-api/src/Unison/Sync/Common.hs @@ -25,8 +25,8 @@ import U.Codebase.Sqlite.TempEntity (TempEntity) import qualified U.Codebase.Sqlite.TempEntity as Sqlite import qualified U.Codebase.Sqlite.TempEntity as TempEntity import qualified U.Codebase.Sqlite.Term.Format as TermFormat -import U.Util.Hash32 (Hash32) -import qualified U.Util.Hash32 as Hash32 +import Unison.Hash32 (Hash32) +import qualified Unison.Hash32 as Hash32 import Unison.Prelude import qualified Unison.Sqlite as Sqlite import qualified Unison.Sync.Types as Share diff --git a/unison-share-api/src/Unison/Sync/Types.hs b/unison-share-api/src/Unison/Sync/Types.hs index 8fa5ae1e9..77598c835 100644 --- a/unison-share-api/src/Unison/Sync/Types.hs +++ b/unison-share-api/src/Unison/Sync/Types.hs @@ -88,8 +88,8 @@ import Data.Set.NonEmpty (NESet) import qualified Data.Text as Text import qualified Data.Text.Encoding as Text import Servant.Auth.JWT -import U.Util.Hash32 (Hash32) -import U.Util.Hash32.Orphans.Aeson () +import Unison.Hash32 (Hash32) +import Unison.Hash32.Orphans.Aeson () import Unison.Prelude import qualified Unison.Util.Set as Set import qualified Web.JWT as JWT @@ -251,7 +251,7 @@ instance (ToJSON text, ToJSON noSyncHash, ToJSON hash) => ToJSON (Entity text no ND ns -> go NamespaceDiffType ns C causal -> go CausalType causal where - go :: ToJSON a => EntityType -> a -> Aeson.Value + go :: (ToJSON a) => EntityType -> a -> Aeson.Value go typ obj = object ["type" .= typ, "object" .= obj] instance (FromJSON text, FromJSON noSyncHash, FromJSON hash, Ord hash) => FromJSON (Entity text noSyncHash hash) where @@ -278,7 +278,7 @@ entityHashes_ f = \case -- | Get the direct dependencies of an entity (which are actually sync'd). -- -- FIXME use generic-lens here? (typed @hash) -entityDependencies :: Ord hash => Entity text noSyncHash hash -> Set hash +entityDependencies :: (Ord hash) => Entity text noSyncHash hash -> Set hash entityDependencies = \case TC (TermComponent terms) -> flip foldMap terms \(LocalIds {hashes}, _term) -> Set.fromList hashes DC (DeclComponent decls) -> flip foldMap decls \(LocalIds {hashes}, _decl) -> Set.fromList hashes @@ -325,7 +325,7 @@ instance (FromJSON text, FromJSON hash) => FromJSON (TermComponent text hash) wh pure (TermComponent terms) bitraverseComponents :: - Applicative f => + (Applicative f) => (a -> f a') -> (b -> f b') -> [(LocalIds a b, ByteString)] -> @@ -427,7 +427,7 @@ instance (FromJSON text, FromJSON oldHash, FromJSON newHash) => FromJSON (Patch Base64Bytes bytes <- obj .: "bytes" pure Patch {..} -patchHashes_ :: Applicative m => (hash -> m hash') -> Patch text noSyncHash hash -> m (Patch text noSyncHash hash') +patchHashes_ :: (Applicative m) => (hash -> m hash') -> Patch text noSyncHash hash -> m (Patch text noSyncHash hash') patchHashes_ f (Patch {..}) = do newHashLookup <- traverse f newHashLookup pure (Patch {..}) @@ -460,7 +460,7 @@ instance (FromJSON text, FromJSON oldHash, FromJSON hash) => FromJSON (PatchDiff Base64Bytes bytes <- obj .: "bytes" pure PatchDiff {..} -patchDiffHashes_ :: Applicative m => (hash -> m hash') -> PatchDiff text noSyncHash hash -> m (PatchDiff text noSyncHash hash') +patchDiffHashes_ :: (Applicative m) => (hash -> m hash') -> PatchDiff text noSyncHash hash -> m (PatchDiff text noSyncHash hash') patchDiffHashes_ f (PatchDiff {..}) = do parent <- f parent newHashLookup <- traverse f newHashLookup @@ -540,7 +540,7 @@ instance (FromJSON text, FromJSON hash) => FromJSON (NamespaceDiff text hash) wh Base64Bytes bytes <- obj .: "bytes" pure NamespaceDiff {..} -namespaceDiffHashes_ :: Applicative m => (hash -> m hash') -> NamespaceDiff text hash -> m (NamespaceDiff text hash') +namespaceDiffHashes_ :: (Applicative m) => (hash -> m hash') -> NamespaceDiff text hash -> m (NamespaceDiff text hash') namespaceDiffHashes_ f (NamespaceDiff {..}) = do parent <- f parent defnLookup <- traverse f defnLookup @@ -929,7 +929,7 @@ data NeedDependencies hash = NeedDependencies } deriving stock (Show, Eq, Ord) -instance ToJSON hash => ToJSON (NeedDependencies hash) where +instance (ToJSON hash) => ToJSON (NeedDependencies hash) where toJSON (NeedDependencies missingDependencies) = object ["missing_dependencies" .= missingDependencies] @@ -941,10 +941,10 @@ instance (FromJSON hash, Ord hash) => FromJSON (NeedDependencies hash) where ------------------------------------------------------------------------------------------------------------------------ -- Misc. helpers -failText :: MonadFail m => Text -> m a +failText :: (MonadFail m) => Text -> m a failText = fail . Text.unpack -jsonUnion :: ToJSON a => Text -> a -> Value +jsonUnion :: (ToJSON a) => Text -> a -> Value jsonUnion typeName val = Aeson.object [ "type" .= String typeName, diff --git a/unison-share-api/src/Unison/Util/Find.hs b/unison-share-api/src/Unison/Util/Find.hs index 4e3395a60..f2fb30a7e 100644 --- a/unison-share-api/src/Unison/Util/Find.hs +++ b/unison-share-api/src/Unison/Util/Find.hs @@ -149,7 +149,7 @@ prefixFindInBranch b hq = -- only search before the # before the # and after the # after the # fuzzyFindInBranch :: - HasCallStack => + (HasCallStack) => Names -> HQ'.HashQualified Name -> [(SearchResult, P.Pretty P.ColorText)] diff --git a/unison-share-api/unison-share-api.cabal b/unison-share-api/unison-share-api.cabal index 0f0e0c6a1..7df4859d8 100644 --- a/unison-share-api/unison-share-api.cabal +++ b/unison-share-api/unison-share-api.cabal @@ -45,6 +45,7 @@ library ConstraintKinds DeriveAnyClass DeriveFunctor + DeriveGeneric DerivingStrategies DerivingVia DoAndIfThenElse @@ -62,6 +63,7 @@ library PatternSynonyms RankNTypes ScopedTypeVariables + StandaloneDeriving TupleSections TypeApplications TypeOperators @@ -103,13 +105,14 @@ library , unison-codebase-sqlite , unison-core , unison-core1 + , unison-hash + , unison-hash-orphans-aeson , unison-parser-typechecker , unison-prelude , unison-pretty-printer , unison-sqlite , unison-syntax , unison-util-base32hex - , unison-util-base32hex-orphans-aeson , unison-util-relation , unliftio , unordered-containers diff --git a/unison-src/builtin-tests/Readme.md b/unison-src/builtin-tests/Readme.md new file mode 100644 index 000000000..a3aba90bc --- /dev/null +++ b/unison-src/builtin-tests/Readme.md @@ -0,0 +1,13 @@ +# Test suite for builtins + +Edit `tests.u` in this directory to add to the test suite. The same test suite can be run using the JIT or the interpreter, using either of the two scripts: + +```bash +$ ./unison-src/builtin-tests/jit-tests.sh +``` + +```bash +$ ./unison-src/builtin-tests/interpreter-tests.sh +``` + +The scripts will fetch a copy of base and the scheme codegen library and cache it for subsequent runs. \ No newline at end of file diff --git a/unison-src/builtin-tests/base.md b/unison-src/builtin-tests/base.md new file mode 100644 index 000000000..e5302ad07 --- /dev/null +++ b/unison-src/builtin-tests/base.md @@ -0,0 +1,5 @@ + +```ucm +.> pull unison.public.base.latest base +.> compile.native.fetch +``` \ No newline at end of file diff --git a/unison-src/builtin-tests/base.output.md b/unison-src/builtin-tests/base.output.md new file mode 100644 index 000000000..e51894c11 --- /dev/null +++ b/unison-src/builtin-tests/base.output.md @@ -0,0 +1,16 @@ + +```ucm +.> pull unison.public.base.latest base + + ✅ + + Successfully pulled into newly created namespace base. + +.> compile.native.fetch + + ✅ + + Successfully updated .unison.internal from + unison.public.internal.trunk. + +``` diff --git a/unison-src/builtin-tests/concurrency-tests.u b/unison-src/builtin-tests/concurrency-tests.u new file mode 100644 index 000000000..50b197fc1 --- /dev/null +++ b/unison-src/builtin-tests/concurrency-tests.u @@ -0,0 +1,148 @@ +concurrency.tests = do + !simpleRefTest + !simpleRefTestScope + !ticketTest + !casTest + !promiseSequentialTest + !promiseConcurrentTest + !forkKillTest + !tryEvalForkTest + !tryEvalKillTest + !fullTest + +simpleRefTest = do + r = IO.ref 0 + Ref.write r 1 + i = Ref.read r + Ref.write r 2 + j = Ref.read r + Ref.write r 5 + checkEqual "Ref read-write" (i, j, Ref.read r) (1, 2, 5) + +simpleRefTestScope = do + Scope.run do + r = Scope.ref 0 + Ref.write r 1 + i = Ref.read r + Ref.write r 2 + j = Ref.read r + Ref.write r 5 + checkEqual "Ref read-write" (i, j, Ref.read r) (1, 2, 5) + +ticketTest = do + r = IO.ref 3 + t = Ref.readForCas r + v = Ticket.read t + checkEqual "Ticket contains the Ref value" v 3 + +casTest = do + ref = IO.ref 0 + ticket = Ref.readForCas ref + v1 = Ref.cas ref ticket 5 + check "CAS is successful is there were no conflicting writes" 'v1 + Ref.write ref 10 + v2 = Ref.cas ref ticket 15 + check "CAS fails when there was an intervening write" '(not v2) + +promiseSequentialTest = do + use Nat eq + use Promise read write + p = !Promise.new + v0 = Promise.tryRead p + checkEqual "Promise should be empty when created" v0 None + Promise.write_ p 0 + v1 = read p + checkEqual "Promise should read a value that's been written" v1 0 + Promise.write_ p 1 + v2 = read p + checkEqual "Promise can only be written to once" v2 v1 + v3 = Promise.tryRead p + checkEqual "Once the Promise is full, tryRead is the same as read" v3 (Some v2) + +millis = 1000 +sleep_ n = unsafeRun! do sleep n + +promiseConcurrentTest = do + use Nat eq + use concurrent fork + p = !Promise.new + _ = fork do + sleep_ (200 * millis) + Promise.write p 5 + v = Promise.read p + checkEqual "Reads awaits for completion of the Promise" v 5 + +kill_ t = unsafeRun! do concurrent.kill t + +forkKillTest = do + ref = IO.ref "initial" + thread = fork do + sleep_ (400 * millis) + Ref.write ref "done" + sleep_ (200 * millis) + kill_ thread + sleep_ (300 * millis) + v = Ref.read ref + checkEqual "Thread was killed" v "initial" + +tryEvalForkTest = do + ref = IO.ref "initial" + t = fork do + match catchAll do sleep_ (400 * millis) with + Left _ -> () + Right _ -> unsafeRun! do Ref.write ref "finished" + sleep_ (500 * millis) + v = Ref.read ref + checkEqual "tryEval is a no-op on success" v "finished" + +tryEvalKillTest = do + ref = IO.ref "initial" + t = fork do + match catchAll do sleep_ (400 * millis) with + Left (Failure typ msg a) -> unsafeRun! do Ref.write ref msg + Right _ -> unsafeRun! do Ref.write ref "finished" + sleep_ (200 * millis) + kill_ t + sleep_ (300 * millis) + v = Ref.read ref + checkEqual "Thread was killed, with finalisers" v "thread killed" + +atomicUpdate : Ref {IO} a -> (a -> a) ->{IO} () +atomicUpdate ref f = + ticket = Ref.readForCas ref + value = f (Ticket.read ticket) + if Ref.cas ref ticket value then () else atomicUpdate ref f + +spawnN : Nat -> '{IO} a ->{IO} [a] +spawnN n fa = + use Nat eq - + use concurrent fork + + go i acc = + if eq i 0 + then acc + else + value = !Promise.new + _ = fork do Promise.write value !fa + go (i - 1) (acc :+ value) + + map Promise.read (go n []) + +fullTest = do + use Nat * + eq - + + numThreads = 100 + iterations = 100 + expected = numThreads * iterations + + state = IO.ref 0 + thread n = + if eq n 0 + then () + else + atomicUpdate state (v -> v + 1) + thread (n - 1) + ignore (spawnN numThreads '(thread iterations)) + result = Ref.read state + checkEqual "The state of the counter is consistent " result expected + diff --git a/unison-src/builtin-tests/interpreter-tests.md b/unison-src/builtin-tests/interpreter-tests.md new file mode 100644 index 000000000..ffa081717 --- /dev/null +++ b/unison-src/builtin-tests/interpreter-tests.md @@ -0,0 +1,36 @@ + +Note: This should be forked off of the codebase created by base.md + +```ucm:hide +.> load unison-src/builtin-tests/testlib.u +.> add +``` + +If you want to define more complex tests somewhere other than `tests.u`, just `load my-tests.u` then `add`, +then reference those tests (which should be of type `'{IO,Exception,Tests} ()`, written using calls +to `Tests.check` and `Tests.checkEqual`). + +```ucm:hide +.> load unison-src/builtin-tests/concurrency-tests.u +.> add +``` + +```ucm:hide +.> load unison-src/builtin-tests/tests.u +.> add +``` + +```ucm +.> run tests +``` + + +```ucm:hide +.> builtins.merge +.> load unison-src/builtin-tests/thread-killed-typeLink-test.u +.> add +``` + +```ucm +.> run threadKilledTypeLinkTest +``` diff --git a/unison-src/builtin-tests/interpreter-tests.output.md b/unison-src/builtin-tests/interpreter-tests.output.md new file mode 100644 index 000000000..751dbdb67 --- /dev/null +++ b/unison-src/builtin-tests/interpreter-tests.output.md @@ -0,0 +1,19 @@ + +Note: This should be forked off of the codebase created by base.md + +If you want to define more complex tests somewhere other than `tests.u`, just `load my-tests.u` then `add`, +then reference those tests (which should be of type `'{IO,Exception,Tests} ()`, written using calls +to `Tests.check` and `Tests.checkEqual`). + +```ucm +.> run tests + + () + +``` +```ucm +.> run threadKilledTypeLinkTest + + () + +``` diff --git a/unison-src/builtin-tests/interpreter-tests.sh b/unison-src/builtin-tests/interpreter-tests.sh new file mode 100755 index 000000000..d8b7d3ef3 --- /dev/null +++ b/unison-src/builtin-tests/interpreter-tests.sh @@ -0,0 +1,19 @@ +#!/bin/bash +#set -ex + +ucm=$(stack exec -- which unison) + +base_codebase=${XDG_CACHE_HOME:-"$HOME/.cache"}/unisonlanguage/base.unison + +if [ ! -d $base_codebase ]; then + $ucm transcript -S $base_codebase unison-src/builtin-tests/base.md +fi + +dir=${XDG_DATA_HOME:-"$HOME/.local/share"}/unisonlanguage/scheme-libs +echo $dir + +mkdir -p $dir +cp -r scheme-libs/* $dir/ + +time $ucm transcript.fork -c $base_codebase unison-src/builtin-tests/interpreter-tests.md + diff --git a/unison-src/builtin-tests/jit-tests.md b/unison-src/builtin-tests/jit-tests.md new file mode 100644 index 000000000..3cb95d893 --- /dev/null +++ b/unison-src/builtin-tests/jit-tests.md @@ -0,0 +1,27 @@ + +Note: This should be forked off of the codebase created by base.md + +```ucm:hide +.> compile.native.fetch +.> compile.native.genlibs +.> load unison-src/builtin-tests/testlib.u +.> add +``` + +If you want to define more complex tests somewhere other than `tests.u`, just `load my-tests.u` then `add`, +then reference those tests (which should be of type `'{IO,Exception,Tests} ()`, written using calls +to `Tests.check` and `Tests.checkEqual`). + +```ucm:hide +.> load unison-src/builtin-tests/concurrency-tests.u +.> add +``` + +```ucm:hide +.> load unison-src/builtin-tests/tests.u +.> add +``` + +```ucm +.> run.native tests +``` diff --git a/unison-src/builtin-tests/jit-tests.output.md b/unison-src/builtin-tests/jit-tests.output.md new file mode 100644 index 000000000..016ea7e65 --- /dev/null +++ b/unison-src/builtin-tests/jit-tests.output.md @@ -0,0 +1,11 @@ + +Note: This should be forked off of the codebase created by base.md + +If you want to define more complex tests somewhere other than `tests.u`, just `load my-tests.u` then `add`, +then reference those tests (which should be of type `'{IO,Exception,Tests} ()`, written using calls +to `Tests.check` and `Tests.checkEqual`). + +```ucm +.> run.native tests + +``` diff --git a/unison-src/builtin-tests/jit-tests.sh b/unison-src/builtin-tests/jit-tests.sh new file mode 100755 index 000000000..64f7f52ac --- /dev/null +++ b/unison-src/builtin-tests/jit-tests.sh @@ -0,0 +1,19 @@ +#!/bin/bash +#set -ex + +ucm=$(stack exec -- which unison) + +base_codebase=${XDG_CACHE_HOME:-"$HOME/.cache"}/unisonlanguage/base.unison + +if [ ! -d $base_codebase ]; then + $ucm transcript -S $base_codebase unison-src/builtin-tests/base.md +fi + +dir=${XDG_DATA_HOME:-"$HOME/.local/share"}/unisonlanguage/scheme-libs +echo $dir + +mkdir -p $dir +cp -r scheme-libs/* $dir/ + +time $ucm transcript.fork -c $base_codebase unison-src/builtin-tests/jit-tests.md + diff --git a/unison-src/builtin-tests/testlib.u b/unison-src/builtin-tests/testlib.u new file mode 100644 index 000000000..c0ed2dd35 --- /dev/null +++ b/unison-src/builtin-tests/testlib.u @@ -0,0 +1,56 @@ + +unique ability Tests where + pass : Text -> () + fail : Text -> Text -> () + exception : Text -> Failure -> () + +Tests.check : Text -> '{g, Exception} Boolean ->{g, Tests} () +Tests.check msg b = + match catch b with + Left e -> exception msg e + Right true -> pass msg + Right false -> fail msg "" + +Tests.checkEqual : Text -> a -> a ->{Tests} () +Tests.checkEqual msg a1 a2 = + match catch '(a1 === a2) with + Left e -> exception msg e + Right true -> pass msg + Right false -> fail msg "not equal" + +Tests.main : '{IO,Exception,Tests} () -> '{IO,Exception} () +Tests.main suite = do + if Tests.run suite then () + else bug "test suite failed" + +Tests.run : '{IO,Exception,Tests} () ->{IO,Exception} Boolean +Tests.run suite = + h passed failed = cases + { _ } -> (passed, failed) + { pass msg -> k } -> + printLine (" ✅ " ++ msg) + handle !k with h (passed + 1) failed + { fail msg reason -> k } -> + printLine (" 🆘 " ++ msg ++ " " ++ reason) + handle !k with h passed (failed + 1) + { exception msg failure@(Failure _ cause payload) -> k} -> + printLine (" 💥 " ++ msg ++ " " ++ cause) + handle !k with h passed (failed + 1) + + printLine "" + printLine "*** Test suite ***" + printLine "" + + (passed, failed) = handle !suite with h 0 0 + + printLine "" + printLine "" + printLine "Summary of results:" + printLine "" + + if failed == 0 then + printLine (" ✅✅✅ " ++ Nat.toText passed ++ " PASSED") + else + printLine (" 🆘🆘🆘 " ++ Nat.toText failed ++ " FAILED, " + ++ Nat.toText passed ++ " passed") + failed == 0 \ No newline at end of file diff --git a/unison-src/builtin-tests/tests.u b/unison-src/builtin-tests/tests.u new file mode 100644 index 000000000..9a2c5a493 --- /dev/null +++ b/unison-src/builtin-tests/tests.u @@ -0,0 +1,83 @@ + +shouldFail fn = isLeft <| catchAll fn + +tests : '{IO,Exception} () +tests = Tests.main do + !crypto.hash.tests + !hmac.tests + !concurrency.tests + !tcp.tests + check "bug is caught" do shouldFail do bug () + +tcp.tests = do + check "connects to example.com" do + socket = Socket.client (HostName "example.com") (Port "80") + Socket.send socket (toUtf8 "GET /index.html HTTP/1.0\r\n\r\n") + response = Socket.receive socket + Socket.close socket + contains "HTTP/1.0 200 OK" (base.Text.fromUtf8 response) + check "rejects invalid port" do shouldFail do Socket.client (HostName "example.com") (Port "what") + check "no send after close" do shouldFail do + socket = Socket.client (HostName "example.com") (Port "80") + Socket.close socket + Socket.send socket (toUtf8 "GET /index.html HTTP/1.0\r\n\r\n") + check "no send on listener" do shouldFail do + match Socket.server None (Port "0") with + BoundServerSocket socket -> Socket.send socket (toUtf8 "what") + + + setup = catchAll do + socket = Socket.listen (server None (Port "0")) + port = match socket with + ListeningServerSocket sock -> Socket.port sock + (socket, port) + + match setup with + Left exn -> + Debug.trace "Setup failed" exn + Tests.fail "Unable to bind and listen on a socket" "" + Right (socket, port) -> + serve = do + sock = Socket.accept socket + data = Socket.receive sock + Socket.send sock (toUtf8 "from server") + base.Text.fromUtf8 data + + serveResult = !Promise.new + _ = fork do Promise.write serveResult (catchAll serve) + + data = catchAll do + clientSocket = Socket.client (HostName "localhost") (Port (Nat.toText port)) + Socket.send clientSocket (toUtf8 "from client") + base.Text.fromUtf8 (Socket.receive clientSocket) + + checkEqual "Server received data" (Promise.read serveResult) (Right "from client") + checkEqual "Client received data" data (Right "from server") + +crypto.hash.tests = do + hash alg = hashBytes alg (toUtf8 "") + tag name = name ++ " hashBytes" + [ + ("Sha1", Sha1, 0xsda39a3ee5e6b4b0d3255bfef95601890afd80709), + ("Sha2_256", Sha2_256, 0xse3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855), + ("Sha2_512", Sha2_512, 0xscf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e), + ("Sha3_256", Sha3_256, 0xsa7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a), + ("Sha3_512", Sha3_512, 0xsa69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26), + ("Blake2s_256", Blake2s_256, 0xs69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9), + ("Blake2b_256", Blake2b_256, 0xs0e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a8), + ("Blake2b_512", Blake2b_512, 0xs786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce) + ] |> List.foreach_ cases (name, alg, res) -> checkEqual (tag name) (hash alg) res + +hmac.tests = do + hmac alg = hmacBytes alg (toUtf8 "key") (toUtf8 "") + tag name = name ++ " hmacBytes" + [ + ("Sha1", Sha1, 0xsf42bb0eeb018ebbd4597ae7213711ec60760843f), + ("Sha2_256", Sha2_256, 0xs5d5d139563c95b5967b9bd9a8c9b233a9dedb45072794cd232dc1b74832607d0), + ("Sha2_512", Sha2_512, 0xs84fa5aa0279bbc473267d05a53ea03310a987cecc4c1535ff29b6d76b8f1444a728df3aadb89d4a9a6709e1998f373566e8f824a8ca93b1821f0b69bc2a2f65e), + ("Sha3_256", Sha3_256, 0xs74f3c030ecc36a1835d04a333ebb7fce2688c0c78fb0bcf9592213331c884c75), + ("Sha3_512", Sha3_512, 0xs7539119b6367aa902bdc6f558d20c906d6acbd4aba3fd344eb08b0200144a1fa453ff6e7919962358be53f6db2a320d1852c52a3dea3e907070775f7a91f1282), + ("Blake2s_256", Blake2s_256, 0xs67148074efc0f6741b474ef81c4d98d266e880d372fe723d2569b1d414d234be), + ("Blake2b_256", Blake2b_256, 0xs4224e1297e51239a642e21f756bde2785716f872298178180d7f3d1d36a5e4e4), + ("Blake2b_512", Blake2b_512, 0xs019fe04bf010b8d72772e6b46897ecf74b4878c394ff2c4d5cfa0b7cc9bbefcb28c36de23cef03089db9c3d900468c89804f135e9fdef7ec9b3c7abe50ed33d3) + ] |> List.foreach_ cases (name, alg, res) -> checkEqual (tag name) (hmac alg) res diff --git a/unison-src/builtin-tests/thread-killed-typeLink-test.u b/unison-src/builtin-tests/thread-killed-typeLink-test.u new file mode 100644 index 000000000..2af89952d --- /dev/null +++ b/unison-src/builtin-tests/thread-killed-typeLink-test.u @@ -0,0 +1,15 @@ +-- TODO Move this to concurrency-tests once the JIT supports typeLinks +threadKilledTypeLinkTest = Tests.main do + ref = IO.ref None + t = fork do + match catchAll do sleep_ (400 * millis) with + Left (Failure f _ _) -> unsafeRun! do Ref.write ref (Some f) + _ -> () + sleep_ (200 * millis) + kill_ t + sleep_ (300 * millis) + v = Ref.read ref + expected = Some (typeLink ThreadKilledFailure) + checkEqual "Thread killed, finalisers with typeLink" v expected + + diff --git a/unison-src/parser-tests/GenerateErrors.hs b/unison-src/parser-tests/GenerateErrors.hs index 16426017c..b4e257288 100644 --- a/unison-src/parser-tests/GenerateErrors.hs +++ b/unison-src/parser-tests/GenerateErrors.hs @@ -25,12 +25,12 @@ unisonFilesInCurrDir = getCurrentDirectory >>= unisonFilesInDir errorFileName :: String -> String errorFileName n = dropExtension n ++ ".message.txt" -emitAsPlainTextTo :: Var v => String -> Err v -> FilePath -> IO () +emitAsPlainTextTo :: (Var v) => String -> Err v -> FilePath -> IO () emitAsPlainTextTo src e f = writeUtf8 f plainErr where plainErr = Color.toPlain $ prettyParseError src e -printError :: Var v => String -> Err v -> IO () +printError :: (Var v) => String -> Err v -> IO () printError src e = putStrLn $ B.showParseError src e processFile :: FilePath -> IO () diff --git a/unison-src/tests/caseguard.u b/unison-src/tests/caseguard.u index 16e949af5..94eb6ce1a 100644 --- a/unison-src/tests/caseguard.u +++ b/unison-src/tests/caseguard.u @@ -10,6 +10,6 @@ use Universal == f = cases x | x == "woot" -> false - y | y == "foo" -> true + y | otherwise -> true -- > f "woot" diff --git a/unison-src/tests/imports.u b/unison-src/tests/imports.u index 0748ae284..2647c9f57 100644 --- a/unison-src/tests/imports.u +++ b/unison-src/tests/imports.u @@ -19,4 +19,4 @@ use Nat drop > match Some (100 + 200 / 3 * 2) with Optional.None -> 19 - Some 200 -> 20 + Some _ -> 20 diff --git a/unison-src/tests/inner-lambda1.u b/unison-src/tests/inner-lambda1.u index 708a59e78..43c196c8d 100644 --- a/unison-src/tests/inner-lambda1.u +++ b/unison-src/tests/inner-lambda1.u @@ -12,4 +12,5 @@ search hit bot top = +0 -> Some mid -1 -> go bot (drop mid 1) +1 -> go (mid + 1) top + _ -> bug "unexpected" go bot top diff --git a/unison-src/tests/inner-lambda2.u b/unison-src/tests/inner-lambda2.u index 6900e8fd3..f356cbcfe 100644 --- a/unison-src/tests/inner-lambda2.u +++ b/unison-src/tests/inner-lambda2.u @@ -13,4 +13,5 @@ search hit bot top = +0 -> Some mid -1 -> go bot (drop mid 1) +1 -> go (mid + 1) top + _ -> bug "unexpected" go bot top diff --git a/unison-src/tests/methodical/pattern-matching.u b/unison-src/tests/methodical/pattern-matching.u index e1883d120..9740b29b2 100644 --- a/unison-src/tests/methodical/pattern-matching.u +++ b/unison-src/tests/methodical/pattern-matching.u @@ -17,7 +17,7 @@ pat6 x y = cases (p1, _) -> (x + y : Nat, p1) pat7 x y = cases (p1, _) | p1 == 9 -> (x + y : Nat, p1) - (p1, _) | true -> (0, p1) + (p1, _) | otherwise -> (0, p1) bpat = cases false -> 0 diff --git a/unison-src/tests/parenthesized-blocks.u b/unison-src/tests/parenthesized-blocks.u index 5824dbbec..db7bff5db 100644 --- a/unison-src/tests/parenthesized-blocks.u +++ b/unison-src/tests/parenthesized-blocks.u @@ -1,5 +1,5 @@ x = (if true then 1 else 0) + 1 -y = (match 1 with 1 -> 1) + 1 +y = (match 1 with _ -> 1) + 1 > (x, y) diff --git a/unison-src/tests/pattern-match-seq.u b/unison-src/tests/pattern-match-seq.u index 58c0a0484..c1485544a 100644 --- a/unison-src/tests/pattern-match-seq.u +++ b/unison-src/tests/pattern-match-seq.u @@ -11,12 +11,12 @@ lenLit = cases [_] -> 1 [_, _] -> 2 [_, _, _] -> 3 + _ -> bug "unexpected" lenCons : [a] -> Nat lenCons = cases [] -> 0 _ +: t -> 1 + lenCons t - _ +: (_ +: t) -> 2 + lenCons t lenSnoc : [a] -> Nat lenSnoc = cases diff --git a/unison-src/tests/pattern-matching.u b/unison-src/tests/pattern-matching.u index b1e2b3c7e..8be597d2f 100644 --- a/unison-src/tests/pattern-matching.u +++ b/unison-src/tests/pattern-matching.u @@ -16,8 +16,8 @@ y = match Foo1 1 with Foo1 _ -> 10 z = match Foo2 1 "hi" with - Foo2 x _ -> x Foo2 1 _ -> 1 + Foo2 x _ -> x w = match Foo3.Foo3 1 2 "bye" with Foo3.Foo3 1 2 x -> x Text.++ "bye" @@ -26,7 +26,6 @@ w = match Foo3.Foo3 1 2 "bye" with w2 = cases Foo3.Foo3 1 4 x -> x Text.++ "bye" Foo3.Foo3 x y z -> z Text.++ z - _ -> "hi" len : List a -> Nat len = cases diff --git a/unison-src/tests/pattern-matching2.u b/unison-src/tests/pattern-matching2.u index 4f6dd8c40..e17364946 100644 --- a/unison-src/tests/pattern-matching2.u +++ b/unison-src/tests/pattern-matching2.u @@ -15,7 +15,8 @@ y = match Foo1 1 with Foo1 _ -> 10 z = match Foo2 1 "hi" with - Foo2 x "bye" -> x Foo2 1 "hi" -> 1 + Foo2 x "bye" -> x + _ -> bug "unexpected" > z diff --git a/unison-src/tests/r2.u b/unison-src/tests/r2.u index 8218decb7..b7eaf7d71 100644 --- a/unison-src/tests/r2.u +++ b/unison-src/tests/r2.u @@ -3,4 +3,5 @@ r2 : Nat r2 = match Optional.Some true with Optional.Some true -> 1 Optional.Some false -> 0 + Optional.None -> bug "unexpected" diff --git a/unison-src/tests/r3.u b/unison-src/tests/r3.u index 74b76105f..93e13894c 100644 --- a/unison-src/tests/r3.u +++ b/unison-src/tests/r3.u @@ -2,5 +2,6 @@ r3 : Nat r3 = match Optional.Some true with Optional.Some true -> 1 Optional.Some false -> 0 + Optional.None -> bug "unexpected" diff --git a/unison-src/tests/r4x.u b/unison-src/tests/r4x.u index 1e7123f6e..dda1d0227 100644 --- a/unison-src/tests/r4x.u +++ b/unison-src/tests/r4x.u @@ -1,3 +1,4 @@ r4 : Int -> Int r4 = cases +1 -> +1 + x -> x diff --git a/unison-src/tests/sequence-at-0.u b/unison-src/tests/sequence-at-0.u index 37f642935..ea80e80d0 100644 --- a/unison-src/tests/sequence-at-0.u +++ b/unison-src/tests/sequence-at-0.u @@ -1,2 +1,3 @@ > match at 0 [100] with Optional.Some _ -> "Hooray!" + Optional.None -> bug "unexpected" diff --git a/unison-src/tests/sequence-literal-argument-parsing.u b/unison-src/tests/sequence-literal-argument-parsing.u index d6d495bca..f5901bb1e 100644 --- a/unison-src/tests/sequence-literal-argument-parsing.u +++ b/unison-src/tests/sequence-literal-argument-parsing.u @@ -3,3 +3,4 @@ structural type X a = X [a] f : X a -> a f = cases X.X [b] -> b + X.X _ -> bug "unexpected" diff --git a/unison-src/tests/tictactoe2.u b/unison-src/tests/tictactoe2.u index 9ebaf3b30..d5965c244 100644 --- a/unison-src/tests/tictactoe2.u +++ b/unison-src/tests/tictactoe2.u @@ -7,6 +7,18 @@ use Board Board use P O X E use Optional Some None +foldLeft : (b -> a -> b) -> b -> [a] -> b +foldLeft f = + go z xs = match xs with + [] -> z + a +: as -> go (f z a) as + go + +orElse a b = + match a with + None -> b + a -> a + isWin : Board -> Optional P isWin board = same : P -> P -> P -> Optional P @@ -18,36 +30,19 @@ isWin board = -- horizontal left/center/right -- diagonal rising/falling Board a b c - _ _ _ - _ _ _ -> same a b c - - Board _ _ _ - a b c - _ _ _ -> same a b c - - Board _ _ _ - _ _ _ - a b c -> same a b c - - Board a _ _ - b _ _ - c _ _ -> same a b c - - Board _ a _ - _ b _ - _ c _ -> same a b c - - Board _ _ a - _ _ b - _ _ c -> same a b c - - Board a _ _ - _ b _ - _ _ c -> same a b c - - Board _ _ a - _ b _ - c _ _ -> same a b c + d e f + g h i + -> + foldLeft orElse None + [ same a b c + , same d e f + , same g h i + , same a d g + , same b e h + , same c f i + , same a e i + , same g e c + ] x = isWin (Board X O X O X X diff --git a/unison-src/transcripts-round-trip/main.md b/unison-src/transcripts-round-trip/main.md index 26f5ab918..4ba440c97 100644 --- a/unison-src/transcripts-round-trip/main.md +++ b/unison-src/transcripts-round-trip/main.md @@ -84,17 +84,20 @@ f x = let Regression test for https://github.com/unisonweb/unison/issues/2224 ```unison:hide -f : [a] -> a +f : [()] -> () f xs = match xs with x +: (x' +: rest) -> x + _ -> () -g : [a] -> a +g : [()] -> () g xs = match xs with - (rest :+ x') :+ x -> x + (rest :+ x') :+ x -> () + _ -> () -h : [[a]] -> a +h : [[()]] -> () h xs = match xs with (rest :+ (rest' :+ x)) -> x + _ -> () ``` ```ucm @@ -301,6 +304,7 @@ broken tvar = ```unison:hide broken = cases Some loooooooooooooooooooooooooooooooooooooooooooooooooooooooong | loooooooooooooooooooooooooooooooooooooooooooooooooooooooong == 1 -> () + _ -> () ``` ``` ucm @@ -325,6 +329,7 @@ foo = let lijaefliejalfijelfj == aefilaeifhlei -> 0 SomethingUnusuallyLong lijaefliejalfijelfj aefilaeifhlei liaehjffeafijij | lijaefliejalfijelfj == liaehjffeafijij -> 1 + _ -> 2 go (SomethingUnusuallyLong "one" "two" "three") ``` @@ -506,4 +511,82 @@ foo = cases ```ucm .> load scratch.u -``` \ No newline at end of file +``` + +# Multi-line lambda let + +Regression test for #3110 and #3801 + +```unison:hide +foreach x f = + _ = List.map f x + () + +ignore x = () + +test1 : () +test1 = + foreach [1, 2, 3] let x -> let + y = Nat.increment x + () + +test2 = foreach [1, 2, 3] let x -> ignore (Nat.increment x) + +test3 = foreach [1, 2, 3] do x -> do + y = Nat.increment x + () +``` + +```ucm +.> add +.> edit test1 test2 test3 foreach ignore +.> undo +``` + +```ucm +.> load scratch.u +``` + +# Destructuring bind in delay or lambda + +Regression test for https://github.com/unisonweb/unison/issues/3710 + +```unison:hide +d1 = do + (a,b) = (1,2) + (c,d) = (3,4) + (e,f) = (5,6) + (a,b,c,d,e,f) + +d2 = let + (a,b) = (1,2) + (c,d) = (3,4) + (e,f) = (5,6) + (a,b,c,d,e,f) + +d3 x = let + (a,b) = (1,x) + (c,d) = (3,4) + (e,f) = (5,6) + (a,b,c,d,e,f) + +d4 x = do + (a,b) = (1,x) + (c,d) = (3,4) + (e,f) = (5,6) + (a,b,c,d,e,f) + +d5 x = match x with + Some x -> x + None -> bug "oops" +``` + +```ucm +.> add +.> edit d1 d2 d3 d4 d5 +.> undo +``` + +```ucm +.> load scratch.u +``` diff --git a/unison-src/transcripts-round-trip/main.output.md b/unison-src/transcripts-round-trip/main.output.md index 9f8099478..24c4115bc 100644 --- a/unison-src/transcripts-round-trip/main.output.md +++ b/unison-src/transcripts-round-trip/main.output.md @@ -34,16 +34,16 @@ x = 1 + 1 most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #l7cnk7raag .old` to make an old namespace + `fork #c5i2vql0hi .old` to make an old namespace accessible again, - `reset-root #l7cnk7raag` to reset the root namespace and + `reset-root #c5i2vql0hi` to reset the root namespace and its history to that of the specified namespace. When Root Hash Action - 1. now #pdrl1ktsa0 add - 2. 1 secs ago #l7cnk7raag builtins.mergeio + 1. now #88srvru2o0 add + 2. 1 secs ago #c5i2vql0hi builtins.mergeio 3. #sg60bvjo91 history starts here Tip: Use `diff.namespace 1 7` to compare namespaces between @@ -120,18 +120,18 @@ Without the above stanza, the `edit` will send the definition to the most recent most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #l7cnk7raag .old` to make an old namespace + `fork #c5i2vql0hi .old` to make an old namespace accessible again, - `reset-root #l7cnk7raag` to reset the root namespace and + `reset-root #c5i2vql0hi` to reset the root namespace and its history to that of the specified namespace. When Root Hash Action - 1. now #7a6vnmv5c9 add - 2. now #l7cnk7raag reset-root #l7cnk7raag - 3. now #pdrl1ktsa0 add - 4. 1 secs ago #l7cnk7raag builtins.mergeio + 1. now #a16i2glj04 add + 2. now #c5i2vql0hi reset-root #c5i2vql0hi + 3. now #88srvru2o0 add + 4. 1 secs ago #c5i2vql0hi builtins.mergeio 5. #sg60bvjo91 history starts here Tip: Use `diff.namespace 1 7` to compare namespaces between @@ -199,20 +199,20 @@ f x = let most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #l7cnk7raag .old` to make an old namespace + `fork #c5i2vql0hi .old` to make an old namespace accessible again, - `reset-root #l7cnk7raag` to reset the root namespace and + `reset-root #c5i2vql0hi` to reset the root namespace and its history to that of the specified namespace. When Root Hash Action - 1. now #obak7jnhcv add - 2. now #l7cnk7raag reset-root #l7cnk7raag - 3. now #7a6vnmv5c9 add - 4. now #l7cnk7raag reset-root #l7cnk7raag - 5. now #pdrl1ktsa0 add - 6. 1 secs ago #l7cnk7raag builtins.mergeio + 1. now #8pc9a0uci4 add + 2. now #c5i2vql0hi reset-root #c5i2vql0hi + 3. now #a16i2glj04 add + 4. now #c5i2vql0hi reset-root #c5i2vql0hi + 5. now #88srvru2o0 add + 6. 1 secs ago #c5i2vql0hi builtins.mergeio 7. #sg60bvjo91 history starts here Tip: Use `diff.namespace 1 7` to compare namespaces between @@ -241,17 +241,20 @@ f x = let Regression test for https://github.com/unisonweb/unison/issues/2224 ```unison -f : [a] -> a +f : [()] -> () f xs = match xs with x +: (x' +: rest) -> x + _ -> () -g : [a] -> a +g : [()] -> () g xs = match xs with - (rest :+ x') :+ x -> x + (rest :+ x') :+ x -> () + _ -> () -h : [[a]] -> a +h : [[()]] -> () h xs = match xs with (rest :+ (rest' :+ x)) -> x + _ -> () ``` ```ucm @@ -259,9 +262,9 @@ h xs = match xs with ⍟ I've added these definitions: - f : [a] -> a - g : [a] -> a - h : [[a]] -> a + f : [()] -> () + g : [()] -> () + h : [[()]] -> () .> edit f g @@ -270,11 +273,15 @@ h xs = match xs with I added these definitions to the top of /Users/runar/work/unison/scratch.u - f : [a] -> a - f = cases x +: (x' +: rest) -> x + f : [()] -> () + f = cases + x +: (x' +: rest) -> x + _ -> () - g : [a] -> a - g = cases rest :+ x' :+ x -> x + g : [()] -> () + g = cases + rest :+ x' :+ x -> () + _ -> () You can edit them there, then do `update` to replace the definitions currently in this namespace. @@ -285,22 +292,22 @@ h xs = match xs with most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #l7cnk7raag .old` to make an old namespace + `fork #c5i2vql0hi .old` to make an old namespace accessible again, - `reset-root #l7cnk7raag` to reset the root namespace and + `reset-root #c5i2vql0hi` to reset the root namespace and its history to that of the specified namespace. When Root Hash Action - 1. now #4skv4f38cf add - 2. now #l7cnk7raag reset-root #l7cnk7raag - 3. now #obak7jnhcv add - 4. now #l7cnk7raag reset-root #l7cnk7raag - 5. now #7a6vnmv5c9 add - 6. now #l7cnk7raag reset-root #l7cnk7raag - 7. now #pdrl1ktsa0 add - 8. 1 secs ago #l7cnk7raag builtins.mergeio + 1. now #psi40d6du2 add + 2. now #c5i2vql0hi reset-root #c5i2vql0hi + 3. now #8pc9a0uci4 add + 4. now #c5i2vql0hi reset-root #c5i2vql0hi + 5. now #a16i2glj04 add + 6. now #c5i2vql0hi reset-root #c5i2vql0hi + 7. now #88srvru2o0 add + 8. 1 secs ago #c5i2vql0hi builtins.mergeio 9. #sg60bvjo91 history starts here Tip: Use `diff.namespace 1 7` to compare namespaces between @@ -320,8 +327,8 @@ h xs = match xs with ⍟ These new definitions are ok to `add`: - f : [a] -> a - g : [a] -> a + f : [()] -> () + g : [()] -> () ``` ## Type application inserts necessary parens @@ -369,24 +376,24 @@ foo n _ = n most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #l7cnk7raag .old` to make an old namespace + `fork #c5i2vql0hi .old` to make an old namespace accessible again, - `reset-root #l7cnk7raag` to reset the root namespace and + `reset-root #c5i2vql0hi` to reset the root namespace and its history to that of the specified namespace. When Root Hash Action - 1. now #fdnrhfkoot add - 2. now #l7cnk7raag reset-root #l7cnk7raag - 3. now #4skv4f38cf add - 4. now #l7cnk7raag reset-root #l7cnk7raag - 5. now #obak7jnhcv add - 6. now #l7cnk7raag reset-root #l7cnk7raag - 7. now #7a6vnmv5c9 add - 8. now #l7cnk7raag reset-root #l7cnk7raag - 9. now #pdrl1ktsa0 add - 10. 1 secs ago #l7cnk7raag builtins.mergeio + 1. now #9i8g6b1m8k add + 2. now #c5i2vql0hi reset-root #c5i2vql0hi + 3. now #psi40d6du2 add + 4. now #c5i2vql0hi reset-root #c5i2vql0hi + 5. now #8pc9a0uci4 add + 6. now #c5i2vql0hi reset-root #c5i2vql0hi + 7. now #a16i2glj04 add + 8. now #c5i2vql0hi reset-root #c5i2vql0hi + 9. now #88srvru2o0 add + 10. 1 secs ago #c5i2vql0hi builtins.mergeio 11. #sg60bvjo91 history starts here Tip: Use `diff.namespace 1 7` to compare namespaces between @@ -452,26 +459,26 @@ foo = most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #l7cnk7raag .old` to make an old namespace + `fork #c5i2vql0hi .old` to make an old namespace accessible again, - `reset-root #l7cnk7raag` to reset the root namespace and + `reset-root #c5i2vql0hi` to reset the root namespace and its history to that of the specified namespace. When Root Hash Action - 1. now #dblf9f7ggq add - 2. now #l7cnk7raag reset-root #l7cnk7raag - 3. now #fdnrhfkoot add - 4. now #l7cnk7raag reset-root #l7cnk7raag - 5. now #4skv4f38cf add - 6. now #l7cnk7raag reset-root #l7cnk7raag - 7. now #obak7jnhcv add - 8. now #l7cnk7raag reset-root #l7cnk7raag - 9. now #7a6vnmv5c9 add - 10. now #l7cnk7raag reset-root #l7cnk7raag - 11. now #pdrl1ktsa0 add - 12. 1 secs ago #l7cnk7raag builtins.mergeio + 1. now #mqg8tqk7i6 add + 2. now #c5i2vql0hi reset-root #c5i2vql0hi + 3. now #9i8g6b1m8k add + 4. now #c5i2vql0hi reset-root #c5i2vql0hi + 5. now #psi40d6du2 add + 6. now #c5i2vql0hi reset-root #c5i2vql0hi + 7. now #8pc9a0uci4 add + 8. now #c5i2vql0hi reset-root #c5i2vql0hi + 9. now #a16i2glj04 add + 10. now #c5i2vql0hi reset-root #c5i2vql0hi + 11. now #88srvru2o0 add + 12. 1 secs ago #c5i2vql0hi builtins.mergeio 13. #sg60bvjo91 history starts here Tip: Use `diff.namespace 1 7` to compare namespaces between @@ -875,7 +882,7 @@ broken tvar = (cases Some _ -> "oh boy isn't this a very very very very very very very long string?" - None -> "")) + None -> "")) tvarmodify : tvar -> fun -> () tvarmodify tvar fun = () @@ -909,6 +916,7 @@ broken tvar = ```unison broken = cases Some loooooooooooooooooooooooooooooooooooooooooooooooooooooooong | loooooooooooooooooooooooooooooooooooooooooooooooooooooooong == 1 -> () + _ -> () ``` ```ucm @@ -928,9 +936,10 @@ broken = cases broken : Optional Nat -> () broken = cases Some - loooooooooooooooooooooooooooooooooooooooooooooooooooooooong | loooooooooooooooooooooooooooooooooooooooooooooooooooooooong - == 1 -> + loooooooooooooooooooooooooooooooooooooooooooooooooooooooong| loooooooooooooooooooooooooooooooooooooooooooooooooooooooong + == 1 -> () + _ -> () You can edit them there, then do `update` to replace the definitions currently in this namespace. @@ -968,6 +977,7 @@ foo = let lijaefliejalfijelfj == aefilaeifhlei -> 0 SomethingUnusuallyLong lijaefliejalfijelfj aefilaeifhlei liaehjffeafijij | lijaefliejalfijelfj == liaehjffeafijij -> 1 + _ -> 2 go (SomethingUnusuallyLong "one" "two" "three") ``` @@ -992,11 +1002,13 @@ foo = let foo : 'Nat foo = go x = - '(match (a -> a) x with + do + match (a -> a) x with SomethingUnusuallyLong - lijaefliejalfijelfj aefilaeifhlei liaehjffeafijij - | lijaefliejalfijelfj == aefilaeifhlei -> 0 - | lijaefliejalfijelfj == liaehjffeafijij -> 1) + lijaefliejalfijelfj aefilaeifhlei liaehjffeafijij + | lijaefliejalfijelfj == aefilaeifhlei -> 0 + | lijaefliejalfijelfj == liaehjffeafijij -> 1 + _ -> 2 go (SomethingUnusuallyLong "one" "two" "three") You can edit them there, then do `update` to replace the @@ -1375,7 +1387,7 @@ afun x f = f x roundtripLastLam = afun "foo" (n -> let - 1 + 1 + _ = 1 + 1 3 ) ``` @@ -1402,7 +1414,7 @@ roundtripLastLam = roundtripLastLam : Nat roundtripLastLam = afun "foo" do - 1 + 1 + _ = 1 + 1 3 You can edit them there, then do `update` to replace the @@ -1529,3 +1541,219 @@ foo = cases foo : Nat -> Nat -> Nat ``` +# Multi-line lambda let + +Regression test for #3110 and #3801 + +```unison +foreach x f = + _ = List.map f x + () + +ignore x = () + +test1 : () +test1 = + foreach [1, 2, 3] let x -> let + y = Nat.increment x + () + +test2 = foreach [1, 2, 3] let x -> ignore (Nat.increment x) + +test3 = foreach [1, 2, 3] do x -> do + y = Nat.increment x + () +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + foreach : [a] -> (a ->{e} t) ->{e} () + ignore : x -> () + test1 : () + test2 : () + test3 : () + +.> edit test1 test2 test3 foreach ignore + + ☝️ + + I added these definitions to the top of + /Users/runar/work/unison/scratch.u + + foreach : [a] -> (a ->{e} t) ->{e} () + foreach x f = + _ = List.map f x + () + + ignore : x -> () + ignore x = () + + test1 : () + test1 = + foreach + [1, 2, 3] (x -> let + y = Nat.increment x + ()) + + test2 : () + test2 = foreach [1, 2, 3] (x -> ignore (Nat.increment x)) + + test3 : () + test3 = + foreach + [1, 2, 3] '(x -> do + y = Nat.increment x + ()) + + You can edit them there, then do `update` to replace the + definitions currently in this namespace. + +.> undo + + Here are the changes I undid + + Added definitions: + + 1. foreach : [a] -> (a ->{e} t) ->{e} () + 2. ignore : x -> () + 3. test1 : () + 4. test2 : () + 5. test3 : () + +``` +```ucm +.> load scratch.u + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + foreach : [a] -> (a ->{e} t) ->{e} () + ignore : x -> () + test1 : () + test2 : () + test3 : () + +``` +# Destructuring bind in delay or lambda + +Regression test for https://github.com/unisonweb/unison/issues/3710 + +```unison +d1 = do + (a,b) = (1,2) + (c,d) = (3,4) + (e,f) = (5,6) + (a,b,c,d,e,f) + +d2 = let + (a,b) = (1,2) + (c,d) = (3,4) + (e,f) = (5,6) + (a,b,c,d,e,f) + +d3 x = let + (a,b) = (1,x) + (c,d) = (3,4) + (e,f) = (5,6) + (a,b,c,d,e,f) + +d4 x = do + (a,b) = (1,x) + (c,d) = (3,4) + (e,f) = (5,6) + (a,b,c,d,e,f) + +d5 x = match x with + Some x -> x + None -> bug "oops" +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + d1 : '(Nat, Nat, Nat, Nat, Nat, Nat) + d2 : (Nat, Nat, Nat, Nat, Nat, Nat) + d3 : x -> (Nat, x, Nat, Nat, Nat, Nat) + d4 : x -> () -> (Nat, x, Nat, Nat, Nat, Nat) + d5 : Optional a -> a + +.> edit d1 d2 d3 d4 d5 + + ☝️ + + I added these definitions to the top of + /Users/runar/work/unison/scratch.u + + d1 : '(Nat, Nat, Nat, Nat, Nat, Nat) + d1 = do + (a, b) = (1, 2) + (c, d) = (3, 4) + (e, f) = (5, 6) + (a, b, c, d, e, f) + + d2 : (Nat, Nat, Nat, Nat, Nat, Nat) + d2 = + (a, b) = (1, 2) + (c, d) = (3, 4) + (e, f) = (5, 6) + (a, b, c, d, e, f) + + d3 : x -> (Nat, x, Nat, Nat, Nat, Nat) + d3 x = + (a, b) = (1, x) + (c, d) = (3, 4) + (e, f) = (5, 6) + (a, b, c, d, e, f) + + d4 : x -> () -> (Nat, x, Nat, Nat, Nat, Nat) + d4 x = do + (a, b) = (1, x) + (c, d) = (3, 4) + (e, f) = (5, 6) + (a, b, c, d, e, f) + + d5 : Optional a -> a + d5 = cases + Some x -> x + None -> bug "oops" + + You can edit them there, then do `update` to replace the + definitions currently in this namespace. + +.> undo + + Here are the changes I undid + + Added definitions: + + 1. d1 : '(Nat, Nat, Nat, Nat, Nat, Nat) + 2. d2 : (Nat, Nat, Nat, Nat, Nat, Nat) + 3. d3 : x -> (Nat, x, Nat, Nat, Nat, Nat) + 4. d4 : x -> () -> (Nat, x, Nat, Nat, Nat, Nat) + 5. d5 : Optional a -> a + +``` +```ucm +.> load scratch.u + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + d1 : '(Nat, Nat, Nat, Nat, Nat, Nat) + d2 : (Nat, Nat, Nat, Nat, Nat, Nat) + d3 : x -> (Nat, x, Nat, Nat, Nat, Nat) + d4 : x -> '(Nat, x, Nat, Nat, Nat, Nat) + d5 : Optional a -> a + +``` diff --git a/unison-src/transcripts-using-base/all-base-hashes.md b/unison-src/transcripts-using-base/all-base-hashes.md new file mode 100644 index 000000000..d7050cb77 --- /dev/null +++ b/unison-src/transcripts-using-base/all-base-hashes.md @@ -0,0 +1,5 @@ +This transcript is intended to make visible accidental changes to the hashing algorithm. + +```ucm +.> find.verbose +``` diff --git a/unison-src/transcripts-using-base/all-base-hashes.output.md b/unison-src/transcripts-using-base/all-base-hashes.output.md new file mode 100644 index 000000000..5761891ba --- /dev/null +++ b/unison-src/transcripts-using-base/all-base-hashes.output.md @@ -0,0 +1,2821 @@ +This transcript is intended to make visible accidental changes to the hashing algorithm. + +```ucm +.> find.verbose + + 1. -- #0moj6a30dak6suns8qefi93ljsl8j59ha9bpcu8qbmh49rbobik4bn9m4rre8sm7pprsistr9o5m4a667i82gs089eur75ua8geldj8 + ascii : Text -> Bytes + + 2. -- #8gr403lgh2aq58nj3tfgc6bek1hqlv3he0hge0nlperojc99tdjg2cfdbu0vko5a4mp7jqt08npk4fa8grbbdu9fn47s0qmp1oa0aqo + autoCleaned : '{IO, TempDirs} r ->{IO, Exception} r + + 3. -- #lf7arjlkufc078rdqh1653j7ft4tt59b1deugc513p21kep0kj1cp7q7v43o6u8csoi172pt27i2mtu2unsv1au7g0ue52s5pmamjk0 + autoCleaned.handler : '{IO} (Request {TempDirs} r + ->{IO, Exception} r) + + 4. -- #p1lh7ba73bdka6e18v9hmkrj811f2qsrogjumssr36drdjfgrfd6e6qotaqksb3o4lk44274fbcaqnj8e41rh0pjn56cbvsb3gv0880 + bContains : [(a, b)] -> a -> Boolean + + 5. -- #ck9knej1qu6jt87i9qun1btc67kvthamubpl658se75po13oe2vto97jck965ibs6uspmopt3097lqd1pmqnegmba8e2bbfgtre9ovo + bInsert : [(a, b)] -> a -> b -> [(a, b)] + + 6. -- #k1ge36n1ouvbaqjamm69pu7uuul0qc842tgqufm0ncaeb5h7im3r3vh9b7vikdu896p3f9ep2tf0dec3ifnrbr197k9lucl733rjpc0 + bSearch : [(a, b)] -> a -> Optional b + + 7. -- #0nmd69uoat0mr9tl1917mdb16cpvd2q8oev5uj9s5d6virfcolc4t7js3l9do0c6c26tj7mvd82fpcumjr513bfudelnunvbok317fo + bSort : [(a, b)] -> [(a, b)] + + 8. -- #re1saul4fhhdmg0b91ssrp3ok604nitpsler5dijra4eutcugsinmqvh9om3ah7qmcj1da57h3git1fn032lj8qs9kpqj3ujfs2ueig + bSplit : [(a, b)] -> a -> ([(a, b)], [(a, b)]) + + 9. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0 + unique type builtin.ANSI.Color + + 10. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#0 + builtin.ANSI.Color.Black : Color + + 11. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#4 + builtin.ANSI.Color.Blue : Color + + 12. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#8 + builtin.ANSI.Color.BrightBlack : Color + + 13. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#12 + builtin.ANSI.Color.BrightBlue : Color + + 14. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#14 + builtin.ANSI.Color.BrightCyan : Color + + 15. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#10 + builtin.ANSI.Color.BrightGreen : Color + + 16. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#13 + builtin.ANSI.Color.BrightMagenta : Color + + 17. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#9 + builtin.ANSI.Color.BrightRed : Color + + 18. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#15 + builtin.ANSI.Color.BrightWhite : Color + + 19. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#11 + builtin.ANSI.Color.BrightYellow : Color + + 20. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#6 + builtin.ANSI.Color.Cyan : Color + + 21. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#2 + builtin.ANSI.Color.Green : Color + + 22. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#5 + builtin.ANSI.Color.Magenta : Color + + 23. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#1 + builtin.ANSI.Color.Red : Color + + 24. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#7 + builtin.ANSI.Color.White : Color + + 25. -- #1j3e8vsn97qrprjr69ls6llab601sdh577uuvtu8pafmngf59suakbjr7asheadidcj3red140fnmdagsv9ihhdar1mc05ig28jtfr0#3 + builtin.ANSI.Color.Yellow : Color + + 26. -- ##Any + builtin type builtin.Any + + 27. -- ##Any.Any + builtin.Any.Any : a -> Any + + 28. -- ##Any.unsafeExtract + builtin.Any.unsafeExtract : Any -> a + + 29. -- #345f3nptqq1c1ped6gq8578kb2bhp1jejnqborsn6fq59rpe1rren3ogia9o9u8oc339vll953inma8pocc686ooknaitud8i5m27vg + unique type builtin.Author + + 30. -- #345f3nptqq1c1ped6gq8578kb2bhp1jejnqborsn6fq59rpe1rren3ogia9o9u8oc339vll953inma8pocc686ooknaitud8i5m27vg#0 + builtin.Author.Author : GUID -> Text -> Author + + 31. -- #09po4pftofof2dl6r5ddr5ucjmbiktvg139pjueica5ncrmbq6irin52tm83mu02r59dbktv7d550ik53bbgvue1qvdbvses746s0f0 + builtin.Author.guid : Author -> GUID + + 32. -- #adci09ncqjm7nle4v7irv1skla5q05glhsf2ld2b6cbre33ej7hbvf64deng8o5edv16vm0cat18ehj384gk4u9il7g6v4e7spgisuo + builtin.Author.guid.modify : (GUID ->{g} GUID) + -> Author + ->{g} Author + + 33. -- #5grcsob7l7dh440he3dibln7m628t4lregtv718vsb33mehvs72muugusuuajc8m804659h0e989dnks2adqr1vb8fnd912854t6nv0 + builtin.Author.guid.set : GUID -> Author -> Author + + 34. -- #vm19g9c23c3g186nkmreodqckso1belvfeb3bhbnfe4kfpjnq4bbo91le6bndfq5761eovt7rind30sp74fs1eqo44ukqmr96ggk1jg + builtin.Author.name : Author -> Text + + 35. -- #9udtng9ee5kq8bkq0fte4a4oknjl5h7tg8i6olebmgvat864n9q5k8cf7kpmtbhpdi9js0egpihprgt22v949bff4523h39noopeepo + builtin.Author.name.modify : (Text ->{g} Text) + -> Author + ->{g} Author + + 36. -- #v7l3vi93crov1681dom6fv17825dpf8rd1q4lpjdr6bn9ltsfliiertoju3rftvdubhn3n8lpf7vtfrmo3p9v47n5in98dq1aosnmq0 + builtin.Author.name.set : Text -> Author -> Author + + 37. -- ##Boolean + builtin type builtin.Boolean + + 38. -- ##Boolean.not + builtin.Boolean.not : Boolean -> Boolean + + 39. -- ##bug + builtin.bug : a -> b + + 40. -- ##Bytes + builtin type builtin.Bytes + + 41. -- ##Bytes.++ + builtin.Bytes.++ : Bytes -> Bytes -> Bytes + + 42. -- ##Bytes.at + builtin.Bytes.at : Nat -> Bytes -> Optional Nat + + 43. -- ##Bytes.decodeNat16be + builtin.Bytes.decodeNat16be : Bytes + -> Optional (Nat, Bytes) + + 44. -- ##Bytes.decodeNat16le + builtin.Bytes.decodeNat16le : Bytes + -> Optional (Nat, Bytes) + + 45. -- ##Bytes.decodeNat32be + builtin.Bytes.decodeNat32be : Bytes + -> Optional (Nat, Bytes) + + 46. -- ##Bytes.decodeNat32le + builtin.Bytes.decodeNat32le : Bytes + -> Optional (Nat, Bytes) + + 47. -- ##Bytes.decodeNat64be + builtin.Bytes.decodeNat64be : Bytes + -> Optional (Nat, Bytes) + + 48. -- ##Bytes.decodeNat64le + builtin.Bytes.decodeNat64le : Bytes + -> Optional (Nat, Bytes) + + 49. -- ##Bytes.drop + builtin.Bytes.drop : Nat -> Bytes -> Bytes + + 50. -- ##Bytes.empty + builtin.Bytes.empty : Bytes + + 51. -- ##Bytes.encodeNat16be + builtin.Bytes.encodeNat16be : Nat -> Bytes + + 52. -- ##Bytes.encodeNat16le + builtin.Bytes.encodeNat16le : Nat -> Bytes + + 53. -- ##Bytes.encodeNat32be + builtin.Bytes.encodeNat32be : Nat -> Bytes + + 54. -- ##Bytes.encodeNat32le + builtin.Bytes.encodeNat32le : Nat -> Bytes + + 55. -- ##Bytes.encodeNat64be + builtin.Bytes.encodeNat64be : Nat -> Bytes + + 56. -- ##Bytes.encodeNat64le + builtin.Bytes.encodeNat64le : Nat -> Bytes + + 57. -- ##Bytes.flatten + builtin.Bytes.flatten : Bytes -> Bytes + + 58. -- ##Bytes.fromBase16 + builtin.Bytes.fromBase16 : Bytes -> Either Text Bytes + + 59. -- ##Bytes.fromBase32 + builtin.Bytes.fromBase32 : Bytes -> Either Text Bytes + + 60. -- ##Bytes.fromBase64 + builtin.Bytes.fromBase64 : Bytes -> Either Text Bytes + + 61. -- ##Bytes.fromBase64UrlUnpadded + builtin.Bytes.fromBase64UrlUnpadded : Bytes + -> Either Text Bytes + + 62. -- ##Bytes.fromList + builtin.Bytes.fromList : [Nat] -> Bytes + + 63. -- ##Bytes.gzip.compress + builtin.Bytes.gzip.compress : Bytes -> Bytes + + 64. -- ##Bytes.gzip.decompress + builtin.Bytes.gzip.decompress : Bytes + -> Either Text Bytes + + 65. -- ##Bytes.size + builtin.Bytes.size : Bytes -> Nat + + 66. -- ##Bytes.take + builtin.Bytes.take : Nat -> Bytes -> Bytes + + 67. -- ##Bytes.toBase16 + builtin.Bytes.toBase16 : Bytes -> Bytes + + 68. -- ##Bytes.toBase32 + builtin.Bytes.toBase32 : Bytes -> Bytes + + 69. -- ##Bytes.toBase64 + builtin.Bytes.toBase64 : Bytes -> Bytes + + 70. -- ##Bytes.toBase64UrlUnpadded + builtin.Bytes.toBase64UrlUnpadded : Bytes -> Bytes + + 71. -- ##Bytes.toList + builtin.Bytes.toList : Bytes -> [Nat] + + 72. -- ##Bytes.zlib.compress + builtin.Bytes.zlib.compress : Bytes -> Bytes + + 73. -- ##Bytes.zlib.decompress + builtin.Bytes.zlib.decompress : Bytes + -> Either Text Bytes + + 74. -- ##Char + builtin type builtin.Char + + 75. -- ##Char.Class + builtin type builtin.Char.Class + + 76. -- ##Char.Class.alphanumeric + builtin.Char.Class.alphanumeric : Class + + 77. -- ##Char.Class.and + builtin.Char.Class.and : Class -> Class -> Class + + 78. -- ##Char.Class.any + builtin.Char.Class.any : Class + + 79. -- ##Char.Class.anyOf + builtin.Char.Class.anyOf : [Char] -> Class + + 80. -- ##Char.Class.control + builtin.Char.Class.control : Class + + 81. -- ##Char.Class.is + builtin.Char.Class.is : Class -> Char -> Boolean + + 82. -- ##Char.Class.letter + builtin.Char.Class.letter : Class + + 83. -- ##Char.Class.lower + builtin.Char.Class.lower : Class + + 84. -- ##Char.Class.mark + builtin.Char.Class.mark : Class + + 85. -- ##Char.Class.not + builtin.Char.Class.not : Class -> Class + + 86. -- ##Char.Class.number + builtin.Char.Class.number : Class + + 87. -- ##Char.Class.or + builtin.Char.Class.or : Class -> Class -> Class + + 88. -- ##Char.Class.printable + builtin.Char.Class.printable : Class + + 89. -- ##Char.Class.punctuation + builtin.Char.Class.punctuation : Class + + 90. -- ##Char.Class.range + builtin.Char.Class.range : Char -> Char -> Class + + 91. -- ##Char.Class.separator + builtin.Char.Class.separator : Class + + 92. -- ##Char.Class.symbol + builtin.Char.Class.symbol : Class + + 93. -- ##Char.Class.upper + builtin.Char.Class.upper : Class + + 94. -- ##Char.Class.whitespace + builtin.Char.Class.whitespace : Class + + 95. -- ##Char.fromNat + builtin.Char.fromNat : Nat -> Char + + 96. -- ##Char.toNat + builtin.Char.toNat : Char -> Nat + + 97. -- ##Char.toText + builtin.Char.toText : Char -> Text + + 98. -- ##Code + builtin type builtin.Code + + 99. -- ##Code.cache_ + builtin.Code.cache_ : [(Link.Term, Code)] + ->{IO} [Link.Term] + + 100. -- ##Code.dependencies + builtin.Code.dependencies : Code -> [Link.Term] + + 101. -- ##Code.deserialize + builtin.Code.deserialize : Bytes -> Either Text Code + + 102. -- ##Code.display + builtin.Code.display : Text -> Code -> Text + + 103. -- ##Code.isMissing + builtin.Code.isMissing : Link.Term ->{IO} Boolean + + 104. -- ##Code.lookup + builtin.Code.lookup : Link.Term ->{IO} Optional Code + + 105. -- ##Code.serialize + builtin.Code.serialize : Code -> Bytes + + 106. -- ##Code.validate + builtin.Code.validate : [(Link.Term, Code)] + ->{IO} Optional Failure + + 107. -- #ldqsht5qvddaabskcad3idka4nqkv6lfncrp0s0o4rqbbnk1qvq269bueu7qobhvaf7gpluqtpn9bgp9u69jsntv0u6o4qtbktnfrs0 + unique type builtin.ConsoleText + + 108. -- #ldqsht5qvddaabskcad3idka4nqkv6lfncrp0s0o4rqbbnk1qvq269bueu7qobhvaf7gpluqtpn9bgp9u69jsntv0u6o4qtbktnfrs0#5 + builtin.ConsoleText.Background : Color + -> ConsoleText + -> ConsoleText + + 109. -- #ldqsht5qvddaabskcad3idka4nqkv6lfncrp0s0o4rqbbnk1qvq269bueu7qobhvaf7gpluqtpn9bgp9u69jsntv0u6o4qtbktnfrs0#0 + builtin.ConsoleText.Bold : ConsoleText -> ConsoleText + + 110. -- #ldqsht5qvddaabskcad3idka4nqkv6lfncrp0s0o4rqbbnk1qvq269bueu7qobhvaf7gpluqtpn9bgp9u69jsntv0u6o4qtbktnfrs0#4 + builtin.ConsoleText.Foreground : Color + -> ConsoleText + -> ConsoleText + + 111. -- #ldqsht5qvddaabskcad3idka4nqkv6lfncrp0s0o4rqbbnk1qvq269bueu7qobhvaf7gpluqtpn9bgp9u69jsntv0u6o4qtbktnfrs0#2 + builtin.ConsoleText.Invert : ConsoleText -> ConsoleText + + 112. -- #ldqsht5qvddaabskcad3idka4nqkv6lfncrp0s0o4rqbbnk1qvq269bueu7qobhvaf7gpluqtpn9bgp9u69jsntv0u6o4qtbktnfrs0#3 + builtin.ConsoleText.Plain : Text -> ConsoleText + + 113. -- #ldqsht5qvddaabskcad3idka4nqkv6lfncrp0s0o4rqbbnk1qvq269bueu7qobhvaf7gpluqtpn9bgp9u69jsntv0u6o4qtbktnfrs0#1 + builtin.ConsoleText.Underline : ConsoleText + -> ConsoleText + + 114. -- #pgornst1pqaea8qmf8ckbtvrm7f6hn49djhffgebajmo12faf4jku63ftc9fp0r4k58e0qcdi77g08f34b2ihvsu97s48du6mfn7vko + unique type builtin.CopyrightHolder + + 115. -- #pgornst1pqaea8qmf8ckbtvrm7f6hn49djhffgebajmo12faf4jku63ftc9fp0r4k58e0qcdi77g08f34b2ihvsu97s48du6mfn7vko#0 + builtin.CopyrightHolder.CopyrightHolder : GUID + -> Text + -> CopyrightHolder + + 116. -- #9jpkv5bb0d680ffs4f2j4lntj54m1iq9kaei8foqv5973i04jq9fugbn9msmpeiorjh4umhdeak625u53hejkvkm3buruj33msd1p6g + builtin.CopyrightHolder.guid : CopyrightHolder -> GUID + + 117. -- #6fhjsi02lnhvotndl6ufqnnsv20f3b9b4eg45n0rgo96m8f21dpqe5erb2dtn9nhdlp028vkock07r0foqune3jojhcrnmti9srsmdg + builtin.CopyrightHolder.guid.modify : (GUID ->{g} GUID) + -> CopyrightHolder + ->{g} CopyrightHolder + + 118. -- #1lk04okan4prc9kkh7julshv5l2q331pa5tf5f0ghm7ob5vkep3t6dnqejc8aju4i2vob6b5seliccer3a1kmtq4481i36alivhgdr0 + builtin.CopyrightHolder.guid.set : GUID + -> CopyrightHolder + -> CopyrightHolder + + 119. -- #u1k741o71gg743tr5o7fc3joeqdm14qkd58cf2h2tmkpejr2uj3qhclvugqsgoighd7o4ijlrp17i6iadgsuhhhb56vi4j22i6c2lbo + builtin.CopyrightHolder.name : CopyrightHolder -> Text + + 120. -- #3845ei99ci6p7dh3bcsctodd0otjtsntik5n0q7fpafo3s7v45a8nl7mk6ae7qot87jr9p4q3857tm4jtvmkb4s3rtn77t7goaphmf8 + builtin.CopyrightHolder.name.modify : (Text ->{g} Text) + -> CopyrightHolder + ->{g} CopyrightHolder + + 121. -- #2ehufgpsgnd2jq0i1topsir6dvv2m132dp2phs2bncnm6n9qrf7oaod6pbmvs9muihlq9dckpnughb3pajrmit7chdr67qco6tsd8j0 + builtin.CopyrightHolder.name.set : Text + -> CopyrightHolder + -> CopyrightHolder + + 122. -- ##crypto.hash + builtin.crypto.hash : HashAlgorithm -> a -> Bytes + + 123. -- ##crypto.HashAlgorithm + builtin type builtin.crypto.HashAlgorithm + + 124. -- ##crypto.HashAlgorithm.Blake2b_256 + builtin.crypto.HashAlgorithm.Blake2b_256 : HashAlgorithm + + 125. -- ##crypto.HashAlgorithm.Blake2b_512 + builtin.crypto.HashAlgorithm.Blake2b_512 : HashAlgorithm + + 126. -- ##crypto.HashAlgorithm.Blake2s_256 + builtin.crypto.HashAlgorithm.Blake2s_256 : HashAlgorithm + + 127. -- ##crypto.HashAlgorithm.Md5 + builtin.crypto.HashAlgorithm.Md5 : HashAlgorithm + + 128. -- ##crypto.HashAlgorithm.Sha1 + builtin.crypto.HashAlgorithm.Sha1 : HashAlgorithm + + 129. -- ##crypto.HashAlgorithm.Sha2_256 + builtin.crypto.HashAlgorithm.Sha2_256 : HashAlgorithm + + 130. -- ##crypto.HashAlgorithm.Sha2_512 + builtin.crypto.HashAlgorithm.Sha2_512 : HashAlgorithm + + 131. -- ##crypto.HashAlgorithm.Sha3_256 + builtin.crypto.HashAlgorithm.Sha3_256 : HashAlgorithm + + 132. -- ##crypto.HashAlgorithm.Sha3_512 + builtin.crypto.HashAlgorithm.Sha3_512 : HashAlgorithm + + 133. -- ##crypto.hashBytes + builtin.crypto.hashBytes : HashAlgorithm + -> Bytes + -> Bytes + + 134. -- ##crypto.hmac + builtin.crypto.hmac : HashAlgorithm + -> Bytes + -> a + -> Bytes + + 135. -- ##crypto.hmacBytes + builtin.crypto.hmacBytes : HashAlgorithm + -> Bytes + -> Bytes + -> Bytes + + 136. -- ##Debug.toText + builtin.Debug.toText : a -> Optional (Either Text Text) + + 137. -- ##Debug.trace + builtin.Debug.trace : Text -> a -> () + + 138. -- ##Debug.watch + builtin.Debug.watch : Text -> a -> a + + 139. -- #p65rcethk26an850aaaceojremfu054hqllhoip1mt9s22584j9r62o08qo9t0pri7ssgu9m7f0rfp4nujhulgbmo41tkgl182quhd8 + unique type builtin.Doc + + 140. -- #baiqeiovdrs4ju0grn5q5akq64k4kuhgifqno52smkkttqg31jkgm3qa9o3ohe54fgpiigd1tj0an7rfveopfg622sjj9v9g44n27go + builtin.Doc.++ : Doc2 -> Doc2 -> Doc2 + + 141. -- #p65rcethk26an850aaaceojremfu054hqllhoip1mt9s22584j9r62o08qo9t0pri7ssgu9m7f0rfp4nujhulgbmo41tkgl182quhd8#0 + builtin.Doc.Blob : Text -> Doc + + 142. -- #p65rcethk26an850aaaceojremfu054hqllhoip1mt9s22584j9r62o08qo9t0pri7ssgu9m7f0rfp4nujhulgbmo41tkgl182quhd8#4 + builtin.Doc.Evaluate : Link.Term -> Doc + + 143. -- #p65rcethk26an850aaaceojremfu054hqllhoip1mt9s22584j9r62o08qo9t0pri7ssgu9m7f0rfp4nujhulgbmo41tkgl182quhd8#5 + builtin.Doc.Join : [Doc] -> Doc + + 144. -- #p65rcethk26an850aaaceojremfu054hqllhoip1mt9s22584j9r62o08qo9t0pri7ssgu9m7f0rfp4nujhulgbmo41tkgl182quhd8#1 + builtin.Doc.Link : Link -> Doc + + 145. -- #p65rcethk26an850aaaceojremfu054hqllhoip1mt9s22584j9r62o08qo9t0pri7ssgu9m7f0rfp4nujhulgbmo41tkgl182quhd8#3 + builtin.Doc.Signature : Link.Term -> Doc + + 146. -- #p65rcethk26an850aaaceojremfu054hqllhoip1mt9s22584j9r62o08qo9t0pri7ssgu9m7f0rfp4nujhulgbmo41tkgl182quhd8#2 + builtin.Doc.Source : Link -> Doc + + 147. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0 + unique type builtin.Doc2 + + 148. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#27 + builtin.Doc2.Anchor : Text -> Doc2 -> Doc2 + + 149. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#11 + builtin.Doc2.Aside : Doc2 -> Doc2 + + 150. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#15 + builtin.Doc2.Blankline : Doc2 + + 151. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#10 + builtin.Doc2.Blockquote : Doc2 -> Doc2 + + 152. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#7 + builtin.Doc2.Bold : Doc2 -> Doc2 + + 153. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#21 + builtin.Doc2.BulletedList : [Doc2] -> Doc2 + + 154. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#3 + builtin.Doc2.Callout : Optional Doc2 -> Doc2 -> Doc2 + + 155. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#6 + builtin.Doc2.Code : Doc2 -> Doc2 + + 156. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#25 + builtin.Doc2.CodeBlock : Text -> Doc2 -> Doc2 + + 157. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#24 + builtin.Doc2.Column : [Doc2] -> Doc2 + + 158. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#0 + builtin.Doc2.Folded : Boolean -> Doc2 -> Doc2 -> Doc2 + + 159. -- #h3gajooii4tsdseghcbcsq4qq7c33mtb71u5npg35b06mgv7v654g0n55gpq212umfmq7nvi11o28m1v13r5fto5g8ium3ee4qk1i68 + unique type builtin.Doc2.FrontMatter + + 160. -- #h3gajooii4tsdseghcbcsq4qq7c33mtb71u5npg35b06mgv7v654g0n55gpq212umfmq7nvi11o28m1v13r5fto5g8ium3ee4qk1i68#0 + builtin.Doc2.FrontMatter.FrontMatter : [(Text, Text)] + -> FrontMatter + + 161. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#12 + builtin.Doc2.Group : Doc2 -> Doc2 + + 162. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#14 + builtin.Doc2.Image : Doc2 + -> Doc2 + -> Optional Doc2 + -> Doc2 + + 163. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#8 + builtin.Doc2.Italic : Doc2 -> Doc2 + + 164. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#22 + builtin.Doc2.Join : [Doc2] -> Doc2 + + 165. -- #lpf7g5c2ct61mci2okedmug8o0i2j0rhpealc05r2musapmn15cina6dsqdvis234evvb2bo09l2p8v5qhh0me7gi1j37nqqp47qvto + unique type builtin.Doc2.LaTeXInline + + 166. -- #lpf7g5c2ct61mci2okedmug8o0i2j0rhpealc05r2musapmn15cina6dsqdvis234evvb2bo09l2p8v5qhh0me7gi1j37nqqp47qvto#0 + builtin.Doc2.LaTeXInline.LaTeXInline : Text + -> LaTeXInline + + 167. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#16 + builtin.Doc2.Linebreak : Doc2 + + 168. -- #ut0tds116gr0soc9p6nroaalqlq423u1mao3p4jjultjmok3vbck69la7rs26duptji5v5hscijpek4hotu4krbfah8np3sntr87gb0 + unique type builtin.Doc2.MediaSource + + 169. -- #ut0tds116gr0soc9p6nroaalqlq423u1mao3p4jjultjmok3vbck69la7rs26duptji5v5hscijpek4hotu4krbfah8np3sntr87gb0#0 + builtin.Doc2.MediaSource.MediaSource : Text + -> Optional Text + -> MediaSource + + 170. -- #f7s1m2rs7ldj4idrcirtdqohsmc6n719e6cdqtgrhdkcrbm7971uvug6mvkrcc32qhdpo1og4oqin4rbmb2346m47ni24k5m3bpp3so + builtin.Doc2.MediaSource.mimeType : MediaSource + -> Optional Text + + 171. -- #rncdj545f93f7nfrneabp6jlrjag766vr2n18al8u2a78ju5v746agg62r4ob8u6ue8eeac6nbg8apeii6qfasgfv2q2ap3h4sk1tdg + builtin.Doc2.MediaSource.mimeType.modify : (Optional Text + ->{g} Optional Text) + -> MediaSource + ->{g} MediaSource + + 172. -- #54dl203thl9540r2jec546pishtg1b1ecb8vl6rqlbgf4h2rk04mrkdkqo4be82m8d3t2d0ef3gidjsn2r9u8ko7c9kvtavbqflim88 + builtin.Doc2.MediaSource.mimeType.set : Optional Text + -> MediaSource + -> MediaSource + + 173. -- #77l9vc6k6miu7pobamoasrpdm455ddgprgvfpg2di6liigijg70f4t3ppmpbs3j12kp93eep7u0e5r1bdq0niou0v85lo4aa5kek8mg + builtin.Doc2.MediaSource.sourceUrl : MediaSource -> Text + + 174. -- #laoh1nhllsb9vf0reilmbmjutdei2b0vs0vse1s8j148imfi1m9uu4l17iqdt9r5575dap8jnlq6r48kdn6ob70iroso75erqfc74e0 + builtin.Doc2.MediaSource.sourceUrl.modify : (Text + ->{g} Text) + -> MediaSource + ->{g} MediaSource + + 175. -- #eb0dl30fc5k80vb0fna187vmag5ta1rgik40s1shlkng8stvvkt2gglecit8ajjd8vmfrtg8ki8ft3ife8rrqlcoit5161ekg6vhcfo + builtin.Doc2.MediaSource.sourceUrl.set : Text + -> MediaSource + -> MediaSource + + 176. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#2 + builtin.Doc2.NamedLink : Doc2 -> Doc2 -> Doc2 + + 177. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#4 + builtin.Doc2.NumberedList : Nat -> [Doc2] -> Doc2 + + 178. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#20 + builtin.Doc2.Paragraph : [Doc2] -> Doc2 + + 179. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#13 + builtin.Doc2.Section : Doc2 -> [Doc2] -> Doc2 + + 180. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#17 + builtin.Doc2.SectionBreak : Doc2 + + 181. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#5 + builtin.Doc2.Special : SpecialForm -> Doc2 + + 182. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0 + unique type builtin.Doc2.SpecialForm + + 183. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0#4 + builtin.Doc2.SpecialForm.Embed : Any -> SpecialForm + + 184. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0#5 + builtin.Doc2.SpecialForm.EmbedInline : Any -> SpecialForm + + 185. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0#9 + builtin.Doc2.SpecialForm.Eval : Doc2.Term -> SpecialForm + + 186. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0#10 + builtin.Doc2.SpecialForm.EvalInline : Doc2.Term + -> SpecialForm + + 187. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0#0 + builtin.Doc2.SpecialForm.Example : Nat + -> Doc2.Term + -> SpecialForm + + 188. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0#1 + builtin.Doc2.SpecialForm.ExampleBlock : Nat + -> Doc2.Term + -> SpecialForm + + 189. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0#7 + builtin.Doc2.SpecialForm.FoldedSource : [( Either + Type Doc2.Term, + [Doc2.Term])] + -> SpecialForm + + 190. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0#3 + builtin.Doc2.SpecialForm.Link : Either Type Doc2.Term + -> SpecialForm + + 191. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0#2 + builtin.Doc2.SpecialForm.Signature : [Doc2.Term] + -> SpecialForm + + 192. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0#8 + builtin.Doc2.SpecialForm.SignatureInline : Doc2.Term + -> SpecialForm + + 193. -- #e46kdnv67raqhc4m3jnitkh3o9seq3q5mtlqnvobjlqnnd2tk7nui54b6grui7eql62fne4fo3ndetmeb23oj5es85habha5f6saoi0#6 + builtin.Doc2.SpecialForm.Source : [( Either + Type Doc2.Term, + [Doc2.Term])] + -> SpecialForm + + 194. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#9 + builtin.Doc2.Strikethrough : Doc2 -> Doc2 + + 195. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#26 + builtin.Doc2.Style : Text -> Doc2 -> Doc2 + + 196. -- #sv2cta4p4th10h7tpurvr0t6s3cbahlevvmpadk01v32e39kse8aicdvfsm2dbk6ltc68ht788jvkfhk6ol2mch7eubngtug019e8fg + unique type builtin.Doc2.Svg + + 197. -- #sv2cta4p4th10h7tpurvr0t6s3cbahlevvmpadk01v32e39kse8aicdvfsm2dbk6ltc68ht788jvkfhk6ol2mch7eubngtug019e8fg#0 + builtin.Doc2.Svg.Svg : Text -> Svg + + 198. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#18 + builtin.Doc2.Table : [[Doc2]] -> Doc2 + + 199. -- #s0an21vospbdlsbddiskuvt3ngbf00n78sip2o1mnp4jgp16i7sursbm14bf8ap7osphqbis2lduep3i29b7diu8sf03f8tlqd7rgcg + unique type builtin.Doc2.Term + + 200. -- #tu2du1k0lrp6iddor1aotdhdgn1j2b86r22tes3o3hka0bv4b4otlbimj88ttrdnbuacokk768k4e54795of8gnosopjirl4jm42g28 + builtin.Doc2.term : '{g} a -> Doc2.Term + + 201. -- #s0an21vospbdlsbddiskuvt3ngbf00n78sip2o1mnp4jgp16i7sursbm14bf8ap7osphqbis2lduep3i29b7diu8sf03f8tlqd7rgcg#0 + builtin.Doc2.Term.Term : Any -> Doc2.Term + + 202. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#1 + builtin.Doc2.Tooltip : Doc2 -> Doc2 -> Doc2 + + 203. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#23 + builtin.Doc2.UntitledSection : [Doc2] -> Doc2 + + 204. -- #794fndq1941e2khqv5uh7fmk9es2g4fkp8pr48objgs6blc1pqsdt2ab4o79noril2l7s70iu2eimn1smpd8t40j4g18btian8a2pt0 + unique type builtin.Doc2.Video + + 205. -- #46er7fsgre91rer0mpk6vhaa2vie19i0piubvtnfmt3vq7odcjfr6tlf0mc57q4jnij9rkolpekjd6dpqdotn41guk9lp9qioa88m58 + builtin.Doc2.Video.config : Video -> [(Text, Text)] + + 206. -- #vld47vp37855gceko81jj00j5t0mf5p137ub57094585aq3jfevq0ob03fot9d73p97r2pj0alel9e6a7lqcc7mue0ogefshg991e6g + builtin.Doc2.Video.config.modify : ([(Text, Text)] + ->{g} [(Text, Text)]) + -> Video + ->{g} Video + + 207. -- #ll9hiqi1s63ragrv9ul3ouu2rvpjkok4gdmgqs6cl8j4fgdmqlgikc5lseoe94e9fvrughjfetlcsn7gc5ed8prtnljfo5j6r1vveq8 + builtin.Doc2.Video.config.set : [(Text, Text)] + -> Video + -> Video + + 208. -- #a454aldsi00l8kh10bhi6d4phtdr9ht0es6apr05jert6oo4vstm5cdr4ee2k0srted1urqgvkrcoihjvmus6tph92v628f3lr9b92o + builtin.Doc2.Video.sources : Video -> [MediaSource] + + 209. -- #nm77894uq9g3kv5mo7ubuptpimt53jml7jt825lr83gu41tqcfpg2krcesn7p5aaea107su7brg2gm8vn1l0mabpfnpbcdi4onlatvo + builtin.Doc2.Video.sources.modify : ([MediaSource] + ->{g} [MediaSource]) + -> Video + ->{g} Video + + 210. -- #5r0bgv3t666s4lh274mvtk13jqu1doc26ki2k8t2rpophrq2hjran1qodeobf3trlnniarjehr1rgl6scn6mhqpmcokdafja3b54jt0 + builtin.Doc2.Video.sources.set : [MediaSource] + -> Video + -> Video + + 211. -- #794fndq1941e2khqv5uh7fmk9es2g4fkp8pr48objgs6blc1pqsdt2ab4o79noril2l7s70iu2eimn1smpd8t40j4g18btian8a2pt0#0 + builtin.Doc2.Video.Video : [MediaSource] + -> [(Text, Text)] + -> Video + + 212. -- #ej86si0ur1lsjade71dojr25phk9bbom9rdks6dltolos5tjivakujcriqe02npba53n9gd7tkh8bmv08ttjb9t35lq2ch5heshqcs0#19 + builtin.Doc2.Word : Text -> Doc2 + + 213. -- #0o7mf021foma9acqdaibmlh1jidlijq08uf7f5se9tssttqs546pfunjpk6s31mqoq8s2o1natede8hkk6he45l95fibglidikt44v8 + structural type builtin.Either a b + + 214. -- #0o7mf021foma9acqdaibmlh1jidlijq08uf7f5se9tssttqs546pfunjpk6s31mqoq8s2o1natede8hkk6he45l95fibglidikt44v8#1 + builtin.Either.Left : a -> Either a b + + 215. -- #u3cen22u7p8dfj0nc45j0pg4lskqjjisflm3jq0957756d23lq53tf27vg37g6jnddh8o70grvotcvrfc1fnpog0rlfsvfvjrk1s94g + builtin.Either.mapRight : (a ->{g} b) + -> Either e a + ->{g} Either e b + + 216. -- #0o7mf021foma9acqdaibmlh1jidlijq08uf7f5se9tssttqs546pfunjpk6s31mqoq8s2o1natede8hkk6he45l95fibglidikt44v8#0 + builtin.Either.Right : b -> Either a b + + 217. -- #4n0fgs00hpsj3paqnm9bfm4nbt9cbrin3hl88i992m9tjiq1ik7eq72asu4hcg885uti36tbnj5rudt56eahhnut1nobofg86pk1bng + structural ability builtin.Exception + structural ability Exception + + 218. -- #4n0fgs00hpsj3paqnm9bfm4nbt9cbrin3hl88i992m9tjiq1ik7eq72asu4hcg885uti36tbnj5rudt56eahhnut1nobofg86pk1bng#0 + builtin.Exception.raise, + Exception.raise : Failure + ->{Exception} x + + 219. -- ##Float + builtin type builtin.Float + + 220. -- ##Float.* + builtin.Float.* : Float -> Float -> Float + + 221. -- ##Float.+ + builtin.Float.+ : Float -> Float -> Float + + 222. -- ##Float.- + builtin.Float.- : Float -> Float -> Float + + 223. -- ##Float./ + builtin.Float./ : Float -> Float -> Float + + 224. -- ##Float.abs + builtin.Float.abs : Float -> Float + + 225. -- ##Float.acos + builtin.Float.acos : Float -> Float + + 226. -- ##Float.acosh + builtin.Float.acosh : Float -> Float + + 227. -- ##Float.asin + builtin.Float.asin : Float -> Float + + 228. -- ##Float.asinh + builtin.Float.asinh : Float -> Float + + 229. -- ##Float.atan + builtin.Float.atan : Float -> Float + + 230. -- ##Float.atan2 + builtin.Float.atan2 : Float -> Float -> Float + + 231. -- ##Float.atanh + builtin.Float.atanh : Float -> Float + + 232. -- ##Float.ceiling + builtin.Float.ceiling : Float -> Int + + 233. -- ##Float.cos + builtin.Float.cos : Float -> Float + + 234. -- ##Float.cosh + builtin.Float.cosh : Float -> Float + + 235. -- ##Float.== + builtin.Float.eq : Float -> Float -> Boolean + + 236. -- ##Float.exp + builtin.Float.exp : Float -> Float + + 237. -- ##Float.floor + builtin.Float.floor : Float -> Int + + 238. -- ##Float.fromRepresentation + builtin.Float.fromRepresentation : Nat -> Float + + 239. -- ##Float.fromText + builtin.Float.fromText : Text -> Optional Float + + 240. -- ##Float.> + builtin.Float.gt : Float -> Float -> Boolean + + 241. -- ##Float.>= + builtin.Float.gteq : Float -> Float -> Boolean + + 242. -- ##Float.log + builtin.Float.log : Float -> Float + + 243. -- ##Float.logBase + builtin.Float.logBase : Float -> Float -> Float + + 244. -- ##Float.< + builtin.Float.lt : Float -> Float -> Boolean + + 245. -- ##Float.<= + builtin.Float.lteq : Float -> Float -> Boolean + + 246. -- ##Float.max + builtin.Float.max : Float -> Float -> Float + + 247. -- ##Float.min + builtin.Float.min : Float -> Float -> Float + + 248. -- ##Float.pow + builtin.Float.pow : Float -> Float -> Float + + 249. -- ##Float.round + builtin.Float.round : Float -> Int + + 250. -- ##Float.sin + builtin.Float.sin : Float -> Float + + 251. -- ##Float.sinh + builtin.Float.sinh : Float -> Float + + 252. -- ##Float.sqrt + builtin.Float.sqrt : Float -> Float + + 253. -- ##Float.tan + builtin.Float.tan : Float -> Float + + 254. -- ##Float.tanh + builtin.Float.tanh : Float -> Float + + 255. -- ##Float.toRepresentation + builtin.Float.toRepresentation : Float -> Nat + + 256. -- ##Float.toText + builtin.Float.toText : Float -> Text + + 257. -- ##Float.truncate + builtin.Float.truncate : Float -> Int + + 258. -- #hqectlr3gt02r6r984b3627eg5bq3d82lab5q18e3ql09u1ka8dblf5k50ae0q0d8gk87udqd7b6767q86gogdt8ghpdiq77gk6blr8 + unique type builtin.GUID + + 259. -- #hqectlr3gt02r6r984b3627eg5bq3d82lab5q18e3ql09u1ka8dblf5k50ae0q0d8gk87udqd7b6767q86gogdt8ghpdiq77gk6blr8#0 + builtin.GUID.GUID : Bytes -> GUID + + 260. -- ##Handle.toText + builtin.Handle.toText : Handle -> Text + + 261. -- ##ImmutableArray + builtin type builtin.ImmutableArray + + 262. -- ##ImmutableArray.copyTo! + builtin.ImmutableArray.copyTo! : MutableArray g a + -> Nat + -> ImmutableArray a + -> Nat + -> Nat + ->{g, Exception} () + + 263. -- ##ImmutableArray.read + builtin.ImmutableArray.read : ImmutableArray a + -> Nat + ->{Exception} a + + 264. -- ##ImmutableArray.size + builtin.ImmutableArray.size : ImmutableArray a -> Nat + + 265. -- ##ImmutableByteArray + builtin type builtin.ImmutableByteArray + + 266. -- ##ImmutableByteArray.copyTo! + builtin.ImmutableByteArray.copyTo! : MutableByteArray g + -> Nat + -> ImmutableByteArray + -> Nat + -> Nat + ->{g, Exception} () + + 267. -- ##ImmutableByteArray.read16be + builtin.ImmutableByteArray.read16be : ImmutableByteArray + -> Nat + ->{Exception} Nat + + 268. -- ##ImmutableByteArray.read24be + builtin.ImmutableByteArray.read24be : ImmutableByteArray + -> Nat + ->{Exception} Nat + + 269. -- ##ImmutableByteArray.read32be + builtin.ImmutableByteArray.read32be : ImmutableByteArray + -> Nat + ->{Exception} Nat + + 270. -- ##ImmutableByteArray.read40be + builtin.ImmutableByteArray.read40be : ImmutableByteArray + -> Nat + ->{Exception} Nat + + 271. -- ##ImmutableByteArray.read64be + builtin.ImmutableByteArray.read64be : ImmutableByteArray + -> Nat + ->{Exception} Nat + + 272. -- ##ImmutableByteArray.read8 + builtin.ImmutableByteArray.read8 : ImmutableByteArray + -> Nat + ->{Exception} Nat + + 273. -- ##ImmutableByteArray.size + builtin.ImmutableByteArray.size : ImmutableByteArray + -> Nat + + 274. -- ##Int + builtin type builtin.Int + + 275. -- ##Int.* + builtin.Int.* : Int -> Int -> Int + + 276. -- ##Int.+ + builtin.Int.+ : Int -> Int -> Int + + 277. -- ##Int.- + builtin.Int.- : Int -> Int -> Int + + 278. -- ##Int./ + builtin.Int./ : Int -> Int -> Int + + 279. -- ##Int.and + builtin.Int.and : Int -> Int -> Int + + 280. -- ##Int.complement + builtin.Int.complement : Int -> Int + + 281. -- ##Int.== + builtin.Int.eq : Int -> Int -> Boolean + + 282. -- ##Int.fromRepresentation + builtin.Int.fromRepresentation : Nat -> Int + + 283. -- ##Int.fromText + builtin.Int.fromText : Text -> Optional Int + + 284. -- ##Int.> + builtin.Int.gt : Int -> Int -> Boolean + + 285. -- ##Int.>= + builtin.Int.gteq : Int -> Int -> Boolean + + 286. -- ##Int.increment + builtin.Int.increment : Int -> Int + + 287. -- ##Int.isEven + builtin.Int.isEven : Int -> Boolean + + 288. -- ##Int.isOdd + builtin.Int.isOdd : Int -> Boolean + + 289. -- ##Int.leadingZeros + builtin.Int.leadingZeros : Int -> Nat + + 290. -- ##Int.< + builtin.Int.lt : Int -> Int -> Boolean + + 291. -- ##Int.<= + builtin.Int.lteq : Int -> Int -> Boolean + + 292. -- ##Int.mod + builtin.Int.mod : Int -> Int -> Int + + 293. -- ##Int.negate + builtin.Int.negate : Int -> Int + + 294. -- ##Int.or + builtin.Int.or : Int -> Int -> Int + + 295. -- ##Int.popCount + builtin.Int.popCount : Int -> Nat + + 296. -- ##Int.pow + builtin.Int.pow : Int -> Nat -> Int + + 297. -- ##Int.shiftLeft + builtin.Int.shiftLeft : Int -> Nat -> Int + + 298. -- ##Int.shiftRight + builtin.Int.shiftRight : Int -> Nat -> Int + + 299. -- ##Int.signum + builtin.Int.signum : Int -> Int + + 300. -- ##Int.toFloat + builtin.Int.toFloat : Int -> Float + + 301. -- ##Int.toRepresentation + builtin.Int.toRepresentation : Int -> Nat + + 302. -- ##Int.toText + builtin.Int.toText : Int -> Text + + 303. -- ##Int.trailingZeros + builtin.Int.trailingZeros : Int -> Nat + + 304. -- ##Int.truncate0 + builtin.Int.truncate0 : Int -> Nat + + 305. -- ##Int.xor + builtin.Int.xor : Int -> Int -> Int + + 306. -- #s6ijmhqkkaus51chjgahogc7sdrqj9t66i599le2k7ts6fkl216f997hbses3mqk6a21vaj3cm1mertbldn0g503jt522vfo4rfv720 + unique type builtin.io2.ArithmeticFailure + + 307. -- #6dtvam7msqc64dimm8p0d8ehdf0330o4qbd2fdafb11jj1c2rg4ke3jdcmbgo6s4pf2jgm0vb76jeavv4ba6ht71t74p963a1miekag + unique type builtin.io2.ArrayFailure + + 308. -- #dc6n5ebu839ik3b6ohmnqm6p0cifn7o94em1g41mjp4ae0gmv3b4rupba499lbasfrp4bqce9u4hd6518unlbg8vk993c0q6rigos98 + unique type builtin.io2.BufferMode + + 309. -- #dc6n5ebu839ik3b6ohmnqm6p0cifn7o94em1g41mjp4ae0gmv3b4rupba499lbasfrp4bqce9u4hd6518unlbg8vk993c0q6rigos98#2 + builtin.io2.BufferMode.BlockBuffering : BufferMode + + 310. -- #dc6n5ebu839ik3b6ohmnqm6p0cifn7o94em1g41mjp4ae0gmv3b4rupba499lbasfrp4bqce9u4hd6518unlbg8vk993c0q6rigos98#1 + builtin.io2.BufferMode.LineBuffering : BufferMode + + 311. -- #dc6n5ebu839ik3b6ohmnqm6p0cifn7o94em1g41mjp4ae0gmv3b4rupba499lbasfrp4bqce9u4hd6518unlbg8vk993c0q6rigos98#0 + builtin.io2.BufferMode.NoBuffering : BufferMode + + 312. -- #dc6n5ebu839ik3b6ohmnqm6p0cifn7o94em1g41mjp4ae0gmv3b4rupba499lbasfrp4bqce9u4hd6518unlbg8vk993c0q6rigos98#3 + builtin.io2.BufferMode.SizedBlockBuffering : Nat + -> BufferMode + + 313. -- ##Clock.internals.monotonic.v1 + builtin.io2.Clock.internals.monotonic : '{IO} Either + Failure TimeSpec + + 314. -- ##Clock.internals.nsec.v1 + builtin.io2.Clock.internals.nsec : TimeSpec -> Nat + + 315. -- ##Clock.internals.processCPUTime.v1 + builtin.io2.Clock.internals.processCPUTime : '{IO} Either + Failure TimeSpec + + 316. -- ##Clock.internals.realtime.v1 + builtin.io2.Clock.internals.realtime : '{IO} Either + Failure TimeSpec + + 317. -- ##Clock.internals.sec.v1 + builtin.io2.Clock.internals.sec : TimeSpec -> Int + + 318. -- ##Clock.internals.threadCPUTime.v1 + builtin.io2.Clock.internals.threadCPUTime : '{IO} Either + Failure TimeSpec + + 319. -- ##TimeSpec + builtin type builtin.io2.Clock.internals.TimeSpec + + 320. -- #r29dja8j9dmjjp45trccchaata8eo1h6d6haar1eai74pq1jt4m7u3ldhlq79f7phfo57eq4bau39vqotl2h63k7ff1m5sj5o9ajuf8 + unique type builtin.io2.Failure + + 321. -- #r29dja8j9dmjjp45trccchaata8eo1h6d6haar1eai74pq1jt4m7u3ldhlq79f7phfo57eq4bau39vqotl2h63k7ff1m5sj5o9ajuf8#0 + builtin.io2.Failure.Failure : Type + -> Text + -> Any + -> Failure + + 322. -- #jhnlob35huv3rr7jg6aa4gtd8okhprla7gvlq8io429qita8vj7k696n9jvp4b8ct9i2pc1jodb8ap2bipqtgp138epdgfcth7vqvt8 + unique type builtin.io2.FileMode + + 323. -- #jhnlob35huv3rr7jg6aa4gtd8okhprla7gvlq8io429qita8vj7k696n9jvp4b8ct9i2pc1jodb8ap2bipqtgp138epdgfcth7vqvt8#2 + builtin.io2.FileMode.Append : FileMode + + 324. -- #jhnlob35huv3rr7jg6aa4gtd8okhprla7gvlq8io429qita8vj7k696n9jvp4b8ct9i2pc1jodb8ap2bipqtgp138epdgfcth7vqvt8#0 + builtin.io2.FileMode.Read : FileMode + + 325. -- #jhnlob35huv3rr7jg6aa4gtd8okhprla7gvlq8io429qita8vj7k696n9jvp4b8ct9i2pc1jodb8ap2bipqtgp138epdgfcth7vqvt8#3 + builtin.io2.FileMode.ReadWrite : FileMode + + 326. -- #jhnlob35huv3rr7jg6aa4gtd8okhprla7gvlq8io429qita8vj7k696n9jvp4b8ct9i2pc1jodb8ap2bipqtgp138epdgfcth7vqvt8#1 + builtin.io2.FileMode.Write : FileMode + + 327. -- ##Handle + builtin type builtin.io2.Handle + + 328. -- ##IO + builtin type builtin.io2.IO + + 329. -- ##IO.array + builtin.io2.IO.array : Nat ->{IO} MutableArray {IO} a + + 330. -- ##IO.arrayOf + builtin.io2.IO.arrayOf : a + -> Nat + ->{IO} MutableArray {IO} a + + 331. -- ##IO.bytearray + builtin.io2.IO.bytearray : Nat + ->{IO} MutableByteArray {IO} + + 332. -- ##IO.bytearrayOf + builtin.io2.IO.bytearrayOf : Nat + -> Nat + ->{IO} MutableByteArray {IO} + + 333. -- ##IO.clientSocket.impl.v3 + builtin.io2.IO.clientSocket.impl : Text + -> Text + ->{IO} Either Failure Socket + + 334. -- ##IO.closeFile.impl.v3 + builtin.io2.IO.closeFile.impl : Handle + ->{IO} Either Failure () + + 335. -- ##IO.closeSocket.impl.v3 + builtin.io2.IO.closeSocket.impl : Socket + ->{IO} Either Failure () + + 336. -- ##IO.createDirectory.impl.v3 + builtin.io2.IO.createDirectory.impl : Text + ->{IO} Either Failure () + + 337. -- ##IO.createTempDirectory.impl.v3 + builtin.io2.IO.createTempDirectory.impl : Text + ->{IO} Either Failure Text + + 338. -- ##IO.delay.impl.v3 + builtin.io2.IO.delay.impl : Nat ->{IO} Either Failure () + + 339. -- ##IO.directoryContents.impl.v3 + builtin.io2.IO.directoryContents.impl : Text + ->{IO} Either Failure [Text] + + 340. -- ##IO.fileExists.impl.v3 + builtin.io2.IO.fileExists.impl : Text + ->{IO} Either Failure Boolean + + 341. -- ##IO.forkComp.v2 + builtin.io2.IO.forkComp : '{IO} a ->{IO} ThreadId + + 342. -- ##IO.getArgs.impl.v1 + builtin.io2.IO.getArgs.impl : '{IO} Either Failure [Text] + + 343. -- ##IO.getBuffering.impl.v3 + builtin.io2.IO.getBuffering.impl : Handle + ->{IO} Either Failure BufferMode + + 344. -- ##IO.getBytes.impl.v3 + builtin.io2.IO.getBytes.impl : Handle + -> Nat + ->{IO} Either Failure Bytes + + 345. -- ##IO.getChar.impl.v1 + builtin.io2.IO.getChar.impl : Handle + ->{IO} Either Failure Char + + 346. -- ##IO.getCurrentDirectory.impl.v3 + builtin.io2.IO.getCurrentDirectory.impl : '{IO} Either + Failure Text + + 347. -- ##IO.getEcho.impl.v1 + builtin.io2.IO.getEcho.impl : Handle + ->{IO} Either Failure Boolean + + 348. -- ##IO.getEnv.impl.v1 + builtin.io2.IO.getEnv.impl : Text + ->{IO} Either Failure Text + + 349. -- ##IO.getFileSize.impl.v3 + builtin.io2.IO.getFileSize.impl : Text + ->{IO} Either Failure Nat + + 350. -- ##IO.getFileTimestamp.impl.v3 + builtin.io2.IO.getFileTimestamp.impl : Text + ->{IO} Either Failure Nat + + 351. -- ##IO.getLine.impl.v1 + builtin.io2.IO.getLine.impl : Handle + ->{IO} Either Failure Text + + 352. -- ##IO.getSomeBytes.impl.v1 + builtin.io2.IO.getSomeBytes.impl : Handle + -> Nat + ->{IO} Either Failure Bytes + + 353. -- ##IO.getTempDirectory.impl.v3 + builtin.io2.IO.getTempDirectory.impl : '{IO} Either + Failure Text + + 354. -- ##IO.handlePosition.impl.v3 + builtin.io2.IO.handlePosition.impl : Handle + ->{IO} Either Failure Nat + + 355. -- ##IO.isDirectory.impl.v3 + builtin.io2.IO.isDirectory.impl : Text + ->{IO} Either Failure Boolean + + 356. -- ##IO.isFileEOF.impl.v3 + builtin.io2.IO.isFileEOF.impl : Handle + ->{IO} Either Failure Boolean + + 357. -- ##IO.isFileOpen.impl.v3 + builtin.io2.IO.isFileOpen.impl : Handle + ->{IO} Either Failure Boolean + + 358. -- ##IO.isSeekable.impl.v3 + builtin.io2.IO.isSeekable.impl : Handle + ->{IO} Either Failure Boolean + + 359. -- ##IO.kill.impl.v3 + builtin.io2.IO.kill.impl : ThreadId + ->{IO} Either Failure () + + 360. -- ##IO.listen.impl.v3 + builtin.io2.IO.listen.impl : Socket + ->{IO} Either Failure () + + 361. -- ##IO.openFile.impl.v3 + builtin.io2.IO.openFile.impl : Text + -> FileMode + ->{IO} Either Failure Handle + + 362. -- ##IO.process.call + builtin.io2.IO.process.call : Text -> [Text] ->{IO} Nat + + 363. -- ##IO.process.exitCode + builtin.io2.IO.process.exitCode : ProcessHandle + ->{IO} Optional Nat + + 364. -- ##IO.process.kill + builtin.io2.IO.process.kill : ProcessHandle ->{IO} () + + 365. -- ##IO.process.start + builtin.io2.IO.process.start : Text + -> [Text] + ->{IO} (Handle, Handle, Handle, ProcessHandle) + + 366. -- ##IO.process.wait + builtin.io2.IO.process.wait : ProcessHandle ->{IO} Nat + + 367. -- ##IO.putBytes.impl.v3 + builtin.io2.IO.putBytes.impl : Handle + -> Bytes + ->{IO} Either Failure () + + 368. -- ##IO.ready.impl.v1 + builtin.io2.IO.ready.impl : Handle + ->{IO} Either Failure Boolean + + 369. -- ##IO.ref + builtin.io2.IO.ref : a ->{IO} Ref {IO} a + + 370. -- ##IO.removeDirectory.impl.v3 + builtin.io2.IO.removeDirectory.impl : Text + ->{IO} Either Failure () + + 371. -- ##IO.removeFile.impl.v3 + builtin.io2.IO.removeFile.impl : Text + ->{IO} Either Failure () + + 372. -- ##IO.renameDirectory.impl.v3 + builtin.io2.IO.renameDirectory.impl : Text + -> Text + ->{IO} Either Failure () + + 373. -- ##IO.renameFile.impl.v3 + builtin.io2.IO.renameFile.impl : Text + -> Text + ->{IO} Either Failure () + + 374. -- ##IO.seekHandle.impl.v3 + builtin.io2.IO.seekHandle.impl : Handle + -> SeekMode + -> Int + ->{IO} Either Failure () + + 375. -- ##IO.serverSocket.impl.v3 + builtin.io2.IO.serverSocket.impl : Optional Text + -> Text + ->{IO} Either Failure Socket + + 376. -- ##IO.setBuffering.impl.v3 + builtin.io2.IO.setBuffering.impl : Handle + -> BufferMode + ->{IO} Either Failure () + + 377. -- ##IO.setCurrentDirectory.impl.v3 + builtin.io2.IO.setCurrentDirectory.impl : Text + ->{IO} Either Failure () + + 378. -- ##IO.setEcho.impl.v1 + builtin.io2.IO.setEcho.impl : Handle + -> Boolean + ->{IO} Either Failure () + + 379. -- ##IO.socketAccept.impl.v3 + builtin.io2.IO.socketAccept.impl : Socket + ->{IO} Either Failure Socket + + 380. -- ##IO.socketPort.impl.v3 + builtin.io2.IO.socketPort.impl : Socket + ->{IO} Either Failure Nat + + 381. -- ##IO.socketReceive.impl.v3 + builtin.io2.IO.socketReceive.impl : Socket + -> Nat + ->{IO} Either Failure Bytes + + 382. -- ##IO.socketSend.impl.v3 + builtin.io2.IO.socketSend.impl : Socket + -> Bytes + ->{IO} Either Failure () + + 383. -- ##IO.stdHandle + builtin.io2.IO.stdHandle : StdHandle -> Handle + + 384. -- ##IO.systemTime.impl.v3 + builtin.io2.IO.systemTime.impl : '{IO} Either Failure Nat + + 385. -- ##IO.systemTimeMicroseconds.v1 + builtin.io2.IO.systemTimeMicroseconds : '{IO} Int + + 386. -- ##IO.tryEval + builtin.io2.IO.tryEval : '{IO} a ->{IO, Exception} a + + 387. -- #h4smnou0l3fg4dn92g2r88j0imfvufjerkgbuscvvmaprv12l22nk6sff3c12edlikb2vfg3vfdj4b23a09q4lvtk75ckbe4lsmtuc0 + unique type builtin.io2.IOError + + 388. -- #h4smnou0l3fg4dn92g2r88j0imfvufjerkgbuscvvmaprv12l22nk6sff3c12edlikb2vfg3vfdj4b23a09q4lvtk75ckbe4lsmtuc0#0 + builtin.io2.IOError.AlreadyExists : IOError + + 389. -- #h4smnou0l3fg4dn92g2r88j0imfvufjerkgbuscvvmaprv12l22nk6sff3c12edlikb2vfg3vfdj4b23a09q4lvtk75ckbe4lsmtuc0#4 + builtin.io2.IOError.EOF : IOError + + 390. -- #h4smnou0l3fg4dn92g2r88j0imfvufjerkgbuscvvmaprv12l22nk6sff3c12edlikb2vfg3vfdj4b23a09q4lvtk75ckbe4lsmtuc0#5 + builtin.io2.IOError.IllegalOperation : IOError + + 391. -- #h4smnou0l3fg4dn92g2r88j0imfvufjerkgbuscvvmaprv12l22nk6sff3c12edlikb2vfg3vfdj4b23a09q4lvtk75ckbe4lsmtuc0#1 + builtin.io2.IOError.NoSuchThing : IOError + + 392. -- #h4smnou0l3fg4dn92g2r88j0imfvufjerkgbuscvvmaprv12l22nk6sff3c12edlikb2vfg3vfdj4b23a09q4lvtk75ckbe4lsmtuc0#6 + builtin.io2.IOError.PermissionDenied : IOError + + 393. -- #h4smnou0l3fg4dn92g2r88j0imfvufjerkgbuscvvmaprv12l22nk6sff3c12edlikb2vfg3vfdj4b23a09q4lvtk75ckbe4lsmtuc0#2 + builtin.io2.IOError.ResourceBusy : IOError + + 394. -- #h4smnou0l3fg4dn92g2r88j0imfvufjerkgbuscvvmaprv12l22nk6sff3c12edlikb2vfg3vfdj4b23a09q4lvtk75ckbe4lsmtuc0#3 + builtin.io2.IOError.ResourceExhausted : IOError + + 395. -- #h4smnou0l3fg4dn92g2r88j0imfvufjerkgbuscvvmaprv12l22nk6sff3c12edlikb2vfg3vfdj4b23a09q4lvtk75ckbe4lsmtuc0#7 + builtin.io2.IOError.UserError : IOError + + 396. -- #6ivk1e38hh0l9gcl8fn4mhf8bmak3qaji36vevg5e1n16ju5i4cl9u5gmqi7u16b907rd98gd60pouma892efbqt2ri58tmu99hp77g + unique type builtin.io2.IOFailure + + 397. -- #574pvphqahl981k517dtrqtq812m05h3hj6t2bt9sn3pknenfik1krscfdb6r66nf1sm7g3r1r56k0c6ob7vg4opfq4gihi8njbnhsg + unique type builtin.io2.MiscFailure + + 398. -- ##MVar + builtin type builtin.io2.MVar + + 399. -- ##MVar.isEmpty + builtin.io2.MVar.isEmpty : MVar a ->{IO} Boolean + + 400. -- ##MVar.new + builtin.io2.MVar.new : a ->{IO} MVar a + + 401. -- ##MVar.newEmpty.v2 + builtin.io2.MVar.newEmpty : '{IO} MVar a + + 402. -- ##MVar.put.impl.v3 + builtin.io2.MVar.put.impl : MVar a + -> a + ->{IO} Either Failure () + + 403. -- ##MVar.read.impl.v3 + builtin.io2.MVar.read.impl : MVar a + ->{IO} Either Failure a + + 404. -- ##MVar.swap.impl.v3 + builtin.io2.MVar.swap.impl : MVar a + -> a + ->{IO} Either Failure a + + 405. -- ##MVar.take.impl.v3 + builtin.io2.MVar.take.impl : MVar a + ->{IO} Either Failure a + + 406. -- ##MVar.tryPut.impl.v3 + builtin.io2.MVar.tryPut.impl : MVar a + -> a + ->{IO} Either Failure Boolean + + 407. -- ##MVar.tryRead.impl.v3 + builtin.io2.MVar.tryRead.impl : MVar a + ->{IO} Either Failure (Optional a) + + 408. -- ##MVar.tryTake + builtin.io2.MVar.tryTake : MVar a ->{IO} Optional a + + 409. -- ##ProcessHandle + builtin type builtin.io2.ProcessHandle + + 410. -- ##Promise + builtin type builtin.io2.Promise + + 411. -- ##Promise.new + builtin.io2.Promise.new : '{IO} Promise a + + 412. -- ##Promise.read + builtin.io2.Promise.read : Promise a ->{IO} a + + 413. -- ##Promise.tryRead + builtin.io2.Promise.tryRead : Promise a ->{IO} Optional a + + 414. -- ##Promise.write + builtin.io2.Promise.write : Promise a -> a ->{IO} Boolean + + 415. -- ##Ref.cas + builtin.io2.Ref.cas : Ref {IO} a + -> Ticket a + -> a + ->{IO} Boolean + + 416. -- ##Ref.readForCas + builtin.io2.Ref.readForCas : Ref {IO} a ->{IO} Ticket a + + 417. -- ##Ref.Ticket + builtin type builtin.io2.Ref.Ticket + + 418. -- ##Ref.Ticket.read + builtin.io2.Ref.Ticket.read : Ticket a -> a + + 419. -- #vph2eas3lf2gi259f3khlrspml3id2l8u0ru07kb5fd833h238jk4iauju0b6decth9i3nao5jkf5eej1e1kovgmu5tghhh8jq3i7p8 + unique type builtin.io2.RuntimeFailure + + 420. -- #1bca3hq98sfgr6a4onuon1tsda69cdjggq8pkmlsfola6492dbrih5up6dv18ptfbqeocm9q6parf64pj773p7p19qe76238o4trc40 + unique type builtin.io2.SeekMode + + 421. -- #1bca3hq98sfgr6a4onuon1tsda69cdjggq8pkmlsfola6492dbrih5up6dv18ptfbqeocm9q6parf64pj773p7p19qe76238o4trc40#0 + builtin.io2.SeekMode.AbsoluteSeek : SeekMode + + 422. -- #1bca3hq98sfgr6a4onuon1tsda69cdjggq8pkmlsfola6492dbrih5up6dv18ptfbqeocm9q6parf64pj773p7p19qe76238o4trc40#1 + builtin.io2.SeekMode.RelativeSeek : SeekMode + + 423. -- #1bca3hq98sfgr6a4onuon1tsda69cdjggq8pkmlsfola6492dbrih5up6dv18ptfbqeocm9q6parf64pj773p7p19qe76238o4trc40#2 + builtin.io2.SeekMode.SeekFromEnd : SeekMode + + 424. -- ##Socket + builtin type builtin.io2.Socket + + 425. -- #121tku5rfh21t247v1cakhd6ir44fakkqsm799rrfp5qcjdls4nvdu4r3nco80stdd86tdo2hhh0ulcpoaofnrnkjun04kqnfmjqio8 + unique type builtin.io2.StdHandle + + 426. -- #121tku5rfh21t247v1cakhd6ir44fakkqsm799rrfp5qcjdls4nvdu4r3nco80stdd86tdo2hhh0ulcpoaofnrnkjun04kqnfmjqio8#2 + builtin.io2.StdHandle.StdErr : StdHandle + + 427. -- #121tku5rfh21t247v1cakhd6ir44fakkqsm799rrfp5qcjdls4nvdu4r3nco80stdd86tdo2hhh0ulcpoaofnrnkjun04kqnfmjqio8#0 + builtin.io2.StdHandle.StdIn : StdHandle + + 428. -- #121tku5rfh21t247v1cakhd6ir44fakkqsm799rrfp5qcjdls4nvdu4r3nco80stdd86tdo2hhh0ulcpoaofnrnkjun04kqnfmjqio8#1 + builtin.io2.StdHandle.StdOut : StdHandle + + 429. -- ##STM + builtin type builtin.io2.STM + + 430. -- ##STM.atomically + builtin.io2.STM.atomically : '{STM} a ->{IO} a + + 431. -- ##STM.retry + builtin.io2.STM.retry : '{STM} a + + 432. -- #cggbdfff21ac5uedf4qvn4to83clinvhsovrila35u7f7e73g4l6hoj8pjmjnk713a8luhnn4bi1j9ai1nl0can1un66hvg230eog9g + unique type builtin.io2.STMFailure + + 433. -- ##ThreadId + builtin type builtin.io2.ThreadId + + 434. -- #ggh649864d9bfnk90n7kgtj7dflddc4kn8osu7u7mub8p7l8biid8dgtungj4u005h7karbgupfpum9jp94spks3ma1sgh39bhirv38 + unique type builtin.io2.ThreadKilledFailure + + 435. -- ##Tls + builtin type builtin.io2.Tls + + 436. -- ##Tls.Cipher + builtin type builtin.io2.Tls.Cipher + + 437. -- ##Tls.ClientConfig + builtin type builtin.io2.Tls.ClientConfig + + 438. -- ##Tls.ClientConfig.certificates.set + builtin.io2.Tls.ClientConfig.certificates.set : [SignedCert] + -> ClientConfig + -> ClientConfig + + 439. -- ##TLS.ClientConfig.ciphers.set + builtin.io2.TLS.ClientConfig.ciphers.set : [Cipher] + -> ClientConfig + -> ClientConfig + + 440. -- ##Tls.ClientConfig.default + builtin.io2.Tls.ClientConfig.default : Text + -> Bytes + -> ClientConfig + + 441. -- ##Tls.ClientConfig.versions.set + builtin.io2.Tls.ClientConfig.versions.set : [Version] + -> ClientConfig + -> ClientConfig + + 442. -- ##Tls.decodeCert.impl.v3 + builtin.io2.Tls.decodeCert.impl : Bytes + -> Either Failure SignedCert + + 443. -- ##Tls.decodePrivateKey + builtin.io2.Tls.decodePrivateKey : Bytes -> [PrivateKey] + + 444. -- ##Tls.encodeCert + builtin.io2.Tls.encodeCert : SignedCert -> Bytes + + 445. -- ##Tls.encodePrivateKey + builtin.io2.Tls.encodePrivateKey : PrivateKey -> Bytes + + 446. -- ##Tls.handshake.impl.v3 + builtin.io2.Tls.handshake.impl : Tls + ->{IO} Either Failure () + + 447. -- ##Tls.newClient.impl.v3 + builtin.io2.Tls.newClient.impl : ClientConfig + -> Socket + ->{IO} Either Failure Tls + + 448. -- ##Tls.newServer.impl.v3 + builtin.io2.Tls.newServer.impl : ServerConfig + -> Socket + ->{IO} Either Failure Tls + + 449. -- ##Tls.PrivateKey + builtin type builtin.io2.Tls.PrivateKey + + 450. -- ##Tls.receive.impl.v3 + builtin.io2.Tls.receive.impl : Tls + ->{IO} Either Failure Bytes + + 451. -- ##Tls.send.impl.v3 + builtin.io2.Tls.send.impl : Tls + -> Bytes + ->{IO} Either Failure () + + 452. -- ##Tls.ServerConfig + builtin type builtin.io2.Tls.ServerConfig + + 453. -- ##Tls.ServerConfig.certificates.set + builtin.io2.Tls.ServerConfig.certificates.set : [SignedCert] + -> ServerConfig + -> ServerConfig + + 454. -- ##Tls.ServerConfig.ciphers.set + builtin.io2.Tls.ServerConfig.ciphers.set : [Cipher] + -> ServerConfig + -> ServerConfig + + 455. -- ##Tls.ServerConfig.default + builtin.io2.Tls.ServerConfig.default : [SignedCert] + -> PrivateKey + -> ServerConfig + + 456. -- ##Tls.ServerConfig.versions.set + builtin.io2.Tls.ServerConfig.versions.set : [Version] + -> ServerConfig + -> ServerConfig + + 457. -- ##Tls.SignedCert + builtin type builtin.io2.Tls.SignedCert + + 458. -- ##Tls.terminate.impl.v3 + builtin.io2.Tls.terminate.impl : Tls + ->{IO} Either Failure () + + 459. -- ##Tls.Version + builtin type builtin.io2.Tls.Version + + 460. -- #r3gag1btclr8iclbdt68irgt8n1d1vf7agv5umke3dgdbl11acj6easav6gtihanrjnct18om07638rne9ej06u2bkv2v4l36knm2l0 + unique type builtin.io2.TlsFailure + + 461. -- ##TVar + builtin type builtin.io2.TVar + + 462. -- ##TVar.new + builtin.io2.TVar.new : a ->{STM} TVar a + + 463. -- ##TVar.newIO + builtin.io2.TVar.newIO : a ->{IO} TVar a + + 464. -- ##TVar.read + builtin.io2.TVar.read : TVar a ->{STM} a + + 465. -- ##TVar.readIO + builtin.io2.TVar.readIO : TVar a ->{IO} a + + 466. -- ##TVar.swap + builtin.io2.TVar.swap : TVar a -> a ->{STM} a + + 467. -- ##TVar.write + builtin.io2.TVar.write : TVar a -> a ->{STM} () + + 468. -- ##validateSandboxed + builtin.io2.validateSandboxed : [Link.Term] + -> a + -> Boolean + + 469. -- #c23jofurcegj93796o0karmkcm6baifupiuu1rtkniu74avn6a4r1n66ga5rml5di7easkgn4iak800u3tnb6kfisbrv6tcfgkb13a8 + unique type builtin.IsPropagated + + 470. -- #c23jofurcegj93796o0karmkcm6baifupiuu1rtkniu74avn6a4r1n66ga5rml5di7easkgn4iak800u3tnb6kfisbrv6tcfgkb13a8#0 + builtin.IsPropagated.IsPropagated : IsPropagated + + 471. -- #q6snodsh7i7u6k7gtqj73tt7nv6htjofs5f37vg2v3dsfk6hau71fs5mcv0hq3lqg111fsvoi92mngm08850aftfgh65uka9mhqvft0 + unique type builtin.IsTest + + 472. -- #q6snodsh7i7u6k7gtqj73tt7nv6htjofs5f37vg2v3dsfk6hau71fs5mcv0hq3lqg111fsvoi92mngm08850aftfgh65uka9mhqvft0#0 + builtin.IsTest.IsTest : IsTest + + 473. -- #68haromionghg6cvojngjrgc7t0ob658nkk8b20fpho6k6ltjtf6rfmr4ia1omige97hk34lu21qsj933vl1dkpbna7evbjfkh71r9g + unique type builtin.License + + 474. -- #knhl4mlkqf0mt877flahlbas2ufb7bub8f11vi9ihh9uf7r6jqaglk7rm6912q1vml50866ddl0qfa4o6d7o0gomchaoae24m0u2nk8 + builtin.License.copyrightHolders : License + -> [CopyrightHolder] + + 475. -- #ucpi54l843bf1osaejl1cnn0jt3o89fak5c0120k8256in3m80ik836hnite0osl12m91utnpnt5n7pgm3oe1rv4r1hk8ai4033agvo + builtin.License.copyrightHolders.modify : ([CopyrightHolder] + ->{g} [CopyrightHolder]) + -> License + ->{g} License + + 476. -- #9hbbfn61d2odn8jvtj5da9n1e9decsrheg6chg73uf94oituv3750b9hd6vp3ljhi54dkp5uqfg57j66i39bstfd8ivgav4p3si39ro + builtin.License.copyrightHolders.set : [CopyrightHolder] + -> License + -> License + + 477. -- #68haromionghg6cvojngjrgc7t0ob658nkk8b20fpho6k6ltjtf6rfmr4ia1omige97hk34lu21qsj933vl1dkpbna7evbjfkh71r9g#0 + builtin.License.License : [CopyrightHolder] + -> [Year] + -> LicenseType + -> License + + 478. -- #aqi4h1bfq2rjnrrfanf4nut8jd1elkkc00u1tn0rmt9ocsrds8i8pha7q9cihvbiq7edpg21iqnfornimae2gad0ab8ih0bksjnoi4g + builtin.License.licenseType : License -> LicenseType + + 479. -- #1rm8kpbv278t9tqj4jfssl8q3cn4hgu1mti7bp8lhcr5h7qmojujmt9de4c31p42to8mtav61u98oad3oen8q9im20sacs69psjpugo + builtin.License.licenseType.modify : (LicenseType + ->{g} LicenseType) + -> License + ->{g} License + + 480. -- #dv9jsg0ksrlp3g0uftvkutpa8matt039o7dhat9airnkto2b703mgoi5t412hdi95pdhp9g01luga13ihmp52nk6bgh788gts6elv2o + builtin.License.licenseType.set : LicenseType + -> License + -> License + + 481. -- #fh5qbeba2hg5c5k9uppi71rfghj8df37p4cg3hk23b9pv0hpm67ok807f05t368rn6v99v7kvf7cp984v8ipkjr1j1h095g6nd9jtig + builtin.License.years : License -> [Year] + + 482. -- #2samr066hti71pf0fkvb4niemm7j3amvaap3sk1dqpihqp9g8f8lknhhmjq9atai6j5kcs4huvfokvpm15ebefmfggr4hd2cetf7co0 + builtin.License.years.modify : ([Year] ->{g} [Year]) + -> License + ->{g} License + + 483. -- #g3ap8lg6974au4meb2hl49k1k6f048det9uckmics3bkt9s571921ksqfdsch63k2pk3fij8pn697svniakkrueddh8nkflnmjk9ffo + builtin.License.years.set : [Year] -> License -> License + + 484. -- #uj652rrb45urfnojgt1ssqoji7iiibu27uhrc1sfl68lm54hbr7r1dpgppsv0pvf0oile2uk2h2gn1h4vgng30fga66idihhen14qc0 + unique type builtin.LicenseType + + 485. -- #uj652rrb45urfnojgt1ssqoji7iiibu27uhrc1sfl68lm54hbr7r1dpgppsv0pvf0oile2uk2h2gn1h4vgng30fga66idihhen14qc0#0 + builtin.LicenseType.LicenseType : Doc -> LicenseType + + 486. -- #f4b37niu61dc517c32h3os36ig34fgnt7inaaoqdbecmscchthi14gdo0vj3eee1ru746ibvl9vnmm1pglrv3125qnhsbc0i1tqtic0 + unique type builtin.Link + + 487. -- ##Link.Term + builtin type builtin.Link.Term + + 488. -- #f4b37niu61dc517c32h3os36ig34fgnt7inaaoqdbecmscchthi14gdo0vj3eee1ru746ibvl9vnmm1pglrv3125qnhsbc0i1tqtic0#0 + builtin.Link.Term : Link.Term -> Link + + 489. -- ##Link.Term.toText + builtin.Link.Term.toText : Link.Term -> Text + + 490. -- ##Link.Type + builtin type builtin.Link.Type + + 491. -- #f4b37niu61dc517c32h3os36ig34fgnt7inaaoqdbecmscchthi14gdo0vj3eee1ru746ibvl9vnmm1pglrv3125qnhsbc0i1tqtic0#1 + builtin.Link.Type : Type -> Link + + 492. -- ##Sequence + builtin type builtin.List + + 493. -- ##List.++ + builtin.List.++ : [a] -> [a] -> [a] + + 494. -- ##List.cons + builtin.List.+:, builtin.List.cons : a -> [a] -> [a] + + 495. -- ##List.snoc + builtin.List.:+, builtin.List.snoc : [a] -> a -> [a] + + 496. -- ##List.at + builtin.List.at : Nat -> [a] -> Optional a + + 497. -- ##List.cons + builtin.List.cons, builtin.List.+: : a -> [a] -> [a] + + 498. -- ##List.drop + builtin.List.drop : Nat -> [a] -> [a] + + 499. -- ##List.empty + builtin.List.empty : [a] + + 500. -- #a8ia0nqfghkpj4dt0t5gsk96tsfv6kg1k2cf7d7sb83tkqosebfiib2bkhjq48tc2v8ld94gf9o3hvc42pf6j49q75k0br395qavli0 + builtin.List.map : (a ->{e} b) -> [a] ->{e} [b] + + 501. -- ##List.size + builtin.List.size : [a] -> Nat + + 502. -- ##List.snoc + builtin.List.snoc, builtin.List.:+ : [a] -> a -> [a] + + 503. -- ##List.take + builtin.List.take : Nat -> [a] -> [a] + + 504. -- #cb9e3iosob3e4q0v96ifmserg27samv1lvi4dh0l0l19phvct4vbbvv19abngneb77b02h8cefr1o3ad8gnm3cn6mjgsub97gjlte8g + builtin.metadata.isPropagated : IsPropagated + + 505. -- #lkpne3jg56pmqegv4jba6b5nnjg86qtfllnlmtvijql5lsf89rfu6tgb1s9ic0gsqs5si0v9agmj90lk0bhihbovd5o5ve023g4ocko + builtin.metadata.isTest : IsTest + + 506. -- ##MutableArray + builtin type builtin.MutableArray + + 507. -- ##MutableArray.copyTo! + builtin.MutableArray.copyTo! : MutableArray g a + -> Nat + -> MutableArray g a + -> Nat + -> Nat + ->{g, Exception} () + + 508. -- ##MutableArray.freeze + builtin.MutableArray.freeze : MutableArray g a + -> Nat + -> Nat + ->{g} ImmutableArray a + + 509. -- ##MutableArray.freeze! + builtin.MutableArray.freeze! : MutableArray g a + ->{g} ImmutableArray a + + 510. -- ##MutableArray.read + builtin.MutableArray.read : MutableArray g a + -> Nat + ->{g, Exception} a + + 511. -- ##MutableArray.size + builtin.MutableArray.size : MutableArray g a -> Nat + + 512. -- ##MutableArray.write + builtin.MutableArray.write : MutableArray g a + -> Nat + -> a + ->{g, Exception} () + + 513. -- ##MutableByteArray + builtin type builtin.MutableByteArray + + 514. -- ##MutableByteArray.copyTo! + builtin.MutableByteArray.copyTo! : MutableByteArray g + -> Nat + -> MutableByteArray g + -> Nat + -> Nat + ->{g, Exception} () + + 515. -- ##MutableByteArray.freeze + builtin.MutableByteArray.freeze : MutableByteArray g + -> Nat + -> Nat + ->{g} ImmutableByteArray + + 516. -- ##MutableByteArray.freeze! + builtin.MutableByteArray.freeze! : MutableByteArray g + ->{g} ImmutableByteArray + + 517. -- ##MutableByteArray.read16be + builtin.MutableByteArray.read16be : MutableByteArray g + -> Nat + ->{g, Exception} Nat + + 518. -- ##MutableByteArray.read24be + builtin.MutableByteArray.read24be : MutableByteArray g + -> Nat + ->{g, Exception} Nat + + 519. -- ##MutableByteArray.read32be + builtin.MutableByteArray.read32be : MutableByteArray g + -> Nat + ->{g, Exception} Nat + + 520. -- ##MutableByteArray.read40be + builtin.MutableByteArray.read40be : MutableByteArray g + -> Nat + ->{g, Exception} Nat + + 521. -- ##MutableByteArray.read64be + builtin.MutableByteArray.read64be : MutableByteArray g + -> Nat + ->{g, Exception} Nat + + 522. -- ##MutableByteArray.read8 + builtin.MutableByteArray.read8 : MutableByteArray g + -> Nat + ->{g, Exception} Nat + + 523. -- ##MutableByteArray.size + builtin.MutableByteArray.size : MutableByteArray g -> Nat + + 524. -- ##MutableByteArray.write16be + builtin.MutableByteArray.write16be : MutableByteArray g + -> Nat + -> Nat + ->{g, Exception} () + + 525. -- ##MutableByteArray.write32be + builtin.MutableByteArray.write32be : MutableByteArray g + -> Nat + -> Nat + ->{g, Exception} () + + 526. -- ##MutableByteArray.write64be + builtin.MutableByteArray.write64be : MutableByteArray g + -> Nat + -> Nat + ->{g, Exception} () + + 527. -- ##MutableByteArray.write8 + builtin.MutableByteArray.write8 : MutableByteArray g + -> Nat + -> Nat + ->{g, Exception} () + + 528. -- ##Nat + builtin type builtin.Nat + + 529. -- ##Nat.* + builtin.Nat.* : Nat -> Nat -> Nat + + 530. -- ##Nat.+ + builtin.Nat.+ : Nat -> Nat -> Nat + + 531. -- ##Nat./ + builtin.Nat./ : Nat -> Nat -> Nat + + 532. -- ##Nat.and + builtin.Nat.and : Nat -> Nat -> Nat + + 533. -- ##Nat.complement + builtin.Nat.complement : Nat -> Nat + + 534. -- ##Nat.drop + builtin.Nat.drop : Nat -> Nat -> Nat + + 535. -- ##Nat.== + builtin.Nat.eq : Nat -> Nat -> Boolean + + 536. -- ##Nat.fromText + builtin.Nat.fromText : Text -> Optional Nat + + 537. -- ##Nat.> + builtin.Nat.gt : Nat -> Nat -> Boolean + + 538. -- ##Nat.>= + builtin.Nat.gteq : Nat -> Nat -> Boolean + + 539. -- ##Nat.increment + builtin.Nat.increment : Nat -> Nat + + 540. -- ##Nat.isEven + builtin.Nat.isEven : Nat -> Boolean + + 541. -- ##Nat.isOdd + builtin.Nat.isOdd : Nat -> Boolean + + 542. -- ##Nat.leadingZeros + builtin.Nat.leadingZeros : Nat -> Nat + + 543. -- ##Nat.< + builtin.Nat.lt : Nat -> Nat -> Boolean + + 544. -- ##Nat.<= + builtin.Nat.lteq : Nat -> Nat -> Boolean + + 545. -- ##Nat.mod + builtin.Nat.mod : Nat -> Nat -> Nat + + 546. -- ##Nat.or + builtin.Nat.or : Nat -> Nat -> Nat + + 547. -- ##Nat.popCount + builtin.Nat.popCount : Nat -> Nat + + 548. -- ##Nat.pow + builtin.Nat.pow : Nat -> Nat -> Nat + + 549. -- ##Nat.shiftLeft + builtin.Nat.shiftLeft : Nat -> Nat -> Nat + + 550. -- ##Nat.shiftRight + builtin.Nat.shiftRight : Nat -> Nat -> Nat + + 551. -- ##Nat.sub + builtin.Nat.sub : Nat -> Nat -> Int + + 552. -- ##Nat.toFloat + builtin.Nat.toFloat : Nat -> Float + + 553. -- ##Nat.toInt + builtin.Nat.toInt : Nat -> Int + + 554. -- ##Nat.toText + builtin.Nat.toText : Nat -> Text + + 555. -- ##Nat.trailingZeros + builtin.Nat.trailingZeros : Nat -> Nat + + 556. -- ##Nat.xor + builtin.Nat.xor : Nat -> Nat -> Nat + + 557. -- #nirp5os0q69o4e1u9p3t6mmq6l6otluefi3ksm7dhm0diidjvkkgl8o9bvnflbj0sanuvdusf34f1qrins3ktcaglpcqv9oums2slsg + structural type builtin.Optional a + + 558. -- #nirp5os0q69o4e1u9p3t6mmq6l6otluefi3ksm7dhm0diidjvkkgl8o9bvnflbj0sanuvdusf34f1qrins3ktcaglpcqv9oums2slsg#1 + builtin.Optional.None : Optional a + + 559. -- #nirp5os0q69o4e1u9p3t6mmq6l6otluefi3ksm7dhm0diidjvkkgl8o9bvnflbj0sanuvdusf34f1qrins3ktcaglpcqv9oums2slsg#0 + builtin.Optional.Some : a -> Optional a + + 560. -- ##Pattern + builtin type builtin.Pattern + + 561. -- ##Pattern.capture + builtin.Pattern.capture : Pattern a -> Pattern a + + 562. -- ##Pattern.isMatch + builtin.Pattern.isMatch : Pattern a -> a -> Boolean + + 563. -- ##Pattern.join + builtin.Pattern.join : [Pattern a] -> Pattern a + + 564. -- ##Pattern.many + builtin.Pattern.many : Pattern a -> Pattern a + + 565. -- ##Pattern.or + builtin.Pattern.or : Pattern a -> Pattern a -> Pattern a + + 566. -- ##Pattern.replicate + builtin.Pattern.replicate : Nat + -> Nat + -> Pattern a + -> Pattern a + + 567. -- ##Pattern.run + builtin.Pattern.run : Pattern a -> a -> Optional ([a], a) + + 568. -- #cbo8de57n17pgc5iic1741jeiunhvhfcfd7gt79vd6516u64aplasdodqoouejbgovhge2le5jb6rje923fcrllhtu01t29cdrssgbg + structural type builtin.Pretty txt + + 569. -- #fqfaur9v9v4fks5d0c74ouitpjp121c3fbu2l9t05km8otjcj43gk453vu668pg54rte6qmh4v3uao6vbfpntrtaq057jgni1jk8fj8 + unique type builtin.Pretty.Annotated w txt + + 570. -- #fqfaur9v9v4fks5d0c74ouitpjp121c3fbu2l9t05km8otjcj43gk453vu668pg54rte6qmh4v3uao6vbfpntrtaq057jgni1jk8fj8#1 + builtin.Pretty.Annotated.Append : w + -> [Annotated w txt] + -> Annotated w txt + + 571. -- #fqfaur9v9v4fks5d0c74ouitpjp121c3fbu2l9t05km8otjcj43gk453vu668pg54rte6qmh4v3uao6vbfpntrtaq057jgni1jk8fj8#6 + builtin.Pretty.Annotated.Empty : Annotated w txt + + 572. -- #fqfaur9v9v4fks5d0c74ouitpjp121c3fbu2l9t05km8otjcj43gk453vu668pg54rte6qmh4v3uao6vbfpntrtaq057jgni1jk8fj8#4 + builtin.Pretty.Annotated.Group : w + -> Annotated w txt + -> Annotated w txt + + 573. -- #fqfaur9v9v4fks5d0c74ouitpjp121c3fbu2l9t05km8otjcj43gk453vu668pg54rte6qmh4v3uao6vbfpntrtaq057jgni1jk8fj8#3 + builtin.Pretty.Annotated.Indent : w + -> Annotated w txt + -> Annotated w txt + -> Annotated w txt + -> Annotated w txt + + 574. -- #fqfaur9v9v4fks5d0c74ouitpjp121c3fbu2l9t05km8otjcj43gk453vu668pg54rte6qmh4v3uao6vbfpntrtaq057jgni1jk8fj8#7 + builtin.Pretty.Annotated.Lit : w + -> txt + -> Annotated w txt + + 575. -- #fqfaur9v9v4fks5d0c74ouitpjp121c3fbu2l9t05km8otjcj43gk453vu668pg54rte6qmh4v3uao6vbfpntrtaq057jgni1jk8fj8#2 + builtin.Pretty.Annotated.OrElse : w + -> Annotated w txt + -> Annotated w txt + -> Annotated w txt + + 576. -- #fqfaur9v9v4fks5d0c74ouitpjp121c3fbu2l9t05km8otjcj43gk453vu668pg54rte6qmh4v3uao6vbfpntrtaq057jgni1jk8fj8#0 + builtin.Pretty.Annotated.Table : w + -> [[Annotated w txt]] + -> Annotated w txt + + 577. -- #fqfaur9v9v4fks5d0c74ouitpjp121c3fbu2l9t05km8otjcj43gk453vu668pg54rte6qmh4v3uao6vbfpntrtaq057jgni1jk8fj8#5 + builtin.Pretty.Annotated.Wrap : w + -> Annotated w txt + -> Annotated w txt + + 578. -- #svdhl4ogs0m1pe7ihtq5q9td72mg41tmndqif4kktbtv4p8e1ciapaj8kvflfbm876llbh60tlkefpi0v0bra8hl7mfgnpscimeqtdg + builtin.Pretty.append : Pretty txt + -> Pretty txt + -> Pretty txt + + 579. -- #sonptakf85a3uklev4rq0pub00k56jdpaop4tcd9bmk0gmjjij5t16sf1knspku2hbp0uikiflbo0dtjv1i6r3t2rpjh86vo1rlaer8 + builtin.Pretty.empty : Pretty txt + + 580. -- #mlpplm1bhqkcif5j09204uuvfll7qte95msb0skjfd30nmei005kiich1ao39gm2j8687s14qvf5llu6i1a6fvt4vdmbp99jlfundfo + builtin.Pretty.get : Pretty txt -> Annotated () txt + + 581. -- #d9m2k9igi4b50cp7v5tlp3o7dot6r41rbbbsc2a4iqae3hc2a7fceh83l1n3nuotfnn7nrgt40s1kfbcnl89qcqieih125gsafk2d00 + builtin.Pretty.group : Pretty txt -> Pretty txt + + 582. -- #p6rkh0u8gfko2fpqdje6h8cain3qakom06a28rh4ccsjsnbagmmv6gadccg4t380c4nnetq9si7bkkvbh44it4lrfvfvcn4usps1uno + builtin.Pretty.indent : Pretty txt + -> Pretty txt + -> Pretty txt + + 583. -- #f59sgojafl5so8ei4vgdpqflqcpsgovpcea73509k5qm1jb8vkeojsfsavhn64gmfpd52uo631ejqu0oj2a6t6k8jcu282lbqjou7ug + builtin.Pretty.indent' : Pretty txt + -> Pretty txt + -> Pretty txt + -> Pretty txt + + 584. -- #hpntja4i04u36vijdesobh75pubru68jf1fhgi49jl3nf6kall1so8hfc0bq0pm8r9kopgskiigdl04hqelklsdrdjndq5on9hsjgmo + builtin.Pretty.join : [Pretty txt] -> Pretty txt + + 585. -- #jtn2i6bg3gargdp2rbk08jfd327htap62brih8phdfm2m4d6ib9cu0o2k5vrh7f4jik99eufu4hi0114akgd1oiivi8p1pa9m2fvjv0 + builtin.Pretty.lit : txt -> Pretty txt + + 586. -- #kfgfekabh7tiprb6ljjkf4qa5puqp6bbpe1oiqv9r39taljs8ahtb518mpcmec3plesvpssn3bpgvq3e7d71giot6lb2j7mbk23dtqo + builtin.Pretty.map : (txt ->{g} txt2) + -> Pretty txt + ->{g} Pretty txt2 + + 587. -- #5rfcm6mlv2njfa8l9slkjp1q2q5r6m1vkp084run6pd632cf02mcpoh2bo3kuqf3uqbb5nh2drf37u51lpf16m5u415tcuk18djnr60 + builtin.Pretty.orElse : Pretty txt + -> Pretty txt + -> Pretty txt + + 588. -- #cbo8de57n17pgc5iic1741jeiunhvhfcfd7gt79vd6516u64aplasdodqoouejbgovhge2le5jb6rje923fcrllhtu01t29cdrssgbg#0 + builtin.Pretty.Pretty : Annotated () txt -> Pretty txt + + 589. -- #qg050nfl4eeeiarp5mvun3j15h3qpgo31a01o03mql8rrrpht3o6h6htov9ghm7cikkbjejgu4vd9v3h1idp0hanol93pqpqiq8rg3g + builtin.Pretty.sepBy : Pretty txt + -> [Pretty txt] + -> Pretty txt + + 590. -- #ev99k0kpivu29vfl7k8pf5n55fnnelq78ul7jqjrk946i1ckvrs5lmrji3l2avhd02mljspdbfspcn26phaqkug6p7rocbbf94uhcro + builtin.Pretty.table : [[Pretty txt]] -> Pretty txt + + 591. -- #7c4jq9udglq9n7pfemqmc7qrks18r80t9dgjefpi78aerb1vo8cakc3fv843dg3h60ihbo75u0jrmbhqk0och8be2am98v3mu5f6v10 + builtin.Pretty.wrap : Pretty txt -> Pretty txt + + 592. -- ##Ref + builtin type builtin.Ref + + 593. -- ##Ref.read + builtin.Ref.read : Ref g a ->{g} a + + 594. -- ##Ref.write + builtin.Ref.write : Ref g a -> a ->{g} () + + 595. -- ##Effect + builtin type builtin.Request + + 596. -- ##Scope + builtin type builtin.Scope + + 597. -- ##Scope.array + builtin.Scope.array : Nat + ->{Scope s} MutableArray (Scope s) a + + 598. -- ##Scope.arrayOf + builtin.Scope.arrayOf : a + -> Nat + ->{Scope s} MutableArray (Scope s) a + + 599. -- ##Scope.bytearray + builtin.Scope.bytearray : Nat + ->{Scope s} MutableByteArray (Scope s) + + 600. -- ##Scope.bytearrayOf + builtin.Scope.bytearrayOf : Nat + -> Nat + ->{Scope s} MutableByteArray (Scope s) + + 601. -- ##Scope.ref + builtin.Scope.ref : a ->{Scope s} Ref {Scope s} a + + 602. -- ##Scope.run + builtin.Scope.run : (∀ s. '{g, Scope s} r) ->{g} r + + 603. -- #6uigas14aqgd889s036hq9ssrlo22pju41009m0rktetcrbm97qniljjc1rv1u661r4f63oq6pupoevghs8a2hupvlbi6qi4ntn9320 + structural type builtin.SeqView a b + + 604. -- #6uigas14aqgd889s036hq9ssrlo22pju41009m0rktetcrbm97qniljjc1rv1u661r4f63oq6pupoevghs8a2hupvlbi6qi4ntn9320#0 + builtin.SeqView.VElem : a -> b -> SeqView a b + + 605. -- #6uigas14aqgd889s036hq9ssrlo22pju41009m0rktetcrbm97qniljjc1rv1u661r4f63oq6pupoevghs8a2hupvlbi6qi4ntn9320#1 + builtin.SeqView.VEmpty : SeqView a b + + 606. -- ##Socket.toText + builtin.Socket.toText : Socket -> Text + + 607. -- #pfp0ajb4v2mb9tspp29v53dkacb76aa1t5kbk1dl0q354cjcg4egdpmvtr5d6t818ucon9eubf6r1vdvh926fgk8otvbkvbpn90levo + builtin.syntax.docAside : Doc2 -> Doc2 + + 608. -- #mvov9qf78ctohefjbmrgs8ussspo5juhf75pee4ikkg8asuos72unn4pjn3fdel8471soj2vaskd5ls103pb6nb8qf75sjn4igs7v48 + builtin.syntax.docBlockquote : Doc2 -> Doc2 + + 609. -- #cg64hg7dag89u80104kit2p40rhmo1k6h1j8obfhjolpogs705bt6hc92ct6rfj8h74m3ioug14u9pm1s7qqpmjda2srjojhi01nvf0 + builtin.syntax.docBold : Doc2 -> Doc2 + + 610. -- #3qd5kt9gjiggrb871al82n11jccedl3kb5p8ffemr703frn38tqajkett30fg7hef5orh7vl0obp3lap9qq2po3ufcnu4k3bik81rlg + builtin.syntax.docBulletedList : [Doc2] -> Doc2 + + 611. -- #el0rph43k5qg25qg20o5jdjukuful041r87v92tcb2339om0hp9u6vqtrcrfkvgj78hrpo2o1l39bbg1oier87pvgkli0lkgalgpo90 + builtin.syntax.docCallout : Optional Doc2 -> Doc2 -> Doc2 + + 612. -- #7jij106qpusbsbpqhmtgrk59qo8ss9e77rtrc1h9hbpnbab8sq717fe6hppmhhds9smqbv3k2q0irjgoe4mogatlp9e4k25kopt6rgo + builtin.syntax.docCode : Doc2 -> Doc2 + + 613. -- #3paq4qqrk028tati33723c4aqi7ebgnjln12avbnf7eu8h8sflg0frlehb4lni4ru0pcfg9ftsurq3pb2q11cfebeki51vom697l7h0 + builtin.syntax.docCodeBlock : Text -> Text -> Doc2 + + 614. -- #1of955s8tqa74vu0ve863p8dn2mncc2anmms54aj084pkbdcpml6ckvs0qb4defi0df3b1e8inp29p60ac93hf2u7to0je4op9fum40 + builtin.syntax.docColumn : [Doc2] -> Doc2 + + 615. -- #ukv56cjchfao07qb08l7iimd2mmv09s5glmtljo5b71leaijtja04obd0u1hsr38itjnv85f7jvd37nr654bl4lfn4msr1one0hi4s0 + builtin.syntax.docEmbedAnnotation : tm -> Doc2.Term + + 616. -- #uccvv8mn62ne8iqppsnpgbquqmhk4hk3n4tg7p6kttr20gov4698tu18jmmvdcs7ab455q7kklhb4uv1mtei4vbvq4qmbtbu1dbagmg + builtin.syntax.docEmbedAnnotations : tms -> tms + + 617. -- #h53vvsbp1eflh5n41fepa5dana1ucfjbk8qc95kf4ht12svn304hc4fv431hiejspdr84oul4gmd3s65neil759q0hmjjrr8ottc6v0 + builtin.syntax.docEmbedSignatureLink : '{g} t + -> Doc2.Term + + 618. -- #dvjs6ebt2ej6funsr6rv351aqe5eqt8pcbte5hpqossikbnqrblhhnve55pdg896s4e6dvd6m3us0151ejegfg1fi8kbfd7soa31dao + builtin.syntax.docEmbedTermLink : '{g} t + -> Either a Doc2.Term + + 619. -- #7t98ois54isfkh31uefvdg4bg302s5q3sun4hfh0mqnosk4ded353jp0p2ij6b22vnvlcbipcv2jb91suh6qc33i7uqlfuto9f0r4n8 + builtin.syntax.docEmbedTypeLink : typ -> Either typ b + + 620. -- #r26nnrb8inld7nstp0rj4sbh7ldbibo3s6ld4hmii114i8fglrvij0a1jgj70u51it80s5vgj5dvu9oi5gqmr2n7j341tg8285mpesg + builtin.syntax.docEval : 'a -> Doc2 + + 621. -- #ojecdd8rnla7dqqop5a43u8kl12149l24452thb0ljkb99ivh6n2evg3g43dj6unlbsmbuvj5g9js5hvsi9f13lt22uqkueioe1vi9g + builtin.syntax.docEvalInline : 'a -> Doc2 + + 622. -- #lecedgertb8tj69o0f2bqeso83hl454am6cjp708epen78s5gtr0ljcc6agopns65lnm3du36dr4m4qu9rp8rtjvtcpg359bpbnfcm0 + builtin.syntax.docExample : Nat -> '{g} t -> Doc2 + + 623. -- #m4ini2v12rc468iflsee87m1qrm52b257e3blah4pcblqo2np3k6ad50bt5gkjob3qrct3jbihjd6i02t7la9oh3cft1d0483lf1pq0 + builtin.syntax.docExampleBlock : Nat -> '{g} t -> Doc2 + + 624. -- #pomj7lft70jnnuk5job0pstih2mosva1oee4tediqbkhnm54tjqnfe6qs1mqt8os1ehg9ksgenb6veub2ngdpb1qat400vn0bj3fju0 + builtin.syntax.docFoldedSource : [( Either Type Doc2.Term, + [Doc2.Term])] + -> Doc2 + + 625. -- #inrar1e9lnt58n0ru88v05a9d9d0la94m7ok5l6i7pr3pg4lapc9vegr542ffog1kl7pfqhmltr53of3qkci8nnrt8gig93qsnggisg + builtin.syntax.docFormatConsole : Doc2 + -> Pretty (Either SpecialForm ConsoleText) + + 626. -- #99qvifgs3u7nof50jbp5lhrf8cab0qiujr1tque2b7hfj56r39o8ot2fafhafuphoraddl1j142k994e22g5v2rhq98flc0954t5918 + builtin.syntax.docGroup : Doc2 -> Doc2 + + 627. -- #gsratvk7mo273bqhivdv06f9rog2cj48u7ci0jp6ubt5oidf8cq0rjilimkas5801inbbsjcedh61jl40i3en1qu6r9vfe684ad6r08 + builtin.syntax.docItalic : Doc2 -> Doc2 + + 628. -- #piohhscvm6lgpk6vfg91u2ndmlfv81nnkspihom77ucr4dev6s22rk2n9hp38nifh5p8vt7jfvep85vudpvlg2tt99e9s2qfjv5oau8 + builtin.syntax.docJoin : [Doc2] -> Doc2 + + 629. -- #hjdqcolihf4obmnfoakl2t5hs1e39hpmpo9ijvc37fqgejog1ii7fpd4q2fe2rkm62tf81unmqlbud8uh63vaa9feaekg5a7uo3nq00 + builtin.syntax.docLink : Either Type Doc2.Term -> Doc2 + + 630. -- #iv6urr76b0ohvr22qa6d05e7e01cd0re77g8c98cm0bqo0im345fotsevqnhk1igtutkrrqm562gtltofvku5mh0i87ru8tdf0i53bo + builtin.syntax.docNamedLink : Doc2 -> Doc2 -> Doc2 + + 631. -- #b5dvn0bqj3rc1rkmlep5f6cd6n3vp247hqku8lqndena5ocgcoae18iuq3985finagr919re4fvji011ved0g21i6o0je2jn8f7k1p0 + builtin.syntax.docNumberedList : Nat -> [Doc2] -> Doc2 + + 632. -- #fs8mho20fqj31ch5kpn8flm4geomotov7fb5ct8mtnh52ladorgp22vder3jgt1mr0u710e6s9gn4u36c9sp19vitvq1r0adtm3t1c0 + builtin.syntax.docParagraph : [Doc2] -> Doc2 + + 633. -- #6dvkai3hc122e2h2h8c3jnijink5m20e27i640qvnt6smefpp2vna1rq4gbmulhb46tdabmkb5hsjeiuo4adtsutg4iu1vfmqhlueso + builtin.syntax.docSection : Doc2 -> [Doc2] -> Doc2 + + 634. -- #n0idf1bdrq5vgpk4pj9db5demk1es4jsnpodfoajftehvqjelsi0h5j2domdllq2peltdek4ptaqfpl4o8l6jpmqhcom9vq107ivdu0 + builtin.syntax.docSignature : [Doc2.Term] -> Doc2 + + 635. -- #git1povkck9jrptdmmpqrv1g17ptbq9hr17l52l8477ijk4cia24tr7cj36v1o22mvtk00qoo5jt4bs4e79sl3eh6is8ubh8aoc1pu0 + builtin.syntax.docSignatureInline : Doc2.Term -> Doc2 + + 636. -- #47agivvofl1jegbqpdg0eeed72mdj29d623e4kdei0l10mhgckif7q2pd968ggribregcknra9u43mhehr1q86n0t4vbe4eestnu9l8 + builtin.syntax.docSource : [( Either Type Doc2.Term, + [Doc2.Term])] + -> Doc2 + + 637. -- #n6uk5tc4d8ipbga8boelh51ro24paveca9fijm1nkn3tlfddqludmlppb2ps8807v2kuou1a262sa59764mdhug2va69q4sls5jli10 + builtin.syntax.docSourceElement : link + -> annotations + -> (link, annotations) + + 638. -- #nurq288b5rfp1f5keccleh51ojgcpd2rp7cane6ftquf7gidtamffb8tr1r5h6luk1nsrqomn1k4as4kcpaskjjv35rnvoous457sag + builtin.syntax.docStrikethrough : Doc2 -> Doc2 + + 639. -- #4ns2amu2njhvb5mtdvh3v7oljjb5ammnb41us4ekpbhb337b6mo2a4q0790cmrusko7omphtfdsaust2fn49hr5acl40ef8fkb9556g + builtin.syntax.docTable : [[Doc2]] -> Doc2 + + 640. -- #i77kddfr68gbjt3767a091dtnqff9beltojh93md8peo28t59c6modeccsfd2tnrtmd75fa7dn0ie21kcv4me098q91h4ftg9eau5fo + builtin.syntax.docTooltip : Doc2 -> Doc2 -> Doc2 + + 641. -- #r0hdacbk2orcb2ate3uhd7ht05hmfa8643slm3u63nb3jaaim533up04lgt0pq97is43v2spkqble7mtu8f63hgcc0k2tb2jhpr2b68 + builtin.syntax.docTransclude : d -> d + + 642. -- #0nptdh40ngakd2rh92bl573a7vbdjcj2kc8rai39v8bb9dfpbj90i7nob381usjsott41c3cpo2m2q095fm0k0r68e8mrda135qa1k0 + builtin.syntax.docUntitledSection : [Doc2] -> Doc2 + + 643. -- #krjm78blt08v52c52l4ubsnfidcrs0h6010j2v2h9ud38mgm6jj4vuqn4okp4g75039o7u78sbg6ghforucbfdf94f8am9kvt6875jo + builtin.syntax.docVerbatim : Doc2 -> Doc2 + + 644. -- #c14vgd4g1tkumf4jjd9vcoos1olb3f4gbc3hketf5l8h3i0efk8igbinh6gn018tr5075uo5nv1elva6tki6ofo3pdafidrkv9m0ot0 + builtin.syntax.docWord : Text -> Doc2 + + 645. -- #aql7qk3iud6vs4cvu43aimopoosgk0fnipibdkc3so13adencmibgfn0u5c01r0adei55nkl3ttsjhl8gbj7tr4gnpj63g64ftbq6s0 + unique type builtin.Test.Result + + 646. -- #aql7qk3iud6vs4cvu43aimopoosgk0fnipibdkc3so13adencmibgfn0u5c01r0adei55nkl3ttsjhl8gbj7tr4gnpj63g64ftbq6s0#0 + builtin.Test.Result.Fail : Text -> Result + + 647. -- #aql7qk3iud6vs4cvu43aimopoosgk0fnipibdkc3so13adencmibgfn0u5c01r0adei55nkl3ttsjhl8gbj7tr4gnpj63g64ftbq6s0#1 + builtin.Test.Result.Ok : Text -> Result + + 648. -- ##Text + builtin type builtin.Text + + 649. -- ##Text.!= + builtin.Text.!= : Text -> Text -> Boolean + + 650. -- ##Text.++ + builtin.Text.++ : Text -> Text -> Text + + 651. -- #nv11qo7s2lqirk3qb44jkm3q3fb6i3mn72ji2c52eubh3kufrdumanblh2bnql1o24efdhmue0v21gd7d1p5ec9j6iqrmekas0183do + builtin.Text.alignLeftWith : Nat -> Char -> Text -> Text + + 652. -- #ebeq250fdoigvu89fneb4c24f8f18eotc8kocdmosn4ri9shoeeg7ofkejts6clm5c6bifce66qtr0vpfkrhuup2en3khous41hp8rg + builtin.Text.alignRightWith : Nat -> Char -> Text -> Text + + 653. -- ##Text.drop + builtin.Text.drop : Nat -> Text -> Text + + 654. -- ##Text.empty + builtin.Text.empty : Text + + 655. -- ##Text.== + builtin.Text.eq : Text -> Text -> Boolean + + 656. -- ##Text.fromCharList + builtin.Text.fromCharList : [Char] -> Text + + 657. -- ##Text.fromUtf8.impl.v3 + builtin.Text.fromUtf8.impl : Bytes -> Either Failure Text + + 658. -- ##Text.> + builtin.Text.gt : Text -> Text -> Boolean + + 659. -- ##Text.>= + builtin.Text.gteq : Text -> Text -> Boolean + + 660. -- ##Text.< + builtin.Text.lt : Text -> Text -> Boolean + + 661. -- ##Text.<= + builtin.Text.lteq : Text -> Text -> Boolean + + 662. -- ##Text.patterns.anyChar + builtin.Text.patterns.anyChar : Pattern Text + + 663. -- ##Text.patterns.char + builtin.Text.patterns.char : Class -> Pattern Text + + 664. -- ##Text.patterns.charIn + builtin.Text.patterns.charIn : [Char] -> Pattern Text + + 665. -- ##Text.patterns.charRange + builtin.Text.patterns.charRange : Char + -> Char + -> Pattern Text + + 666. -- ##Text.patterns.digit + builtin.Text.patterns.digit : Pattern Text + + 667. -- ##Text.patterns.eof + builtin.Text.patterns.eof : Pattern Text + + 668. -- ##Text.patterns.letter + builtin.Text.patterns.letter : Pattern Text + + 669. -- ##Text.patterns.literal + builtin.Text.patterns.literal : Text -> Pattern Text + + 670. -- ##Text.patterns.notCharIn + builtin.Text.patterns.notCharIn : [Char] -> Pattern Text + + 671. -- ##Text.patterns.notCharRange + builtin.Text.patterns.notCharRange : Char + -> Char + -> Pattern Text + + 672. -- ##Text.patterns.punctuation + builtin.Text.patterns.punctuation : Pattern Text + + 673. -- ##Text.patterns.space + builtin.Text.patterns.space : Pattern Text + + 674. -- ##Text.repeat + builtin.Text.repeat : Nat -> Text -> Text + + 675. -- ##Text.reverse + builtin.Text.reverse : Text -> Text + + 676. -- ##Text.size + builtin.Text.size : Text -> Nat + + 677. -- ##Text.take + builtin.Text.take : Nat -> Text -> Text + + 678. -- ##Text.toCharList + builtin.Text.toCharList : Text -> [Char] + + 679. -- ##Text.toLowercase + builtin.Text.toLowercase : Text -> Text + + 680. -- ##Text.toUppercase + builtin.Text.toUppercase : Text -> Text + + 681. -- ##Text.toUtf8 + builtin.Text.toUtf8 : Text -> Bytes + + 682. -- ##Text.uncons + builtin.Text.uncons : Text -> Optional (Char, Text) + + 683. -- ##Text.unsnoc + builtin.Text.unsnoc : Text -> Optional (Text, Char) + + 684. -- ##ThreadId.toText + builtin.ThreadId.toText : ThreadId -> Text + + 685. -- ##todo + builtin.todo : a -> b + + 686. -- #2lg4ah6ir6t129m33d7gssnigacral39qdamo20mn6r2vefliubpeqnjhejai9ekjckv0qnu9mlu3k9nbpfhl2schec4dohn7rjhjt8 + structural type builtin.Tuple a b + + 687. -- #2lg4ah6ir6t129m33d7gssnigacral39qdamo20mn6r2vefliubpeqnjhejai9ekjckv0qnu9mlu3k9nbpfhl2schec4dohn7rjhjt8#0 + builtin.Tuple.Cons : a -> b -> Tuple a b + + 688. -- #00nv2kob8fp11qdkr750rlppf81cda95m3q0niohj1pvljnjl4r3hqrhvp1un2p40ptgkhhsne7hocod90r3qdlus9guivh7j3qcq0g + structural type builtin.Unit + + 689. -- #00nv2kob8fp11qdkr750rlppf81cda95m3q0niohj1pvljnjl4r3hqrhvp1un2p40ptgkhhsne7hocod90r3qdlus9guivh7j3qcq0g#0 + builtin.Unit.Unit : () + + 690. -- ##Universal.< + builtin.Universal.< : a -> a -> Boolean + + 691. -- ##Universal.<= + builtin.Universal.<= : a -> a -> Boolean + + 692. -- ##Universal.== + builtin.Universal.== : a -> a -> Boolean + + 693. -- ##Universal.> + builtin.Universal.> : a -> a -> Boolean + + 694. -- ##Universal.>= + builtin.Universal.>= : a -> a -> Boolean + + 695. -- ##Universal.compare + builtin.Universal.compare : a -> a -> Int + + 696. -- ##Universal.murmurHash + builtin.Universal.murmurHash : a -> Nat + + 697. -- ##unsafe.coerceAbilities + builtin.unsafe.coerceAbilities : (a ->{e1} b) + -> a + ->{e2} b + + 698. -- ##Value + builtin type builtin.Value + + 699. -- ##Value.dependencies + builtin.Value.dependencies : Value -> [Link.Term] + + 700. -- ##Value.deserialize + builtin.Value.deserialize : Bytes -> Either Text Value + + 701. -- ##Value.load + builtin.Value.load : Value ->{IO} Either [Link.Term] a + + 702. -- ##Value.serialize + builtin.Value.serialize : Value -> Bytes + + 703. -- ##Value.value + builtin.Value.value : a -> Value + + 704. -- #dem6aglnj8cppfrnq9qipl7geo5pim3auo9cmv1rhh5la9edalj19sspbpm1pd4vh0plokdh6qfo48gs034dqlg0s7j9fhr9p9ndtpo + unique type builtin.Year + + 705. -- #dem6aglnj8cppfrnq9qipl7geo5pim3auo9cmv1rhh5la9edalj19sspbpm1pd4vh0plokdh6qfo48gs034dqlg0s7j9fhr9p9ndtpo#0 + builtin.Year.Year : Nat -> Year + + 706. -- #k0rcrut9836hr3sevkivq4n2o3t540hllesila69b16gr5fcqe0i6aepqhv2qmso6h22lbipbp3fto0oc8o73l1lvf6vpifi01gmhg8 + cache : [(Link.Term, Code)] ->{IO, Exception} () + + 707. -- #okolgrio28p1mbl1bfjfs9qtsr1m9upblcm3ul872gcir6epkcbq619vk5bdq1fnr371nelsof6jsp8469g4j6f0gg3007p79o4kf18 + check : Text -> Boolean ->{Stream Result} () + + 708. -- #je42vk6rsefjlup01e1fmmdssf5i3ba9l6aka3bipggetfm8o4i8d1q5d7hddggu5jure1bu5ot8aq5in31to4788ctrtpb44ri83r8 + checks : [Boolean] -> [Result] + + 709. -- #barg6v1n15ea1qhp80i77gjjq3vu1noc67q2jkv9n6n5v0c9djup70ltauujgpfe0kuo8ckd20gc9kutngdpb8d22rubtb5rjldrb3o + clientSocket : Text -> Text ->{IO, Exception} Socket + + 710. -- #lg7i12ido0jr43ovdbhhv2enpk5ar869leouri5qhrivinde93nl86s2rgshubtfhlogbe310k3rluotscmus9moo1tvpn0nmp1efv8 + closeFile : Handle ->{IO, Exception} () + + 711. -- #4e6qn65v05l32n380lpf536u4llnp6f6tvvt13hvo0bhqeh3f3i8bquekc120c8h59gld1mf02ok0sje7037ipg1fsu97fqrm01oi00 + closeSocket : Socket ->{IO, Exception} () + + 712. -- #2cl9ivrimnadurkum2blduls21kcihu89oasj2efmi03s1vfm433pi6c4k1d2a3obpmf2orm3c9lfgffnlhuc6ktaa98a1ccdhfueqo + Code.transitiveDeps : Link.Term + ->{IO} [(Link.Term, Code)] + + 713. -- #sfud7h76up0cofgk61b7tf8rhdlugfmg44lksnpglfes1b8po26si7betka39r9j8dpgueorjdrb1i7v4g62m5bci1e971eqi8dblmo + compose : ∀ o g1 i1 g i. + (i1 ->{g1} o) -> (i ->{g} i1) -> i ->{g1, g} o + + 714. -- #b0tsob9a3fegn5dkb57jh15smd7ho2qo78st6qngpa7a8hc88mccl7vhido41o4otokv5l8hjdj3nabtkmpni5ikeatd44agmqbhano + compose2 : ∀ o g2 i2 g1 g i i1. + (i2 ->{g2} o) + -> (i1 ->{g1} i ->{g} i2) + -> i1 + -> i + ->{g2, g1, g} o + + 715. -- #m632ocgh2rougfejkddsso3vfpf4dmg1f8bhf0k6sha4g4aqfmbeuct3eo0je6dv9utterfvotjdu32p0kojuo9fj4qkp2g1bt464eg + compose3 : ∀ o g3 i3 g2 g1 g i i1 i2. + (i3 ->{g3} o) + -> (i2 ->{g2} i1 ->{g1} i ->{g} i3) + -> i2 + -> i1 + -> i + ->{g3, g2, g1, g} o + + 716. -- #ilkeid6l866bmq90d2v1ilqp9dsjo6ucmf8udgrokq3nr3mo9skl2vao2mo7ish136as52rsf19u9v3jkmd85bl08gnmamo4e5v2fqo + contains : Text -> Text -> Boolean + + 717. -- #pen6v1vcqdsg5ar8ajio0baiujthquamelbqd00p66amfjftk2o3stod4n81snc3hb9sc4fmnitf6ada0n5sfqfroi8sv1nbn7rnq48 + crawl : [(Link.Term, Code)] + -> [Link.Term] + ->{IO} [(Link.Term, Code)] + + 718. -- #o0qn048fk7tjb8e7d54vq5mg9egr5kophb9pcm0to4aj0kf39mv76c6olsm27vj309d7nhjh4nps7098fpvqe8j5cfg01ghf3bnju90 + createTempDirectory : Text ->{IO, Exception} Text + + 719. -- #4858f4krb9l4ot1hml21j48lp3bcvbo8b9unlk33b9a3ovu1jrbr1k56pnfhffkiu1bht2ovh0i82nn5jnoc5s5ru85qvua0m2ol43g + decodeCert : Bytes ->{Exception} SignedCert + + 720. -- #ihbmfc4r7o3391jocjm6v4mojpp3hvt84ivqigrmp34vb5l3d7mmdlvh3hkrtebi812npso7rqo203a59pbs7r2g78ig6jvsv0nva38 + delay : Nat ->{IO, Exception} () + + 721. -- #dsen29k7605pkfquesnaphhmlm3pjkfgm7m2oc90m53gqvob4l39p4g3id3pirl8emg5tcdmr81ctl3lk1enm52mldlfmlh1i85rjbg + directoryContents : Text ->{IO, Exception} [Text] + + 722. -- #b22tpqhkq6kvt27dcsddnbfci2bcqutvhmumdven9c5psiilboq2mb8v9ekihtkl6mkartd5ml5u75u84v850n29l91de63lkg3ud38 + Either.isLeft : Either a b -> Boolean + + 723. -- #i1ec3csomb1pegm9r7ppabunabb7cq1t6bb6cvqtt72nd01jot7gde2mak288cbml910abbtho0smsbq17b2r33j599b0vuv7je04j8 + Either.mapLeft : (i ->{g} o) + -> Either i b + ->{g} Either o b + + 724. -- #f765l0pa2tb9ieciivum76s7bp8rdjr8j7i635jjenj9tacgba9eeomur4vv3uuh4kem1pggpmrn61a1e3im9g90okcm13r192f7alg + Either.raiseMessage : v -> Either Text b ->{Exception} b + + 725. -- #9hifem8o2e1g7tdh4om9kfo98ifr60gfmdp8ci58djn17epm1b4m6idli8b373bsrg487n87n4l50ksq76avlrbh9q2jpobkk18ucvg + evalTest : '{IO, TempDirs, Exception, Stream Result} a + ->{IO, Exception} ([Result], a) + + 726. -- #4n0fgs00hpsj3paqnm9bfm4nbt9cbrin3hl88i992m9tjiq1ik7eq72asu4hcg885uti36tbnj5rudt56eahhnut1nobofg86pk1bng + structural ability Exception + structural ability builtin.Exception + + 727. -- #t20uuuiil07o22les8gv4sji7ju5esevloamnja3bjkrh2f250lgitv6595l6hlc2q64c1om0hhjqgter28dtnibb0dkr2j7e3ss530 + Exception.catch : '{g, Exception} a + ->{g} Either Failure a + + 728. -- #hbhvk2e00l6o7qhn8e7p6dc36bjl7ljm0gn2df5clidlrdoufsig1gt5pjhg72kl67folgg2b892kh9jc1oh0l79h4p8dqhcf1tkde0 + Exception.failure : Text -> a -> Failure + + 729. -- #4n0fgs00hpsj3paqnm9bfm4nbt9cbrin3hl88i992m9tjiq1ik7eq72asu4hcg885uti36tbnj5rudt56eahhnut1nobofg86pk1bng#0 + Exception.raise, + builtin.Exception.raise : Failure + ->{Exception} x + + 730. -- #5mqjoauctm02dlqdc10cc66relu40997d6o1u8fj7vv7g0i2mtacjc83afqhuekll1gkqr9vv4lq7aenanq4kf53kcce4l1srr6ip08 + Exception.reraise : Either Failure a ->{Exception} a + + 731. -- #1f774ia7im9i0cfp7l5a1g9tkvnd4m2940ga3buaf4ekd43dr1289vknghjjvi4qtevh7s61p5s573gpli51qh7e0i5pj9ggmeb69d0 + Exception.toEither : '{ε, Exception} a + ->{ε} Either Failure a + + 732. -- #li2h4hncbgmfi5scuah06rtdt8rjcipiv2t95hos15ol63usv78ti3vng7o9862a70906rum7nrrs9qd9q8iqu1rdcfe292r0al7n38 + Exception.toEither.handler : Request {Exception} a + -> Either Failure a + + 733. -- #5fi0ep8mufag822f18ukaffakrmm3ddg8a83dkj4gh2ks4e2c60sk9s8pmk92p69bvkcflql3rgoalp8ruth7fapqrks3kbmdl61b00 + Exception.unsafeRun! : '{g, Exception} a ->{g} a + + 734. -- #qdcih6h4dmf9a2tn2ndvn0br9ef41ubhcniadou1m6ro641gm2tn79m6boh5sr4q271oiui6ehbdqe53r0gobdeagotkjr67kieq3ro + expect : Text + -> (a -> a -> Boolean) + -> a + -> a + ->{Stream Result} () + + 735. -- #ngmnbge6f7nkehkkhj6rkit60rp3qlt0vij33itch1el3ta2ukrit4gvpn2n0j0s43sj9af53kphgs0h2n65bnqcr9pmasud2r7klsg + expectU : Text -> a -> a ->{Stream Result} () + + 736. -- #f54plhut9f6mg77r1f033vubik89irq1eri79d5pd6mqi03rq9em99mc90plurvjnmvho73ssof5fvndgmcg4fgrpvuuil7hb5qmebo + fail : Text -> b ->{Exception} c + + 737. -- #mpe805fs330vqp5l5mg73deahken20dub4hrfvmuutfo97dikgagvimncfr6mfp1l24bjqes1m1dp11a3hop92u49b1fb45j8qs9hoo + fileExists : Text ->{IO, Exception} Boolean + + 738. -- #cft2pjc05jljtlefm4osg96k5t2look2ujq1tgg5hoc5i3fkkatt9pf79g2ka461kq8nbmsggrvo2675ocl599to9e8nre5oef4scdo + fromB32 : Bytes ->{Exception} Bytes + + 739. -- #13fpchr37ua0pr38ssr7j22pudmseuedf490aok18upagh0f00kg40guj9pgl916v9qurqrvu53f3lpsvi0s82hg3dtjacanrpjvs38 + fromHex : Text -> Bytes + + 740. -- #b36oslvh534s82lda0ghc5ql7p7nir0tknsluigulmpso22tjh62uiiq4lq9s3m97a2grkso0qofpb423p06olkkikrt4mfn15vpkug + getBuffering : Handle ->{IO, Exception} BufferMode + + 741. -- #9vijttgmba0ui9cshmhmmvgn6ve2e95t168766h2n6pkviddebiimgipic5dbg5lmiht12g6np8a7e06jpk03rnue3ln5mbo4prde0g + getBytes : Handle -> Nat ->{IO, Exception} Bytes + + 742. -- #c5oeqqglf28ungtq1im4fjdh317eeoba4537l1ntq3ob22v07rpgj9307udscbghlrior398hqm1ci099qmriim8cs975kocacsd9r0 + getChar : Handle ->{IO, Exception} Char + + 743. -- #j9jdo2pqvi4aktcfsb0n4ns1tk2be7dtckqdeedqp7n52oghsq82cgc1tv562rj1sf1abq2h0vta4uo6873cdbgrtrvd5cvollu3ovo + getEcho : Handle ->{IO, Exception} Boolean + + 744. -- #0hj09gufk8fs2hvr6qij6pie8bp0h6hmm6hpsi8d5fvl1fp1dbk6u8c9p6h4eu2hle6ctgpdbepo9vit5atllkodogn6r0csar9fn1g + getLine : Handle ->{IO, Exception} Text + + 745. -- #ck1nfg5fainelng0694jkdf9e06pmn60h7kvble1ff7hkc6jdgqtf7g5o3qevr7ic1bdhfn5n2rc3gde5bh6o9fpbit3ocs0av0scdg + getSomeBytes : Handle -> Nat ->{IO, Exception} Bytes + + 746. -- #bk29bjnrcuh55usf3vocm4j1aml161p6ila7t82cpr3ub9vu0g9lsg2mspmfuefc4ig0qtdqk7nds4t3f68jp6o77e0h4ltbitqjpno + getTempDirectory : '{IO, Exception} Text + + 747. -- #j8i534slc2rvakvmqcb6j28iatrh3d7btajai9qndutr0edi5aaoi2p5noditaococ4l104hdhhvjc5vr0rbcjoqrbng46fdeqtnf98 + handlePosition : Handle ->{IO, Exception} Nat + + 748. -- #bgf7sqs0h0p8bhm3t2ei8006oj1gjonvtkdejv2g9kar0kmvob9e88ceevdfh99jom9rs0hbalf1gut5juanudfcb8tpb1e9ta0vrm8 + handshake : Tls ->{IO, Exception} () + + 749. -- #128490j1tmitiu3vesv97sqspmefobg1am38vos9p0vt4s1bhki87l7kj4cctquffkp40eanmr9ummfglj9i7s25jrpb32ob5sf2tio + hex : Bytes -> Text + + 750. -- #ttjui80dbufvf3vgaddmcr065dpgl0rtp68i5cdht6tq4t2vk3i2vg60hi77rug368qijgijf8oui27te7o5oq0t0osm6dg65c080i0 + id : a -> a + + 751. -- #9qnapjbbdhcc2mjf1b0slm7mefu0idnj1bs4c5bckq42ruodftolnd193uehr31lc01air6d6b3j4ihurnks13n85h3r8rs16nqvj2g + isDirectory : Text ->{IO, Exception} Boolean + + 752. -- #vb1e252fqt0q63hpmtkq2bkg5is2n6thejofnev96040thle5o1ia8dtq7dc6v359gtoqugbqg5tb340aqovrfticb63jgei4ncq3j8 + isFileEOF : Handle ->{IO, Exception} Boolean + + 753. -- #ahkhlm9sd7arpevos99sqc90g7k5nn9bj5n0lhh82c1uva52ltv0295ugc123l17vd1orkng061e11knqjnmk087qjg3vug3rs6mv60 + isFileOpen : Handle ->{IO, Exception} Boolean + + 754. -- #2a11371klrv2i8726knma0l3g14on4m2ucihpg65cjj9k930aefg65ovvg0ak4uv3i9evtnu0a5249q3i8ugheqd65cnmgquc1a88n0 + isNone : Optional a -> Boolean + + 755. -- #ln4avnqpdk7813vsrrr414hg0smcmufrl1c7b87nb7nb0h9cogp6arqa7fbgd7rgolffmgue698ovvefo18j1k8g30t4hbp23onm3l8 + isSeekable : Handle ->{IO, Exception} Boolean + + 756. -- #gop2v9s6l24ii1v6bf1nks2h0h18pato0vbsf4u3el18s7mp1jfnp4c7fesdf9sunnlv5f5a9fjr1s952pte87mf63l1iqki9bp0mio + List.all : (a ->{ε} Boolean) -> [a] ->{ε} Boolean + + 757. -- #m2g5korqq5etr0qk1qrgjbaqktj4ks4bu9m3c4v3j9g8ktsd2e218nml6q8vo45bi3meb53csack40mle6clfrfep073e313b3jagt0 + List.filter : (a ->{g} Boolean) -> [a] ->{g} [a] + + 758. -- #8s836vq5jggucs6bj3bear30uhe6h9cskudjrdc772ghiec6ce2jqft09l1n05kd1n6chekrbgt0h8mkc9drgscjvgghacojm9e8c5o + List.foldLeft : (b ->{g} a ->{g} b) -> b -> [a] ->{g} b + + 759. -- #m5tlb5a0m4kp5b4m9oq9vhda9d7nhu2obn2lpmosal0ebij9gon4gkd1aq0b3b61jtsc1go0hi7b2sm2memtil55ijq32b2n0k39vko + List.forEach : [a] -> (a ->{e} ()) ->{e} () + + 760. -- #j9ve4ionu2sn7f814t0t4gc75objke2drgnfvvvb50v2f57ss0hlsa3ai5g5jsk2t4b8s37ocrtmte7nktfb2vjf8508ksvrc6llu30 + listen : Socket ->{IO, Exception} () + + 761. -- #s0f4et1o1ns8cmmvp3i0cm6cmmv5qaf99qm2q4jmgpciof6ntmuh3mpr4epns3ocskn8raacbvm30ovvj2b6arv0ff7iks31rannbf0 + loadCodeBytes : Bytes ->{Exception} Code + + 762. -- #gvaed1m07qihc9c216125sur1q9a7i5ita44qnevongg4jrbd8k2plsqhdur45nn6h3drn6lc3iidp1b208ht8s73fg2711l76c7j4g + loadSelfContained : Text ->{IO, Exception} a + + 763. -- #g1hqlq27e3stamnnfp6q178pleeml9sbo2d6scj2ikubocane5cvf8ctausoqrgj9co9h56ttgt179sgktc0bei2r37dmtj51jg0ou8 + loadValueBytes : Bytes + ->{IO, Exception} ([(Link.Term, Code)], Value) + + 764. -- #tlllu51stumo77vi2e5m0e8m05qletfbr3nea3d84dcgh66dq4s3bt7kdbf8mpdqh16mmnoh11kr3n43m8b5g4pf95l9gfbhhok1h20 + MVar.put : MVar i -> i ->{IO, Exception} () + + 765. -- #3b7lp7s9m31mcvh73nh4gfj1kal6onrmppf35esvmma4jsg7bbm7a8tsrfcb4te88f03r97dkf7n1f2kcc6o7ng4vurp95svfj2fg7o + MVar.read : MVar o ->{IO, Exception} o + + 766. -- #be8m7lsjnf31u87pt5rvn04c9ellhbm3p56jgapbp8k7qp0v3mm7beh81luoifp17681l0ldjj46gthmmu32lkn0jnejr3tedjotntg + MVar.swap : MVar o -> o ->{IO, Exception} o + + 767. -- #c2qb0ca2dj3rronbp4slj3ph56p0iopaos7ib37hjunpkl1rcl1gp820dpg8qflhvt9cm2l1bfm40rkdslce2sr6f0oru5lr5cl5nu0 + MVar.take : MVar o ->{IO, Exception} o + + 768. -- #ht0k9hb3k1cnjsgmtu9klivo074a2uro4csh63m1sqr2483rkojlj7abcf0jfmssbfig98i6is1osr2djoqubg3bp6articvq9o8090 + newClient : ClientConfig -> Socket ->{IO, Exception} Tls + + 769. -- #coeloqmjin6lais8u6j0plh5f1601lpcue4ejfcute46opams4vsbkplqj6jg6af0uecjie3mbclv40b3jumghsf09aavvucrc0d148 + newServer : ServerConfig -> Socket ->{IO, Exception} Tls + + 770. -- #ocvo5mvs8fghsf715tt4mhpj1pu8e8r7pq9nue63ut0ol2vnv70k7t6tavtsljlmdib9lo3bt669qac94dk53ldcgtukvotvrlfkan0 + openFile : Text -> FileMode ->{IO, Exception} Handle + + 771. -- #c58qbcgd90d965dokk7bu82uehegkbe8jttm7lv4j0ohgi2qm3e3p4v1qfr8vc2dlsmsl9tv0v71kco8c18mneule0ntrhte4ks1090 + printLine : Text ->{IO, Exception} () + + 772. -- #dck7pb7qv05ol3b0o76l88a22bc7enl781ton5qbs2umvgsua3p16n22il02m29592oohsnbt3cr7hnlumpdhv2ibjp6iji9te4iot0 + printText : Text ->{IO} Either Failure () + + 773. -- #i9lm1g1j0p4qtakg164jdlgac409sgj1cb91k86k0c44ssajbluovuu7ptm5uc20sjgedjbak3iji8o859ek871ul51b8l30s4uf978 + putBytes : Handle -> Bytes ->{IO, Exception} () + + 774. -- #84j6ua3924v85vh2a581de7sd8pee1lqbp1ibvatvjtui9hvk36sv2riabu0v2r0s25p62ipnvv4aeadpg0u8m5ffqrc202i71caopg + readFile : Text ->{IO, Exception} Bytes + + 775. -- #pk003cv7lvidkbmsnne4mpt20254gh4hd7vvretnbk8na8bhr9fg9776rp8pt9srhiucrd1c7sjl006vmil9e78p40gdcir81ujil2o + ready : Handle ->{IO, Exception} Boolean + + 776. -- #unn7qak4qe0nbbpf62uesu0fe8i68o83l4o7f6jcblefbla53fef7a63ts28fh6ql81o5c04j44g7m5rq9aouo73dpeprbl5lka8170 + receive : Tls ->{IO, Exception} Bytes + + 777. -- #ugs4208vpm97jr2ecmr7l9h4e22r1ije6v379m4v6229c8o7hk669ba63bor4pe6n1bm24il87iq2d99sj78lt6n5eqa1fre0grn93g + removeDirectory : Text ->{IO, Exception} () + + 778. -- #6pia69u5u5rja1jk04v3i9ke24gf4b1t7vnaj0noogord6ekiqhf72qfkc1n08rd11f2cbkofni5rd5u7t1qkgslbi40hut35pfi1v0 + renameDirectory : Text -> Text ->{IO, Exception} () + + 779. -- #amtsq2jq1k75r309esfp800a8slelm4d3q9i1pq1qqs3pil13at916958sf9ucb4607kpktbnup7nc58ecoq8mcs01e2a03d08agn18 + runTest : '{IO, TempDirs, Exception, Stream Result} a + ->{IO} [Result] + + 780. -- #b59q94bf9mrvv4gl8gqjd04dc3ahq373ka5esh4grtjupkm8ov7o7h0n56q2dg3ocdsreqvm973rlhs4etua1tbrsuabc398e5pvs0o + saveSelfContained : a -> Text ->{IO, Exception} () + + 781. -- #f55p4o2hlhka9olk8a9dnan57o51605g4q26jtpsbkt0g652s322779sej71182ntb6lkh01gom3g26cmngqq7vtl7m7oovdi0koc70 + saveTestCase : Text + -> (a -> Text) + -> a + ->{IO, Exception} () + + 782. -- #v2otbk1e0e81d6ea9i3j1kivnfam6rk6earsjbjljv4mmrk1mgfals6jhfd74evor6al9mkb5gv8hf15f02807f0aa0hnsg9fas1qco + seekHandle : Handle + -> SeekMode + -> Int + ->{IO, Exception} () + + 783. -- #a98jlos4rj2um55iksdin9p5djo6j70qmuitoe2ff3uvkefb8pqensorln5flr3pm8hkc0lqkchbd63cf9tl0kqnqu3i17kvqnm35g0 + send : Tls -> Bytes ->{IO, Exception} () + + 784. -- #qrdia2sc9vuoi7u3a4ukjk8lv0rlhn2i2bbin1adbhcuj79jn366dv3a8t52hpil0jtgkhhuiavibmdev63j5ndriod33rkktjekqv8 + serverSocket : Optional Text + -> Text + ->{IO, Exception} Socket + + 785. -- #3vft70875p42eao55rhb61siobuei4h0e9vlu4bbgucjo296c2vfjpucacovnu9538tvup5c7lo9123se8v4fe7m8q9aiqbkjpumkao + setBuffering : Handle -> BufferMode ->{IO, Exception} () + + 786. -- #erqshamlurgahpd4rroild36cc5e4rk56r38r53vcbg8cblr82c6gfji3um8f09ffgjlg58g7r32mtsbvjlcq4c65v0jn3va9888mao + setEcho : Handle -> Boolean ->{IO, Exception} () + + 787. -- #ugar51qqij4ur24frdi84eqdkvqa0fbsi4v6e2586hi3tai52ovtpm3f2dc9crnfv8pk0ppq6b5tv3utl4sl49n5aecorgkqddr7i38 + snd : ∀ a a1. (a1, a) -> a + + 788. -- #leoq6smeq8to5ej3314uuujmh6rfbcsdb9q8ah8h3ohg9jq5kftc93mq671o0qh2he9vqgd288k0ecea3h7eerpbgjt6a8p843tmon8 + socketAccept : Socket ->{IO, Exception} Socket + + 789. -- #s43jbp19k91qq704tidpue2vs2re1lh4mtv46rdmdnurkdndst7u0k712entcvip160vh9cilmpamikmflbprg5up0k6cl15b8tr5l0 + socketPort : Socket ->{IO, Exception} Nat + + 790. -- #3rp8h0dt7g60nrjdehuhqga9dmomti5rdqho7r1rm5rg5moet7kt3ieempo7c9urur752njachq6k48ggbic4ugbbv75jl2mfbk57a0 + startsWith : Text -> Text -> Boolean + + 791. -- #elsab3sc7p4c6bj73pgvklv0j7qu268rn5isv6micfp7ib8grjoustpqdq0pkd4a379mr5ijb8duu2q0n040osfurppp8pt8vaue2fo + stdout : Handle + + 792. -- #rfi1v9429f9qluv533l2iba77aadttilrpmnhljfapfnfa6sru2nr8ibpqvib9nc4s4nb9s1as45upsfqfqe6ivqi2p82b2vd866it8 + structural ability Stream a + + 793. -- #2jl99er43tnksj8r8oveap5ger9uqlvj0u0ghfs0uqa7i6m45jk976n7a726jb7rtusjdu2p8hbbcgmoacvke7k5o3kdgoj57c3v2v8 + Stream.collect : '{e, Stream a} r ->{e} ([a], r) + + 794. -- #rnuje46fvuqa4a8sdgl9e250a2gcmhtsscr8bdonj2bduhrst38ur7dorv3ahr2ghf9cufkfit7ndh9qb9gspbfapcnn3sol0l2moqg + Stream.collect.handler : Request {Stream a} r -> ([a], r) + + 795. -- #rfi1v9429f9qluv533l2iba77aadttilrpmnhljfapfnfa6sru2nr8ibpqvib9nc4s4nb9s1as45upsfqfqe6ivqi2p82b2vd866it8#0 + Stream.emit : a ->{Stream a} () + + 796. -- #c70gf5m1blvh8tg4kvt1taee036fr7r22bbtqcupac5r5igs102nj077vdl0nimef94u951kfcl9a5hcevo01j04v9o6v3cpndq41bo + Stream.toList : '{Stream a} r -> [a] + + 797. -- #ul69cgsrsspjni8b0hqnt4kt4bk7sjtp6jvlhhofom7bemu9nb2kimm6tt1raigr7j86afgmnjnrfabn6a5l5v1t219uidiu22ueiv0 + Stream.toList.handler : Request {Stream a} r -> [a] + + 798. -- #58d8kfuq8sqbipa1aaijjhm28pa6a844h19mgg5s4a1h160etbulig21cm0pcnfla8fisqvrp80840g9luid5u8amvcc8sf46pd25h8 + systemTime : '{IO, Exception} Nat + + 799. -- #11mhfqj6rts8lm3im7saf44tn3km5bboqtu1td0udnaiit4qqg6ar1ecmccosl6gufsnp6sug3vcmgapsc58sgj7dh7rg8msq2qkj18 + structural ability TempDirs + + 800. -- #11mhfqj6rts8lm3im7saf44tn3km5bboqtu1td0udnaiit4qqg6ar1ecmccosl6gufsnp6sug3vcmgapsc58sgj7dh7rg8msq2qkj18#0 + TempDirs.newTempDir : Text ->{TempDirs} Text + + 801. -- #11mhfqj6rts8lm3im7saf44tn3km5bboqtu1td0udnaiit4qqg6ar1ecmccosl6gufsnp6sug3vcmgapsc58sgj7dh7rg8msq2qkj18#1 + TempDirs.removeDir : Text ->{TempDirs} () + + 802. -- #natgur73q6b4c3tp5jcor0v1cdnplh0n3fhm4qvhg4v74u3e3ff1352shs1lveot83lj82qqbl78n40qi9a132fhkmaa6g5s1ja91go + terminate : Tls ->{IO, Exception} () + + 803. -- #i3pbnc98rbfug5dnnvpd4uahm2e5fld2fu0re9r305isffr1r43048h7ql6ojdbjcsvjr6h91s6i026na046ltg5ff59klla6e7vq98 + testAutoClean : '{IO} [Result] + + 804. -- #spepthutvs3p6je794h520665rh8abl36qg43i7ipvj0mtg5sb0sbemjp2vpu9j3feithk2ae0sdtcmb8afoglo9rnvl350380t21h0 + Text.fromUtf8 : Bytes ->{Exception} Text + + 805. -- #32q9jqhmi8f08pec3hj0je4u7k52f9f1hdfsmn9ncg2kpki5da9dabigplvdcot3a00k7s5npc4n78psd6ojaumqjla259e9pqd4ov8 + structural ability Throw e + + 806. -- #32q9jqhmi8f08pec3hj0je4u7k52f9f1hdfsmn9ncg2kpki5da9dabigplvdcot3a00k7s5npc4n78psd6ojaumqjla259e9pqd4ov8#0 + Throw.throw : e ->{Throw e} a + + 807. -- #vri6fsnl704n6aqs346p6ijcbkcsv9875edr6b74enumrhbjiuon94ir4ufmrrn84k9b2jka4f05o16mcvsjrjav6gpskpiu4sknd1g + uncurry : ∀ o g1 i g i1. + (i1 ->{g} i ->{g1} o) -> (i1, i) ->{g1, g} o + + 808. -- #rhak55ntto40n4utgv5o93jvlmv82lceb625slrt8tsmg74vin5bclf10vkl1sgpau3thqsa6guiihog74qoknlsqbuce5th60bu2eg + Value.transitiveDeps : Value ->{IO} [(Link.Term, Code)] + + 809. -- #o5bg5el7ckak28ib98j5b6rt26bqbprpddd1brrg3s18qahhbbe3uohufjjnt5eenvtjg0hrvnvpra95jmdppqrovvmcfm1ih2k7guo + void : x -> () + + 810. -- #8ugamqlp7a4g0dmbcvipqfi8gnuuj23pjbdfbof11naiun1qf8otjcap80epaom2kl9fv5rhjaudt4558n38dovrc0lhipubqjgm8mg + writeFile : Text -> Bytes ->{IO, Exception} () + + 811. -- #lcmj2envm11lrflvvcl290lplhvbccv82utoej0lg0eomhmsf2vrv8af17k6if7ff98fp1b13rkseng3fng4stlr495c8dn3gn4k400 + |> : a -> (a ->{g} t) ->{g} t + + + +``` diff --git a/unison-src/transcripts-using-base/base.u b/unison-src/transcripts-using-base/base.u index daaa0e5aa..84afda95c 100644 --- a/unison-src/transcripts-using-base/base.u +++ b/unison-src/transcripts-using-base/base.u @@ -274,7 +274,7 @@ bSearch m k = Some (k', v) | k == k' -> Some v | k > k' -> find (i+1) u - | k < k' -> find l i + | otherwise -> find l i None -> None find 0 (size m) @@ -299,7 +299,7 @@ bSplit m k = Some (k', _) | k == k' -> (List.take i m, List.drop (i+1) m) | k > k' -> find (i+1) u - | k < k' -> find l i + | otherwise -> find l i None -> (m, []) find 0 (size m) diff --git a/unison-src/transcripts-using-base/fix2049.md b/unison-src/transcripts-using-base/fix2049.md index 4fa2edf70..5ccb89a46 100644 --- a/unison-src/transcripts-using-base/fix2049.md +++ b/unison-src/transcripts-using-base/fix2049.md @@ -13,8 +13,6 @@ catcher act = tests _ = [ catcher do - match None with Some x -> x - , catcher do _ = 1/0 () , catcher '(bug "testing") diff --git a/unison-src/transcripts-using-base/fix2049.output.md b/unison-src/transcripts-using-base/fix2049.output.md index bbc29e767..9ed56a65d 100644 --- a/unison-src/transcripts-using-base/fix2049.output.md +++ b/unison-src/transcripts-using-base/fix2049.output.md @@ -9,8 +9,6 @@ catcher act = tests _ = [ catcher do - match None with Some x -> x - , catcher do _ = 1/0 () , catcher '(bug "testing") @@ -45,12 +43,11 @@ tests _ = New test results: - ◉ tests caught ◉ tests caught ◉ tests caught ◉ tests got the right answer - ✅ 4 test(s) passing + ✅ 3 test(s) passing Tip: Use view tests to view the source of a test. diff --git a/unison-src/transcripts-using-base/hashing.md b/unison-src/transcripts-using-base/hashing.md index 123b5ae5a..fc6f64dc8 100644 --- a/unison-src/transcripts-using-base/hashing.md +++ b/unison-src/transcripts-using-base/hashing.md @@ -225,6 +225,29 @@ test> hmac_sha2_512.tests.ex2 = "164b7a7bfcf819e2e395fbe73b56e0a387bd64222e831fd610270cd7ea2505549758bf75c05a994a6d034f65f8f0e6fdcaeab1a34d4a6b4b636e070a38bce737" ``` +## MD5 tests + +Test vectors here pulled from [Wikipedia's writeup](https://en.wikipedia.org/wiki/MD5). + +```unison +ex alg input expected = checks [hashBytes alg (ascii input) == fromHex expected] + +test> md5.tests.ex1 = + ex Md5 + "" + "d41d8cd98f00b204e9800998ecf8427e" + +test> md5.tests.ex2 = + ex Md5 + "The quick brown fox jumps over the lazy dog" + "9e107d9d372bb6826bd81d3542a419d6" + +test> md5.tests.ex3 = + ex Md5 + "The quick brown fox jumps over the lazy dog." + "e4d909c290d0fb1ca068ffaddf22cbd0" +``` + ```ucm:hide .> add ``` diff --git a/unison-src/transcripts-using-base/hashing.output.md b/unison-src/transcripts-using-base/hashing.output.md index ab631472b..2db485884 100644 --- a/unison-src/transcripts-using-base/hashing.output.md +++ b/unison-src/transcripts-using-base/hashing.output.md @@ -124,14 +124,15 @@ And here's the full API: 3. HashAlgorithm.Blake2b_256 : HashAlgorithm 4. HashAlgorithm.Blake2b_512 : HashAlgorithm 5. HashAlgorithm.Blake2s_256 : HashAlgorithm - 6. HashAlgorithm.Sha1 : HashAlgorithm - 7. HashAlgorithm.Sha2_256 : HashAlgorithm - 8. HashAlgorithm.Sha2_512 : HashAlgorithm - 9. HashAlgorithm.Sha3_256 : HashAlgorithm - 10. HashAlgorithm.Sha3_512 : HashAlgorithm - 11. hashBytes : HashAlgorithm -> Bytes -> Bytes - 12. hmac : HashAlgorithm -> Bytes -> a -> Bytes - 13. hmacBytes : HashAlgorithm -> Bytes -> Bytes -> Bytes + 6. HashAlgorithm.Md5 : HashAlgorithm + 7. HashAlgorithm.Sha1 : HashAlgorithm + 8. HashAlgorithm.Sha2_256 : HashAlgorithm + 9. HashAlgorithm.Sha2_512 : HashAlgorithm + 10. HashAlgorithm.Sha3_256 : HashAlgorithm + 11. HashAlgorithm.Sha3_512 : HashAlgorithm + 12. hashBytes : HashAlgorithm -> Bytes -> Bytes + 13. hmac : HashAlgorithm -> Bytes -> a -> Bytes + 14. hmacBytes : HashAlgorithm -> Bytes -> Bytes -> Bytes .> cd . @@ -389,42 +390,94 @@ test> hmac_sha2_512.tests.ex2 = ✅ Passed Passed +``` +## MD5 tests + +Test vectors here pulled from [Wikipedia's writeup](https://en.wikipedia.org/wiki/MD5). + +```unison +ex alg input expected = checks [hashBytes alg (ascii input) == fromHex expected] + +test> md5.tests.ex1 = + ex Md5 + "" + "d41d8cd98f00b204e9800998ecf8427e" + +test> md5.tests.ex2 = + ex Md5 + "The quick brown fox jumps over the lazy dog" + "9e107d9d372bb6826bd81d3542a419d6" + +test> md5.tests.ex3 = + ex Md5 + "The quick brown fox jumps over the lazy dog." + "e4d909c290d0fb1ca068ffaddf22cbd0" +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⊡ Previously added definitions will be ignored: ex + + ⍟ These new definitions are ok to `add`: + + md5.tests.ex1 : [Result] + md5.tests.ex2 : [Result] + md5.tests.ex3 : [Result] + + Now evaluating any watch expressions (lines starting with + `>`)... Ctrl+C cancels. + + 4 | ex Md5 + + ✅ Passed Passed + + 9 | ex Md5 + + ✅ Passed Passed + + 14 | ex Md5 + + ✅ Passed Passed + ``` ```ucm .> test Cached test results (`help testcache` to learn more) - ◉ blake2b_512.tests.ex1 Passed - ◉ blake2b_512.tests.ex2 Passed - ◉ blake2b_512.tests.ex3 Passed - ◉ blake2s_256.tests.ex1 Passed - ◉ hmac_sha2_256.tests.ex1 Passed - ◉ hmac_sha2_256.tests.ex2 Passed - ◉ hmac_sha2_512.tests.ex1 Passed - ◉ hmac_sha2_512.tests.ex2 Passed - ◉ sha1.tests.ex1 Passed - ◉ sha1.tests.ex2 Passed - ◉ sha1.tests.ex3 Passed - ◉ sha1.tests.ex4 Passed - ◉ sha2_256.tests.ex1 Passed - ◉ sha2_256.tests.ex2 Passed - ◉ sha2_256.tests.ex3 Passed - ◉ sha2_256.tests.ex4 Passed - ◉ sha2_512.tests.ex1 Passed - ◉ sha2_512.tests.ex2 Passed - ◉ sha2_512.tests.ex3 Passed - ◉ sha2_512.tests.ex4 Passed - ◉ sha3_256.tests.ex1 Passed - ◉ sha3_256.tests.ex2 Passed - ◉ sha3_256.tests.ex3 Passed - ◉ sha3_256.tests.ex4 Passed - ◉ sha3_512.tests.ex1 Passed - ◉ sha3_512.tests.ex2 Passed - ◉ sha3_512.tests.ex3 Passed - ◉ sha3_512.tests.ex4 Passed + ◉ blake2b_512.tests.ex1 Passed + ◉ blake2b_512.tests.ex2 Passed + ◉ blake2b_512.tests.ex3 Passed + ◉ blake2s_256.tests.ex1 Passed + ◉ md5.tests.ex1 Passed + ◉ md5.tests.ex2 Passed + ◉ md5.tests.ex3 Passed + ◉ sha1.tests.ex1 Passed + ◉ sha1.tests.ex2 Passed + ◉ sha1.tests.ex3 Passed + ◉ sha1.tests.ex4 Passed + ◉ sha2_256.tests.ex1 Passed + ◉ sha2_256.tests.ex2 Passed + ◉ sha2_256.tests.ex3 Passed + ◉ sha2_256.tests.ex4 Passed + ◉ sha2_512.tests.ex1 Passed + ◉ sha2_512.tests.ex2 Passed + ◉ sha2_512.tests.ex3 Passed + ◉ sha2_512.tests.ex4 Passed + ◉ sha3_256.tests.ex1 Passed + ◉ sha3_256.tests.ex2 Passed + ◉ sha3_256.tests.ex3 Passed + ◉ sha3_256.tests.ex4 Passed + ◉ sha3_512.tests.ex1 Passed + ◉ sha3_512.tests.ex2 Passed + ◉ sha3_512.tests.ex3 Passed + ◉ sha3_512.tests.ex4 Passed - ✅ 28 test(s) passing + ✅ 27 test(s) passing Tip: Use view blake2b_512.tests.ex1 to view the source of a test. diff --git a/unison-src/transcripts-using-base/random-deserial.md b/unison-src/transcripts-using-base/random-deserial.md index 526705b3b..9b0672764 100644 --- a/unison-src/transcripts-using-base/random-deserial.md +++ b/unison-src/transcripts-using-base/random-deserial.md @@ -23,6 +23,7 @@ shuffle = | otherwise -> match gen seed (size l) with (k, seed) -> match (take k l, drop k l) with (pre, x +: post) -> pick (acc :+ x) seed (pre ++ post) + (pre, []) -> pick acc seed pre pick [] diff --git a/unison-src/transcripts-using-base/random-deserial.output.md b/unison-src/transcripts-using-base/random-deserial.output.md index d7f278426..24f21d771 100644 --- a/unison-src/transcripts-using-base/random-deserial.output.md +++ b/unison-src/transcripts-using-base/random-deserial.output.md @@ -19,6 +19,7 @@ shuffle = | otherwise -> match gen seed (size l) with (k, seed) -> match (take k l, drop k l) with (pre, x +: post) -> pick (acc :+ x) seed (pre ++ post) + (pre, []) -> pick acc seed pre pick [] diff --git a/unison-src/transcripts-using-base/ref-promise.md b/unison-src/transcripts-using-base/ref-promise.md new file mode 100644 index 000000000..dd54328ec --- /dev/null +++ b/unison-src/transcripts-using-base/ref-promise.md @@ -0,0 +1,128 @@ +# tests for Promise and CAS on Refs + +Ref support a CAS operation that can be used as a building block to +change state atomically without locks. + +```unison +casTest: '{io2.IO} [Result] +casTest = do + test = do + ref = IO.ref 0 + ticket = Ref.readForCas ref + v1 = Ref.cas ref ticket 5 + check "CAS is successful is there were no conflicting writes" v1 + Ref.write ref 10 + v2 = Ref.cas ref ticket 15 + check "CAS fails when there was an intervening write" (not v2) + + runTest test +``` + +```ucm +.> add +.> io.test casTest +``` + +Promise is a simple one-shot awaitable condition. + +```unison +promiseSequentialTest : '{IO} [Result] +promiseSequentialTest = do + test = do + use Nat eq + use Promise read write + p = !Promise.new + write p 0 |> void + v1 = read p + check "Should read a value that's been written" (eq v1 0) + write p 1 |> void + v2 = read p + check "Promise can only be written to once" (eq v2 0) + + runTest test + +promiseConcurrentTest : '{IO} [Result] +promiseConcurrentTest = do + use Nat eq + test = do + p = !Promise.new + _ = forkComp '(Promise.write p 5) + v = Promise.read p + check "Reads awaits for completion of the Promise" (eq v 5) + + runTest test +``` + +```ucm +.> add +.> io.test promiseSequentialTest +.> io.test promiseConcurrentTest +``` + +CAS can be used to write an atomic update function. + +```unison +atomicUpdate : Ref {IO} a -> (a -> a) ->{IO} () +atomicUpdate ref f = + ticket = Ref.readForCas ref + value = f (Ticket.read ticket) + if Ref.cas ref ticket value then () else atomicUpdate ref f +``` + +```ucm +.> add +``` + +Promise can be used to write an operation that spawns N concurrent +tasks and collects their results + +```unison +spawnN : Nat -> '{IO} a ->{IO} [a] +spawnN n fa = + use Nat eq drop + go i acc = + if eq i 0 + then acc + else + value = !Promise.new + _ = forkComp do Promise.write value !fa + go (drop i 1) (acc :+ value) + + map Promise.read (go n []) +``` +```ucm +.> add +``` + +We can use these primitives to write a more interesting example, where +multiple threads repeatedly update an atomic counter, we check that +the value of the counter is correct after all threads are done. + +```unison +fullTest : '{IO} [Result] +fullTest = do + use Nat * + eq drop + + numThreads = 100 + iterations = 100 + expected = numThreads * iterations + + test = do + state = IO.ref 0 + thread n = + if eq n 0 + then () + else + atomicUpdate state (v -> v + 1) + thread (drop n 1) + void (spawnN numThreads '(thread iterations)) + result = Ref.read state + check "The state of the counter is consistent "(eq result expected) + + runTest test +``` + +```ucm +.> add +.> io.test fullTest +``` diff --git a/unison-src/transcripts-using-base/ref-promise.output.md b/unison-src/transcripts-using-base/ref-promise.output.md new file mode 100644 index 000000000..444c0dcaa --- /dev/null +++ b/unison-src/transcripts-using-base/ref-promise.output.md @@ -0,0 +1,247 @@ +# tests for Promise and CAS on Refs + +Ref support a CAS operation that can be used as a building block to +change state atomically without locks. + +```unison +casTest: '{io2.IO} [Result] +casTest = do + test = do + ref = IO.ref 0 + ticket = Ref.readForCas ref + v1 = Ref.cas ref ticket 5 + check "CAS is successful is there were no conflicting writes" v1 + Ref.write ref 10 + v2 = Ref.cas ref ticket 15 + check "CAS fails when there was an intervening write" (not v2) + + runTest test +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + casTest : '{IO} [Result] + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + casTest : '{IO} [Result] + +.> io.test casTest + + New test results: + + ◉ casTest CAS is successful is there were no conflicting writes + ◉ casTest CAS fails when there was an intervening write + + ✅ 2 test(s) passing + + Tip: Use view casTest to view the source of a test. + +``` +Promise is a simple one-shot awaitable condition. + +```unison +promiseSequentialTest : '{IO} [Result] +promiseSequentialTest = do + test = do + use Nat eq + use Promise read write + p = !Promise.new + write p 0 |> void + v1 = read p + check "Should read a value that's been written" (eq v1 0) + write p 1 |> void + v2 = read p + check "Promise can only be written to once" (eq v2 0) + + runTest test + +promiseConcurrentTest : '{IO} [Result] +promiseConcurrentTest = do + use Nat eq + test = do + p = !Promise.new + _ = forkComp '(Promise.write p 5) + v = Promise.read p + check "Reads awaits for completion of the Promise" (eq v 5) + + runTest test +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + promiseConcurrentTest : '{IO} [Result] + promiseSequentialTest : '{IO} [Result] + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + promiseConcurrentTest : '{IO} [Result] + promiseSequentialTest : '{IO} [Result] + +.> io.test promiseSequentialTest + + New test results: + + ◉ promiseSequentialTest Should read a value that's been written + ◉ promiseSequentialTest Promise can only be written to once + + ✅ 2 test(s) passing + + Tip: Use view promiseSequentialTest to view the source of a + test. + +.> io.test promiseConcurrentTest + + New test results: + + ◉ promiseConcurrentTest Reads awaits for completion of the Promise + + ✅ 1 test(s) passing + + Tip: Use view promiseConcurrentTest to view the source of a + test. + +``` +CAS can be used to write an atomic update function. + +```unison +atomicUpdate : Ref {IO} a -> (a -> a) ->{IO} () +atomicUpdate ref f = + ticket = Ref.readForCas ref + value = f (Ticket.read ticket) + if Ref.cas ref ticket value then () else atomicUpdate ref f +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + atomicUpdate : Ref {IO} a -> (a -> a) ->{IO} () + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + atomicUpdate : Ref {IO} a -> (a -> a) ->{IO} () + +``` +Promise can be used to write an operation that spawns N concurrent +tasks and collects their results + +```unison +spawnN : Nat -> '{IO} a ->{IO} [a] +spawnN n fa = + use Nat eq drop + go i acc = + if eq i 0 + then acc + else + value = !Promise.new + _ = forkComp do Promise.write value !fa + go (drop i 1) (acc :+ value) + + map Promise.read (go n []) +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + spawnN : Nat -> '{IO} a ->{IO} [a] + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + spawnN : Nat -> '{IO} a ->{IO} [a] + +``` +We can use these primitives to write a more interesting example, where +multiple threads repeatedly update an atomic counter, we check that +the value of the counter is correct after all threads are done. + +```unison +fullTest : '{IO} [Result] +fullTest = do + use Nat * + eq drop + + numThreads = 100 + iterations = 100 + expected = numThreads * iterations + + test = do + state = IO.ref 0 + thread n = + if eq n 0 + then () + else + atomicUpdate state (v -> v + 1) + thread (drop n 1) + void (spawnN numThreads '(thread iterations)) + result = Ref.read state + check "The state of the counter is consistent "(eq result expected) + + runTest test +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + fullTest : '{IO} [Result] + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + fullTest : '{IO} [Result] + +.> io.test fullTest + + New test results: + + ◉ fullTest The state of the counter is consistent + + ✅ 1 test(s) passing + + Tip: Use view fullTest to view the source of a test. + +``` diff --git a/unison-src/transcripts-using-base/tls.md b/unison-src/transcripts-using-base/tls.md index fff979ede..7da872603 100644 --- a/unison-src/transcripts-using-base/tls.md +++ b/unison-src/transcripts-using-base/tls.md @@ -61,7 +61,9 @@ serverThread portVar toSend = 'let cert = decodeCert (toUtf8 self_signed_cert_pem2) -- assume there is exactly one key decoded from our Bytes - key = match (decodePrivateKey (toUtf8 self_signed_key_pem)) with k +: _ -> k + key = match (decodePrivateKey (toUtf8 self_signed_key_pem)) with + k +: _ -> k + [] -> bug "oh no" -- create a default configuration using our credentials (certificate chain and key) tlsconfig = Tls.ServerConfig.default [cert] key diff --git a/unison-src/transcripts-using-base/tls.output.md b/unison-src/transcripts-using-base/tls.output.md index 1e4cb5120..52dae392d 100644 --- a/unison-src/transcripts-using-base/tls.output.md +++ b/unison-src/transcripts-using-base/tls.output.md @@ -78,7 +78,9 @@ serverThread portVar toSend = 'let cert = decodeCert (toUtf8 self_signed_cert_pem2) -- assume there is exactly one key decoded from our Bytes - key = match (decodePrivateKey (toUtf8 self_signed_key_pem)) with k +: _ -> k + key = match (decodePrivateKey (toUtf8 self_signed_key_pem)) with + k +: _ -> k + [] -> bug "oh no" -- create a default configuration using our credentials (certificate chain and key) tlsconfig = Tls.ServerConfig.default [cert] key diff --git a/unison-src/transcripts-using-base/utf8.md b/unison-src/transcripts-using-base/utf8.md index 127f46660..07cd53a4b 100644 --- a/unison-src/transcripts-using-base/utf8.md +++ b/unison-src/transcripts-using-base/utf8.md @@ -55,5 +55,6 @@ greek_bytes = Bytes.fromList [206, 145, 206, 146, 206, 147, 206, 148, 206] -- Its an error if we drop the first byte > match fromUtf8.impl (drop 1 greek_bytes) with Left (Failure _ t _) -> t + _ -> bug "expected a left" ``` diff --git a/unison-src/transcripts-using-base/utf8.output.md b/unison-src/transcripts-using-base/utf8.output.md index 372185222..ab99fd866 100644 --- a/unison-src/transcripts-using-base/utf8.output.md +++ b/unison-src/transcripts-using-base/utf8.output.md @@ -111,6 +111,7 @@ greek_bytes = Bytes.fromList [206, 145, 206, 146, 206, 147, 206, 148, 206] -- Its an error if we drop the first byte > match fromUtf8.impl (drop 1 greek_bytes) with Left (Failure _ t _) -> t + _ -> bug "expected a left" ``` diff --git a/unison-src/transcripts/alias-many.output.md b/unison-src/transcripts/alias-many.output.md index 763978588..75fa93d8d 100644 --- a/unison-src/transcripts/alias-many.output.md +++ b/unison-src/transcripts/alias-many.output.md @@ -59,590 +59,636 @@ Let's try it! 39. Bytes.zlib.compress : Bytes -> Bytes 40. Bytes.zlib.decompress : Bytes -> Either Text Bytes 41. builtin type Char - 42. Char.fromNat : Nat -> Char - 43. Char.toNat : Char -> Nat - 44. Char.toText : Char -> Text - 45. builtin type Code - 46. Code.cache_ : [(Term, Code)] ->{IO} [Term] - 47. Code.dependencies : Code -> [Term] - 48. Code.deserialize : Bytes -> Either Text Code - 49. Code.display : Text -> Code -> Text - 50. Code.isMissing : Term ->{IO} Boolean - 51. Code.lookup : Term ->{IO} Optional Code - 52. Code.serialize : Code -> Bytes - 53. Code.validate : [(Term, Code)] ->{IO} Optional Failure - 54. crypto.hash : HashAlgorithm -> a -> Bytes - 55. builtin type crypto.HashAlgorithm - 56. crypto.HashAlgorithm.Blake2b_256 : HashAlgorithm - 57. crypto.HashAlgorithm.Blake2b_512 : HashAlgorithm - 58. crypto.HashAlgorithm.Blake2s_256 : HashAlgorithm - 59. crypto.HashAlgorithm.Sha1 : HashAlgorithm - 60. crypto.HashAlgorithm.Sha2_256 : HashAlgorithm - 61. crypto.HashAlgorithm.Sha2_512 : HashAlgorithm - 62. crypto.HashAlgorithm.Sha3_256 : HashAlgorithm - 63. crypto.HashAlgorithm.Sha3_512 : HashAlgorithm - 64. crypto.hashBytes : HashAlgorithm -> Bytes -> Bytes - 65. crypto.hmac : HashAlgorithm -> Bytes -> a -> Bytes - 66. crypto.hmacBytes : HashAlgorithm + 42. builtin type Char.Class + 43. Char.Class.alphanumeric : Class + 44. Char.Class.and : Class -> Class -> Class + 45. Char.Class.any : Class + 46. Char.Class.anyOf : [Char] -> Class + 47. Char.Class.control : Class + 48. Char.Class.is : Class -> Char -> Boolean + 49. Char.Class.letter : Class + 50. Char.Class.lower : Class + 51. Char.Class.mark : Class + 52. Char.Class.not : Class -> Class + 53. Char.Class.number : Class + 54. Char.Class.or : Class -> Class -> Class + 55. Char.Class.printable : Class + 56. Char.Class.punctuation : Class + 57. Char.Class.range : Char -> Char -> Class + 58. Char.Class.separator : Class + 59. Char.Class.symbol : Class + 60. Char.Class.upper : Class + 61. Char.Class.whitespace : Class + 62. Char.fromNat : Nat -> Char + 63. Char.toNat : Char -> Nat + 64. Char.toText : Char -> Text + 65. builtin type Code + 66. Code.cache_ : [(Term, Code)] ->{IO} [Term] + 67. Code.dependencies : Code -> [Term] + 68. Code.deserialize : Bytes -> Either Text Code + 69. Code.display : Text -> Code -> Text + 70. Code.isMissing : Term ->{IO} Boolean + 71. Code.lookup : Term ->{IO} Optional Code + 72. Code.serialize : Code -> Bytes + 73. Code.validate : [(Term, Code)] ->{IO} Optional Failure + 74. crypto.hash : HashAlgorithm -> a -> Bytes + 75. builtin type crypto.HashAlgorithm + 76. crypto.HashAlgorithm.Blake2b_256 : HashAlgorithm + 77. crypto.HashAlgorithm.Blake2b_512 : HashAlgorithm + 78. crypto.HashAlgorithm.Blake2s_256 : HashAlgorithm + 79. crypto.HashAlgorithm.Md5 : HashAlgorithm + 80. crypto.HashAlgorithm.Sha1 : HashAlgorithm + 81. crypto.HashAlgorithm.Sha2_256 : HashAlgorithm + 82. crypto.HashAlgorithm.Sha2_512 : HashAlgorithm + 83. crypto.HashAlgorithm.Sha3_256 : HashAlgorithm + 84. crypto.HashAlgorithm.Sha3_512 : HashAlgorithm + 85. crypto.hashBytes : HashAlgorithm -> Bytes -> Bytes + 86. crypto.hmac : HashAlgorithm -> Bytes -> a -> Bytes + 87. crypto.hmacBytes : HashAlgorithm -> Bytes -> Bytes -> Bytes - 67. Debug.trace : Text -> a -> () - 68. Debug.watch : Text -> a -> a - 69. unique type Doc - 70. Doc.Blob : Text -> Doc - 71. Doc.Evaluate : Term -> Doc - 72. Doc.Join : [Doc] -> Doc - 73. Doc.Link : Link -> Doc - 74. Doc.Signature : Term -> Doc - 75. Doc.Source : Link -> Doc - 76. structural type Either a b - 77. Either.Left : a -> Either a b - 78. Either.Right : b -> Either a b - 79. structural ability Exception - 80. Exception.raise : Failure ->{Exception} x - 81. builtin type Float - 82. Float.* : Float -> Float -> Float - 83. Float.+ : Float -> Float -> Float - 84. Float.- : Float -> Float -> Float - 85. Float./ : Float -> Float -> Float - 86. Float.abs : Float -> Float - 87. Float.acos : Float -> Float - 88. Float.acosh : Float -> Float - 89. Float.asin : Float -> Float - 90. Float.asinh : Float -> Float - 91. Float.atan : Float -> Float - 92. Float.atan2 : Float -> Float -> Float - 93. Float.atanh : Float -> Float - 94. Float.ceiling : Float -> Int - 95. Float.cos : Float -> Float - 96. Float.cosh : Float -> Float - 97. Float.eq : Float -> Float -> Boolean - 98. Float.exp : Float -> Float - 99. Float.floor : Float -> Int - 100. Float.fromRepresentation : Nat -> Float - 101. Float.fromText : Text -> Optional Float - 102. Float.gt : Float -> Float -> Boolean - 103. Float.gteq : Float -> Float -> Boolean - 104. Float.log : Float -> Float - 105. Float.logBase : Float -> Float -> Float - 106. Float.lt : Float -> Float -> Boolean - 107. Float.lteq : Float -> Float -> Boolean - 108. Float.max : Float -> Float -> Float - 109. Float.min : Float -> Float -> Float - 110. Float.pow : Float -> Float -> Float - 111. Float.round : Float -> Int - 112. Float.sin : Float -> Float - 113. Float.sinh : Float -> Float - 114. Float.sqrt : Float -> Float - 115. Float.tan : Float -> Float - 116. Float.tanh : Float -> Float - 117. Float.toRepresentation : Float -> Nat - 118. Float.toText : Float -> Text - 119. Float.truncate : Float -> Int - 120. Handle.toText : Handle -> Text - 121. builtin type ImmutableArray - 122. ImmutableArray.copyTo! : MutableArray g a + 88. Debug.toText : a -> Optional (Either Text Text) + 89. Debug.trace : Text -> a -> () + 90. Debug.watch : Text -> a -> a + 91. unique type Doc + 92. Doc.Blob : Text -> Doc + 93. Doc.Evaluate : Term -> Doc + 94. Doc.Join : [Doc] -> Doc + 95. Doc.Link : Link -> Doc + 96. Doc.Signature : Term -> Doc + 97. Doc.Source : Link -> Doc + 98. structural type Either a b + 99. Either.Left : a -> Either a b + 100. Either.Right : b -> Either a b + 101. structural ability Exception + 102. Exception.raise : Failure ->{Exception} x + 103. builtin type Float + 104. Float.* : Float -> Float -> Float + 105. Float.+ : Float -> Float -> Float + 106. Float.- : Float -> Float -> Float + 107. Float./ : Float -> Float -> Float + 108. Float.abs : Float -> Float + 109. Float.acos : Float -> Float + 110. Float.acosh : Float -> Float + 111. Float.asin : Float -> Float + 112. Float.asinh : Float -> Float + 113. Float.atan : Float -> Float + 114. Float.atan2 : Float -> Float -> Float + 115. Float.atanh : Float -> Float + 116. Float.ceiling : Float -> Int + 117. Float.cos : Float -> Float + 118. Float.cosh : Float -> Float + 119. Float.eq : Float -> Float -> Boolean + 120. Float.exp : Float -> Float + 121. Float.floor : Float -> Int + 122. Float.fromRepresentation : Nat -> Float + 123. Float.fromText : Text -> Optional Float + 124. Float.gt : Float -> Float -> Boolean + 125. Float.gteq : Float -> Float -> Boolean + 126. Float.log : Float -> Float + 127. Float.logBase : Float -> Float -> Float + 128. Float.lt : Float -> Float -> Boolean + 129. Float.lteq : Float -> Float -> Boolean + 130. Float.max : Float -> Float -> Float + 131. Float.min : Float -> Float -> Float + 132. Float.pow : Float -> Float -> Float + 133. Float.round : Float -> Int + 134. Float.sin : Float -> Float + 135. Float.sinh : Float -> Float + 136. Float.sqrt : Float -> Float + 137. Float.tan : Float -> Float + 138. Float.tanh : Float -> Float + 139. Float.toRepresentation : Float -> Nat + 140. Float.toText : Float -> Text + 141. Float.truncate : Float -> Int + 142. Handle.toText : Handle -> Text + 143. builtin type ImmutableArray + 144. ImmutableArray.copyTo! : MutableArray g a -> Nat -> ImmutableArray a -> Nat -> Nat ->{g, Exception} () - 123. ImmutableArray.read : ImmutableArray a + 145. ImmutableArray.read : ImmutableArray a -> Nat ->{Exception} a - 124. ImmutableArray.size : ImmutableArray a -> Nat - 125. builtin type ImmutableByteArray - 126. ImmutableByteArray.copyTo! : MutableByteArray g + 146. ImmutableArray.size : ImmutableArray a -> Nat + 147. builtin type ImmutableByteArray + 148. ImmutableByteArray.copyTo! : MutableByteArray g -> Nat -> ImmutableByteArray -> Nat -> Nat ->{g, Exception} () - 127. ImmutableByteArray.read16be : ImmutableByteArray + 149. ImmutableByteArray.read16be : ImmutableByteArray -> Nat ->{Exception} Nat - 128. ImmutableByteArray.read24be : ImmutableByteArray + 150. ImmutableByteArray.read24be : ImmutableByteArray -> Nat ->{Exception} Nat - 129. ImmutableByteArray.read32be : ImmutableByteArray + 151. ImmutableByteArray.read32be : ImmutableByteArray -> Nat ->{Exception} Nat - 130. ImmutableByteArray.read40be : ImmutableByteArray + 152. ImmutableByteArray.read40be : ImmutableByteArray -> Nat ->{Exception} Nat - 131. ImmutableByteArray.read64be : ImmutableByteArray + 153. ImmutableByteArray.read64be : ImmutableByteArray -> Nat ->{Exception} Nat - 132. ImmutableByteArray.read8 : ImmutableByteArray + 154. ImmutableByteArray.read8 : ImmutableByteArray -> Nat ->{Exception} Nat - 133. ImmutableByteArray.size : ImmutableByteArray -> Nat - 134. builtin type Int - 135. Int.* : Int -> Int -> Int - 136. Int.+ : Int -> Int -> Int - 137. Int.- : Int -> Int -> Int - 138. Int./ : Int -> Int -> Int - 139. Int.and : Int -> Int -> Int - 140. Int.complement : Int -> Int - 141. Int.eq : Int -> Int -> Boolean - 142. Int.fromRepresentation : Nat -> Int - 143. Int.fromText : Text -> Optional Int - 144. Int.gt : Int -> Int -> Boolean - 145. Int.gteq : Int -> Int -> Boolean - 146. Int.increment : Int -> Int - 147. Int.isEven : Int -> Boolean - 148. Int.isOdd : Int -> Boolean - 149. Int.leadingZeros : Int -> Nat - 150. Int.lt : Int -> Int -> Boolean - 151. Int.lteq : Int -> Int -> Boolean - 152. Int.mod : Int -> Int -> Int - 153. Int.negate : Int -> Int - 154. Int.or : Int -> Int -> Int - 155. Int.popCount : Int -> Nat - 156. Int.pow : Int -> Nat -> Int - 157. Int.shiftLeft : Int -> Nat -> Int - 158. Int.shiftRight : Int -> Nat -> Int - 159. Int.signum : Int -> Int - 160. Int.toFloat : Int -> Float - 161. Int.toRepresentation : Int -> Nat - 162. Int.toText : Int -> Text - 163. Int.trailingZeros : Int -> Nat - 164. Int.truncate0 : Int -> Nat - 165. Int.xor : Int -> Int -> Int - 166. unique type io2.ArithmeticFailure - 167. unique type io2.ArrayFailure - 168. unique type io2.BufferMode - 169. io2.BufferMode.BlockBuffering : BufferMode - 170. io2.BufferMode.LineBuffering : BufferMode - 171. io2.BufferMode.NoBuffering : BufferMode - 172. io2.BufferMode.SizedBlockBuffering : Nat -> BufferMode - 173. io2.Clock.internals.monotonic : '{IO} Either + 155. ImmutableByteArray.size : ImmutableByteArray -> Nat + 156. builtin type Int + 157. Int.* : Int -> Int -> Int + 158. Int.+ : Int -> Int -> Int + 159. Int.- : Int -> Int -> Int + 160. Int./ : Int -> Int -> Int + 161. Int.and : Int -> Int -> Int + 162. Int.complement : Int -> Int + 163. Int.eq : Int -> Int -> Boolean + 164. Int.fromRepresentation : Nat -> Int + 165. Int.fromText : Text -> Optional Int + 166. Int.gt : Int -> Int -> Boolean + 167. Int.gteq : Int -> Int -> Boolean + 168. Int.increment : Int -> Int + 169. Int.isEven : Int -> Boolean + 170. Int.isOdd : Int -> Boolean + 171. Int.leadingZeros : Int -> Nat + 172. Int.lt : Int -> Int -> Boolean + 173. Int.lteq : Int -> Int -> Boolean + 174. Int.mod : Int -> Int -> Int + 175. Int.negate : Int -> Int + 176. Int.or : Int -> Int -> Int + 177. Int.popCount : Int -> Nat + 178. Int.pow : Int -> Nat -> Int + 179. Int.shiftLeft : Int -> Nat -> Int + 180. Int.shiftRight : Int -> Nat -> Int + 181. Int.signum : Int -> Int + 182. Int.toFloat : Int -> Float + 183. Int.toRepresentation : Int -> Nat + 184. Int.toText : Int -> Text + 185. Int.trailingZeros : Int -> Nat + 186. Int.truncate0 : Int -> Nat + 187. Int.xor : Int -> Int -> Int + 188. unique type io2.ArithmeticFailure + 189. unique type io2.ArrayFailure + 190. unique type io2.BufferMode + 191. io2.BufferMode.BlockBuffering : BufferMode + 192. io2.BufferMode.LineBuffering : BufferMode + 193. io2.BufferMode.NoBuffering : BufferMode + 194. io2.BufferMode.SizedBlockBuffering : Nat -> BufferMode + 195. io2.Clock.internals.monotonic : '{IO} Either Failure TimeSpec - 174. io2.Clock.internals.nsec : TimeSpec -> Nat - 175. io2.Clock.internals.processCPUTime : '{IO} Either + 196. io2.Clock.internals.nsec : TimeSpec -> Nat + 197. io2.Clock.internals.processCPUTime : '{IO} Either Failure TimeSpec - 176. io2.Clock.internals.realtime : '{IO} Either + 198. io2.Clock.internals.realtime : '{IO} Either Failure TimeSpec - 177. io2.Clock.internals.sec : TimeSpec -> Int - 178. io2.Clock.internals.threadCPUTime : '{IO} Either + 199. io2.Clock.internals.sec : TimeSpec -> Int + 200. io2.Clock.internals.threadCPUTime : '{IO} Either Failure TimeSpec - 179. builtin type io2.Clock.internals.TimeSpec - 180. unique type io2.Failure - 181. io2.Failure.Failure : Type -> Text -> Any -> Failure - 182. unique type io2.FileMode - 183. io2.FileMode.Append : FileMode - 184. io2.FileMode.Read : FileMode - 185. io2.FileMode.ReadWrite : FileMode - 186. io2.FileMode.Write : FileMode - 187. builtin type io2.Handle - 188. builtin type io2.IO - 189. io2.IO.array : Nat ->{IO} MutableArray {IO} a - 190. io2.IO.arrayOf : a -> Nat ->{IO} MutableArray {IO} a - 191. io2.IO.bytearray : Nat ->{IO} MutableByteArray {IO} - 192. io2.IO.bytearrayOf : Nat + 201. builtin type io2.Clock.internals.TimeSpec + 202. unique type io2.Failure + 203. io2.Failure.Failure : Type -> Text -> Any -> Failure + 204. unique type io2.FileMode + 205. io2.FileMode.Append : FileMode + 206. io2.FileMode.Read : FileMode + 207. io2.FileMode.ReadWrite : FileMode + 208. io2.FileMode.Write : FileMode + 209. builtin type io2.Handle + 210. builtin type io2.IO + 211. io2.IO.array : Nat ->{IO} MutableArray {IO} a + 212. io2.IO.arrayOf : a -> Nat ->{IO} MutableArray {IO} a + 213. io2.IO.bytearray : Nat ->{IO} MutableByteArray {IO} + 214. io2.IO.bytearrayOf : Nat -> Nat ->{IO} MutableByteArray {IO} - 193. io2.IO.clientSocket.impl : Text + 215. io2.IO.clientSocket.impl : Text -> Text ->{IO} Either Failure Socket - 194. io2.IO.closeFile.impl : Handle ->{IO} Either Failure () - 195. io2.IO.closeSocket.impl : Socket ->{IO} Either Failure () - 196. io2.IO.createDirectory.impl : Text + 216. io2.IO.closeFile.impl : Handle ->{IO} Either Failure () + 217. io2.IO.closeSocket.impl : Socket ->{IO} Either Failure () + 218. io2.IO.createDirectory.impl : Text ->{IO} Either Failure () - 197. io2.IO.createTempDirectory.impl : Text + 219. io2.IO.createTempDirectory.impl : Text ->{IO} Either Failure Text - 198. io2.IO.delay.impl : Nat ->{IO} Either Failure () - 199. io2.IO.directoryContents.impl : Text + 220. io2.IO.delay.impl : Nat ->{IO} Either Failure () + 221. io2.IO.directoryContents.impl : Text ->{IO} Either Failure [Text] - 200. io2.IO.fileExists.impl : Text + 222. io2.IO.fileExists.impl : Text ->{IO} Either Failure Boolean - 201. io2.IO.forkComp : '{IO} a ->{IO} ThreadId - 202. io2.IO.getArgs.impl : '{IO} Either Failure [Text] - 203. io2.IO.getBuffering.impl : Handle + 223. io2.IO.forkComp : '{IO} a ->{IO} ThreadId + 224. io2.IO.getArgs.impl : '{IO} Either Failure [Text] + 225. io2.IO.getBuffering.impl : Handle ->{IO} Either Failure BufferMode - 204. io2.IO.getBytes.impl : Handle + 226. io2.IO.getBytes.impl : Handle -> Nat ->{IO} Either Failure Bytes - 205. io2.IO.getChar.impl : Handle ->{IO} Either Failure Char - 206. io2.IO.getCurrentDirectory.impl : '{IO} Either + 227. io2.IO.getChar.impl : Handle ->{IO} Either Failure Char + 228. io2.IO.getCurrentDirectory.impl : '{IO} Either Failure Text - 207. io2.IO.getEcho.impl : Handle + 229. io2.IO.getEcho.impl : Handle ->{IO} Either Failure Boolean - 208. io2.IO.getEnv.impl : Text ->{IO} Either Failure Text - 209. io2.IO.getFileSize.impl : Text ->{IO} Either Failure Nat - 210. io2.IO.getFileTimestamp.impl : Text + 230. io2.IO.getEnv.impl : Text ->{IO} Either Failure Text + 231. io2.IO.getFileSize.impl : Text ->{IO} Either Failure Nat + 232. io2.IO.getFileTimestamp.impl : Text ->{IO} Either Failure Nat - 211. io2.IO.getLine.impl : Handle ->{IO} Either Failure Text - 212. io2.IO.getSomeBytes.impl : Handle + 233. io2.IO.getLine.impl : Handle ->{IO} Either Failure Text + 234. io2.IO.getSomeBytes.impl : Handle -> Nat ->{IO} Either Failure Bytes - 213. io2.IO.getTempDirectory.impl : '{IO} Either Failure Text - 214. io2.IO.handlePosition.impl : Handle + 235. io2.IO.getTempDirectory.impl : '{IO} Either Failure Text + 236. io2.IO.handlePosition.impl : Handle ->{IO} Either Failure Nat - 215. io2.IO.isDirectory.impl : Text + 237. io2.IO.isDirectory.impl : Text ->{IO} Either Failure Boolean - 216. io2.IO.isFileEOF.impl : Handle + 238. io2.IO.isFileEOF.impl : Handle ->{IO} Either Failure Boolean - 217. io2.IO.isFileOpen.impl : Handle + 239. io2.IO.isFileOpen.impl : Handle ->{IO} Either Failure Boolean - 218. io2.IO.isSeekable.impl : Handle + 240. io2.IO.isSeekable.impl : Handle ->{IO} Either Failure Boolean - 219. io2.IO.kill.impl : ThreadId ->{IO} Either Failure () - 220. io2.IO.listen.impl : Socket ->{IO} Either Failure () - 221. io2.IO.openFile.impl : Text + 241. io2.IO.kill.impl : ThreadId ->{IO} Either Failure () + 242. io2.IO.listen.impl : Socket ->{IO} Either Failure () + 243. io2.IO.openFile.impl : Text -> FileMode ->{IO} Either Failure Handle - 222. io2.IO.putBytes.impl : Handle + 244. io2.IO.process.call : Text -> [Text] ->{IO} Nat + 245. io2.IO.process.exitCode : ProcessHandle + ->{IO} Optional Nat + 246. io2.IO.process.kill : ProcessHandle ->{IO} () + 247. io2.IO.process.start : Text + -> [Text] + ->{IO} ( Handle, + Handle, + Handle, + ProcessHandle) + 248. io2.IO.process.wait : ProcessHandle ->{IO} Nat + 249. io2.IO.putBytes.impl : Handle -> Bytes ->{IO} Either Failure () - 223. io2.IO.ready.impl : Handle ->{IO} Either Failure Boolean - 224. io2.IO.ref : a ->{IO} Ref {IO} a - 225. io2.IO.removeDirectory.impl : Text + 250. io2.IO.ready.impl : Handle ->{IO} Either Failure Boolean + 251. io2.IO.ref : a ->{IO} Ref {IO} a + 252. io2.IO.removeDirectory.impl : Text ->{IO} Either Failure () - 226. io2.IO.removeFile.impl : Text ->{IO} Either Failure () - 227. io2.IO.renameDirectory.impl : Text + 253. io2.IO.removeFile.impl : Text ->{IO} Either Failure () + 254. io2.IO.renameDirectory.impl : Text -> Text ->{IO} Either Failure () - 228. io2.IO.renameFile.impl : Text + 255. io2.IO.renameFile.impl : Text -> Text ->{IO} Either Failure () - 229. io2.IO.seekHandle.impl : Handle + 256. io2.IO.seekHandle.impl : Handle -> SeekMode -> Int ->{IO} Either Failure () - 230. io2.IO.serverSocket.impl : Optional Text + 257. io2.IO.serverSocket.impl : Optional Text -> Text ->{IO} Either Failure Socket - 231. io2.IO.setBuffering.impl : Handle + 258. io2.IO.setBuffering.impl : Handle -> BufferMode ->{IO} Either Failure () - 232. io2.IO.setCurrentDirectory.impl : Text + 259. io2.IO.setCurrentDirectory.impl : Text ->{IO} Either Failure () - 233. io2.IO.setEcho.impl : Handle + 260. io2.IO.setEcho.impl : Handle -> Boolean ->{IO} Either Failure () - 234. io2.IO.socketAccept.impl : Socket + 261. io2.IO.socketAccept.impl : Socket ->{IO} Either Failure Socket - 235. io2.IO.socketPort.impl : Socket ->{IO} Either Failure Nat - 236. io2.IO.socketReceive.impl : Socket + 262. io2.IO.socketPort.impl : Socket ->{IO} Either Failure Nat + 263. io2.IO.socketReceive.impl : Socket -> Nat ->{IO} Either Failure Bytes - 237. io2.IO.socketSend.impl : Socket + 264. io2.IO.socketSend.impl : Socket -> Bytes ->{IO} Either Failure () - 238. io2.IO.stdHandle : StdHandle -> Handle - 239. io2.IO.systemTime.impl : '{IO} Either Failure Nat - 240. io2.IO.systemTimeMicroseconds : '{IO} Int - 241. io2.IO.tryEval : '{IO} a ->{IO, Exception} a - 242. unique type io2.IOError - 243. io2.IOError.AlreadyExists : IOError - 244. io2.IOError.EOF : IOError - 245. io2.IOError.IllegalOperation : IOError - 246. io2.IOError.NoSuchThing : IOError - 247. io2.IOError.PermissionDenied : IOError - 248. io2.IOError.ResourceBusy : IOError - 249. io2.IOError.ResourceExhausted : IOError - 250. io2.IOError.UserError : IOError - 251. unique type io2.IOFailure - 252. unique type io2.MiscFailure - 253. builtin type io2.MVar - 254. io2.MVar.isEmpty : MVar a ->{IO} Boolean - 255. io2.MVar.new : a ->{IO} MVar a - 256. io2.MVar.newEmpty : '{IO} MVar a - 257. io2.MVar.put.impl : MVar a -> a ->{IO} Either Failure () - 258. io2.MVar.read.impl : MVar a ->{IO} Either Failure a - 259. io2.MVar.swap.impl : MVar a -> a ->{IO} Either Failure a - 260. io2.MVar.take.impl : MVar a ->{IO} Either Failure a - 261. io2.MVar.tryPut.impl : MVar a + 265. io2.IO.stdHandle : StdHandle -> Handle + 266. io2.IO.systemTime.impl : '{IO} Either Failure Nat + 267. io2.IO.systemTimeMicroseconds : '{IO} Int + 268. io2.IO.tryEval : '{IO} a ->{IO, Exception} a + 269. unique type io2.IOError + 270. io2.IOError.AlreadyExists : IOError + 271. io2.IOError.EOF : IOError + 272. io2.IOError.IllegalOperation : IOError + 273. io2.IOError.NoSuchThing : IOError + 274. io2.IOError.PermissionDenied : IOError + 275. io2.IOError.ResourceBusy : IOError + 276. io2.IOError.ResourceExhausted : IOError + 277. io2.IOError.UserError : IOError + 278. unique type io2.IOFailure + 279. unique type io2.MiscFailure + 280. builtin type io2.MVar + 281. io2.MVar.isEmpty : MVar a ->{IO} Boolean + 282. io2.MVar.new : a ->{IO} MVar a + 283. io2.MVar.newEmpty : '{IO} MVar a + 284. io2.MVar.put.impl : MVar a -> a ->{IO} Either Failure () + 285. io2.MVar.read.impl : MVar a ->{IO} Either Failure a + 286. io2.MVar.swap.impl : MVar a -> a ->{IO} Either Failure a + 287. io2.MVar.take.impl : MVar a ->{IO} Either Failure a + 288. io2.MVar.tryPut.impl : MVar a -> a ->{IO} Either Failure Boolean - 262. io2.MVar.tryRead.impl : MVar a + 289. io2.MVar.tryRead.impl : MVar a ->{IO} Either Failure (Optional a) - 263. io2.MVar.tryTake : MVar a ->{IO} Optional a - 264. unique type io2.RuntimeFailure - 265. unique type io2.SeekMode - 266. io2.SeekMode.AbsoluteSeek : SeekMode - 267. io2.SeekMode.RelativeSeek : SeekMode - 268. io2.SeekMode.SeekFromEnd : SeekMode - 269. builtin type io2.Socket - 270. unique type io2.StdHandle - 271. io2.StdHandle.StdErr : StdHandle - 272. io2.StdHandle.StdIn : StdHandle - 273. io2.StdHandle.StdOut : StdHandle - 274. builtin type io2.STM - 275. io2.STM.atomically : '{STM} a ->{IO} a - 276. io2.STM.retry : '{STM} a - 277. unique type io2.STMFailure - 278. builtin type io2.ThreadId - 279. builtin type io2.Tls - 280. builtin type io2.Tls.Cipher - 281. builtin type io2.Tls.ClientConfig - 282. io2.Tls.ClientConfig.certificates.set : [SignedCert] + 290. io2.MVar.tryTake : MVar a ->{IO} Optional a + 291. builtin type io2.ProcessHandle + 292. builtin type io2.Promise + 293. io2.Promise.new : '{IO} Promise a + 294. io2.Promise.read : Promise a ->{IO} a + 295. io2.Promise.tryRead : Promise a ->{IO} Optional a + 296. io2.Promise.write : Promise a -> a ->{IO} Boolean + 297. io2.Ref.cas : Ref {IO} a -> Ticket a -> a ->{IO} Boolean + 298. io2.Ref.readForCas : Ref {IO} a ->{IO} Ticket a + 299. builtin type io2.Ref.Ticket + 300. io2.Ref.Ticket.read : Ticket a -> a + 301. unique type io2.RuntimeFailure + 302. unique type io2.SeekMode + 303. io2.SeekMode.AbsoluteSeek : SeekMode + 304. io2.SeekMode.RelativeSeek : SeekMode + 305. io2.SeekMode.SeekFromEnd : SeekMode + 306. builtin type io2.Socket + 307. unique type io2.StdHandle + 308. io2.StdHandle.StdErr : StdHandle + 309. io2.StdHandle.StdIn : StdHandle + 310. io2.StdHandle.StdOut : StdHandle + 311. builtin type io2.STM + 312. io2.STM.atomically : '{STM} a ->{IO} a + 313. io2.STM.retry : '{STM} a + 314. unique type io2.STMFailure + 315. builtin type io2.ThreadId + 316. unique type io2.ThreadKilledFailure + 317. builtin type io2.Tls + 318. builtin type io2.Tls.Cipher + 319. builtin type io2.Tls.ClientConfig + 320. io2.Tls.ClientConfig.certificates.set : [SignedCert] -> ClientConfig -> ClientConfig - 283. io2.TLS.ClientConfig.ciphers.set : [Cipher] + 321. io2.TLS.ClientConfig.ciphers.set : [Cipher] -> ClientConfig -> ClientConfig - 284. io2.Tls.ClientConfig.default : Text + 322. io2.Tls.ClientConfig.default : Text -> Bytes -> ClientConfig - 285. io2.Tls.ClientConfig.versions.set : [Version] + 323. io2.Tls.ClientConfig.versions.set : [Version] -> ClientConfig -> ClientConfig - 286. io2.Tls.decodeCert.impl : Bytes + 324. io2.Tls.decodeCert.impl : Bytes -> Either Failure SignedCert - 287. io2.Tls.decodePrivateKey : Bytes -> [PrivateKey] - 288. io2.Tls.encodeCert : SignedCert -> Bytes - 289. io2.Tls.encodePrivateKey : PrivateKey -> Bytes - 290. io2.Tls.handshake.impl : Tls ->{IO} Either Failure () - 291. io2.Tls.newClient.impl : ClientConfig + 325. io2.Tls.decodePrivateKey : Bytes -> [PrivateKey] + 326. io2.Tls.encodeCert : SignedCert -> Bytes + 327. io2.Tls.encodePrivateKey : PrivateKey -> Bytes + 328. io2.Tls.handshake.impl : Tls ->{IO} Either Failure () + 329. io2.Tls.newClient.impl : ClientConfig -> Socket ->{IO} Either Failure Tls - 292. io2.Tls.newServer.impl : ServerConfig + 330. io2.Tls.newServer.impl : ServerConfig -> Socket ->{IO} Either Failure Tls - 293. builtin type io2.Tls.PrivateKey - 294. io2.Tls.receive.impl : Tls ->{IO} Either Failure Bytes - 295. io2.Tls.send.impl : Tls -> Bytes ->{IO} Either Failure () - 296. builtin type io2.Tls.ServerConfig - 297. io2.Tls.ServerConfig.certificates.set : [SignedCert] + 331. builtin type io2.Tls.PrivateKey + 332. io2.Tls.receive.impl : Tls ->{IO} Either Failure Bytes + 333. io2.Tls.send.impl : Tls -> Bytes ->{IO} Either Failure () + 334. builtin type io2.Tls.ServerConfig + 335. io2.Tls.ServerConfig.certificates.set : [SignedCert] -> ServerConfig -> ServerConfig - 298. io2.Tls.ServerConfig.ciphers.set : [Cipher] + 336. io2.Tls.ServerConfig.ciphers.set : [Cipher] -> ServerConfig -> ServerConfig - 299. io2.Tls.ServerConfig.default : [SignedCert] + 337. io2.Tls.ServerConfig.default : [SignedCert] -> PrivateKey -> ServerConfig - 300. io2.Tls.ServerConfig.versions.set : [Version] + 338. io2.Tls.ServerConfig.versions.set : [Version] -> ServerConfig -> ServerConfig - 301. builtin type io2.Tls.SignedCert - 302. io2.Tls.terminate.impl : Tls ->{IO} Either Failure () - 303. builtin type io2.Tls.Version - 304. unique type io2.TlsFailure - 305. builtin type io2.TVar - 306. io2.TVar.new : a ->{STM} TVar a - 307. io2.TVar.newIO : a ->{IO} TVar a - 308. io2.TVar.read : TVar a ->{STM} a - 309. io2.TVar.readIO : TVar a ->{IO} a - 310. io2.TVar.swap : TVar a -> a ->{STM} a - 311. io2.TVar.write : TVar a -> a ->{STM} () - 312. io2.validateSandboxed : [Term] -> a -> Boolean - 313. unique type IsPropagated - 314. IsPropagated.IsPropagated : IsPropagated - 315. unique type IsTest - 316. IsTest.IsTest : IsTest - 317. unique type Link - 318. builtin type Link.Term - 319. Link.Term : Term -> Link - 320. Link.Term.toText : Term -> Text - 321. builtin type Link.Type - 322. Link.Type : Type -> Link - 323. builtin type List - 324. List.++ : [a] -> [a] -> [a] - 325. List.+: : a -> [a] -> [a] - 326. List.:+ : [a] -> a -> [a] - 327. List.at : Nat -> [a] -> Optional a - 328. List.cons : a -> [a] -> [a] - 329. List.drop : Nat -> [a] -> [a] - 330. List.empty : [a] - 331. List.size : [a] -> Nat - 332. List.snoc : [a] -> a -> [a] - 333. List.take : Nat -> [a] -> [a] - 334. metadata.isPropagated : IsPropagated - 335. metadata.isTest : IsTest - 336. builtin type MutableArray - 337. MutableArray.copyTo! : MutableArray g a + 339. builtin type io2.Tls.SignedCert + 340. io2.Tls.terminate.impl : Tls ->{IO} Either Failure () + 341. builtin type io2.Tls.Version + 342. unique type io2.TlsFailure + 343. builtin type io2.TVar + 344. io2.TVar.new : a ->{STM} TVar a + 345. io2.TVar.newIO : a ->{IO} TVar a + 346. io2.TVar.read : TVar a ->{STM} a + 347. io2.TVar.readIO : TVar a ->{IO} a + 348. io2.TVar.swap : TVar a -> a ->{STM} a + 349. io2.TVar.write : TVar a -> a ->{STM} () + 350. io2.validateSandboxed : [Term] -> a -> Boolean + 351. unique type IsPropagated + 352. IsPropagated.IsPropagated : IsPropagated + 353. unique type IsTest + 354. IsTest.IsTest : IsTest + 355. unique type Link + 356. builtin type Link.Term + 357. Link.Term : Term -> Link + 358. Link.Term.toText : Term -> Text + 359. builtin type Link.Type + 360. Link.Type : Type -> Link + 361. builtin type List + 362. List.++ : [a] -> [a] -> [a] + 363. List.+: : a -> [a] -> [a] + 364. List.:+ : [a] -> a -> [a] + 365. List.at : Nat -> [a] -> Optional a + 366. List.cons : a -> [a] -> [a] + 367. List.drop : Nat -> [a] -> [a] + 368. List.empty : [a] + 369. List.size : [a] -> Nat + 370. List.snoc : [a] -> a -> [a] + 371. List.take : Nat -> [a] -> [a] + 372. metadata.isPropagated : IsPropagated + 373. metadata.isTest : IsTest + 374. builtin type MutableArray + 375. MutableArray.copyTo! : MutableArray g a -> Nat -> MutableArray g a -> Nat -> Nat ->{g, Exception} () - 338. MutableArray.freeze : MutableArray g a + 376. MutableArray.freeze : MutableArray g a -> Nat -> Nat ->{g} ImmutableArray a - 339. MutableArray.freeze! : MutableArray g a + 377. MutableArray.freeze! : MutableArray g a ->{g} ImmutableArray a - 340. MutableArray.read : MutableArray g a + 378. MutableArray.read : MutableArray g a -> Nat ->{g, Exception} a - 341. MutableArray.size : MutableArray g a -> Nat - 342. MutableArray.write : MutableArray g a + 379. MutableArray.size : MutableArray g a -> Nat + 380. MutableArray.write : MutableArray g a -> Nat -> a ->{g, Exception} () - 343. builtin type MutableByteArray - 344. MutableByteArray.copyTo! : MutableByteArray g + 381. builtin type MutableByteArray + 382. MutableByteArray.copyTo! : MutableByteArray g -> Nat -> MutableByteArray g -> Nat -> Nat ->{g, Exception} () - 345. MutableByteArray.freeze : MutableByteArray g + 383. MutableByteArray.freeze : MutableByteArray g -> Nat -> Nat ->{g} ImmutableByteArray - 346. MutableByteArray.freeze! : MutableByteArray g + 384. MutableByteArray.freeze! : MutableByteArray g ->{g} ImmutableByteArray - 347. MutableByteArray.read16be : MutableByteArray g + 385. MutableByteArray.read16be : MutableByteArray g -> Nat ->{g, Exception} Nat - 348. MutableByteArray.read24be : MutableByteArray g + 386. MutableByteArray.read24be : MutableByteArray g -> Nat ->{g, Exception} Nat - 349. MutableByteArray.read32be : MutableByteArray g + 387. MutableByteArray.read32be : MutableByteArray g -> Nat ->{g, Exception} Nat - 350. MutableByteArray.read40be : MutableByteArray g + 388. MutableByteArray.read40be : MutableByteArray g -> Nat ->{g, Exception} Nat - 351. MutableByteArray.read64be : MutableByteArray g + 389. MutableByteArray.read64be : MutableByteArray g -> Nat ->{g, Exception} Nat - 352. MutableByteArray.read8 : MutableByteArray g + 390. MutableByteArray.read8 : MutableByteArray g -> Nat ->{g, Exception} Nat - 353. MutableByteArray.size : MutableByteArray g -> Nat - 354. MutableByteArray.write16be : MutableByteArray g + 391. MutableByteArray.size : MutableByteArray g -> Nat + 392. MutableByteArray.write16be : MutableByteArray g -> Nat -> Nat ->{g, Exception} () - 355. MutableByteArray.write32be : MutableByteArray g + 393. MutableByteArray.write32be : MutableByteArray g -> Nat -> Nat ->{g, Exception} () - 356. MutableByteArray.write64be : MutableByteArray g + 394. MutableByteArray.write64be : MutableByteArray g -> Nat -> Nat ->{g, Exception} () - 357. MutableByteArray.write8 : MutableByteArray g + 395. MutableByteArray.write8 : MutableByteArray g -> Nat -> Nat ->{g, Exception} () - 358. builtin type Nat - 359. Nat.* : Nat -> Nat -> Nat - 360. Nat.+ : Nat -> Nat -> Nat - 361. Nat./ : Nat -> Nat -> Nat - 362. Nat.and : Nat -> Nat -> Nat - 363. Nat.complement : Nat -> Nat - 364. Nat.drop : Nat -> Nat -> Nat - 365. Nat.eq : Nat -> Nat -> Boolean - 366. Nat.fromText : Text -> Optional Nat - 367. Nat.gt : Nat -> Nat -> Boolean - 368. Nat.gteq : Nat -> Nat -> Boolean - 369. Nat.increment : Nat -> Nat - 370. Nat.isEven : Nat -> Boolean - 371. Nat.isOdd : Nat -> Boolean - 372. Nat.leadingZeros : Nat -> Nat - 373. Nat.lt : Nat -> Nat -> Boolean - 374. Nat.lteq : Nat -> Nat -> Boolean - 375. Nat.mod : Nat -> Nat -> Nat - 376. Nat.or : Nat -> Nat -> Nat - 377. Nat.popCount : Nat -> Nat - 378. Nat.pow : Nat -> Nat -> Nat - 379. Nat.shiftLeft : Nat -> Nat -> Nat - 380. Nat.shiftRight : Nat -> Nat -> Nat - 381. Nat.sub : Nat -> Nat -> Int - 382. Nat.toFloat : Nat -> Float - 383. Nat.toInt : Nat -> Int - 384. Nat.toText : Nat -> Text - 385. Nat.trailingZeros : Nat -> Nat - 386. Nat.xor : Nat -> Nat -> Nat - 387. structural type Optional a - 388. Optional.None : Optional a - 389. Optional.Some : a -> Optional a - 390. builtin type Pattern - 391. Pattern.capture : Pattern a -> Pattern a - 392. Pattern.isMatch : Pattern a -> a -> Boolean - 393. Pattern.join : [Pattern a] -> Pattern a - 394. Pattern.many : Pattern a -> Pattern a - 395. Pattern.or : Pattern a -> Pattern a -> Pattern a - 396. Pattern.replicate : Nat -> Nat -> Pattern a -> Pattern a - 397. Pattern.run : Pattern a -> a -> Optional ([a], a) - 398. builtin type Ref - 399. Ref.read : Ref g a ->{g} a - 400. Ref.write : Ref g a -> a ->{g} () - 401. builtin type Request - 402. builtin type Scope - 403. Scope.array : Nat ->{Scope s} MutableArray (Scope s) a - 404. Scope.arrayOf : a + 396. builtin type Nat + 397. Nat.* : Nat -> Nat -> Nat + 398. Nat.+ : Nat -> Nat -> Nat + 399. Nat./ : Nat -> Nat -> Nat + 400. Nat.and : Nat -> Nat -> Nat + 401. Nat.complement : Nat -> Nat + 402. Nat.drop : Nat -> Nat -> Nat + 403. Nat.eq : Nat -> Nat -> Boolean + 404. Nat.fromText : Text -> Optional Nat + 405. Nat.gt : Nat -> Nat -> Boolean + 406. Nat.gteq : Nat -> Nat -> Boolean + 407. Nat.increment : Nat -> Nat + 408. Nat.isEven : Nat -> Boolean + 409. Nat.isOdd : Nat -> Boolean + 410. Nat.leadingZeros : Nat -> Nat + 411. Nat.lt : Nat -> Nat -> Boolean + 412. Nat.lteq : Nat -> Nat -> Boolean + 413. Nat.mod : Nat -> Nat -> Nat + 414. Nat.or : Nat -> Nat -> Nat + 415. Nat.popCount : Nat -> Nat + 416. Nat.pow : Nat -> Nat -> Nat + 417. Nat.shiftLeft : Nat -> Nat -> Nat + 418. Nat.shiftRight : Nat -> Nat -> Nat + 419. Nat.sub : Nat -> Nat -> Int + 420. Nat.toFloat : Nat -> Float + 421. Nat.toInt : Nat -> Int + 422. Nat.toText : Nat -> Text + 423. Nat.trailingZeros : Nat -> Nat + 424. Nat.xor : Nat -> Nat -> Nat + 425. structural type Optional a + 426. Optional.None : Optional a + 427. Optional.Some : a -> Optional a + 428. builtin type Pattern + 429. Pattern.capture : Pattern a -> Pattern a + 430. Pattern.isMatch : Pattern a -> a -> Boolean + 431. Pattern.join : [Pattern a] -> Pattern a + 432. Pattern.many : Pattern a -> Pattern a + 433. Pattern.or : Pattern a -> Pattern a -> Pattern a + 434. Pattern.replicate : Nat -> Nat -> Pattern a -> Pattern a + 435. Pattern.run : Pattern a -> a -> Optional ([a], a) + 436. builtin type Ref + 437. Ref.read : Ref g a ->{g} a + 438. Ref.write : Ref g a -> a ->{g} () + 439. builtin type Request + 440. builtin type Scope + 441. Scope.array : Nat ->{Scope s} MutableArray (Scope s) a + 442. Scope.arrayOf : a -> Nat ->{Scope s} MutableArray (Scope s) a - 405. Scope.bytearray : Nat + 443. Scope.bytearray : Nat ->{Scope s} MutableByteArray (Scope s) - 406. Scope.bytearrayOf : Nat + 444. Scope.bytearrayOf : Nat -> Nat ->{Scope s} MutableByteArray (Scope s) - 407. Scope.ref : a ->{Scope s} Ref {Scope s} a - 408. Scope.run : (∀ s. '{g, Scope s} r) ->{g} r - 409. structural type SeqView a b - 410. SeqView.VElem : a -> b -> SeqView a b - 411. SeqView.VEmpty : SeqView a b - 412. Socket.toText : Socket -> Text - 413. unique type Test.Result - 414. Test.Result.Fail : Text -> Result - 415. Test.Result.Ok : Text -> Result - 416. builtin type Text - 417. Text.!= : Text -> Text -> Boolean - 418. Text.++ : Text -> Text -> Text - 419. Text.drop : Nat -> Text -> Text - 420. Text.empty : Text - 421. Text.eq : Text -> Text -> Boolean - 422. Text.fromCharList : [Char] -> Text - 423. Text.fromUtf8.impl : Bytes -> Either Failure Text - 424. Text.gt : Text -> Text -> Boolean - 425. Text.gteq : Text -> Text -> Boolean - 426. Text.lt : Text -> Text -> Boolean - 427. Text.lteq : Text -> Text -> Boolean - 428. Text.patterns.anyChar : Pattern Text - 429. Text.patterns.charIn : [Char] -> Pattern Text - 430. Text.patterns.charRange : Char -> Char -> Pattern Text - 431. Text.patterns.digit : Pattern Text - 432. Text.patterns.eof : Pattern Text - 433. Text.patterns.letter : Pattern Text - 434. Text.patterns.literal : Text -> Pattern Text - 435. Text.patterns.notCharIn : [Char] -> Pattern Text - 436. Text.patterns.notCharRange : Char -> Char -> Pattern Text - 437. Text.patterns.punctuation : Pattern Text - 438. Text.patterns.space : Pattern Text - 439. Text.repeat : Nat -> Text -> Text - 440. Text.reverse : Text -> Text - 441. Text.size : Text -> Nat - 442. Text.take : Nat -> Text -> Text - 443. Text.toCharList : Text -> [Char] - 444. Text.toLowercase : Text -> Text - 445. Text.toUppercase : Text -> Text - 446. Text.toUtf8 : Text -> Bytes - 447. Text.uncons : Text -> Optional (Char, Text) - 448. Text.unsnoc : Text -> Optional (Text, Char) - 449. ThreadId.toText : ThreadId -> Text - 450. todo : a -> b - 451. structural type Tuple a b - 452. Tuple.Cons : a -> b -> Tuple a b - 453. structural type Unit - 454. Unit.Unit : () - 455. Universal.< : a -> a -> Boolean - 456. Universal.<= : a -> a -> Boolean - 457. Universal.== : a -> a -> Boolean - 458. Universal.> : a -> a -> Boolean - 459. Universal.>= : a -> a -> Boolean - 460. Universal.compare : a -> a -> Int - 461. unsafe.coerceAbilities : (a ->{e1} b) -> a ->{e2} b - 462. builtin type Value - 463. Value.dependencies : Value -> [Term] - 464. Value.deserialize : Bytes -> Either Text Value - 465. Value.load : Value ->{IO} Either [Term] a - 466. Value.serialize : Value -> Bytes - 467. Value.value : a -> Value + 445. Scope.ref : a ->{Scope s} Ref {Scope s} a + 446. Scope.run : (∀ s. '{g, Scope s} r) ->{g} r + 447. structural type SeqView a b + 448. SeqView.VElem : a -> b -> SeqView a b + 449. SeqView.VEmpty : SeqView a b + 450. Socket.toText : Socket -> Text + 451. unique type Test.Result + 452. Test.Result.Fail : Text -> Result + 453. Test.Result.Ok : Text -> Result + 454. builtin type Text + 455. Text.!= : Text -> Text -> Boolean + 456. Text.++ : Text -> Text -> Text + 457. Text.drop : Nat -> Text -> Text + 458. Text.empty : Text + 459. Text.eq : Text -> Text -> Boolean + 460. Text.fromCharList : [Char] -> Text + 461. Text.fromUtf8.impl : Bytes -> Either Failure Text + 462. Text.gt : Text -> Text -> Boolean + 463. Text.gteq : Text -> Text -> Boolean + 464. Text.lt : Text -> Text -> Boolean + 465. Text.lteq : Text -> Text -> Boolean + 466. Text.patterns.anyChar : Pattern Text + 467. Text.patterns.char : Class -> Pattern Text + 468. Text.patterns.charIn : [Char] -> Pattern Text + 469. Text.patterns.charRange : Char -> Char -> Pattern Text + 470. Text.patterns.digit : Pattern Text + 471. Text.patterns.eof : Pattern Text + 472. Text.patterns.letter : Pattern Text + 473. Text.patterns.literal : Text -> Pattern Text + 474. Text.patterns.notCharIn : [Char] -> Pattern Text + 475. Text.patterns.notCharRange : Char -> Char -> Pattern Text + 476. Text.patterns.punctuation : Pattern Text + 477. Text.patterns.space : Pattern Text + 478. Text.repeat : Nat -> Text -> Text + 479. Text.reverse : Text -> Text + 480. Text.size : Text -> Nat + 481. Text.take : Nat -> Text -> Text + 482. Text.toCharList : Text -> [Char] + 483. Text.toLowercase : Text -> Text + 484. Text.toUppercase : Text -> Text + 485. Text.toUtf8 : Text -> Bytes + 486. Text.uncons : Text -> Optional (Char, Text) + 487. Text.unsnoc : Text -> Optional (Text, Char) + 488. ThreadId.toText : ThreadId -> Text + 489. todo : a -> b + 490. structural type Tuple a b + 491. Tuple.Cons : a -> b -> Tuple a b + 492. structural type Unit + 493. Unit.Unit : () + 494. Universal.< : a -> a -> Boolean + 495. Universal.<= : a -> a -> Boolean + 496. Universal.== : a -> a -> Boolean + 497. Universal.> : a -> a -> Boolean + 498. Universal.>= : a -> a -> Boolean + 499. Universal.compare : a -> a -> Int + 500. Universal.murmurHash : a -> Nat + 501. unsafe.coerceAbilities : (a ->{e1} b) -> a ->{e2} b + 502. builtin type Value + 503. Value.dependencies : Value -> [Term] + 504. Value.deserialize : Bytes -> Either Text Value + 505. Value.load : Value ->{IO} Either [Term] a + 506. Value.serialize : Value -> Bytes + 507. Value.value : a -> Value .builtin> alias.many 94-104 .mylib @@ -651,17 +697,17 @@ Let's try it! Added definitions: - 1. Float.ceiling : Float -> Int - 2. Float.cos : Float -> Float - 3. Float.cosh : Float -> Float - 4. Float.eq : Float -> Float -> Boolean - 5. Float.exp : Float -> Float - 6. Float.floor : Float -> Int - 7. Float.fromRepresentation : Nat -> Float - 8. Float.fromText : Text -> Optional Float - 9. Float.gt : Float -> Float -> Boolean - 10. Float.gteq : Float -> Float -> Boolean - 11. Float.log : Float -> Float + 1. structural type Either a b + 2. structural ability Exception + 3. builtin type Float + 4. Doc.Join : [Doc] -> Doc + 5. Either.Left : a -> Either a b + 6. Doc.Link : Link -> Doc + 7. Either.Right : b -> Either a b + 8. Doc.Signature : Term -> Doc + 9. Doc.Source : Link -> Doc + 10. Exception.raise : Failure ->{Exception} x + 11. Float.* : Float -> Float -> Float Tip: You can use `undo` or `reflog` to undo this change. @@ -721,17 +767,17 @@ I want to incorporate a few more from another namespace: .mylib> find - 1. Float.ceiling : Float -> Int - 2. Float.cos : Float -> Float - 3. Float.cosh : Float -> Float - 4. Float.eq : Float -> Float -> Boolean - 5. Float.exp : Float -> Float - 6. Float.floor : Float -> Int - 7. Float.fromRepresentation : Nat -> Float - 8. Float.fromText : Text -> Optional Float - 9. Float.gt : Float -> Float -> Boolean - 10. Float.gteq : Float -> Float -> Boolean - 11. Float.log : Float -> Float + 1. Doc.Join : [Doc] -> Doc + 2. Doc.Link : Link -> Doc + 3. Doc.Signature : Term -> Doc + 4. Doc.Source : Link -> Doc + 5. structural type Either a b + 6. Either.Left : a -> Either a b + 7. Either.Right : b -> Either a b + 8. structural ability Exception + 9. Exception.raise : Failure ->{Exception} x + 10. builtin type Float + 11. Float.* : Float -> Float -> Float 12. List.adjacentPairs : [a] -> [(a, a)] 13. List.all : (a ->{g} Boolean) -> [a] ->{g} Boolean 14. List.any : (a ->{g} Boolean) -> [a] ->{g} Boolean diff --git a/unison-src/transcripts/bug-strange-closure.output.md b/unison-src/transcripts/bug-strange-closure.output.md index d4b723282..495a2d57b 100644 --- a/unison-src/transcripts/bug-strange-closure.output.md +++ b/unison-src/transcripts/bug-strange-closure.output.md @@ -2669,9 +2669,10 @@ rendered = Pretty.get (docFormatConsole doc.guide) (Eval (Term.Term (Any - ('(match 1 with + (do + match 1 with 1 -> "hi" - _ -> "goodbye")))))))), + _ -> "goodbye"))))))), Lit () (Right (Plain "\n")), Lit () (Right (Plain "\n")), Indent diff --git a/unison-src/transcripts/builtins-merge.output.md b/unison-src/transcripts/builtins-merge.output.md index 98ad8a0e1..feb39b578 100644 --- a/unison-src/transcripts/builtins-merge.output.md +++ b/unison-src/transcripts/builtins-merge.output.md @@ -16,10 +16,10 @@ The `builtins.merge` command adds the known builtins to a `builtin` subnamespace 5. Bytes (builtin type) 6. Bytes/ (33 terms) 7. Char (builtin type) - 8. Char/ (3 terms) + 8. Char/ (22 terms, 1 type) 9. Code (builtin type) 10. Code/ (8 terms) - 11. Debug/ (2 terms) + 11. Debug/ (3 terms) 12. Doc (type) 13. Doc/ (6 terms) 14. Either (type) @@ -63,18 +63,18 @@ The `builtins.merge` command adds the known builtins to a `builtin` subnamespace 52. Socket/ (1 term) 53. Test/ (2 terms, 1 type) 54. Text (builtin type) - 55. Text/ (32 terms) + 55. Text/ (33 terms) 56. ThreadId/ (1 term) 57. Tuple (type) 58. Tuple/ (1 term) 59. Unit (type) 60. Unit/ (1 term) - 61. Universal/ (6 terms) + 61. Universal/ (7 terms) 62. Value (builtin type) 63. Value/ (5 terms) 64. bug (a -> b) - 65. crypto/ (12 terms, 1 type) - 66. io2/ (119 terms, 28 types) + 65. crypto/ (13 terms, 1 type) + 66. io2/ (131 terms, 32 types) 67. metadata/ (2 terms) 68. todo (a -> b) 69. unsafe/ (1 term) diff --git a/unison-src/transcripts/builtins.md b/unison-src/transcripts/builtins.md index bcd652022..9ea848ebf 100644 --- a/unison-src/transcripts/builtins.md +++ b/unison-src/transcripts/builtins.md @@ -14,6 +14,9 @@ This transcript defines unit tests for builtin functions. There's a single `.> t ```unison:hide use Int +-- used for some take/drop tests later +bigN = Nat.shiftLeft 1 63 + -- Note: you can make the tests more fine-grained if you -- want to be able to tell which one is failing test> Int.tests.arithmetic = @@ -78,6 +81,8 @@ test> Int.tests.conversions = fromText "+0" == Some +0, fromText "a8f9djasdlfkj" == None, fromText "3940" == Some +3940, + fromText "1000000000000000000000000000" == None, + fromText "-1000000000000000000000000000" == None, toFloat +9394 == 9394.0, toFloat -20349 == -20349.0 ] @@ -147,6 +152,8 @@ test> Nat.tests.conversions = toText 10 == "10", fromText "ooga" == None, fromText "90" == Some 90, + fromText "-1" == None, + fromText "100000000000000000000000000" == None, unsnoc "abc" == Some ("ab", ?c), uncons "abc" == Some (?a, "bc"), unsnoc "" == None, @@ -198,7 +205,9 @@ test> Text.tests.takeDropAppend = Text.take 99 "yabba" == "yabba", Text.drop 0 "yabba" == "yabba", Text.drop 2 "yabba" == "bba", - Text.drop 99 "yabba" == "" + Text.drop 99 "yabba" == "", + Text.take bigN "yabba" == "yabba", + Text.drop bigN "yabba" == "" ] test> Text.tests.repeat = @@ -255,7 +264,9 @@ test> Bytes.tests.at = checks [ Bytes.at 1 bs == Some 13, Bytes.at 0 bs == Some 77, - Bytes.at 99 bs == None + Bytes.at 99 bs == None, + Bytes.take bigN bs == bs, + Bytes.drop bigN bs == empty ] test> Bytes.tests.compression = @@ -306,6 +317,14 @@ test> checks [ .> add ``` +Other list functions +```unison:hide +test> checks [ + List.take bigN [1,2,3] == [1,2,3], + List.drop bigN [1,2,3] == [] + ] +``` + ## `Any` functions ```unison @@ -341,6 +360,19 @@ openFile] .> add ``` +## Universal hash functions + +Just exercises the function + +```unison +> Universal.murmurHash 1 +test> Universal.murmurHash.tests = checks [Universal.murmurHash [1,2,3] == Universal.murmurHash [1,2,3]] +``` + +```ucm:hide +.> add +``` + ## Run the tests Now that all the tests have been added to the codebase, let's view the test report. This will fail the transcript (with a nice message) if any of the tests are failing. diff --git a/unison-src/transcripts/builtins.output.md b/unison-src/transcripts/builtins.output.md index f3f8f21a7..2c1475799 100644 --- a/unison-src/transcripts/builtins.output.md +++ b/unison-src/transcripts/builtins.output.md @@ -7,6 +7,9 @@ This transcript defines unit tests for builtin functions. There's a single `.> t ```unison use Int +-- used for some take/drop tests later +bigN = Nat.shiftLeft 1 63 + -- Note: you can make the tests more fine-grained if you -- want to be able to tell which one is failing test> Int.tests.arithmetic = @@ -71,6 +74,8 @@ test> Int.tests.conversions = fromText "+0" == Some +0, fromText "a8f9djasdlfkj" == None, fromText "3940" == Some +3940, + fromText "1000000000000000000000000000" == None, + fromText "-1000000000000000000000000000" == None, toFloat +9394 == 9394.0, toFloat -20349 == -20349.0 ] @@ -136,6 +141,8 @@ test> Nat.tests.conversions = toText 10 == "10", fromText "ooga" == None, fromText "90" == Some 90, + fromText "-1" == None, + fromText "100000000000000000000000000" == None, unsnoc "abc" == Some ("ab", ?c), uncons "abc" == Some (?a, "bc"), unsnoc "" == None, @@ -179,7 +186,9 @@ test> Text.tests.takeDropAppend = Text.take 99 "yabba" == "yabba", Text.drop 0 "yabba" == "yabba", Text.drop 2 "yabba" == "bba", - Text.drop 99 "yabba" == "" + Text.drop 99 "yabba" == "", + Text.take bigN "yabba" == "yabba", + Text.drop bigN "yabba" == "" ] test> Text.tests.repeat = @@ -232,7 +241,9 @@ test> Bytes.tests.at = checks [ Bytes.at 1 bs == Some 13, Bytes.at 0 bs == Some 77, - Bytes.at 99 bs == None + Bytes.at 99 bs == None, + Bytes.take bigN bs == bs, + Bytes.drop bigN bs == empty ] test> Bytes.tests.compression = @@ -275,6 +286,14 @@ test> checks [ ] ``` +Other list functions +```unison +test> checks [ + List.take bigN [1,2,3] == [1,2,3], + List.drop bigN [1,2,3] == [] + ] +``` + ## `Any` functions ```unison @@ -359,6 +378,37 @@ openFile] ✅ Passed Passed +``` +## Universal hash functions + +Just exercises the function + +```unison +> Universal.murmurHash 1 +test> Universal.murmurHash.tests = checks [Universal.murmurHash [1,2,3] == Universal.murmurHash [1,2,3]] +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + Universal.murmurHash.tests : [Result] + + Now evaluating any watch expressions (lines starting with + `>`)... Ctrl+C cancels. + + 1 | > Universal.murmurHash 1 + ⧩ + 5006114823290027883 + + 2 | test> Universal.murmurHash.tests = checks [Universal.murmurHash [1,2,3] == Universal.murmurHash [1,2,3]] + + ✅ Passed Passed + ``` ## Run the tests @@ -392,8 +442,9 @@ Now that all the tests have been added to the codebase, let's view the test repo ◉ Text.tests.patterns Passed ◉ Text.tests.repeat Passed ◉ Text.tests.takeDropAppend Passed + ◉ Universal.murmurHash.tests Passed - ✅ 23 test(s) passing + ✅ 24 test(s) passing Tip: Use view Any.test1 to view the source of a test. diff --git a/unison-src/transcripts/delete-namespace.md b/unison-src/transcripts/delete-namespace.md index 447bf53da..fe8f34630 100644 --- a/unison-src/transcripts/delete-namespace.md +++ b/unison-src/transcripts/delete-namespace.md @@ -47,11 +47,15 @@ Deleting the root namespace should require confirmation if not forced. ```ucm .> delete.namespace . .> delete.namespace . +-- Should have an empty history +.> history . ``` Deleting the root namespace shouldn't require confirmation if forced. ```ucm .> delete.namespace.force . +-- Should have an empty history +.> history . ``` diff --git a/unison-src/transcripts/delete-namespace.output.md b/unison-src/transcripts/delete-namespace.output.md index 0acdacbbb..36f143147 100644 --- a/unison-src/transcripts/delete-namespace.output.md +++ b/unison-src/transcripts/delete-namespace.output.md @@ -86,6 +86,11 @@ Deleting the root namespace should require confirmation if not forced. undo, or `builtins.merge` to restore the absolute basics to the current path. +-- Should have an empty history +.> history . + + ☝️ The namespace . is empty. + ``` Deleting the root namespace shouldn't require confirmation if forced. @@ -96,4 +101,9 @@ Deleting the root namespace shouldn't require confirmation if forced. undo, or `builtins.merge` to restore the absolute basics to the current path. +-- Should have an empty history +.> history . + + ☝️ The namespace . is empty. + ``` diff --git a/unison-src/transcripts/delete-silent.output.md b/unison-src/transcripts/delete-silent.output.md index 4f0b8590a..7ea6d420d 100644 --- a/unison-src/transcripts/delete-silent.output.md +++ b/unison-src/transcripts/delete-silent.output.md @@ -3,7 +3,8 @@ ⚠️ - I don't know about that name. + The following names were not found in the codebase. Check your spelling. + foo ``` ```unison diff --git a/unison-src/transcripts/delete.md b/unison-src/transcripts/delete.md index f0d3f061d..fdacbf6e1 100644 --- a/unison-src/transcripts/delete.md +++ b/unison-src/transcripts/delete.md @@ -98,3 +98,112 @@ structural type foo = Foo () ```ucm .> delete.verbose foo ``` + +We want to be able to delete multiple terms at once + +```unison:hide +a = "a" +b = "b" +c = "c" +``` + +```ucm +.> add +.> delete.verbose a b c +``` + +We can delete terms and types in the same invocation of delete + +```unison:hide +structural type Foo = Foo () +a = "a" +b = "b" +c = "c" +``` + +```ucm +.> add +.> delete.verbose a b c Foo +.> delete.verbose Foo.Foo +``` + +We can delete a type and its constructors + +```unison:hide +structural type Foo = Foo () +``` + +```ucm +.> add +.> delete.verbose Foo Foo.Foo +``` + +You should not be able to delete terms which are referenced by other terms + +```unison:hide +a = 1 +b = 2 +c = 3 +d = a + b + c +``` + +```ucm:error +.> add +.> delete.verbose a b c +``` + +But you should be able to delete all terms which reference each other in a single command + +```unison:hide +e = 11 +f = 12 + e +g = 13 + f +h = e + f + g +``` + +```ucm +.> add +.> delete.verbose e f g h +``` + +You should be able to delete a type and all the functions that reference it in a single command + +```unison:hide +structural type Foo = Foo Nat + +incrementFoo : Foo -> Nat +incrementFoo = cases + (Foo n) -> n + 1 +``` + +```ucm +.> add +.> delete.verbose Foo Foo.Foo incrementFoo +``` + +If you mess up on one of the names of your command, delete short circuits + +```unison:hide +e = 11 +f = 12 + e +g = 13 + f +h = e + f + g +``` + +```ucm:error +.> add +.> delete.verbose e f gg +``` + +Cyclical terms which are guarded by a lambda are allowed to be deleted + +```unison:hide +ping _ = 1 Nat.+ !pong +pong _ = 4 Nat.+ !ping +``` + +```ucm +.> add +.> delete.verbose ping +.> view pong +``` diff --git a/unison-src/transcripts/delete.output.md b/unison-src/transcripts/delete.output.md index 9013563f8..1b4d862aa 100644 --- a/unison-src/transcripts/delete.output.md +++ b/unison-src/transcripts/delete.output.md @@ -10,7 +10,8 @@ exist. ⚠️ - I don't know about that name. + The following names were not found in the codebase. Check your spelling. + foo ``` Now for some easy cases. Deleting an unambiguous term, then deleting an @@ -237,3 +238,251 @@ structural type foo = Foo () Tip: You can use `undo` or `reflog` to undo this change. ``` +We want to be able to delete multiple terms at once + +```unison +a = "a" +b = "b" +c = "c" +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + a : Text + b : Text + c : Text + +.> delete.verbose a b c + + Removed definitions: + + 1. a : Text + 2. b : Text + 3. c : Text + + Tip: You can use `undo` or `reflog` to undo this change. + +``` +We can delete terms and types in the same invocation of delete + +```unison +structural type Foo = Foo () +a = "a" +b = "b" +c = "c" +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + structural type Foo + a : Text + b : Text + c : Text + +.> delete.verbose a b c Foo + + Removed definitions: + + 1. structural type Foo + 2. a : Text + 3. b : Text + 4. c : Text + + Tip: You can use `undo` or `reflog` to undo this change. + +.> delete.verbose Foo.Foo + + Name changes: + + Original Changes + 1. Foo.Foo ┐ 2. Foo.Foo (removed) + 3. foo.Foo ┘ + + Tip: You can use `undo` or `reflog` to undo this change. + +``` +We can delete a type and its constructors + +```unison +structural type Foo = Foo () +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + structural type Foo + +.> delete.verbose Foo Foo.Foo + + Removed definitions: + + 1. structural type Foo + + Name changes: + + Original Changes + 2. Foo.Foo ┐ 3. Foo.Foo (removed) + 4. foo.Foo ┘ + + Tip: You can use `undo` or `reflog` to undo this change. + +``` +You should not be able to delete terms which are referenced by other terms + +```unison +a = 1 +b = 2 +c = 3 +d = a + b + c +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + a : Nat + b : Nat + (also named b.foo) + c : Nat + d : Nat + +.> delete.verbose a b c + + ⚠️ + + I didn't delete the following definitions because they are + still in use: + + Dependency Referenced In + c 1. d + + a 2. d + +``` +But you should be able to delete all terms which reference each other in a single command + +```unison +e = 11 +f = 12 + e +g = 13 + f +h = e + f + g +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + e : Nat + f : Nat + g : Nat + h : Nat + +.> delete.verbose e f g h + + Removed definitions: + + 1. e : Nat + 2. f : Nat + 3. g : Nat + 4. h : Nat + + Tip: You can use `undo` or `reflog` to undo this change. + +``` +You should be able to delete a type and all the functions that reference it in a single command + +```unison +structural type Foo = Foo Nat + +incrementFoo : Foo -> Nat +incrementFoo = cases + (Foo n) -> n + 1 +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + structural type Foo + incrementFoo : Foo -> Nat + +.> delete.verbose Foo Foo.Foo incrementFoo + + Removed definitions: + + 1. structural type Foo + 2. Foo.Foo : Nat -> #68k40ra7l7 + 3. incrementFoo : #68k40ra7l7 -> Nat + + Tip: You can use `undo` or `reflog` to undo this change. + +``` +If you mess up on one of the names of your command, delete short circuits + +```unison +e = 11 +f = 12 + e +g = 13 + f +h = e + f + g +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + e : Nat + f : Nat + g : Nat + h : Nat + +.> delete.verbose e f gg + + ⚠️ + + The following names were not found in the codebase. Check your spelling. + gg + +``` +Cyclical terms which are guarded by a lambda are allowed to be deleted + +```unison +ping _ = 1 Nat.+ !pong +pong _ = 4 Nat.+ !ping +``` + +```ucm +.> add + + ⍟ I've added these definitions: + + ping : 'Nat + pong : 'Nat + +.> delete.verbose ping + + Removed definitions: + + 1. ping : 'Nat + + Tip: You can use `undo` or `reflog` to undo this change. + +.> view pong + + pong : 'Nat + pong _ = + use Nat + + 4 + !#l9uq1dpl5v.1 + +``` diff --git a/unison-src/transcripts/destructuring-binds.md b/unison-src/transcripts/destructuring-binds.md index 0170860d7..f9a1eef97 100644 --- a/unison-src/transcripts/destructuring-binds.md +++ b/unison-src/transcripts/destructuring-binds.md @@ -33,14 +33,6 @@ ex2 tup = match tup with (a, b, (c,d)) -> c + d ``` -Syntactically, the left-hand side of the bind can be any pattern and can even include guards, for instance, see below. Because a destructuring bind desugars to a regular pattern match, pattern match coverage will eventually cause this to not typecheck: - -```unison:hide -ex3 = - Some x | x > 10 = Some 19 - x + 1 -``` - ## Corner cases Destructuring binds can't be recursive: the left-hand side bound variables aren't available on the right hand side. For instance, this doesn't typecheck: @@ -57,10 +49,12 @@ Even though the parser accepts any pattern on the LHS of a bind, it looks pretty ex5 : 'Text ex5 _ = match 99 + 1 with 12 -> "Hi" + _ -> "Bye" ex5a : 'Text ex5a _ = match (99 + 1, "hi") with (x, "hi") -> "Not printed as a destructuring bind." + _ -> "impossible" ``` ```ucm diff --git a/unison-src/transcripts/destructuring-binds.output.md b/unison-src/transcripts/destructuring-binds.output.md index 58a186ae0..1ca633ea7 100644 --- a/unison-src/transcripts/destructuring-binds.output.md +++ b/unison-src/transcripts/destructuring-binds.output.md @@ -71,14 +71,6 @@ ex2 tup = match tup with (also named ex1) ``` -Syntactically, the left-hand side of the bind can be any pattern and can even include guards, for instance, see below. Because a destructuring bind desugars to a regular pattern match, pattern match coverage will eventually cause this to not typecheck: - -```unison -ex3 = - Some x | x > 10 = Some 19 - x + 1 -``` - ## Corner cases Destructuring binds can't be recursive: the left-hand side bound variables aren't available on the right hand side. For instance, this doesn't typecheck: @@ -114,10 +106,12 @@ Even though the parser accepts any pattern on the LHS of a bind, it looks pretty ex5 : 'Text ex5 _ = match 99 + 1 with 12 -> "Hi" + _ -> "Bye" ex5a : 'Text ex5a _ = match (99 + 1, "hi") with (x, "hi") -> "Not printed as a destructuring bind." + _ -> "impossible" ``` ```ucm @@ -145,13 +139,16 @@ ex5a _ = match (99 + 1, "hi") with ex5 : 'Text ex5 _ = use Nat + - match 99 + 1 with 12 -> "Hi" + match 99 + 1 with + 12 -> "Hi" + _ -> "Bye" ex5a : 'Text ex5a _ = use Nat + match (99 + 1, "hi") with (x, "hi") -> "Not printed as a destructuring bind." + _ -> "impossible" ``` Notice how it prints both an ordinary match. diff --git a/unison-src/transcripts/empty-namespaces.md b/unison-src/transcripts/empty-namespaces.md index 9b552bdd5..e73424c4a 100644 --- a/unison-src/transcripts/empty-namespaces.md +++ b/unison-src/transcripts/empty-namespaces.md @@ -22,15 +22,15 @@ The deleted namespace shouldn't appear in `ls` output. ## history -The history of the namespace should still exist if requested explicitly. +The history of the namespace should be empty. ```ucm .> history mynamespace ``` -Merging an empty namespace should still copy its history if it has some. +Merging an empty namespace should be a no-op -```ucm +```ucm:error .empty> history .empty> merge .mynamespace .empty> history diff --git a/unison-src/transcripts/empty-namespaces.output.md b/unison-src/transcripts/empty-namespaces.output.md index 7c682827f..7bc7f00d7 100644 --- a/unison-src/transcripts/empty-namespaces.output.md +++ b/unison-src/transcripts/empty-namespaces.output.md @@ -47,24 +47,15 @@ The deleted namespace shouldn't appear in `ls` output. ``` ## history -The history of the namespace should still exist if requested explicitly. +The history of the namespace should be empty. ```ucm .> history mynamespace - Note: The most recent namespace hash is immediately below this - message. - - ⊙ 1. #nvh8d4j0fm - - - Deletes: - - x - - □ 2. #i52j9fd57b (start of history) + ☝️ The namespace .mynamespace is empty. ``` -Merging an empty namespace should still copy its history if it has some. +Merging an empty namespace should be a no-op ```ucm ☝️ The namespace .empty is empty. @@ -75,20 +66,13 @@ Merging an empty namespace should still copy its history if it has some. .empty> merge .mynamespace - Nothing changed as a result of the merge. + ⚠️ + + The namespace .mynamespace doesn't exist. .empty> history - Note: The most recent namespace hash is immediately below this - message. - - ⊙ 1. #nvh8d4j0fm - - - Deletes: - - x - - □ 2. #i52j9fd57b (start of history) + ☝️ The namespace .empty is empty. ``` Add and then delete a term to add some history to a deleted namespace. diff --git a/unison-src/transcripts/emptyCodebase.output.md b/unison-src/transcripts/emptyCodebase.output.md index 4fb8635fc..665a8192b 100644 --- a/unison-src/transcripts/emptyCodebase.output.md +++ b/unison-src/transcripts/emptyCodebase.output.md @@ -23,7 +23,7 @@ Technically, the definitions all exist, but they have no names. `builtins.merge` .foo> ls - 1. builtin/ (406 terms, 61 types) + 1. builtin/ (441 terms, 66 types) ``` And for a limited time, you can get even more builtin goodies: @@ -35,7 +35,7 @@ And for a limited time, you can get even more builtin goodies: .foo> ls - 1. builtin/ (576 terms, 77 types) + 1. builtin/ (613 terms, 84 types) ``` More typically, you'd start out by pulling `base. diff --git a/unison-src/transcripts/error-messages.md b/unison-src/transcripts/error-messages.md index 1f11e0de8..de58eb43b 100644 --- a/unison-src/transcripts/error-messages.md +++ b/unison-src/transcripts/error-messages.md @@ -61,6 +61,11 @@ foo = with -- unclosed ### Matching +```unison:error +-- No cases +foo = match 1 with +``` + ```unison:error foo = match 1 with 2 -- no right-hand-side @@ -73,6 +78,29 @@ foo = cases 3 -> () ``` +```unison:error +-- Missing a '->' +x = match Some a with + None -> + 1 + Some _ + 2 +``` + +```unison:error +-- Missing patterns +x = match Some a with + None -> 1 + -> 2 + -> 3 +``` + +```unison:error +-- Guards following an unguarded case +x = match Some a with + None -> 1 + | true -> 2 +``` ### Watches diff --git a/unison-src/transcripts/error-messages.output.md b/unison-src/transcripts/error-messages.output.md index 2fc4bb092..ee9b7c08d 100644 --- a/unison-src/transcripts/error-messages.output.md +++ b/unison-src/transcripts/error-messages.output.md @@ -162,20 +162,33 @@ foo = with -- unclosed ### Matching ```unison +-- No cases foo = match 1 with - 2 -- no right-hand-side ``` ```ucm 😶 - I expected some patterns after a match / with but I didn't - find any. + I expected some patterns after a match / with or cases but I + didn't find any. - 1 | foo = match 1 with + 2 | foo = match 1 with +``` +```unison +foo = match 1 with + 2 -- no right-hand-side +``` + +```ucm + + offset=8: + unexpected + expecting ",", case match, or pattern guard + 3 | + ``` ```unison -- Mismatched arities @@ -195,6 +208,56 @@ foo = cases 4 | 3 -> () +``` +```unison +-- Missing a '->' +x = match Some a with + None -> + 1 + Some _ + 2 +``` + +```ucm + + offset=16: + unexpected + expecting ",", blank, case match, false, pattern guard, or true + 7 | + +``` +```unison +-- Missing patterns +x = match Some a with + None -> 1 + -> 2 + -> 3 +``` + +```ucm + + offset=12: + unexpected -> + expecting newline or semicolon + 4 | -> 2 + + +``` +```unison +-- Guards following an unguarded case +x = match Some a with + None -> 1 + | true -> 2 +``` + +```ucm + + offset=12: + unexpected | + expecting newline or semicolon + 4 | | true -> 2 + + ``` ### Watches diff --git a/unison-src/transcripts/fix1063.md b/unison-src/transcripts/fix1063.md index 1806b0e30..4a876a6b1 100644 --- a/unison-src/transcripts/fix1063.md +++ b/unison-src/transcripts/fix1063.md @@ -7,6 +7,8 @@ Tests that functions named `.` are rendered correctly. ``` unison (.) f g x = f (g x) +use Boolean not + noop = not . not ``` diff --git a/unison-src/transcripts/fix1063.output.md b/unison-src/transcripts/fix1063.output.md index 38065343b..790b54a0f 100644 --- a/unison-src/transcripts/fix1063.output.md +++ b/unison-src/transcripts/fix1063.output.md @@ -3,6 +3,8 @@ Tests that functions named `.` are rendered correctly. ```unison (.) f g x = f (g x) +use Boolean not + noop = not . not ``` @@ -31,6 +33,8 @@ noop = not . not .> view noop noop : Boolean -> Boolean - noop = not . not + noop = + use Boolean not + not . not ``` diff --git a/unison-src/transcripts/fix2053.output.md b/unison-src/transcripts/fix2053.output.md index 1009a11a5..f0f2df91b 100644 --- a/unison-src/transcripts/fix2053.output.md +++ b/unison-src/transcripts/fix2053.output.md @@ -2,12 +2,13 @@ .> display List.map f a -> - go i as acc = - match List.at i as with - None -> acc - Some a -> - use Nat + - go (i + 1) as (acc :+ f a) - go 0 a [] + let + go i as acc = + match List.at i as with + None -> acc + Some a -> + use Nat + + go (i + 1) as (acc :+ f a) + go 0 a [] ``` diff --git a/unison-src/transcripts/fix3265.output.md b/unison-src/transcripts/fix3265.output.md index 6cdf798ae..5d68e8e55 100644 --- a/unison-src/transcripts/fix3265.output.md +++ b/unison-src/transcripts/fix3265.output.md @@ -33,19 +33,20 @@ are three cases that need to be 'fixed up.' ⧩ Any (w x -> - use Nat + drop - f1 y = - match y with - 0 -> w + x - n -> 1 + f0 (drop y 1) - f0 y = - match y with - 0 -> x - n -> 1 + f1 (drop y 1) - f2 x = f2 x - f3 x y = 1 + y + f2 x - g h = h 1 + x - g (z -> x + f0 z)) + let + use Nat + drop + f1 y = + match y with + 0 -> w + x + n -> 1 + f0 (drop y 1) + f0 y = + match y with + 0 -> x + n -> 1 + f1 (drop y 1) + f2 x = f2 x + f3 x y = 1 + y + f2 x + g h = h 1 + x + g (z -> x + f0 z)) ``` Also check for some possible corner cases. @@ -77,10 +78,11 @@ discard its arguments, where `f` also occurs. ⧩ Any (x -> - f x y = - match y with - 0 -> 0 - _ -> f x (f y (Nat.drop y 1)) - f x 20) + let + f x y = + match y with + 0 -> 0 + _ -> f x (f y (Nat.drop y 1)) + f x 20) ``` diff --git a/unison-src/transcripts/lambdacase.md b/unison-src/transcripts/lambdacase.md index dcbc2559d..e2e3a557e 100644 --- a/unison-src/transcripts/lambdacase.md +++ b/unison-src/transcripts/lambdacase.md @@ -75,13 +75,14 @@ Here's another example: ```unison structural type B = T | F +blah : B -> B -> Text blah = cases T, x -> "hi" - x, F -> "bye" + x, y -> "bye" blorf = cases x, T -> x - T, x -> x + x, y -> y > blah T F > blah F F diff --git a/unison-src/transcripts/lambdacase.output.md b/unison-src/transcripts/lambdacase.output.md index 573138be6..6fc1d5387 100644 --- a/unison-src/transcripts/lambdacase.output.md +++ b/unison-src/transcripts/lambdacase.output.md @@ -119,13 +119,14 @@ Here's another example: ```unison structural type B = T | F +blah : B -> B -> Text blah = cases T, x -> "hi" - x, F -> "bye" + x, y -> "bye" blorf = cases x, T -> x - T, x -> x + x, y -> y > blah T F > blah F F @@ -147,15 +148,15 @@ blorf = cases Now evaluating any watch expressions (lines starting with `>`)... Ctrl+C cancels. - 11 | > blah T F + 12 | > blah T F ⧩ "hi" - 12 | > blah F F + 13 | > blah F F ⧩ "bye" - 13 | > blorf T F + 14 | > blorf T F ⧩ F diff --git a/unison-src/transcripts/merges.md b/unison-src/transcripts/merges.md index 2292a8877..1c0f28cf8 100644 --- a/unison-src/transcripts/merges.md +++ b/unison-src/transcripts/merges.md @@ -49,7 +49,8 @@ y = "hello" Notice that `master` now has the definition of `y` we wrote. -We can also delete the fork if we're done with it. (Don't worry, it's still in the `history` and can be resurrected at any time.) +We can also delete the fork if we're done with it. (Don't worry, even though the history at that path is now empty, +it's still in the `history` of the parent namespace and can be resurrected at any time.) ```ucm .> delete.namespace .feature1 diff --git a/unison-src/transcripts/merges.output.md b/unison-src/transcripts/merges.output.md index 9c38043f5..a23dd5541 100644 --- a/unison-src/transcripts/merges.output.md +++ b/unison-src/transcripts/merges.output.md @@ -96,7 +96,8 @@ y = "hello" Notice that `master` now has the definition of `y` we wrote. -We can also delete the fork if we're done with it. (Don't worry, it's still in the `history` and can be resurrected at any time.) +We can also delete the fork if we're done with it. (Don't worry, even though the history at that path is now empty, +it's still in the `history` of the parent namespace and can be resurrected at any time.) ```ucm .> delete.namespace .feature1 @@ -105,29 +106,20 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t .> history .feature1 - Note: The most recent namespace hash is immediately below this - message. - - ⊙ 1. #hsbtlt2og6 - - - Deletes: - - y - - □ 2. #q95r47tc4l (start of history) + ☝️ The namespace .feature1 is empty. .> history Note: The most recent namespace hash is immediately below this message. - ⊙ 1. #jvvtvqg91i + ⊙ 1. #nkba2hklaj - Deletes: feature1.y - ⊙ 2. #pdn0nrdikc + ⊙ 2. #5rvucutrqs + Adds / updates: @@ -138,26 +130,26 @@ We can also delete the fork if we're done with it. (Don't worry, it's still in t Original name New name(s) feature1.y master.y - ⊙ 3. #j275561d72 + ⊙ 3. #s37mv81ocj + Adds / updates: feature1.y - ⊙ 4. #aib93cgn8r + ⊙ 4. #9lpfbm6ug1 > Moves: Original name New name x master.x - ⊙ 5. #22gtrovg7e + ⊙ 5. #i9jun1cl1a + Adds / updates: x - □ 6. #qehn7jqmaf (start of history) + □ 6. #rcu3lukmgn (start of history) ``` To resurrect an old version of a namespace, you can learn its hash via the `history` command, then use `fork #namespacehash .newname`. diff --git a/unison-src/transcripts/move-namespace.md b/unison-src/transcripts/move-namespace.md index 6496cd8c8..8728be1da 100644 --- a/unison-src/transcripts/move-namespace.md +++ b/unison-src/transcripts/move-namespace.md @@ -58,7 +58,9 @@ b.termInB = 11 .history> update ``` -Now, if we soft-delete a namespace, but move another over it we expect the history to be replaced, and we expect the history from the source to be wiped out. +Deleting a namespace should not leave behind any history, +if we move another to that location we expect the history to simply be the history +of the moved namespace. ```ucm .history> delete.namespace b diff --git a/unison-src/transcripts/move-namespace.output.md b/unison-src/transcripts/move-namespace.output.md index 0b2db3295..4018c58fd 100644 --- a/unison-src/transcripts/move-namespace.output.md +++ b/unison-src/transcripts/move-namespace.output.md @@ -150,7 +150,9 @@ b.termInB = 11 b.termInB : Nat ``` -Now, if we soft-delete a namespace, but move another over it we expect the history to be replaced, and we expect the history from the source to be wiped out. +Deleting a namespace should not leave behind any history, +if we move another to that location we expect the history to simply be the history +of the moved namespace. ```ucm .history> delete.namespace b @@ -267,7 +269,7 @@ I should be able to move the root into a sub-namespace .> ls - 1. root/ (581 terms, 78 types) + 1. root/ (618 terms, 85 types) .> history @@ -276,13 +278,13 @@ I should be able to move the root into a sub-namespace - □ 1. #q3966917gb (start of history) + □ 1. #59k30lbgtv (start of history) ``` ```ucm .> ls .root.at.path - 1. builtin/ (576 terms, 77 types) + 1. builtin/ (613 terms, 84 types) 2. existing/ (1 term) 3. happy/ (3 terms, 1 type) 4. history/ (1 term) @@ -292,7 +294,7 @@ I should be able to move the root into a sub-namespace Note: The most recent namespace hash is immediately below this message. - ⊙ 1. #k5qut7l83d + ⊙ 1. #3fqubj2dfq - Deletes: @@ -303,7 +305,7 @@ I should be able to move the root into a sub-namespace Original name New name existing.a.termInA existing.b.termInA - ⊙ 2. #ufgblvn6fs + ⊙ 2. #4oifqrjksd + Adds / updates: @@ -315,26 +317,26 @@ I should be able to move the root into a sub-namespace happy.b.termInA existing.a.termInA history.b.termInA existing.a.termInA - ⊙ 3. #4jti34auic + ⊙ 3. #vmtrlijaub + Adds / updates: existing.a.termInA existing.b.termInB - ⊙ 4. #c3arv0etko + ⊙ 4. #qp72kiv6qv > Moves: Original name New name history.a.termInA history.b.termInA - ⊙ 5. #hpqjvcomfm + ⊙ 5. #rgcqdg78el - Deletes: history.b.termInB - ⊙ 6. #i22kafeklo + ⊙ 6. #3ep162o7o5 + Adds / updates: @@ -345,13 +347,13 @@ I should be able to move the root into a sub-namespace Original name New name(s) happy.b.termInA history.a.termInA - ⊙ 7. #itf9pe81hk + ⊙ 7. #icou0jar0d + Adds / updates: history.a.termInA history.b.termInB - ⊙ 8. #e40v255vc3 + ⊙ 8. #utbaohifbg > Moves: @@ -361,7 +363,7 @@ I should be able to move the root into a sub-namespace happy.a.T.T2 happy.b.T.T2 happy.a.termInA happy.b.termInA - ⊙ 9. #n19k3oti8l + ⊙ 9. #3tgqm3g16k + Adds / updates: @@ -371,7 +373,7 @@ I should be able to move the root into a sub-namespace happy.a.T.T - ⊙ 10. #42rrfc9heu + ⊙ 10. #5f0h338k67 + Adds / updates: @@ -383,7 +385,7 @@ I should be able to move the root into a sub-namespace ⠇ - ⊙ 11. #l7cnk7raag + ⊙ 11. #u0kujjj8n2 ``` diff --git a/unison-src/transcripts/name-selection.output.md b/unison-src/transcripts/name-selection.output.md index f65b342f8..44e94b095 100644 --- a/unison-src/transcripts/name-selection.output.md +++ b/unison-src/transcripts/name-selection.output.md @@ -100,322 +100,348 @@ d = c + 10 12. builtin type builtin.Bytes 13. builtin type builtin.Char 14. builtin type builtin.io2.Tls.Cipher - 15. builtin type builtin.io2.Tls.ClientConfig - 16. builtin type builtin.Code - 17. unique type builtin.Doc - 18. structural type builtin.Either a b - 19. structural ability builtin.Exception - 20. unique type builtin.io2.Failure - 21. unique type builtin.io2.FileMode - 22. builtin type builtin.Float - 23. builtin type builtin.io2.Handle - 24. builtin type builtin.crypto.HashAlgorithm - 25. builtin ability builtin.io2.IO - 26. unique type builtin.io2.IOError - 27. unique type builtin.io2.IOFailure - 28. builtin type builtin.ImmutableArray - 29. builtin type builtin.ImmutableByteArray - 30. builtin type builtin.Int - 31. unique type builtin.IsPropagated - 32. unique type builtin.IsTest - 33. unique type builtin.Link - 34. builtin type builtin.List - 35. builtin type builtin.io2.MVar - 36. unique type builtin.io2.MiscFailure - 37. builtin type builtin.MutableArray - 38. builtin type builtin.MutableByteArray - 39. builtin type builtin.Nat - 40. structural type builtin.Optional a - 41. builtin type builtin.Pattern - 42. builtin type builtin.io2.Tls.PrivateKey - 43. builtin type builtin.Ref - 44. builtin type builtin.Request - 45. unique type builtin.Test.Result - 46. unique type builtin.io2.RuntimeFailure - 47. builtin ability builtin.io2.STM - 48. unique type builtin.io2.STMFailure - 49. builtin ability builtin.Scope - 50. unique type builtin.io2.SeekMode - 51. structural type builtin.SeqView a b - 52. builtin type builtin.io2.Tls.ServerConfig - 53. builtin type builtin.io2.Tls.SignedCert - 54. builtin type builtin.io2.Socket - 55. unique type builtin.io2.StdHandle - 56. builtin type builtin.io2.TVar - 57. builtin type builtin.Link.Term - 58. builtin type builtin.Text - 59. builtin type builtin.io2.ThreadId - 60. builtin type builtin.io2.Clock.internals.TimeSpec - 61. builtin type builtin.io2.Tls - 62. unique type builtin.io2.TlsFailure - 63. structural type builtin.Tuple a b - 64. builtin type builtin.Link.Type - 65. structural type builtin.Unit - 66. builtin type builtin.Value - 67. builtin type builtin.io2.Tls.Version - 68. builtin.io2.SeekMode.AbsoluteSeek : SeekMode - 69. builtin.io2.IOError.AlreadyExists : IOError - 70. builtin.io2.FileMode.Append : FileMode - 71. builtin.Doc.Blob : Text + 15. builtin type builtin.Char.Class + 16. builtin type builtin.io2.Tls.ClientConfig + 17. builtin type builtin.Code + 18. unique type builtin.Doc + 19. structural type builtin.Either a b + 20. structural ability builtin.Exception + 21. unique type builtin.io2.Failure + 22. unique type builtin.io2.FileMode + 23. builtin type builtin.Float + 24. builtin type builtin.io2.Handle + 25. builtin type builtin.crypto.HashAlgorithm + 26. builtin ability builtin.io2.IO + 27. unique type builtin.io2.IOError + 28. unique type builtin.io2.IOFailure + 29. builtin type builtin.ImmutableArray + 30. builtin type builtin.ImmutableByteArray + 31. builtin type builtin.Int + 32. unique type builtin.IsPropagated + 33. unique type builtin.IsTest + 34. unique type builtin.Link + 35. builtin type builtin.List + 36. builtin type builtin.io2.MVar + 37. unique type builtin.io2.MiscFailure + 38. builtin type builtin.MutableArray + 39. builtin type builtin.MutableByteArray + 40. builtin type builtin.Nat + 41. structural type builtin.Optional a + 42. builtin type builtin.Pattern + 43. builtin type builtin.io2.Tls.PrivateKey + 44. builtin type builtin.io2.ProcessHandle + 45. builtin type builtin.io2.Promise + 46. builtin type builtin.Ref + 47. builtin type builtin.Request + 48. unique type builtin.Test.Result + 49. unique type builtin.io2.RuntimeFailure + 50. builtin ability builtin.io2.STM + 51. unique type builtin.io2.STMFailure + 52. builtin ability builtin.Scope + 53. unique type builtin.io2.SeekMode + 54. structural type builtin.SeqView a b + 55. builtin type builtin.io2.Tls.ServerConfig + 56. builtin type builtin.io2.Tls.SignedCert + 57. builtin type builtin.io2.Socket + 58. unique type builtin.io2.StdHandle + 59. builtin type builtin.io2.TVar + 60. builtin type builtin.Link.Term + 61. builtin type builtin.Text + 62. builtin type builtin.io2.ThreadId + 63. unique type builtin.io2.ThreadKilledFailure + 64. builtin type builtin.io2.Ref.Ticket + 65. builtin type builtin.io2.Clock.internals.TimeSpec + 66. builtin type builtin.io2.Tls + 67. unique type builtin.io2.TlsFailure + 68. structural type builtin.Tuple a b + 69. builtin type builtin.Link.Type + 70. structural type builtin.Unit + 71. builtin type builtin.Value + 72. builtin type builtin.io2.Tls.Version + 73. builtin.io2.SeekMode.AbsoluteSeek : SeekMode + 74. builtin.io2.IOError.AlreadyExists : IOError + 75. builtin.io2.FileMode.Append : FileMode + 76. builtin.Doc.Blob : Text -> Doc - 72. builtin.io2.BufferMode.BlockBuffering : BufferMode - 73. builtin.Tuple.Cons : a + 77. builtin.io2.BufferMode.BlockBuffering : BufferMode + 78. builtin.Tuple.Cons : a -> b -> Tuple a b - 74. builtin.io2.IOError.EOF : IOError - 75. builtin.Doc.Evaluate : Term + 79. builtin.io2.IOError.EOF : IOError + 80. builtin.Doc.Evaluate : Term -> Doc - 76. builtin.Test.Result.Fail : Text + 81. builtin.Test.Result.Fail : Text -> Result - 77. builtin.io2.Failure.Failure : Type + 82. builtin.io2.Failure.Failure : Type -> Text -> Any -> Failure - 78. builtin.io2.IOError.IllegalOperation : IOError - 79. builtin.IsPropagated.IsPropagated : IsPropagated - 80. builtin.IsTest.IsTest : IsTest - 81. builtin.Doc.Join : [Doc] + 83. builtin.io2.IOError.IllegalOperation : IOError + 84. builtin.IsPropagated.IsPropagated : IsPropagated + 85. builtin.IsTest.IsTest : IsTest + 86. builtin.Doc.Join : [Doc] -> Doc - 82. builtin.Either.Left : a + 87. builtin.Either.Left : a -> Either a b - 83. builtin.io2.BufferMode.LineBuffering : BufferMode - 84. builtin.Doc.Link : Link + 88. builtin.io2.BufferMode.LineBuffering : BufferMode + 89. builtin.Doc.Link : Link -> Doc - 85. builtin.io2.BufferMode.NoBuffering : BufferMode - 86. builtin.io2.IOError.NoSuchThing : IOError - 87. builtin.Optional.None : Optional + 90. builtin.io2.BufferMode.NoBuffering : BufferMode + 91. builtin.io2.IOError.NoSuchThing : IOError + 92. builtin.Optional.None : Optional a - 88. builtin.Test.Result.Ok : Text + 93. builtin.Test.Result.Ok : Text -> Result - 89. builtin.io2.IOError.PermissionDenied : IOError - 90. builtin.io2.FileMode.Read : FileMode - 91. builtin.io2.FileMode.ReadWrite : FileMode - 92. builtin.io2.SeekMode.RelativeSeek : SeekMode - 93. builtin.io2.IOError.ResourceBusy : IOError - 94. builtin.io2.IOError.ResourceExhausted : IOError - 95. builtin.Either.Right : b + 94. builtin.io2.IOError.PermissionDenied : IOError + 95. builtin.io2.FileMode.Read : FileMode + 96. builtin.io2.FileMode.ReadWrite : FileMode + 97. builtin.io2.SeekMode.RelativeSeek : SeekMode + 98. builtin.io2.IOError.ResourceBusy : IOError + 99. builtin.io2.IOError.ResourceExhausted : IOError + 100. builtin.Either.Right : b -> Either a b - 96. builtin.io2.SeekMode.SeekFromEnd : SeekMode - 97. builtin.Doc.Signature : Term + 101. builtin.io2.SeekMode.SeekFromEnd : SeekMode + 102. builtin.Doc.Signature : Term -> Doc - 98. builtin.io2.BufferMode.SizedBlockBuffering : Nat + 103. builtin.io2.BufferMode.SizedBlockBuffering : Nat -> BufferMode - 99. builtin.Optional.Some : a + 104. builtin.Optional.Some : a -> Optional a - 100. builtin.Doc.Source : Link + 105. builtin.Doc.Source : Link -> Doc - 101. builtin.io2.StdHandle.StdErr : StdHandle - 102. builtin.io2.StdHandle.StdIn : StdHandle - 103. builtin.io2.StdHandle.StdOut : StdHandle - 104. builtin.Link.Term : Term + 106. builtin.io2.StdHandle.StdErr : StdHandle + 107. builtin.io2.StdHandle.StdIn : StdHandle + 108. builtin.io2.StdHandle.StdOut : StdHandle + 109. builtin.Link.Term : Term -> Link - 105. builtin.Link.Type : Type + 110. builtin.Link.Type : Type -> Link - 106. builtin.Unit.Unit : () - 107. builtin.io2.IOError.UserError : IOError - 108. builtin.SeqView.VElem : a + 111. builtin.Unit.Unit : () + 112. builtin.io2.IOError.UserError : IOError + 113. builtin.SeqView.VElem : a -> b -> SeqView a b - 109. builtin.SeqView.VEmpty : SeqView + 114. builtin.SeqView.VEmpty : SeqView a b - 110. builtin.io2.FileMode.Write : FileMode - 111. builtin.Exception.raise : Failure + 115. builtin.io2.FileMode.Write : FileMode + 116. builtin.Exception.raise : Failure ->{Exception} x - 112. builtin.Text.!= : Text + 117. builtin.Text.!= : Text -> Text -> Boolean - 113. builtin.Float.* : Float + 118. builtin.Float.* : Float -> Float -> Float - 114. builtin.Int.* : Int + 119. builtin.Int.* : Int -> Int -> Int - 115. builtin.Nat.* : Nat + 120. builtin.Nat.* : Nat -> Nat -> Nat - 116. builtin.Float.+ : Float + 121. builtin.Float.+ : Float -> Float -> Float - 117. builtin.Int.+ : Int + 122. builtin.Int.+ : Int -> Int -> Int - 118. builtin.Nat.+ : Nat + 123. builtin.Nat.+ : Nat -> Nat -> Nat - 119. builtin.Bytes.++ : Bytes + 124. builtin.Bytes.++ : Bytes -> Bytes -> Bytes - 120. builtin.List.++ : [a] + 125. builtin.List.++ : [a] -> [a] -> [a] - 121. builtin.Text.++ : Text + 126. builtin.Text.++ : Text -> Text -> Text - 122. ┌ builtin.List.+: : a + 127. ┌ builtin.List.+: : a -> [a] -> [a] - 123. └ builtin.List.cons : a + 128. └ builtin.List.cons : a -> [a] -> [a] - 124. builtin.Float.- : Float + 129. builtin.Float.- : Float -> Float -> Float - 125. builtin.Int.- : Int + 130. builtin.Int.- : Int -> Int -> Int - 126. builtin.Float./ : Float + 131. builtin.Float./ : Float -> Float -> Float - 127. builtin.Int./ : Int + 132. builtin.Int./ : Int -> Int -> Int - 128. builtin.Nat./ : Nat + 133. builtin.Nat./ : Nat -> Nat -> Nat - 129. ┌ builtin.List.:+ : [a] + 134. ┌ builtin.List.:+ : [a] -> a -> [a] - 130. └ builtin.List.snoc : [a] + 135. └ builtin.List.snoc : [a] -> a -> [a] - 131. builtin.Universal.< : a + 136. builtin.Universal.< : a -> a -> Boolean - 132. builtin.Universal.<= : a + 137. builtin.Universal.<= : a -> a -> Boolean - 133. builtin.Universal.== : a + 138. builtin.Universal.== : a -> a -> Boolean - 134. builtin.Universal.> : a + 139. builtin.Universal.> : a -> a -> Boolean - 135. builtin.Universal.>= : a + 140. builtin.Universal.>= : a -> a -> Boolean - 136. builtin.Any.Any : a + 141. builtin.Any.Any : a -> Any - 137. builtin.crypto.HashAlgorithm.Blake2b_256 : HashAlgorithm - 138. builtin.crypto.HashAlgorithm.Blake2b_512 : HashAlgorithm - 139. builtin.crypto.HashAlgorithm.Blake2s_256 : HashAlgorithm - 140. builtin.crypto.HashAlgorithm.Sha1 : HashAlgorithm - 141. builtin.crypto.HashAlgorithm.Sha2_256 : HashAlgorithm - 142. builtin.crypto.HashAlgorithm.Sha2_512 : HashAlgorithm - 143. builtin.crypto.HashAlgorithm.Sha3_256 : HashAlgorithm - 144. builtin.crypto.HashAlgorithm.Sha3_512 : HashAlgorithm - 145. builtin.Float.abs : Float + 142. builtin.crypto.HashAlgorithm.Blake2b_256 : HashAlgorithm + 143. builtin.crypto.HashAlgorithm.Blake2b_512 : HashAlgorithm + 144. builtin.crypto.HashAlgorithm.Blake2s_256 : HashAlgorithm + 145. builtin.crypto.HashAlgorithm.Md5 : HashAlgorithm + 146. builtin.crypto.HashAlgorithm.Sha1 : HashAlgorithm + 147. builtin.crypto.HashAlgorithm.Sha2_256 : HashAlgorithm + 148. builtin.crypto.HashAlgorithm.Sha2_512 : HashAlgorithm + 149. builtin.crypto.HashAlgorithm.Sha3_256 : HashAlgorithm + 150. builtin.crypto.HashAlgorithm.Sha3_512 : HashAlgorithm + 151. builtin.Float.abs : Float -> Float - 146. builtin.Float.acos : Float + 152. builtin.Float.acos : Float -> Float - 147. builtin.Float.acosh : Float + 153. builtin.Float.acosh : Float -> Float - 148. builtin.Int.and : Int + 154. builtin.Char.Class.alphanumeric : Class + 155. builtin.Char.Class.and : Class + -> Class + -> Class + 156. builtin.Int.and : Int -> Int -> Int - 149. builtin.Nat.and : Nat + 157. builtin.Nat.and : Nat -> Nat -> Nat - 150. builtin.Text.patterns.anyChar : Pattern + 158. builtin.Char.Class.any : Class + 159. builtin.Text.patterns.anyChar : Pattern Text - 151. builtin.io2.IO.array : Nat + 160. builtin.Char.Class.anyOf : [Char] + -> Class + 161. builtin.io2.IO.array : Nat ->{IO} MutableArray {IO} a - 152. builtin.Scope.array : Nat + 162. builtin.Scope.array : Nat ->{Scope s} MutableArray (Scope s) a - 153. builtin.io2.IO.arrayOf : a + 163. builtin.io2.IO.arrayOf : a -> Nat ->{IO} MutableArray {IO} a - 154. builtin.Scope.arrayOf : a + 164. builtin.Scope.arrayOf : a -> Nat ->{Scope s} MutableArray (Scope s) a - 155. builtin.Float.asin : Float + 165. builtin.Float.asin : Float -> Float - 156. builtin.Float.asinh : Float + 166. builtin.Float.asinh : Float -> Float - 157. builtin.Bytes.at : Nat + 167. builtin.Bytes.at : Nat -> Bytes -> Optional Nat - 158. builtin.List.at : Nat + 168. builtin.List.at : Nat -> [a] -> Optional a - 159. builtin.Float.atan : Float + 169. builtin.Float.atan : Float -> Float - 160. builtin.Float.atan2 : Float + 170. builtin.Float.atan2 : Float -> Float -> Float - 161. builtin.Float.atanh : Float + 171. builtin.Float.atanh : Float -> Float - 162. builtin.io2.STM.atomically : '{STM} a + 172. builtin.io2.STM.atomically : '{STM} a ->{IO} a - 163. builtin.bug : a -> b - 164. builtin.io2.IO.bytearray : Nat + 173. builtin.bug : a -> b + 174. builtin.io2.IO.bytearray : Nat ->{IO} MutableByteArray {IO} - 165. builtin.Scope.bytearray : Nat + 175. builtin.Scope.bytearray : Nat ->{Scope s} MutableByteArray (Scope s) - 166. builtin.io2.IO.bytearrayOf : Nat + 176. builtin.io2.IO.bytearrayOf : Nat -> Nat ->{IO} MutableByteArray {IO} - 167. builtin.Scope.bytearrayOf : Nat + 177. builtin.Scope.bytearrayOf : Nat -> Nat ->{Scope s} MutableByteArray (Scope s) - 168. ┌ c#gjmq673r1v : Nat - 169. └ long.name.but.shortest.suffixification : Nat - 170. builtin.Code.cache_ : [( Term, + 178. ┌ c#gjmq673r1v : Nat + 179. └ long.name.but.shortest.suffixification : Nat + 180. builtin.Code.cache_ : [( Term, Code)] ->{IO} [Term] - 171. builtin.Pattern.capture : Pattern + 181. builtin.io2.IO.process.call : Text + -> [Text] + ->{IO} Nat + 182. builtin.Pattern.capture : Pattern a -> Pattern a - 172. builtin.Float.ceiling : Float + 183. builtin.io2.Ref.cas : Ref + {IO} a + -> Ticket + a + -> a + ->{IO} Boolean + 184. builtin.Float.ceiling : Float -> Int - 173. builtin.Text.patterns.charIn : [Char] + 185. builtin.Text.patterns.char : Class -> Pattern Text - 174. builtin.Text.patterns.charRange : Char + 186. builtin.Text.patterns.charIn : [Char] + -> Pattern + Text + 187. builtin.Text.patterns.charRange : Char -> Char -> Pattern Text - 175. builtin.unsafe.coerceAbilities : (a + 188. builtin.unsafe.coerceAbilities : (a ->{e1} b) -> a ->{e2} b - 176. builtin.Universal.compare : a + 189. builtin.Universal.compare : a -> a -> Int - 177. builtin.Int.complement : Int + 190. builtin.Int.complement : Int -> Int - 178. builtin.Nat.complement : Nat + 191. builtin.Nat.complement : Nat -> Nat - 179. builtin.Bytes.gzip.compress : Bytes + 192. builtin.Bytes.gzip.compress : Bytes -> Bytes - 180. builtin.Bytes.zlib.compress : Bytes + 193. builtin.Bytes.zlib.compress : Bytes -> Bytes - 181. builtin.ImmutableArray.copyTo! : MutableArray + 194. builtin.Char.Class.control : Class + 195. builtin.ImmutableArray.copyTo! : MutableArray g a -> Nat -> ImmutableArray @@ -424,7 +450,7 @@ d = c + 10 -> Nat ->{g, Exception} () - 182. builtin.ImmutableByteArray.copyTo! : MutableByteArray + 196. builtin.ImmutableByteArray.copyTo! : MutableByteArray g -> Nat -> ImmutableByteArray @@ -432,7 +458,7 @@ d = c + 10 -> Nat ->{g, Exception} () - 183. builtin.MutableArray.copyTo! : MutableArray + 197. builtin.MutableArray.copyTo! : MutableArray g a -> Nat -> MutableArray @@ -441,7 +467,7 @@ d = c + 10 -> Nat ->{g, Exception} () - 184. builtin.MutableByteArray.copyTo! : MutableByteArray + 198. builtin.MutableByteArray.copyTo! : MutableByteArray g -> Nat -> MutableByteArray @@ -450,918 +476,979 @@ d = c + 10 -> Nat ->{g, Exception} () - 185. builtin.Float.cos : Float + 199. builtin.Float.cos : Float -> Float - 186. builtin.Float.cosh : Float + 200. builtin.Float.cosh : Float -> Float - 187. builtin.Bytes.decodeNat16be : Bytes + 201. builtin.Bytes.decodeNat16be : Bytes -> Optional ( Nat, Bytes) - 188. builtin.Bytes.decodeNat16le : Bytes + 202. builtin.Bytes.decodeNat16le : Bytes -> Optional ( Nat, Bytes) - 189. builtin.Bytes.decodeNat32be : Bytes + 203. builtin.Bytes.decodeNat32be : Bytes -> Optional ( Nat, Bytes) - 190. builtin.Bytes.decodeNat32le : Bytes + 204. builtin.Bytes.decodeNat32le : Bytes -> Optional ( Nat, Bytes) - 191. builtin.Bytes.decodeNat64be : Bytes + 205. builtin.Bytes.decodeNat64be : Bytes -> Optional ( Nat, Bytes) - 192. builtin.Bytes.decodeNat64le : Bytes + 206. builtin.Bytes.decodeNat64le : Bytes -> Optional ( Nat, Bytes) - 193. builtin.io2.Tls.decodePrivateKey : Bytes + 207. builtin.io2.Tls.decodePrivateKey : Bytes -> [PrivateKey] - 194. builtin.Bytes.gzip.decompress : Bytes + 208. builtin.Bytes.gzip.decompress : Bytes -> Either Text Bytes - 195. builtin.Bytes.zlib.decompress : Bytes + 209. builtin.Bytes.zlib.decompress : Bytes -> Either Text Bytes - 196. builtin.io2.Tls.ClientConfig.default : Text + 210. builtin.io2.Tls.ClientConfig.default : Text -> Bytes -> ClientConfig - 197. builtin.io2.Tls.ServerConfig.default : [SignedCert] + 211. builtin.io2.Tls.ServerConfig.default : [SignedCert] -> PrivateKey -> ServerConfig - 198. builtin.Code.dependencies : Code + 212. builtin.Code.dependencies : Code -> [Term] - 199. builtin.Value.dependencies : Value + 213. builtin.Value.dependencies : Value -> [Term] - 200. builtin.Code.deserialize : Bytes + 214. builtin.Code.deserialize : Bytes -> Either Text Code - 201. builtin.Value.deserialize : Bytes + 215. builtin.Value.deserialize : Bytes -> Either Text Value - 202. builtin.Text.patterns.digit : Pattern + 216. builtin.Text.patterns.digit : Pattern Text - 203. builtin.Code.display : Text + 217. builtin.Code.display : Text -> Code -> Text - 204. builtin.Bytes.drop : Nat + 218. builtin.Bytes.drop : Nat -> Bytes -> Bytes - 205. builtin.List.drop : Nat + 219. builtin.List.drop : Nat -> [a] -> [a] - 206. builtin.Nat.drop : Nat + 220. builtin.Nat.drop : Nat -> Nat -> Nat - 207. builtin.Text.drop : Nat + 221. builtin.Text.drop : Nat -> Text -> Text - 208. builtin.Bytes.empty : Bytes - 209. builtin.List.empty : [a] - 210. builtin.Text.empty : Text - 211. builtin.io2.Tls.encodeCert : SignedCert + 222. builtin.Bytes.empty : Bytes + 223. builtin.List.empty : [a] + 224. builtin.Text.empty : Text + 225. builtin.io2.Tls.encodeCert : SignedCert -> Bytes - 212. builtin.Bytes.encodeNat16be : Nat + 226. builtin.Bytes.encodeNat16be : Nat -> Bytes - 213. builtin.Bytes.encodeNat16le : Nat + 227. builtin.Bytes.encodeNat16le : Nat -> Bytes - 214. builtin.Bytes.encodeNat32be : Nat + 228. builtin.Bytes.encodeNat32be : Nat -> Bytes - 215. builtin.Bytes.encodeNat32le : Nat + 229. builtin.Bytes.encodeNat32le : Nat -> Bytes - 216. builtin.Bytes.encodeNat64be : Nat + 230. builtin.Bytes.encodeNat64be : Nat -> Bytes - 217. builtin.Bytes.encodeNat64le : Nat + 231. builtin.Bytes.encodeNat64le : Nat -> Bytes - 218. builtin.io2.Tls.encodePrivateKey : PrivateKey + 232. builtin.io2.Tls.encodePrivateKey : PrivateKey -> Bytes - 219. builtin.Text.patterns.eof : Pattern + 233. builtin.Text.patterns.eof : Pattern Text - 220. builtin.Float.eq : Float + 234. builtin.Float.eq : Float -> Float -> Boolean - 221. builtin.Int.eq : Int + 235. builtin.Int.eq : Int -> Int -> Boolean - 222. builtin.Nat.eq : Nat + 236. builtin.Nat.eq : Nat -> Nat -> Boolean - 223. builtin.Text.eq : Text + 237. builtin.Text.eq : Text -> Text -> Boolean - 224. builtin.Float.exp : Float + 238. builtin.io2.IO.process.exitCode : ProcessHandle + ->{IO} Optional + Nat + 239. builtin.Float.exp : Float -> Float - 225. builtin.Bytes.flatten : Bytes + 240. builtin.Bytes.flatten : Bytes -> Bytes - 226. builtin.Float.floor : Float + 241. builtin.Float.floor : Float -> Int - 227. builtin.io2.IO.forkComp : '{IO} a + 242. builtin.io2.IO.forkComp : '{IO} a ->{IO} ThreadId - 228. builtin.MutableArray.freeze : MutableArray + 243. builtin.MutableArray.freeze : MutableArray g a -> Nat -> Nat ->{g} ImmutableArray a - 229. builtin.MutableByteArray.freeze : MutableByteArray + 244. builtin.MutableByteArray.freeze : MutableByteArray g -> Nat -> Nat ->{g} ImmutableByteArray - 230. builtin.MutableArray.freeze! : MutableArray + 245. builtin.MutableArray.freeze! : MutableArray g a ->{g} ImmutableArray a - 231. builtin.MutableByteArray.freeze! : MutableByteArray + 246. builtin.MutableByteArray.freeze! : MutableByteArray g ->{g} ImmutableByteArray - 232. builtin.Bytes.fromBase16 : Bytes + 247. builtin.Bytes.fromBase16 : Bytes -> Either Text Bytes - 233. builtin.Bytes.fromBase32 : Bytes + 248. builtin.Bytes.fromBase32 : Bytes -> Either Text Bytes - 234. builtin.Bytes.fromBase64 : Bytes + 249. builtin.Bytes.fromBase64 : Bytes -> Either Text Bytes - 235. builtin.Bytes.fromBase64UrlUnpadded : Bytes + 250. builtin.Bytes.fromBase64UrlUnpadded : Bytes -> Either Text Bytes - 236. builtin.Text.fromCharList : [Char] + 251. builtin.Text.fromCharList : [Char] -> Text - 237. builtin.Bytes.fromList : [Nat] + 252. builtin.Bytes.fromList : [Nat] -> Bytes - 238. builtin.Char.fromNat : Nat + 253. builtin.Char.fromNat : Nat -> Char - 239. builtin.Float.fromRepresentation : Nat + 254. builtin.Float.fromRepresentation : Nat -> Float - 240. builtin.Int.fromRepresentation : Nat + 255. builtin.Int.fromRepresentation : Nat -> Int - 241. builtin.Float.fromText : Text + 256. builtin.Float.fromText : Text -> Optional Float - 242. builtin.Int.fromText : Text + 257. builtin.Int.fromText : Text -> Optional Int - 243. builtin.Nat.fromText : Text + 258. builtin.Nat.fromText : Text -> Optional Nat - 244. builtin.Float.gt : Float + 259. builtin.Float.gt : Float -> Float -> Boolean - 245. builtin.Int.gt : Int + 260. builtin.Int.gt : Int -> Int -> Boolean - 246. builtin.Nat.gt : Nat + 261. builtin.Nat.gt : Nat -> Nat -> Boolean - 247. builtin.Text.gt : Text + 262. builtin.Text.gt : Text -> Text -> Boolean - 248. builtin.Float.gteq : Float + 263. builtin.Float.gteq : Float -> Float -> Boolean - 249. builtin.Int.gteq : Int + 264. builtin.Int.gteq : Int -> Int -> Boolean - 250. builtin.Nat.gteq : Nat + 265. builtin.Nat.gteq : Nat -> Nat -> Boolean - 251. builtin.Text.gteq : Text + 266. builtin.Text.gteq : Text -> Text -> Boolean - 252. builtin.crypto.hash : HashAlgorithm + 267. builtin.crypto.hash : HashAlgorithm -> a -> Bytes - 253. builtin.crypto.hashBytes : HashAlgorithm + 268. builtin.crypto.hashBytes : HashAlgorithm -> Bytes -> Bytes - 254. builtin.crypto.hmac : HashAlgorithm + 269. builtin.crypto.hmac : HashAlgorithm -> Bytes -> a -> Bytes - 255. builtin.crypto.hmacBytes : HashAlgorithm + 270. builtin.crypto.hmacBytes : HashAlgorithm -> Bytes -> Bytes -> Bytes - 256. builtin.io2.IO.clientSocket.impl : Text + 271. builtin.io2.IO.clientSocket.impl : Text -> Text ->{IO} Either Failure Socket - 257. builtin.io2.IO.closeFile.impl : Handle + 272. builtin.io2.IO.closeFile.impl : Handle ->{IO} Either Failure () - 258. builtin.io2.IO.closeSocket.impl : Socket + 273. builtin.io2.IO.closeSocket.impl : Socket ->{IO} Either Failure () - 259. builtin.io2.IO.createDirectory.impl : Text + 274. builtin.io2.IO.createDirectory.impl : Text ->{IO} Either Failure () - 260. builtin.io2.IO.createTempDirectory.impl : Text + 275. builtin.io2.IO.createTempDirectory.impl : Text ->{IO} Either Failure Text - 261. builtin.io2.Tls.decodeCert.impl : Bytes + 276. builtin.io2.Tls.decodeCert.impl : Bytes -> Either Failure SignedCert - 262. builtin.io2.IO.delay.impl : Nat + 277. builtin.io2.IO.delay.impl : Nat ->{IO} Either Failure () - 263. builtin.io2.IO.directoryContents.impl : Text + 278. builtin.io2.IO.directoryContents.impl : Text ->{IO} Either Failure [Text] - 264. builtin.io2.IO.fileExists.impl : Text + 279. builtin.io2.IO.fileExists.impl : Text ->{IO} Either Failure Boolean - 265. builtin.Text.fromUtf8.impl : Bytes + 280. builtin.Text.fromUtf8.impl : Bytes -> Either Failure Text - 266. builtin.io2.IO.getArgs.impl : '{IO} Either + 281. builtin.io2.IO.getArgs.impl : '{IO} Either Failure [Text] - 267. builtin.io2.IO.getBuffering.impl : Handle + 282. builtin.io2.IO.getBuffering.impl : Handle ->{IO} Either Failure BufferMode - 268. builtin.io2.IO.getBytes.impl : Handle + 283. builtin.io2.IO.getBytes.impl : Handle -> Nat ->{IO} Either Failure Bytes - 269. builtin.io2.IO.getChar.impl : Handle + 284. builtin.io2.IO.getChar.impl : Handle ->{IO} Either Failure Char - 270. builtin.io2.IO.getCurrentDirectory.impl : '{IO} Either + 285. builtin.io2.IO.getCurrentDirectory.impl : '{IO} Either Failure Text - 271. builtin.io2.IO.getEcho.impl : Handle + 286. builtin.io2.IO.getEcho.impl : Handle ->{IO} Either Failure Boolean - 272. builtin.io2.IO.getEnv.impl : Text + 287. builtin.io2.IO.getEnv.impl : Text ->{IO} Either Failure Text - 273. builtin.io2.IO.getFileSize.impl : Text + 288. builtin.io2.IO.getFileSize.impl : Text ->{IO} Either Failure Nat - 274. builtin.io2.IO.getFileTimestamp.impl : Text + 289. builtin.io2.IO.getFileTimestamp.impl : Text ->{IO} Either Failure Nat - 275. builtin.io2.IO.getLine.impl : Handle + 290. builtin.io2.IO.getLine.impl : Handle ->{IO} Either Failure Text - 276. builtin.io2.IO.getSomeBytes.impl : Handle + 291. builtin.io2.IO.getSomeBytes.impl : Handle -> Nat ->{IO} Either Failure Bytes - 277. builtin.io2.IO.getTempDirectory.impl : '{IO} Either + 292. builtin.io2.IO.getTempDirectory.impl : '{IO} Either Failure Text - 278. builtin.io2.IO.handlePosition.impl : Handle + 293. builtin.io2.IO.handlePosition.impl : Handle ->{IO} Either Failure Nat - 279. builtin.io2.Tls.handshake.impl : Tls + 294. builtin.io2.Tls.handshake.impl : Tls ->{IO} Either Failure () - 280. builtin.io2.IO.isDirectory.impl : Text + 295. builtin.io2.IO.isDirectory.impl : Text ->{IO} Either Failure Boolean - 281. builtin.io2.IO.isFileEOF.impl : Handle + 296. builtin.io2.IO.isFileEOF.impl : Handle ->{IO} Either Failure Boolean - 282. builtin.io2.IO.isFileOpen.impl : Handle + 297. builtin.io2.IO.isFileOpen.impl : Handle ->{IO} Either Failure Boolean - 283. builtin.io2.IO.isSeekable.impl : Handle + 298. builtin.io2.IO.isSeekable.impl : Handle ->{IO} Either Failure Boolean - 284. builtin.io2.IO.kill.impl : ThreadId + 299. builtin.io2.IO.kill.impl : ThreadId ->{IO} Either Failure () - 285. builtin.io2.IO.listen.impl : Socket + 300. builtin.io2.IO.listen.impl : Socket ->{IO} Either Failure () - 286. builtin.io2.Tls.newClient.impl : ClientConfig + 301. builtin.io2.Tls.newClient.impl : ClientConfig -> Socket ->{IO} Either Failure Tls - 287. builtin.io2.Tls.newServer.impl : ServerConfig + 302. builtin.io2.Tls.newServer.impl : ServerConfig -> Socket ->{IO} Either Failure Tls - 288. builtin.io2.IO.openFile.impl : Text + 303. builtin.io2.IO.openFile.impl : Text -> FileMode ->{IO} Either Failure Handle - 289. builtin.io2.MVar.put.impl : MVar a + 304. builtin.io2.MVar.put.impl : MVar a -> a ->{IO} Either Failure () - 290. builtin.io2.IO.putBytes.impl : Handle + 305. builtin.io2.IO.putBytes.impl : Handle -> Bytes ->{IO} Either Failure () - 291. builtin.io2.MVar.read.impl : MVar a + 306. builtin.io2.MVar.read.impl : MVar a ->{IO} Either Failure a - 292. builtin.io2.IO.ready.impl : Handle + 307. builtin.io2.IO.ready.impl : Handle ->{IO} Either Failure Boolean - 293. builtin.io2.Tls.receive.impl : Tls + 308. builtin.io2.Tls.receive.impl : Tls ->{IO} Either Failure Bytes - 294. builtin.io2.IO.removeDirectory.impl : Text + 309. builtin.io2.IO.removeDirectory.impl : Text ->{IO} Either Failure () - 295. builtin.io2.IO.removeFile.impl : Text + 310. builtin.io2.IO.removeFile.impl : Text ->{IO} Either Failure () - 296. builtin.io2.IO.renameDirectory.impl : Text + 311. builtin.io2.IO.renameDirectory.impl : Text -> Text ->{IO} Either Failure () - 297. builtin.io2.IO.renameFile.impl : Text + 312. builtin.io2.IO.renameFile.impl : Text -> Text ->{IO} Either Failure () - 298. builtin.io2.IO.seekHandle.impl : Handle + 313. builtin.io2.IO.seekHandle.impl : Handle -> SeekMode -> Int ->{IO} Either Failure () - 299. builtin.io2.Tls.send.impl : Tls + 314. builtin.io2.Tls.send.impl : Tls -> Bytes ->{IO} Either Failure () - 300. builtin.io2.IO.serverSocket.impl : Optional + 315. builtin.io2.IO.serverSocket.impl : Optional Text -> Text ->{IO} Either Failure Socket - 301. builtin.io2.IO.setBuffering.impl : Handle + 316. builtin.io2.IO.setBuffering.impl : Handle -> BufferMode ->{IO} Either Failure () - 302. builtin.io2.IO.setCurrentDirectory.impl : Text + 317. builtin.io2.IO.setCurrentDirectory.impl : Text ->{IO} Either Failure () - 303. builtin.io2.IO.setEcho.impl : Handle + 318. builtin.io2.IO.setEcho.impl : Handle -> Boolean ->{IO} Either Failure () - 304. builtin.io2.IO.socketAccept.impl : Socket + 319. builtin.io2.IO.socketAccept.impl : Socket ->{IO} Either Failure Socket - 305. builtin.io2.IO.socketPort.impl : Socket + 320. builtin.io2.IO.socketPort.impl : Socket ->{IO} Either Failure Nat - 306. builtin.io2.IO.socketReceive.impl : Socket + 321. builtin.io2.IO.socketReceive.impl : Socket -> Nat ->{IO} Either Failure Bytes - 307. builtin.io2.IO.socketSend.impl : Socket + 322. builtin.io2.IO.socketSend.impl : Socket -> Bytes ->{IO} Either Failure () - 308. builtin.io2.MVar.swap.impl : MVar a + 323. builtin.io2.MVar.swap.impl : MVar a -> a ->{IO} Either Failure a - 309. builtin.io2.IO.systemTime.impl : '{IO} Either + 324. builtin.io2.IO.systemTime.impl : '{IO} Either Failure Nat - 310. builtin.io2.MVar.take.impl : MVar a + 325. builtin.io2.MVar.take.impl : MVar a ->{IO} Either Failure a - 311. builtin.io2.Tls.terminate.impl : Tls + 326. builtin.io2.Tls.terminate.impl : Tls ->{IO} Either Failure () - 312. builtin.io2.MVar.tryPut.impl : MVar a + 327. builtin.io2.MVar.tryPut.impl : MVar a -> a ->{IO} Either Failure Boolean - 313. builtin.io2.MVar.tryRead.impl : MVar a + 328. builtin.io2.MVar.tryRead.impl : MVar a ->{IO} Either Failure (Optional a) - 314. builtin.Int.increment : Int + 329. builtin.Int.increment : Int -> Int - 315. builtin.Nat.increment : Nat + 330. builtin.Nat.increment : Nat -> Nat - 316. builtin.io2.MVar.isEmpty : MVar a + 331. builtin.Char.Class.is : Class + -> Char + -> Boolean + 332. builtin.io2.MVar.isEmpty : MVar a ->{IO} Boolean - 317. builtin.Int.isEven : Int + 333. builtin.Int.isEven : Int -> Boolean - 318. builtin.Nat.isEven : Nat + 334. builtin.Nat.isEven : Nat -> Boolean - 319. builtin.Pattern.isMatch : Pattern + 335. builtin.Pattern.isMatch : Pattern a -> a -> Boolean - 320. builtin.Code.isMissing : Term + 336. builtin.Code.isMissing : Term ->{IO} Boolean - 321. builtin.Int.isOdd : Int + 337. builtin.Int.isOdd : Int -> Boolean - 322. builtin.Nat.isOdd : Nat + 338. builtin.Nat.isOdd : Nat -> Boolean - 323. builtin.metadata.isPropagated : IsPropagated - 324. builtin.metadata.isTest : IsTest - 325. builtin.Pattern.join : [Pattern + 339. builtin.metadata.isPropagated : IsPropagated + 340. builtin.metadata.isTest : IsTest + 341. builtin.Pattern.join : [Pattern a] -> Pattern a - 326. builtin.Int.leadingZeros : Int + 342. builtin.io2.IO.process.kill : ProcessHandle + ->{IO} () + 343. builtin.Int.leadingZeros : Int -> Nat - 327. builtin.Nat.leadingZeros : Nat + 344. builtin.Nat.leadingZeros : Nat -> Nat - 328. builtin.Text.patterns.letter : Pattern + 345. builtin.Char.Class.letter : Class + 346. builtin.Text.patterns.letter : Pattern Text - 329. builtin.Text.patterns.literal : Text + 347. builtin.Text.patterns.literal : Text -> Pattern Text - 330. builtin.Value.load : Value + 348. builtin.Value.load : Value ->{IO} Either [Term] a - 331. builtin.Float.log : Float + 349. builtin.Float.log : Float -> Float - 332. builtin.Float.logBase : Float + 350. builtin.Float.logBase : Float -> Float -> Float - 333. builtin.Code.lookup : Term + 351. builtin.Code.lookup : Term ->{IO} Optional Code - 334. builtin.Float.lt : Float + 352. builtin.Char.Class.lower : Class + 353. builtin.Float.lt : Float -> Float -> Boolean - 335. builtin.Int.lt : Int + 354. builtin.Int.lt : Int -> Int -> Boolean - 336. builtin.Nat.lt : Nat + 355. builtin.Nat.lt : Nat -> Nat -> Boolean - 337. builtin.Text.lt : Text + 356. builtin.Text.lt : Text -> Text -> Boolean - 338. builtin.Float.lteq : Float + 357. builtin.Float.lteq : Float -> Float -> Boolean - 339. builtin.Int.lteq : Int + 358. builtin.Int.lteq : Int -> Int -> Boolean - 340. builtin.Nat.lteq : Nat + 359. builtin.Nat.lteq : Nat -> Nat -> Boolean - 341. builtin.Text.lteq : Text + 360. builtin.Text.lteq : Text -> Text -> Boolean - 342. builtin.Pattern.many : Pattern + 361. builtin.Pattern.many : Pattern a -> Pattern a - 343. builtin.Float.max : Float + 362. builtin.Char.Class.mark : Class + 363. builtin.Float.max : Float -> Float -> Float - 344. builtin.Float.min : Float + 364. builtin.Float.min : Float -> Float -> Float - 345. builtin.Int.mod : Int + 365. builtin.Int.mod : Int -> Int -> Int - 346. builtin.Nat.mod : Nat + 366. builtin.Nat.mod : Nat -> Nat -> Nat - 347. builtin.io2.Clock.internals.monotonic : '{IO} Either + 367. builtin.io2.Clock.internals.monotonic : '{IO} Either Failure TimeSpec - 348. builtin.Int.negate : Int + 368. builtin.Universal.murmurHash : a + -> Nat + 369. builtin.Int.negate : Int -> Int - 349. builtin.io2.MVar.new : a + 370. builtin.io2.MVar.new : a ->{IO} MVar a - 350. builtin.io2.TVar.new : a + 371. builtin.io2.Promise.new : '{IO} Promise + a + 372. builtin.io2.TVar.new : a ->{STM} TVar a - 351. builtin.io2.MVar.newEmpty : '{IO} MVar + 373. builtin.io2.MVar.newEmpty : '{IO} MVar a - 352. builtin.io2.TVar.newIO : a + 374. builtin.io2.TVar.newIO : a ->{IO} TVar a - 353. builtin.Boolean.not : Boolean + 375. builtin.Boolean.not : Boolean -> Boolean - 354. builtin.Text.patterns.notCharIn : [Char] + 376. builtin.Char.Class.not : Class + -> Class + 377. builtin.Text.patterns.notCharIn : [Char] -> Pattern Text - 355. builtin.Text.patterns.notCharRange : Char + 378. builtin.Text.patterns.notCharRange : Char -> Char -> Pattern Text - 356. builtin.io2.Clock.internals.nsec : TimeSpec + 379. builtin.io2.Clock.internals.nsec : TimeSpec -> Nat - 357. builtin.Int.or : Int + 380. builtin.Char.Class.number : Class + 381. builtin.Char.Class.or : Class + -> Class + -> Class + 382. builtin.Int.or : Int -> Int -> Int - 358. builtin.Nat.or : Nat + 383. builtin.Nat.or : Nat -> Nat -> Nat - 359. builtin.Pattern.or : Pattern + 384. builtin.Pattern.or : Pattern a -> Pattern a -> Pattern a - 360. builtin.Int.popCount : Int + 385. builtin.Int.popCount : Int -> Nat - 361. builtin.Nat.popCount : Nat + 386. builtin.Nat.popCount : Nat -> Nat - 362. builtin.Float.pow : Float + 387. builtin.Float.pow : Float -> Float -> Float - 363. builtin.Int.pow : Int + 388. builtin.Int.pow : Int -> Nat -> Int - 364. builtin.Nat.pow : Nat + 389. builtin.Nat.pow : Nat -> Nat -> Nat - 365. builtin.io2.Clock.internals.processCPUTime : '{IO} Either + 390. builtin.Char.Class.printable : Class + 391. builtin.io2.Clock.internals.processCPUTime : '{IO} Either Failure TimeSpec - 366. builtin.Text.patterns.punctuation : Pattern + 392. builtin.Char.Class.punctuation : Class + 393. builtin.Text.patterns.punctuation : Pattern Text - 367. builtin.ImmutableArray.read : ImmutableArray + 394. builtin.Char.Class.range : Char + -> Char + -> Class + 395. builtin.ImmutableArray.read : ImmutableArray a -> Nat ->{Exception} a - 368. builtin.MutableArray.read : MutableArray + 396. builtin.MutableArray.read : MutableArray g a -> Nat ->{g, Exception} a - 369. builtin.Ref.read : Ref g a - ->{g} a - 370. builtin.io2.TVar.read : TVar a - ->{STM} a - 371. builtin.ImmutableByteArray.read16be : ImmutableByteArray - -> Nat - ->{Exception} Nat - 372. builtin.MutableByteArray.read16be : MutableByteArray - g - -> Nat - ->{g, - Exception} Nat - 373. builtin.ImmutableByteArray.read24be : ImmutableByteArray - -> Nat - ->{Exception} Nat - 374. builtin.MutableByteArray.read24be : MutableByteArray - g - -> Nat - ->{g, - Exception} Nat - 375. builtin.ImmutableByteArray.read32be : ImmutableByteArray - -> Nat - ->{Exception} Nat - 376. builtin.MutableByteArray.read32be : MutableByteArray - g - -> Nat - ->{g, - Exception} Nat - 377. builtin.ImmutableByteArray.read40be : ImmutableByteArray - -> Nat - ->{Exception} Nat - 378. builtin.MutableByteArray.read40be : MutableByteArray - g - -> Nat - ->{g, - Exception} Nat - 379. builtin.ImmutableByteArray.read64be : ImmutableByteArray - -> Nat - ->{Exception} Nat - 380. builtin.MutableByteArray.read64be : MutableByteArray - g - -> Nat - ->{g, - Exception} Nat - 381. builtin.ImmutableByteArray.read8 : ImmutableByteArray - -> Nat - ->{Exception} Nat - 382. builtin.MutableByteArray.read8 : MutableByteArray - g - -> Nat - ->{g, - Exception} Nat - 383. builtin.io2.TVar.readIO : TVar a + 397. builtin.io2.Promise.read : Promise + a ->{IO} a - 384. builtin.io2.Clock.internals.realtime : '{IO} Either + 398. builtin.Ref.read : Ref g a + ->{g} a + 399. builtin.io2.TVar.read : TVar a + ->{STM} a + 400. builtin.io2.Ref.Ticket.read : Ticket + a + -> a + 401. builtin.ImmutableByteArray.read16be : ImmutableByteArray + -> Nat + ->{Exception} Nat + 402. builtin.MutableByteArray.read16be : MutableByteArray + g + -> Nat + ->{g, + Exception} Nat + 403. builtin.ImmutableByteArray.read24be : ImmutableByteArray + -> Nat + ->{Exception} Nat + 404. builtin.MutableByteArray.read24be : MutableByteArray + g + -> Nat + ->{g, + Exception} Nat + 405. builtin.ImmutableByteArray.read32be : ImmutableByteArray + -> Nat + ->{Exception} Nat + 406. builtin.MutableByteArray.read32be : MutableByteArray + g + -> Nat + ->{g, + Exception} Nat + 407. builtin.ImmutableByteArray.read40be : ImmutableByteArray + -> Nat + ->{Exception} Nat + 408. builtin.MutableByteArray.read40be : MutableByteArray + g + -> Nat + ->{g, + Exception} Nat + 409. builtin.ImmutableByteArray.read64be : ImmutableByteArray + -> Nat + ->{Exception} Nat + 410. builtin.MutableByteArray.read64be : MutableByteArray + g + -> Nat + ->{g, + Exception} Nat + 411. builtin.ImmutableByteArray.read8 : ImmutableByteArray + -> Nat + ->{Exception} Nat + 412. builtin.MutableByteArray.read8 : MutableByteArray + g + -> Nat + ->{g, + Exception} Nat + 413. builtin.io2.Ref.readForCas : Ref + {IO} a + ->{IO} Ticket + a + 414. builtin.io2.TVar.readIO : TVar a + ->{IO} a + 415. builtin.io2.Clock.internals.realtime : '{IO} Either Failure TimeSpec - 385. builtin.io2.IO.ref : a + 416. builtin.io2.IO.ref : a ->{IO} Ref {IO} a - 386. builtin.Scope.ref : a + 417. builtin.Scope.ref : a ->{Scope s} Ref {Scope s} a - 387. builtin.Text.repeat : Nat + 418. builtin.Text.repeat : Nat -> Text -> Text - 388. builtin.Pattern.replicate : Nat + 419. builtin.Pattern.replicate : Nat -> Nat -> Pattern a -> Pattern a - 389. builtin.io2.STM.retry : '{STM} a - 390. builtin.Text.reverse : Text + 420. builtin.io2.STM.retry : '{STM} a + 421. builtin.Text.reverse : Text -> Text - 391. builtin.Float.round : Float + 422. builtin.Float.round : Float -> Int - 392. builtin.Pattern.run : Pattern + 423. builtin.Pattern.run : Pattern a -> a -> Optional ( [a], a) - 393. builtin.Scope.run : (∀ s. + 424. builtin.Scope.run : (∀ s. '{g, Scope s} r) ->{g} r - 394. builtin.io2.Clock.internals.sec : TimeSpec + 425. builtin.io2.Clock.internals.sec : TimeSpec -> Int - 395. builtin.Code.serialize : Code + 426. builtin.Char.Class.separator : Class + 427. builtin.Code.serialize : Code -> Bytes - 396. builtin.Value.serialize : Value + 428. builtin.Value.serialize : Value -> Bytes - 397. builtin.io2.Tls.ClientConfig.certificates.set : [SignedCert] + 429. builtin.io2.Tls.ClientConfig.certificates.set : [SignedCert] -> ClientConfig -> ClientConfig - 398. builtin.io2.Tls.ServerConfig.certificates.set : [SignedCert] + 430. builtin.io2.Tls.ServerConfig.certificates.set : [SignedCert] -> ServerConfig -> ServerConfig - 399. builtin.io2.TLS.ClientConfig.ciphers.set : [Cipher] + 431. builtin.io2.TLS.ClientConfig.ciphers.set : [Cipher] -> ClientConfig -> ClientConfig - 400. builtin.io2.Tls.ServerConfig.ciphers.set : [Cipher] + 432. builtin.io2.Tls.ServerConfig.ciphers.set : [Cipher] -> ServerConfig -> ServerConfig - 401. builtin.io2.Tls.ClientConfig.versions.set : [Version] + 433. builtin.io2.Tls.ClientConfig.versions.set : [Version] -> ClientConfig -> ClientConfig - 402. builtin.io2.Tls.ServerConfig.versions.set : [Version] + 434. builtin.io2.Tls.ServerConfig.versions.set : [Version] -> ServerConfig -> ServerConfig - 403. builtin.Int.shiftLeft : Int + 435. builtin.Int.shiftLeft : Int -> Nat -> Int - 404. builtin.Nat.shiftLeft : Nat + 436. builtin.Nat.shiftLeft : Nat -> Nat -> Nat - 405. builtin.Int.shiftRight : Int + 437. builtin.Int.shiftRight : Int -> Nat -> Int - 406. builtin.Nat.shiftRight : Nat + 438. builtin.Nat.shiftRight : Nat -> Nat -> Nat - 407. builtin.Int.signum : Int + 439. builtin.Int.signum : Int -> Int - 408. builtin.Float.sin : Float + 440. builtin.Float.sin : Float -> Float - 409. builtin.Float.sinh : Float + 441. builtin.Float.sinh : Float -> Float - 410. builtin.Bytes.size : Bytes + 442. builtin.Bytes.size : Bytes -> Nat - 411. builtin.ImmutableArray.size : ImmutableArray + 443. builtin.ImmutableArray.size : ImmutableArray a -> Nat - 412. builtin.ImmutableByteArray.size : ImmutableByteArray + 444. builtin.ImmutableByteArray.size : ImmutableByteArray -> Nat - 413. builtin.List.size : [a] + 445. builtin.List.size : [a] -> Nat - 414. builtin.MutableArray.size : MutableArray + 446. builtin.MutableArray.size : MutableArray g a -> Nat - 415. builtin.MutableByteArray.size : MutableByteArray + 447. builtin.MutableByteArray.size : MutableByteArray g -> Nat - 416. builtin.Text.size : Text + 448. builtin.Text.size : Text -> Nat - 417. builtin.Text.patterns.space : Pattern + 449. builtin.Text.patterns.space : Pattern Text - 418. builtin.Float.sqrt : Float + 450. builtin.Float.sqrt : Float -> Float - 419. builtin.io2.IO.stdHandle : StdHandle + 451. builtin.io2.IO.process.start : Text + -> [Text] + ->{IO} ( Handle, + Handle, + Handle, + ProcessHandle) + 452. builtin.io2.IO.stdHandle : StdHandle -> Handle - 420. builtin.Nat.sub : Nat + 453. builtin.Nat.sub : Nat -> Nat -> Int - 421. builtin.io2.TVar.swap : TVar a + 454. builtin.io2.TVar.swap : TVar a -> a ->{STM} a - 422. builtin.io2.IO.systemTimeMicroseconds : '{IO} Int - 423. builtin.Bytes.take : Nat + 455. builtin.Char.Class.symbol : Class + 456. builtin.io2.IO.systemTimeMicroseconds : '{IO} Int + 457. builtin.Bytes.take : Nat -> Bytes -> Bytes - 424. builtin.List.take : Nat + 458. builtin.List.take : Nat -> [a] -> [a] - 425. builtin.Text.take : Nat + 459. builtin.Text.take : Nat -> Text -> Text - 426. builtin.Float.tan : Float + 460. builtin.Float.tan : Float -> Float - 427. builtin.Float.tanh : Float + 461. builtin.Float.tanh : Float -> Float - 428. builtin.io2.Clock.internals.threadCPUTime : '{IO} Either + 462. builtin.io2.Clock.internals.threadCPUTime : '{IO} Either Failure TimeSpec - 429. builtin.Bytes.toBase16 : Bytes + 463. builtin.Bytes.toBase16 : Bytes -> Bytes - 430. builtin.Bytes.toBase32 : Bytes + 464. builtin.Bytes.toBase32 : Bytes -> Bytes - 431. builtin.Bytes.toBase64 : Bytes + 465. builtin.Bytes.toBase64 : Bytes -> Bytes - 432. builtin.Bytes.toBase64UrlUnpadded : Bytes + 466. builtin.Bytes.toBase64UrlUnpadded : Bytes -> Bytes - 433. builtin.Text.toCharList : Text + 467. builtin.Text.toCharList : Text -> [Char] - 434. builtin.Int.toFloat : Int + 468. builtin.Int.toFloat : Int -> Float - 435. builtin.Nat.toFloat : Nat + 469. builtin.Nat.toFloat : Nat -> Float - 436. builtin.Nat.toInt : Nat + 470. builtin.Nat.toInt : Nat -> Int - 437. builtin.Bytes.toList : Bytes + 471. builtin.Bytes.toList : Bytes -> [Nat] - 438. builtin.Text.toLowercase : Text + 472. builtin.Text.toLowercase : Text -> Text - 439. builtin.Char.toNat : Char + 473. builtin.Char.toNat : Char -> Nat - 440. builtin.Float.toRepresentation : Float + 474. builtin.Float.toRepresentation : Float -> Nat - 441. builtin.Int.toRepresentation : Int + 475. builtin.Int.toRepresentation : Int -> Nat - 442. builtin.Char.toText : Char + 476. builtin.Char.toText : Char -> Text - 443. builtin.Float.toText : Float + 477. builtin.Debug.toText : a + -> Optional + (Either + Text + Text) + 478. builtin.Float.toText : Float -> Text - 444. builtin.Handle.toText : Handle + 479. builtin.Handle.toText : Handle -> Text - 445. builtin.Int.toText : Int + 480. builtin.Int.toText : Int -> Text - 446. builtin.Nat.toText : Nat + 481. builtin.Nat.toText : Nat -> Text - 447. builtin.Socket.toText : Socket + 482. builtin.Socket.toText : Socket -> Text - 448. builtin.Link.Term.toText : Term + 483. builtin.Link.Term.toText : Term -> Text - 449. builtin.ThreadId.toText : ThreadId + 484. builtin.ThreadId.toText : ThreadId -> Text - 450. builtin.Text.toUppercase : Text + 485. builtin.Text.toUppercase : Text -> Text - 451. builtin.Text.toUtf8 : Text + 486. builtin.Text.toUtf8 : Text -> Bytes - 452. builtin.todo : a -> b - 453. builtin.Debug.trace : Text + 487. builtin.todo : a -> b + 488. builtin.Debug.trace : Text -> a -> () - 454. builtin.Int.trailingZeros : Int + 489. builtin.Int.trailingZeros : Int -> Nat - 455. builtin.Nat.trailingZeros : Nat + 490. builtin.Nat.trailingZeros : Nat -> Nat - 456. builtin.Float.truncate : Float + 491. builtin.Float.truncate : Float -> Int - 457. builtin.Int.truncate0 : Int + 492. builtin.Int.truncate0 : Int -> Nat - 458. builtin.io2.IO.tryEval : '{IO} a + 493. builtin.io2.IO.tryEval : '{IO} a ->{IO, Exception} a - 459. builtin.io2.MVar.tryTake : MVar a + 494. builtin.io2.Promise.tryRead : Promise + a ->{IO} Optional a - 460. builtin.Text.uncons : Text + 495. builtin.io2.MVar.tryTake : MVar a + ->{IO} Optional + a + 496. builtin.Text.uncons : Text -> Optional ( Char, Text) - 461. builtin.Any.unsafeExtract : Any + 497. builtin.Any.unsafeExtract : Any -> a - 462. builtin.Text.unsnoc : Text + 498. builtin.Text.unsnoc : Text -> Optional ( Text, Char) - 463. builtin.Code.validate : [( Term, + 499. builtin.Char.Class.upper : Class + 500. builtin.Code.validate : [( Term, Code)] ->{IO} Optional Failure - 464. builtin.io2.validateSandboxed : [Term] + 501. builtin.io2.validateSandboxed : [Term] -> a -> Boolean - 465. builtin.Value.value : a + 502. builtin.Value.value : a -> Value - 466. builtin.Debug.watch : Text + 503. builtin.io2.IO.process.wait : ProcessHandle + ->{IO} Nat + 504. builtin.Debug.watch : Text -> a -> a - 467. builtin.MutableArray.write : MutableArray + 505. builtin.Char.Class.whitespace : Class + 506. builtin.MutableArray.write : MutableArray g a -> Nat -> a ->{g, Exception} () - 468. builtin.Ref.write : Ref g a + 507. builtin.io2.Promise.write : Promise + a + -> a + ->{IO} Boolean + 508. builtin.Ref.write : Ref g a -> a ->{g} () - 469. builtin.io2.TVar.write : TVar a + 509. builtin.io2.TVar.write : TVar a -> a ->{STM} () - 470. builtin.MutableByteArray.write16be : MutableByteArray + 510. builtin.MutableByteArray.write16be : MutableByteArray g -> Nat -> Nat ->{g, Exception} () - 471. builtin.MutableByteArray.write32be : MutableByteArray + 511. builtin.MutableByteArray.write32be : MutableByteArray g -> Nat -> Nat ->{g, Exception} () - 472. builtin.MutableByteArray.write64be : MutableByteArray + 512. builtin.MutableByteArray.write64be : MutableByteArray g -> Nat -> Nat ->{g, Exception} () - 473. builtin.MutableByteArray.write8 : MutableByteArray + 513. builtin.MutableByteArray.write8 : MutableByteArray g -> Nat -> Nat ->{g, Exception} () - 474. builtin.Int.xor : Int + 514. builtin.Int.xor : Int -> Int -> Int - 475. builtin.Nat.xor : Nat + 515. builtin.Nat.xor : Nat -> Nat -> Nat diff --git a/unison-src/transcripts/pattern-match-coverage.md b/unison-src/transcripts/pattern-match-coverage.md new file mode 100644 index 000000000..88d3fe822 --- /dev/null +++ b/unison-src/transcripts/pattern-match-coverage.md @@ -0,0 +1,345 @@ +```ucm:hide +.> builtins.merge +``` + +# Basics +## non-exhaustive patterns +```unison:error +unique type T = A | B | C + +test : T -> () +test = cases + A -> () +``` + +```unison:error +unique type T = A | B + +test : (T, Optional T) -> () +test = cases + (A, Some _) -> () + (A, None) -> () + (B, Some A) -> () + (B, None) -> () +``` + +## redundant patterns +```unison:error +unique type T = A | B | C + +test : T -> () +test = cases + A -> () + B -> () + C -> () + _ -> () +``` + +```unison:error +unique type T = A | B + +test : (T, Optional T) -> () +test = cases + (A, Some _) -> () + (A, None) -> () + (B, Some _) -> () + (B, None) -> () + (A, Some A) -> () +``` + +# Uninhabited patterns + +match is complete without covering uninhabited patterns +```unison +unique type V = + +test : Optional (Optional V) -> () +test = cases + None -> () + Some None -> () +``` + +uninhabited patterns are reported as redundant +```unison:error +unique type V = + +test0 : V -> () +test0 = cases + _ -> () +``` + +```unison:error +unique type V = + +test : Optional (Optional V) -> () +test = cases + None -> () + Some None -> () + Some _ -> () +``` + +# Guards + +## Incomplete patterns due to guards should be reported +```unison:error +test : () -> () +test = cases + () | false -> () +``` + +```unison:error +test : Optional Nat -> Nat +test = cases + None -> 0 + Some x + | isEven x -> x +``` + +## Complete patterns with guards should be accepted +```unison:error +test : Optional Nat -> Nat +test = cases + None -> 0 + Some x + | isEven x -> x + | otherwise -> 0 +``` + +# Pattern instantiation depth + +Uncovered patterns are only instantiated as deeply as necessary to +distinguish them from existing patterns. +```unison:error +unique type T = A | B | C + +test : Optional (Optional T) -> () +test = cases + None -> () + Some None -> () +``` + +```unison:error +unique type T = A | B | C + +test : Optional (Optional T) -> () +test = cases + None -> () + Some None -> () + Some (Some A) -> () +``` + +# Literals + +## Non-exhaustive + +Nat +```unison:error +test : Nat -> () +test = cases + 0 -> () +``` + +Boolean +```unison:error +test : Boolean -> () +test = cases + true -> () +``` + +## Exhaustive + +Nat +```unison +test : Nat -> () +test = cases + 0 -> () + _ -> () +``` + +Boolean +```unison +test : Boolean -> () +test = cases + true -> () + false -> () +``` + +# Redundant + +Nat +```unison:error +test : Nat -> () +test = cases + 0 -> () + 0 -> () + _ -> () +``` + +Boolean +```unison:error +test : Boolean -> () +test = cases + true -> () + false -> () + _ -> () +``` + +# Sequences + +## Exhaustive +```unison +test : [()] -> () +test = cases + [] -> () + x +: xs -> () +``` + +## Non-exhaustive +```unison:error +test : [()] -> () +test = cases + [] -> () +``` + +```unison:error +test : [()] -> () +test = cases + x +: xs -> () +``` + +```unison:error +test : [()] -> () +test = cases + xs :+ x -> () +``` + +```unison:error +test : [()] -> () +test = cases + x0 +: (x1 +: xs) -> () + [] -> () +``` + +```unison:error +test : [()] -> () +test = cases + [] -> () + x0 +: [] -> () +``` + +## Uninhabited + +`Cons` is not expected since `V` is uninhabited +```unison +unique type V = + +test : [V] -> () +test = cases + [] -> () +``` + +## Length restrictions can equate cons and nil patterns + +Here the first pattern matches lists of length two or greater, the +second pattern matches lists of length 0. The third case matches when the +final element is `false`, while the fourth pattern matches when the +first element is `true`. However, the only possible list length at +the third or fourth clause is 1, so the first and final element must +be equal. Thus, the pattern match is exhaustive. +```unison +test : [Boolean] -> () +test = cases + [a, b] ++ xs -> () + [] -> () + xs :+ false -> () + true +: xs -> () +``` + +This is the same idea as above but shows that fourth match is redundant. +```unison:error +test : [Boolean] -> () +test = cases + [a, b] ++ xs -> () + [] -> () + xs :+ true -> () + true +: xs -> () + _ -> () +``` + +This is another similar example. The first pattern matches lists of +length 5 or greater. The second matches lists of length 4 or greater where the +first and third element are true. The third matches lists of length 4 +or greater where the final 4 elements are `true, false, true, false`. +The list must be exactly of length 4 to arrive at the second or third +clause, so the third pattern is redundant. +```unison:error +test : [Boolean] -> () +test = cases + [a, b, c, d, f] ++ xs -> () + [true, _, true, _] ++ _ -> () + _ ++ [true, false, true, false] -> () + _ -> () +``` + +# bugfix: Sufficient data decl map + +```unison +unique type T = A + +unit2t : Unit -> T +unit2t = cases + () -> A +``` + +```ucm +.> add +``` + +Pattern coverage checking needs the data decl map to contain all +transitive type dependencies of the scrutinee type. We do this +before typechecking begins in a roundabout way: fetching all +transitive type dependencies of references that appear in the expression. + +This test ensures that we have fetched the `T` type although there is +no data decl reference to `T` in `witht`. +```unison +witht : Unit +witht = match unit2t () with + x -> () +``` + +```unison +unique type V = + +evil : Unit -> V +evil = bug "" +``` + +```ucm +.> add +``` + +```unison:error +withV : Unit +withV = match evil () with + x -> () +``` + +```unison +unique type SomeType = A +``` + +```ucm +.> add +``` + +```unison +unique type R = R SomeType + +get x = match x with + R y -> y +``` + +```unison +unique type R = { someType : SomeType } +``` diff --git a/unison-src/transcripts/pattern-match-coverage.output.md b/unison-src/transcripts/pattern-match-coverage.output.md new file mode 100644 index 000000000..e7c76c900 --- /dev/null +++ b/unison-src/transcripts/pattern-match-coverage.output.md @@ -0,0 +1,727 @@ +# Basics +## non-exhaustive patterns +```unison +unique type T = A | B | C + +test : T -> () +test = cases + A -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 4 | test = cases + 5 | A -> () + + + Patterns not matched: + + * B + * C + +``` +```unison +unique type T = A | B + +test : (T, Optional T) -> () +test = cases + (A, Some _) -> () + (A, None) -> () + (B, Some A) -> () + (B, None) -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 4 | test = cases + 5 | (A, Some _) -> () + 6 | (A, None) -> () + 7 | (B, Some A) -> () + 8 | (B, None) -> () + + + Patterns not matched: + * (B, Some B) + +``` +## redundant patterns +```unison +unique type T = A | B | C + +test : T -> () +test = cases + A -> () + B -> () + C -> () + _ -> () +``` + +```ucm + + This case would be ignored because it's already covered by the preceding case(s): + 8 | _ -> () + + +``` +```unison +unique type T = A | B + +test : (T, Optional T) -> () +test = cases + (A, Some _) -> () + (A, None) -> () + (B, Some _) -> () + (B, None) -> () + (A, Some A) -> () +``` + +```ucm + + This case would be ignored because it's already covered by the preceding case(s): + 9 | (A, Some A) -> () + + +``` +# Uninhabited patterns + +match is complete without covering uninhabited patterns +```unison +unique type V = + +test : Optional (Optional V) -> () +test = cases + None -> () + Some None -> () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique type V + test : Optional (Optional V) -> () + +``` +uninhabited patterns are reported as redundant +```unison +unique type V = + +test0 : V -> () +test0 = cases + _ -> () +``` + +```ucm + + This case would be ignored because it's already covered by the preceding case(s): + 5 | _ -> () + + +``` +```unison +unique type V = + +test : Optional (Optional V) -> () +test = cases + None -> () + Some None -> () + Some _ -> () +``` + +```ucm + + This case would be ignored because it's already covered by the preceding case(s): + 7 | Some _ -> () + + +``` +# Guards + +## Incomplete patterns due to guards should be reported +```unison +test : () -> () +test = cases + () | false -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 2 | test = cases + 3 | () | false -> () + + + Patterns not matched: + * () + +``` +```unison +test : Optional Nat -> Nat +test = cases + None -> 0 + Some x + | isEven x -> x +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 2 | test = cases + 3 | None -> 0 + 4 | Some x + 5 | | isEven x -> x + + + Patterns not matched: + * Some _ + +``` +## Complete patterns with guards should be accepted +```unison +test : Optional Nat -> Nat +test = cases + None -> 0 + Some x + | isEven x -> x + | otherwise -> 0 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + test : Optional Nat -> Nat + +``` +# Pattern instantiation depth + +Uncovered patterns are only instantiated as deeply as necessary to +distinguish them from existing patterns. +```unison +unique type T = A | B | C + +test : Optional (Optional T) -> () +test = cases + None -> () + Some None -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 4 | test = cases + 5 | None -> () + 6 | Some None -> () + + + Patterns not matched: + * Some (Some _) + +``` +```unison +unique type T = A | B | C + +test : Optional (Optional T) -> () +test = cases + None -> () + Some None -> () + Some (Some A) -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 4 | test = cases + 5 | None -> () + 6 | Some None -> () + 7 | Some (Some A) -> () + + + Patterns not matched: + + * Some (Some B) + * Some (Some C) + +``` +# Literals + +## Non-exhaustive + +Nat +```unison +test : Nat -> () +test = cases + 0 -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 2 | test = cases + 3 | 0 -> () + + + Patterns not matched: + * _ + +``` +Boolean +```unison +test : Boolean -> () +test = cases + true -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 2 | test = cases + 3 | true -> () + + + Patterns not matched: + * false + +``` +## Exhaustive + +Nat +```unison +test : Nat -> () +test = cases + 0 -> () + _ -> () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + test : Nat -> () + +``` +Boolean +```unison +test : Boolean -> () +test = cases + true -> () + false -> () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + test : Boolean -> () + +``` +# Redundant + +Nat +```unison +test : Nat -> () +test = cases + 0 -> () + 0 -> () + _ -> () +``` + +```ucm + + This case would be ignored because it's already covered by the preceding case(s): + 4 | 0 -> () + + +``` +Boolean +```unison +test : Boolean -> () +test = cases + true -> () + false -> () + _ -> () +``` + +```ucm + + This case would be ignored because it's already covered by the preceding case(s): + 5 | _ -> () + + +``` +# Sequences + +## Exhaustive +```unison +test : [()] -> () +test = cases + [] -> () + x +: xs -> () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + test : [()] -> () + +``` +## Non-exhaustive +```unison +test : [()] -> () +test = cases + [] -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 2 | test = cases + 3 | [] -> () + + + Patterns not matched: + * (() +: _) + +``` +```unison +test : [()] -> () +test = cases + x +: xs -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 2 | test = cases + 3 | x +: xs -> () + + + Patterns not matched: + * [] + +``` +```unison +test : [()] -> () +test = cases + xs :+ x -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 2 | test = cases + 3 | xs :+ x -> () + + + Patterns not matched: + * [] + +``` +```unison +test : [()] -> () +test = cases + x0 +: (x1 +: xs) -> () + [] -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 2 | test = cases + 3 | x0 +: (x1 +: xs) -> () + 4 | [] -> () + + + Patterns not matched: + * (() +: []) + +``` +```unison +test : [()] -> () +test = cases + [] -> () + x0 +: [] -> () +``` + +```ucm + + Pattern match doesn't cover all possible cases: + 2 | test = cases + 3 | [] -> () + 4 | x0 +: [] -> () + + + Patterns not matched: + * (() +: (() +: _)) + +``` +## Uninhabited + +`Cons` is not expected since `V` is uninhabited +```unison +unique type V = + +test : [V] -> () +test = cases + [] -> () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique type V + test : [V] -> () + +``` +## Length restrictions can equate cons and nil patterns + +Here the first pattern matches lists of length two or greater, the +second pattern matches lists of length 0. The third case matches when the +final element is `false`, while the fourth pattern matches when the +first element is `true`. However, the only possible list length at +the third or fourth clause is 1, so the first and final element must +be equal. Thus, the pattern match is exhaustive. +```unison +test : [Boolean] -> () +test = cases + [a, b] ++ xs -> () + [] -> () + xs :+ false -> () + true +: xs -> () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + test : [Boolean] -> () + +``` +This is the same idea as above but shows that fourth match is redundant. +```unison +test : [Boolean] -> () +test = cases + [a, b] ++ xs -> () + [] -> () + xs :+ true -> () + true +: xs -> () + _ -> () +``` + +```ucm + + This case would be ignored because it's already covered by the preceding case(s): + 6 | true +: xs -> () + + +``` +This is another similar example. The first pattern matches lists of +length 5 or greater. The second matches lists of length 4 or greater where the +first and third element are true. The third matches lists of length 4 +or greater where the final 4 elements are `true, false, true, false`. +The list must be exactly of length 4 to arrive at the second or third +clause, so the third pattern is redundant. +```unison +test : [Boolean] -> () +test = cases + [a, b, c, d, f] ++ xs -> () + [true, _, true, _] ++ _ -> () + _ ++ [true, false, true, false] -> () + _ -> () +``` + +```ucm + + This case would be ignored because it's already covered by the preceding case(s): + 5 | _ ++ [true, false, true, false] -> () + + +``` +# bugfix: Sufficient data decl map + +```unison +unique type T = A + +unit2t : Unit -> T +unit2t = cases + () -> A +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique type T + unit2t : 'T + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + unique type T + unit2t : 'T + +``` +Pattern coverage checking needs the data decl map to contain all +transitive type dependencies of the scrutinee type. We do this +before typechecking begins in a roundabout way: fetching all +transitive type dependencies of references that appear in the expression. + +This test ensures that we have fetched the `T` type although there is +no data decl reference to `T` in `witht`. +```unison +witht : Unit +witht = match unit2t () with + x -> () +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + witht : () + +``` +```unison +unique type V = + +evil : Unit -> V +evil = bug "" +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique type V + evil : 'V + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + unique type V + evil : 'V + +``` +```unison +withV : Unit +withV = match evil () with + x -> () +``` + +```ucm + + This case would be ignored because it's already covered by the preceding case(s): + 3 | x -> () + + +``` +```unison +unique type SomeType = A +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique type SomeType + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + unique type SomeType + +``` +```unison +unique type R = R SomeType + +get x = match x with + R y -> y +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique type R + get : R -> SomeType + +``` +```unison +unique type R = { someType : SomeType } +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + unique type R + R.someType : R -> SomeType + R.someType.modify : (SomeType ->{g} SomeType) -> R ->{g} R + R.someType.set : SomeType -> R -> R + +``` diff --git a/unison-src/transcripts/pattern-pretty-print-2345.md b/unison-src/transcripts/pattern-pretty-print-2345.md index 83cb13d7a..8f77046d3 100644 --- a/unison-src/transcripts/pattern-pretty-print-2345.md +++ b/unison-src/transcripts/pattern-pretty-print-2345.md @@ -11,45 +11,56 @@ structural ability Ab where dopey = cases ?0 -> () + _ -> () grumpy = cases d -> () happy = cases true -> () + false -> () sneezy = cases +1 -> () + _ -> () bashful = cases Some a -> () + _ -> () mouthy = cases [] -> () + _ -> () pokey = cases h +: t -> () + _ -> () sleepy = cases i :+ l -> () + _ -> () demure = cases [0] -> () + _ -> () angry = cases a ++ [] -> () tremulous = cases (0,1) -> () + _ -> () throaty = cases { Ab.a a -> k } -> () agitated = cases a | a == 2 -> () + _ -> () doc = cases y@4 -> () + _ -> () ``` ```ucm diff --git a/unison-src/transcripts/pattern-pretty-print-2345.output.md b/unison-src/transcripts/pattern-pretty-print-2345.output.md index a764580fa..610c75699 100644 --- a/unison-src/transcripts/pattern-pretty-print-2345.output.md +++ b/unison-src/transcripts/pattern-pretty-print-2345.output.md @@ -7,45 +7,56 @@ structural ability Ab where dopey = cases ?0 -> () + _ -> () grumpy = cases d -> () happy = cases true -> () + false -> () sneezy = cases +1 -> () + _ -> () bashful = cases Some a -> () + _ -> () mouthy = cases [] -> () + _ -> () pokey = cases h +: t -> () + _ -> () sleepy = cases i :+ l -> () + _ -> () demure = cases [0] -> () + _ -> () angry = cases a ++ [] -> () tremulous = cases (0,1) -> () + _ -> () throaty = cases { Ab.a a -> k } -> () agitated = cases a | a == 2 -> () + _ -> () doc = cases y@4 -> () + _ -> () ``` ```ucm @@ -63,7 +74,7 @@ doc = cases demure : [Nat] -> () doc : Nat -> () dopey : Char -> () - grumpy : p4kl4dn7b41 -> () + grumpy : ff284oqf651 -> () happy : Boolean -> () mouthy : [t] -> () pokey : [t] -> () @@ -85,7 +96,7 @@ doc = cases demure : [Nat] -> () doc : Nat -> () dopey : Char -> () - grumpy : p4kl4dn7b41 -> () + grumpy : ff284oqf651 -> () happy : Boolean -> () mouthy : [t] -> () pokey : [t] -> () @@ -97,47 +108,63 @@ doc = cases .> view dopey dopey : Char -> () - dopey = cases ?0 -> () + dopey = cases + ?0 -> () + _ -> () .> view grumpy - grumpy : p4kl4dn7b41 -> () + grumpy : ff284oqf651 -> () grumpy = cases d -> () .> view happy happy : Boolean -> () - happy = cases true -> () + happy = cases + true -> () + false -> () .> view sneezy sneezy : Int -> () - sneezy = cases +1 -> () + sneezy = cases + +1 -> () + _ -> () .> view bashful bashful : Optional a -> () - bashful = cases Some a -> () + bashful = cases + Some a -> () + _ -> () .> view mouthy mouthy : [t] -> () - mouthy = cases [] -> () + mouthy = cases + [] -> () + _ -> () .> view pokey pokey : [t] -> () - pokey = cases h +: t -> () + pokey = cases + h +: t -> () + _ -> () .> view sleepy sleepy : [t] -> () - sleepy = cases i :+ l -> () + sleepy = cases + i :+ l -> () + _ -> () .> view demure demure : [Nat] -> () - demure = cases [0] -> () + demure = cases + [0] -> () + _ -> () .> view angry @@ -147,7 +174,9 @@ doc = cases .> view tremulous tremulous : (Nat, Nat) -> () - tremulous = cases (0, 1) -> () + tremulous = cases + (0, 1) -> () + _ -> () .> view throaty @@ -157,11 +186,15 @@ doc = cases .> view agitated agitated : Nat -> () - agitated = cases a | a == 2 -> () + agitated = cases + a | a == 2 -> () + _ -> () .> view doc doc : Nat -> () - doc = cases y@4 -> () + doc = cases + y@4 -> () + _ -> () ``` diff --git a/unison-src/transcripts/patternMatchTls.md b/unison-src/transcripts/patternMatchTls.md new file mode 100644 index 000000000..1fe2c295b --- /dev/null +++ b/unison-src/transcripts/patternMatchTls.md @@ -0,0 +1,35 @@ +```ucm:hide +.> builtins.merge +.> builtins.mergeio +``` + +We had bugs in the calling conventions for both send and terminate which would +cause pattern matching on the resulting (Right ()) would cause a runtime error. + + + +```unison +use builtin.io2.Tls newClient send handshake terminate + +frank: '{IO} () +frank = do + socket = assertRight (clientSocket.impl "example.com" "443") + config = ClientConfig.default "example.com" 0xs + tls = assertRight (newClient.impl config socket) + () = assertRight (handshake.impl tls) + () = assertRight (send.impl tls 0xs) + () = assertRight (terminate.impl tls) + () + +assertRight : Either a b -> b +assertRight = cases + Right x -> x + Left _ -> bug "expected a right but got a left" +``` + + + +```ucm +.> add +.> run frank +``` diff --git a/unison-src/transcripts/patternMatchTls.output.md b/unison-src/transcripts/patternMatchTls.output.md new file mode 100644 index 000000000..726af9f86 --- /dev/null +++ b/unison-src/transcripts/patternMatchTls.output.md @@ -0,0 +1,49 @@ +We had bugs in the calling conventions for both send and terminate which would +cause pattern matching on the resulting (Right ()) would cause a runtime error. + + + +```unison +use builtin.io2.Tls newClient send handshake terminate + +frank: '{IO} () +frank = do + socket = assertRight (clientSocket.impl "example.com" "443") + config = ClientConfig.default "example.com" 0xs + tls = assertRight (newClient.impl config socket) + () = assertRight (handshake.impl tls) + () = assertRight (send.impl tls 0xs) + () = assertRight (terminate.impl tls) + () + +assertRight : Either a b -> b +assertRight = cases + Right x -> x + Left _ -> bug "expected a right but got a left" +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + assertRight : Either a b -> b + frank : '{IO} () + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + assertRight : Either a b -> b + frank : '{IO} () + +.> run frank + + () + +``` diff --git a/unison-src/transcripts/reflog.output.md b/unison-src/transcripts/reflog.output.md index 472829e82..ee84f1c01 100644 --- a/unison-src/transcripts/reflog.output.md +++ b/unison-src/transcripts/reflog.output.md @@ -59,17 +59,17 @@ y = 2 most recent, along with the command that got us there. Try: `fork 2 .old` - `fork #k4l5pp6m04 .old` to make an old namespace + `fork #upgj8h6ju3 .old` to make an old namespace accessible again, - `reset-root #k4l5pp6m04` to reset the root namespace and + `reset-root #upgj8h6ju3` to reset the root namespace and its history to that of the specified namespace. When Root Hash Action - 1. now #90f8seam5j add - 2. now #k4l5pp6m04 add - 3. now #v4vfn849gt builtins.merge + 1. now #58jmfch7o7 add + 2. now #upgj8h6ju3 add + 3. now #acegso70di builtins.merge 4. #sg60bvjo91 history starts here Tip: Use `diff.namespace 1 7` to compare namespaces between diff --git a/unison-src/transcripts/squash.output.md b/unison-src/transcripts/squash.output.md index cf59f51fa..fb6206977 100644 --- a/unison-src/transcripts/squash.output.md +++ b/unison-src/transcripts/squash.output.md @@ -13,7 +13,7 @@ Let's look at some examples. We'll start with a namespace with just the builtins - □ 1. #jgei2u1mk0 (start of history) + □ 1. #l6nvab7prj (start of history) .> fork builtin builtin2 @@ -42,21 +42,21 @@ Now suppose we `fork` a copy of builtin, then rename `Nat.+` to `frobnicate`, th Note: The most recent namespace hash is immediately below this message. - ⊙ 1. #qf2na62l7i + ⊙ 1. #la42o8m5tq > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ 2. #b3docj1m3t + ⊙ 2. #fqkpt5ogt2 > Moves: Original name New name Nat.+ Nat.frobnicate - □ 3. #jgei2u1mk0 (start of history) + □ 3. #l6nvab7prj (start of history) ``` If we merge that back into `builtin`, we get that same chain of history: @@ -71,21 +71,21 @@ If we merge that back into `builtin`, we get that same chain of history: Note: The most recent namespace hash is immediately below this message. - ⊙ 1. #qf2na62l7i + ⊙ 1. #la42o8m5tq > Moves: Original name New name Nat.frobnicate Nat.+ - ⊙ 2. #b3docj1m3t + ⊙ 2. #fqkpt5ogt2 > Moves: Original name New name Nat.+ Nat.frobnicate - □ 3. #jgei2u1mk0 (start of history) + □ 3. #l6nvab7prj (start of history) ``` Let's try again, but using a `merge.squash` (or just `squash`) instead. The history will be unchanged: @@ -106,7 +106,7 @@ Let's try again, but using a `merge.squash` (or just `squash`) instead. The hist - □ 1. #jgei2u1mk0 (start of history) + □ 1. #l6nvab7prj (start of history) ``` The churn that happened in `mybuiltin` namespace ended up back in the same spot, so the squash merge of that namespace with our original namespace had no effect. @@ -485,13 +485,13 @@ This checks to see that squashing correctly preserves deletions: Note: The most recent namespace hash is immediately below this message. - ⊙ 1. #vinfl9l054 + ⊙ 1. #ae3oc8cikb - Deletes: Nat.* Nat.+ - □ 2. #jgei2u1mk0 (start of history) + □ 2. #l6nvab7prj (start of history) ``` Notice that `Nat.+` and `Nat.*` are deleted by the squash, and we see them deleted in one atomic step in the history. diff --git a/unison-src/transcripts/text-literals.md b/unison-src/transcripts/text-literals.md new file mode 100644 index 000000000..f9336b040 --- /dev/null +++ b/unison-src/transcripts/text-literals.md @@ -0,0 +1,37 @@ + +```ucm:hide +.> builtins.merge +``` + +This transcript shows some syntax for raw text literals. + +```unison +lit1 = """ +This is a raw text literal. +It can start with 3 or more ", +and is terminated by the same number of quotes. +Nothing is escaped. \n + +The initial newline, if it exists, is ignored. +""" + +> lit1 +> Some lit1 + +lit2 = """" + This is a raw text literal, indented. + It can start with 3 or more ", + and is terminated by the same number of quotes. + Nothing is escaped. \n + + This doesn't terminate the literal - """ + """" + +> lit2 +> Some lit2 +``` + +```ucm +.> add +.> view lit1 lit2 +``` \ No newline at end of file diff --git a/unison-src/transcripts/text-literals.output.md b/unison-src/transcripts/text-literals.output.md new file mode 100644 index 000000000..9ca0f7619 --- /dev/null +++ b/unison-src/transcripts/text-literals.output.md @@ -0,0 +1,109 @@ + +This transcript shows some syntax for raw text literals. + +```unison +lit1 = """ +This is a raw text literal. +It can start with 3 or more ", +and is terminated by the same number of quotes. +Nothing is escaped. \n + +The initial newline, if it exists, is ignored. +""" + +> lit1 +> Some lit1 + +lit2 = """" + This is a raw text literal, indented. + It can start with 3 or more ", + and is terminated by the same number of quotes. + Nothing is escaped. \n + + This doesn't terminate the literal - """ + """" + +> lit2 +> Some lit2 +``` + +```ucm + + I found and typechecked these definitions in scratch.u. If you + do an `add` or `update`, here's how your codebase would + change: + + ⍟ These new definitions are ok to `add`: + + lit1 : Text + lit2 : Text + + Now evaluating any watch expressions (lines starting with + `>`)... Ctrl+C cancels. + + 10 | > lit1 + ⧩ + """ + This is a raw text literal. + It can start with 3 or more ", + and is terminated by the same number of quotes. + Nothing is escaped. \n + + The initial newline, if it exists, is ignored. + """ + + 11 | > Some lit1 + ⧩ + Some + "This is a raw text literal.\nIt can start with 3 or more \",\nand is terminated by the same number of quotes.\nNothing is escaped. \\n\n\nThe initial newline, if it exists, is ignored.\n" + + 22 | > lit2 + ⧩ + """" + This is a raw text literal, indented. + It can start with 3 or more ", + and is terminated by the same number of quotes. + Nothing is escaped. \n + + This doesn't terminate the literal - """ + """" + + 23 | > Some lit2 + ⧩ + Some + "This is a raw text literal, indented.\nIt can start with 3 or more \",\nand is terminated by the same number of quotes.\nNothing is escaped. \\n\n\nThis doesn't terminate the literal - \"\"\"\n" + +``` +```ucm +.> add + + ⍟ I've added these definitions: + + lit1 : Text + lit2 : Text + +.> view lit1 lit2 + + lit1 : Text + lit1 = + """ + This is a raw text literal. + It can start with 3 or more ", + and is terminated by the same number of quotes. + Nothing is escaped. \n + + The initial newline, if it exists, is ignored. + """ + + lit2 : Text + lit2 = + """" + This is a raw text literal, indented. + It can start with 3 or more ", + and is terminated by the same number of quotes. + Nothing is escaped. \n + + This doesn't terminate the literal - """ + """" + +``` diff --git a/unison-src/transcripts/transcript-parser-commands.output.md b/unison-src/transcripts/transcript-parser-commands.output.md index 1a1cdbc91..8f9abb7e3 100644 --- a/unison-src/transcripts/transcript-parser-commands.output.md +++ b/unison-src/transcripts/transcript-parser-commands.output.md @@ -39,7 +39,8 @@ z ⚠️ - I don't know about that name. + The following names were not found in the codebase. Check your spelling. + foo ``` ```ucm @@ -47,7 +48,8 @@ z ⚠️ - I don't know about that name. + The following names were not found in the codebase. Check your spelling. + lineToken.call ``` However handling of blocks of other languages should be supported. diff --git a/unison-syntax/package.yaml b/unison-syntax/package.yaml index ba9f76c45..d65dac942 100644 --- a/unison-syntax/package.yaml +++ b/unison-syntax/package.yaml @@ -15,6 +15,7 @@ dependencies: - text - unison-core - unison-core1 + - unison-hash - unison-prelude - unison-util-base32hex - unison-util-bytes diff --git a/unison-syntax/src/Unison/Parser/Ann.hs b/unison-syntax/src/Unison/Parser/Ann.hs index 3964270b3..e5a598e08 100644 --- a/unison-syntax/src/Unison/Parser/Ann.hs +++ b/unison-syntax/src/Unison/Parser/Ann.hs @@ -46,3 +46,26 @@ contains :: Ann -> L.Pos -> Bool contains Intrinsic _ = False contains External _ = False contains (Ann start end) p = start <= p && p < end + +-- | Checks whether an annotation contains another annotation. +-- +-- i.e. pos ∈ [start, end) +-- +-- >>> Intrinsic `encompasses` Ann (L.Pos 1 1) (L.Pos 2 1) +-- Nothing +-- +-- >>> External `encompasses` Ann (L.Pos 1 1) (L.Pos 2 1) +-- Nothing +-- +-- >>> Ann (L.Pos 0 0) (L.Pos 0 10) `encompasses` Ann (L.Pos 0 1) (L.Pos 0 5) +-- Just True +-- +-- >>> Ann (L.Pos 1 0) (L.Pos 1 10) `encompasses` Ann (L.Pos 0 0) (L.Pos 2 0) +-- Just False +encompasses :: Ann -> Ann -> Maybe Bool +encompasses Intrinsic _ = Nothing +encompasses External _ = Nothing +encompasses _ Intrinsic = Nothing +encompasses _ External = Nothing +encompasses (Ann start1 end1) (Ann start2 end2) = + Just $ start1 <= start2 && end1 >= end2 diff --git a/unison-syntax/src/Unison/Syntax/HashQualified'.hs b/unison-syntax/src/Unison/Syntax/HashQualified'.hs index 00a97c567..20b6cd996 100644 --- a/unison-syntax/src/Unison/Syntax/HashQualified'.hs +++ b/unison-syntax/src/Unison/Syntax/HashQualified'.hs @@ -34,7 +34,7 @@ fromText t = case Text.breakOn "#" t of (name, "") -> Just $ HQ'.NameOnly (Name.unsafeFromText name) -- safe bc breakOn # (name, hash) -> HQ'.HashQualified (Name.unsafeFromText name) <$> SH.fromText hash -unsafeFromText :: HasCallStack => Text -> HQ'.HashQualified Name +unsafeFromText :: (HasCallStack) => Text -> HQ'.HashQualified Name unsafeFromText txt = fromMaybe msg (fromText txt) where msg = error ("HashQualified.unsafeFromText " <> show txt) diff --git a/unison-syntax/src/Unison/Syntax/HashQualified.hs b/unison-syntax/src/Unison/Syntax/HashQualified.hs index 4ae865dd9..c312d3636 100644 --- a/unison-syntax/src/Unison/Syntax/HashQualified.hs +++ b/unison-syntax/src/Unison/Syntax/HashQualified.hs @@ -52,7 +52,7 @@ unsafeFromText txt = fromMaybe msg . fromText $ txt where msg = error $ "HashQualified.unsafeFromText " <> show txt -unsafeFromVar :: Var v => v -> HashQualified Name +unsafeFromVar :: (Var v) => v -> HashQualified Name unsafeFromVar = unsafeFromText . Var.name toString :: HashQualified Name -> String @@ -63,6 +63,6 @@ toText :: HashQualified Name -> Text toText = HashQualified.toTextWith Name.toText -toVar :: Var v => HashQualified Name -> v +toVar :: (Var v) => HashQualified Name -> v toVar = Var.named . toText diff --git a/unison-syntax/src/Unison/Syntax/Lexer.hs b/unison-syntax/src/Unison/Syntax/Lexer.hs index 68f6ded23..4707b3c1c 100644 --- a/unison-syntax/src/Unison/Syntax/Lexer.hs +++ b/unison-syntax/src/Unison/Syntax/Lexer.hs @@ -31,6 +31,7 @@ module Unison.Syntax.Lexer wordyIdStartChar, wordyId, symbolyId, + symbolyIdChar, wordyId0, symbolyId0, ) @@ -128,7 +129,6 @@ data Lexeme | Reserved String -- reserved tokens such as `{`, `(`, `type`, `of`, etc | Textual String -- text literals, `"foo bar"` | Character Char -- character literals, `?X` - | Backticks String (Maybe ShortHash) -- an identifier in backticks | WordyId String (Maybe ShortHash) -- a (non-infix) identifier | SymbolyId String (Maybe ShortHash) -- an infix identifier | Blank String -- a typed hole or placeholder @@ -248,10 +248,11 @@ token'' tok p = do topHasClosePair ((name, _) : _) = name `elem` ["{", "(", "[", "handle", "match", "if", "then"] -showErrorFancy :: P.ShowErrorComponent e => P.ErrorFancy e -> String +showErrorFancy :: (P.ShowErrorComponent e) => P.ErrorFancy e -> String showErrorFancy (P.ErrorFail msg) = msg showErrorFancy (P.ErrorIndentation ord ref actual) = - "incorrect indentation (got " <> show (P.unPos actual) + "incorrect indentation (got " + <> show (P.unPos actual) <> ", should be " <> p <> show (P.unPos ref) @@ -300,19 +301,19 @@ lexer0' scope rem = tweak (h@(payload -> Reserved _) : t) = h : tweak t tweak (t1 : t2@(payload -> Numeric num) : rem) | notLayout t1 && touches t1 t2 && isSigned num = - t1 : - Token - (SymbolyId (take 1 num) Nothing) - (start t2) - (inc $ start t2) : - Token (Numeric (drop 1 num)) (inc $ start t2) (end t2) : - tweak rem + t1 + : Token + (SymbolyId (take 1 num) Nothing) + (start t2) + (inc $ start t2) + : Token (Numeric (drop 1 num)) (inc $ start t2) (end t2) + : tweak rem tweak (h : t) = h : tweak t isSigned num = all (\ch -> ch == '-' || ch == '+') $ take 1 num infixl 2 <+> -(<+>) :: Monoid a => P a -> P a -> P a +(<+>) :: (Monoid a) => P a -> P a -> P a p1 <+> p2 = do a1 <- p1; a2 <- p2; pure (a1 <> a2) lexemes :: P [Token Lexeme] @@ -349,7 +350,11 @@ lexemes' eof = pure $ hd <> tl where toks = - doc2 <|> doc <|> token numeric <|> token character <|> reserved + doc2 + <|> doc + <|> token numeric + <|> token character + <|> reserved <|> token symbolyId <|> token blank <|> token wordyId @@ -418,7 +423,10 @@ lexemes' eof = leafy closing = groupy closing gs where gs = - link <|> externalLink <|> exampleInline <|> expr + link + <|> externalLink + <|> exampleInline + <|> expr <|> boldOrItalicOrStrikethrough closing <|> verbatim <|> atDoc @@ -528,7 +536,8 @@ lexemes' eof = link = P.label "link (examples: {type List}, {Nat.+})" $ wrap "syntax.docLink" $ - P.try $ lit "{" *> (typeLink <|> termLink) <* lit "}" + P.try $ + lit "{" *> (typeLink <|> termLink) <* lit "}" expr = P.label "transclusion (examples: {{ doc2 }}, {{ sepBy s [doc1, doc2] }})" $ @@ -587,7 +596,8 @@ lexemes' eof = boldOrItalicOrStrikethrough closing = do let start = - some (P.satisfy (== '*')) <|> some (P.satisfy (== '_')) + some (P.satisfy (== '*')) + <|> some (P.satisfy (== '_')) <|> some (P.satisfy (== '~')) name s = @@ -773,7 +783,24 @@ lexemes' eof = semi = char ';' $> Semi False textual = Textual <$> quoted - quoted = char '"' *> P.manyTill (LP.charLiteral <|> sp) (char '"') + quoted = quotedRaw <|> quotedSingleLine + quotedRaw = do + _ <- lit "\"\"\"" + n <- many (char '"') + _ <- optional (char '\n') -- initial newline is skipped + s <- P.manyTill P.anySingle (lit (replicate (length n + 3) '"')) + col0 <- column <$> pos + let col = col0 - (length n) - 3 + let leading = replicate (max 0 (col - 1)) ' ' + -- lines "foo\n" will produce ["foo"] (ignoring last newline), + -- lines' "foo\n" will produce ["foo",""] (preserving trailing newline) + let lines' s = lines s <> (if take 1 (reverse s) == "\n" then [""] else []) + pure $ case lines' s of + [] -> s + ls + | all (\l -> isPrefixOf leading l || all isSpace l) ls -> intercalate "\n" (drop (length leading) <$> ls) + | otherwise -> s + quotedSingleLine = char '"' *> P.manyTill (LP.charLiteral <|> sp) (char '"') where sp = lit "\\s" $> ' ' character = Character <$> (char '?' *> (spEsc <|> LP.charLiteral)) @@ -925,14 +952,21 @@ lexemes' eof = layoutKeywords :: P [Token Lexeme] layoutKeywords = - ifElse <|> withKw <|> openKw "match" <|> openKw "handle" <|> typ <|> arr <|> eq + ifElse + <|> withKw + <|> openKw "match" + <|> openKw "handle" + <|> typ + <|> arr + <|> eq <|> openKw "cases" <|> openKw "where" <|> openKw "let" <|> openKw "do" where ifElse = - openKw "if" <|> closeKw' (Just "then") ["if"] (lit "then") + openKw "if" + <|> closeKw' (Just "then") ["if"] (lit "then") <|> closeKw' (Just "else") ["then"] (lit "else") modKw = typeModifiersAlt (openKw1 wordySep) typeOrAbilityKw = typeOrAbilityAlt openTypeKw1 @@ -1114,10 +1148,11 @@ headToken :: T a -> a headToken (T a _ _) = a headToken (L a) = a -instance Show a => Show (T a) where +instance (Show a) => Show (T a) where show (L a) = show a show (T open mid close) = - show open ++ "\n" + show open + ++ "\n" ++ indent " " (intercalateMap "\n" show mid) ++ "\n" ++ intercalateMap "" show close @@ -1294,14 +1329,14 @@ keywords = typeOrAbility :: Set String typeOrAbility = Set.fromList ["type", "ability"] -typeOrAbilityAlt :: Alternative f => (String -> f a) -> f a +typeOrAbilityAlt :: (Alternative f) => (String -> f a) -> f a typeOrAbilityAlt f = asum $ map f (toList typeOrAbility) typeModifiers :: Set String typeModifiers = Set.fromList ["structural", "unique"] -typeModifiersAlt :: Alternative f => (String -> f a) -> f a +typeModifiersAlt :: (Alternative f) => (String -> f a) -> f a typeModifiersAlt f = asum $ map f (toList typeModifiers) @@ -1329,7 +1364,10 @@ debugLex'' [Token (Err (Opaque msg)) start end] = where msg1 = "Error on line " <> show (line start) <> ", column " <> show (column start) msg2 = - "Error on line " <> show (line start) <> ", column " <> show (column start) + "Error on line " + <> show (line start) + <> ", column " + <> show (column start) <> " - line " <> show (line end) <> ", column " @@ -1373,8 +1411,6 @@ instance P.VisualStream [Token Lexeme] where case showEscapeChar c of Just c -> "?\\" ++ [c] Nothing -> '?' : [c] - pretty (Backticks n h) = - '`' : n ++ (toList h >>= SH.toString) ++ ['`'] pretty (WordyId n h) = n ++ (toList h >>= SH.toString) pretty (SymbolyId n h) = n ++ (toList h >>= SH.toString) pretty (Blank s) = "_" ++ s diff --git a/unison-syntax/src/Unison/Syntax/Name.hs b/unison-syntax/src/Unison/Syntax/Name.hs index 56f154696..052b34f92 100644 --- a/unison-syntax/src/Unison/Syntax/Name.hs +++ b/unison-syntax/src/Unison/Syntax/Name.hs @@ -55,7 +55,7 @@ toText (Name pos (x0 :| xs)) = Relative -> "" -- | Convert a name to a string representation, then parse that as a var. -toVar :: Var v => Name -> v +toVar :: (Var v) => Name -> v toVar = Var.named . toText @@ -102,12 +102,12 @@ unsafeFromString = -- -- Performs very minor validation (a name can't be empty, nor contain a '#' character [at least currently?]) but makes -- no attempt at rejecting bogus names like "foo...bar...baz". -unsafeFromText :: HasCallStack => Text -> Name +unsafeFromText :: (HasCallStack) => Text -> Name unsafeFromText = either (error . Text.unpack) id . fromTextEither -- | Unsafely parse a name from a var, by first rendering the var as a string. -- -- See 'unsafeFromText'. -unsafeFromVar :: Var v => v -> Name +unsafeFromVar :: (Var v) => v -> Name unsafeFromVar = unsafeFromText . Var.name diff --git a/unison-syntax/src/Unison/Syntax/Parser.hs b/unison-syntax/src/Unison/Syntax/Parser.hs index 35f22cea5..ae5efe95f 100644 --- a/unison-syntax/src/Unison/Syntax/Parser.hs +++ b/unison-syntax/src/Unison/Syntax/Parser.hs @@ -73,7 +73,7 @@ instance Semigroup UniqueName where instance Monoid UniqueName where mempty = UniqueName (\_ _ -> Nothing) -uniqueBase32Namegen :: forall gen. Random.DRG gen => gen -> UniqueName +uniqueBase32Namegen :: forall gen. (Random.DRG gen) => gen -> UniqueName uniqueBase32Namegen rng = UniqueName $ \pos lenInBase32Hex -> go pos lenInBase32Hex rng where @@ -86,12 +86,12 @@ uniqueBase32Namegen rng = serialize $ VarInt (L.line pos) serialize $ VarInt (L.column pos) h = Hashable.accumulate' $ bytes <> posBytes - b58 = Hash.base32Hex h + b58 = Hash.toBase32HexText h in if Char.isDigit (Text.head b58) then go pos lenInBase32Hex rng else Just . Text.take lenInBase32Hex $ b58 -uniqueName :: Var v => Int -> P v Text +uniqueName :: (Var v) => Int -> P v Text uniqueName lenInBase32Hex = do UniqueName mkName <- asks uniqueNames pos <- L.start <$> P.lookAhead anyToken @@ -108,7 +108,8 @@ data Error v | UnknownType (L.Token (HQ.HashQualified Name)) (Set Reference) | UnknownId (L.Token (HQ.HashQualified Name)) (Set Referent) (Set Reference) | ExpectedBlockOpen String (L.Token L.Lexeme) - | EmptyMatch (L.Token ()) + | -- Indicates a cases or match/with which doesn't have any patterns + EmptyMatch (L.Token ()) | EmptyWatch Ann | UseInvalidPrefixSuffix (Either (L.Token Name) (L.Token Name)) (Maybe [L.Token Name]) | UseEmpty (L.Token String) -- an empty `use` statement @@ -143,13 +144,13 @@ instance Annotated Ann where instance Annotated (L.Token a) where ann (L.Token _ s e) = Ann s e -instance Annotated a => Annotated (ABT.Term f v a) where +instance (Annotated a) => Annotated (ABT.Term f v a) where ann = ann . ABT.annotation -instance Annotated a => Annotated (Pattern a) where +instance (Annotated a) => Annotated (Pattern a) where ann = ann . Pattern.loc -instance Annotated a => Annotated [a] where +instance (Annotated a) => Annotated [a] where ann [] = mempty ann (h : t) = foldl' (\acc a -> acc <> ann a) (ann h) t @@ -161,7 +162,7 @@ label = P.label -- label = P.dbg -traceRemainingTokens :: Ord v => String -> P v () +traceRemainingTokens :: (Ord v) => String -> P v () traceRemainingTokens label = do remainingTokens <- lookAhead $ many anyToken let _ = @@ -174,16 +175,16 @@ mkAnn x y = ann x <> ann y tok :: (Ann -> a -> b) -> L.Token a -> b tok f (L.Token a start end) = f (Ann start end) a -peekAny :: Ord v => P v (L.Token L.Lexeme) +peekAny :: (Ord v) => P v (L.Token L.Lexeme) peekAny = P.lookAhead P.anySingle -lookAhead :: Ord v => P v a -> P v a +lookAhead :: (Ord v) => P v a -> P v a lookAhead = P.lookAhead -anyToken :: Ord v => P v (L.Token L.Lexeme) +anyToken :: (Ord v) => P v (L.Token L.Lexeme) anyToken = P.anySingle -failCommitted :: Ord v => Error v -> P v x +failCommitted :: (Ord v) => Error v -> P v x failCommitted e = do void anyToken <|> void P.eof P.customFailure e @@ -191,13 +192,13 @@ failCommitted e = do proxy :: Proxy Input proxy = Proxy -root :: Ord v => P v a -> P v a +root :: (Ord v) => P v a -> P v a root p = (openBlock *> p) <* closeBlock <* P.eof -rootFile :: Ord v => P v a -> P v a +rootFile :: (Ord v) => P v a -> P v a rootFile p = p <* P.eof -run' :: Ord v => P v a -> String -> String -> ParsingEnv -> Either (Err v) a +run' :: (Ord v) => P v a -> String -> String -> ParsingEnv -> Either (Err v) a run' p s name env = let lex = if debug @@ -208,101 +209,101 @@ run' p s name env = Left err -> Left (Nel.head (P.bundleErrors err)) Right x -> Right x -run :: Ord v => P v a -> String -> ParsingEnv -> Either (Err v) a +run :: (Ord v) => P v a -> String -> ParsingEnv -> Either (Err v) a run p s = run' p s "" -- Virtual pattern match on a lexeme. -queryToken :: Ord v => (L.Lexeme -> Maybe a) -> P v (L.Token a) +queryToken :: (Ord v) => (L.Lexeme -> Maybe a) -> P v (L.Token a) queryToken f = P.token (traverse f) Set.empty -- Consume a block opening and return the string that opens the block. -openBlock :: Ord v => P v (L.Token String) +openBlock :: (Ord v) => P v (L.Token String) openBlock = queryToken getOpen where getOpen (L.Open s) = Just s getOpen _ = Nothing -openBlockWith :: Ord v => String -> P v (L.Token ()) +openBlockWith :: (Ord v) => String -> P v (L.Token ()) openBlockWith s = void <$> P.satisfy ((L.Open s ==) . L.payload) -- Match a particular lexeme exactly, and consume it. -matchToken :: Ord v => L.Lexeme -> P v (L.Token L.Lexeme) +matchToken :: (Ord v) => L.Lexeme -> P v (L.Token L.Lexeme) matchToken x = P.satisfy ((==) x . L.payload) -- The package name that refers to the root, literally just `.` -importDotId :: Ord v => P v (L.Token Name) +importDotId :: (Ord v) => P v (L.Token Name) importDotId = queryToken go where go (L.SymbolyId "." Nothing) = Just (Name.unsafeFromString ".") go _ = Nothing -- Consume a virtual semicolon -semi :: Ord v => P v (L.Token ()) -semi = queryToken go +semi :: (Ord v) => P v (L.Token ()) +semi = label "newline or semicolon" $ queryToken go where go (L.Semi _) = Just () go _ = Nothing -- Consume the end of a block -closeBlock :: Ord v => P v (L.Token ()) +closeBlock :: (Ord v) => P v (L.Token ()) closeBlock = void <$> matchToken L.Close -wordyPatternName :: Var v => P v (L.Token v) +wordyPatternName :: (Var v) => P v (L.Token v) wordyPatternName = queryToken $ \case L.WordyId s Nothing -> Just $ Var.nameds s _ -> Nothing -- Parse an prefix identifier e.g. Foo or (+), discarding any hash -prefixDefinitionName :: Var v => P v (L.Token v) +prefixDefinitionName :: (Var v) => P v (L.Token v) prefixDefinitionName = wordyDefinitionName <|> parenthesize symbolyDefinitionName -- Parse a wordy identifier e.g. Foo, discarding any hash -wordyDefinitionName :: Var v => P v (L.Token v) +wordyDefinitionName :: (Var v) => P v (L.Token v) wordyDefinitionName = queryToken $ \case L.WordyId s _ -> Just $ Var.nameds s L.Blank s -> Just $ Var.nameds ("_" <> s) _ -> Nothing -- Parse a wordyId as a String, rejecting any hash -wordyIdString :: Ord v => P v (L.Token String) +wordyIdString :: (Ord v) => P v (L.Token String) wordyIdString = queryToken $ \case L.WordyId s Nothing -> Just s _ -> Nothing -- Parse a wordyId as a Name, rejecting any hash -importWordyId :: Ord v => P v (L.Token Name) +importWordyId :: (Ord v) => P v (L.Token Name) importWordyId = (fmap . fmap) Name.unsafeFromString wordyIdString -- The `+` in: use Foo.bar + as a Name -importSymbolyId :: Ord v => P v (L.Token Name) +importSymbolyId :: (Ord v) => P v (L.Token Name) importSymbolyId = (fmap . fmap) Name.unsafeFromString symbolyIdString -- Parse a symbolyId as a String, rejecting any hash -symbolyIdString :: Ord v => P v (L.Token String) +symbolyIdString :: (Ord v) => P v (L.Token String) symbolyIdString = queryToken $ \case L.SymbolyId s Nothing -> Just s _ -> Nothing -- Parse an infix id e.g. + or Docs.++, discarding any hash -infixDefinitionName :: Var v => P v (L.Token v) +infixDefinitionName :: (Var v) => P v (L.Token v) infixDefinitionName = symbolyDefinitionName -- Parse a symboly ID like >>= or &&, discarding any hash -symbolyDefinitionName :: Var v => P v (L.Token v) +symbolyDefinitionName :: (Var v) => P v (L.Token v) symbolyDefinitionName = queryToken $ \case L.SymbolyId s _ -> Just $ Var.nameds s _ -> Nothing -parenthesize :: Ord v => P v a -> P v a +parenthesize :: (Ord v) => P v a -> P v a parenthesize p = P.try (openBlockWith "(" *> p) <* closeBlock -hqPrefixId, hqInfixId :: Ord v => P v (L.Token (HQ.HashQualified Name)) +hqPrefixId, hqInfixId :: (Ord v) => P v (L.Token (HQ.HashQualified Name)) hqPrefixId = hqWordyId_ <|> parenthesize hqSymbolyId_ hqInfixId = hqSymbolyId_ -- Parse a hash-qualified alphanumeric identifier -hqWordyId_ :: Ord v => P v (L.Token (HQ.HashQualified Name)) +hqWordyId_ :: (Ord v) => P v (L.Token (HQ.HashQualified Name)) hqWordyId_ = queryToken $ \case L.WordyId "" (Just h) -> Just $ HQ.HashOnly h L.WordyId s (Just h) -> Just $ HQ.HashQualified (Name.unsafeFromString s) h @@ -312,7 +313,7 @@ hqWordyId_ = queryToken $ \case _ -> Nothing -- Parse a hash-qualified symboly ID like >>=#foo or && -hqSymbolyId_ :: Ord v => P v (L.Token (HQ.HashQualified Name)) +hqSymbolyId_ :: (Ord v) => P v (L.Token (HQ.HashQualified Name)) hqSymbolyId_ = queryToken $ \case L.SymbolyId "" (Just h) -> Just $ HQ.HashOnly h L.SymbolyId s (Just h) -> Just $ HQ.HashQualified (Name.unsafeFromString s) h @@ -320,62 +321,62 @@ hqSymbolyId_ = queryToken $ \case _ -> Nothing -- Parse a reserved word -reserved :: Ord v => String -> P v (L.Token String) +reserved :: (Ord v) => String -> P v (L.Token String) reserved w = label w $ queryToken getReserved where getReserved (L.Reserved w') | w == w' = Just w getReserved _ = Nothing -- Parse a placeholder or typed hole -blank :: Ord v => P v (L.Token String) +blank :: (Ord v) => P v (L.Token String) blank = label "blank" $ queryToken getBlank where getBlank (L.Blank s) = Just ('_' : s) getBlank _ = Nothing -numeric :: Ord v => P v (L.Token String) +numeric :: (Ord v) => P v (L.Token String) numeric = queryToken getNumeric where getNumeric (L.Numeric s) = Just s getNumeric _ = Nothing -bytesToken :: Ord v => P v (L.Token Bytes) +bytesToken :: (Ord v) => P v (L.Token Bytes) bytesToken = queryToken getBytes where getBytes (L.Bytes bs) = Just bs getBytes _ = Nothing -sepBy :: Ord v => P v a -> P v b -> P v [b] +sepBy :: (Ord v) => P v a -> P v b -> P v [b] sepBy sep pb = P.sepBy pb sep -sepBy1 :: Ord v => P v a -> P v b -> P v [b] +sepBy1 :: (Ord v) => P v a -> P v b -> P v [b] sepBy1 sep pb = P.sepBy1 pb sep -sepEndBy :: Ord v => P v a -> P v b -> P v [b] +sepEndBy :: (Ord v) => P v a -> P v b -> P v [b] sepEndBy sep pb = P.sepEndBy pb sep -character :: Ord v => P v (L.Token Char) +character :: (Ord v) => P v (L.Token Char) character = queryToken getChar where getChar (L.Character c) = Just c getChar _ = Nothing -string :: Ord v => P v (L.Token Text) +string :: (Ord v) => P v (L.Token Text) string = queryToken getString where getString (L.Textual s) = Just (Text.pack s) getString _ = Nothing -tupleOrParenthesized :: Ord v => P v a -> (Ann -> a) -> (a -> a -> a) -> P v a +tupleOrParenthesized :: (Ord v) => P v a -> (Ann -> a) -> (a -> a -> a) -> P v a tupleOrParenthesized p unit pair = seq' "(" go p where go _ [t] = t go a xs = foldr pair (unit a) xs -seq :: Ord v => (Ann -> [a] -> a) -> P v a -> P v a +seq :: (Ord v) => (Ann -> [a] -> a) -> P v a -> P v a seq = seq' "[" -seq' :: Ord v => String -> (Ann -> [a] -> a) -> P v a -> P v a +seq' :: (Ord v) => String -> (Ann -> [a] -> a) -> P v a -> P v a seq' openStr f p = do open <- openBlockWith openStr <* redundant es <- sepEndBy (P.try $ optional semi *> reserved "," <* redundant) p @@ -385,19 +386,19 @@ seq' openStr f p = do go open elems close = f (ann open <> ann close) elems redundant = P.skipMany (P.eitherP (reserved ",") semi) -chainr1 :: Ord v => P v a -> P v (a -> a -> a) -> P v a +chainr1 :: (Ord v) => P v a -> P v (a -> a -> a) -> P v a chainr1 p op = go1 where go1 = p >>= go2 go2 hd = do { op <- op; op hd <$> go1 } <|> pure hd -- Parse `p` 1+ times, combining with `op` -chainl1 :: Ord v => P v a -> P v (a -> a -> a) -> P v a +chainl1 :: (Ord v) => P v a -> P v (a -> a -> a) -> P v a chainl1 p op = foldl (flip ($)) <$> p <*> P.many (flip <$> op <*> p) -- If `p` would succeed, this fails uncommitted. -- Otherwise, `failIfOk` used to produce the output -failureIf :: Ord v => P v (P v b) -> P v a -> P v b +failureIf :: (Ord v) => P v (P v b) -> P v a -> P v b failureIf failIfOk p = do dontwant <- P.try . P.lookAhead $ failIfOk p <- P.try $ P.lookAhead (optional p) diff --git a/unison-syntax/unison-syntax.cabal b/unison-syntax/unison-syntax.cabal index 65add9b98..5fbdadd76 100644 --- a/unison-syntax/unison-syntax.cabal +++ b/unison-syntax/unison-syntax.cabal @@ -68,6 +68,7 @@ library , text , unison-core , unison-core1 + , unison-hash , unison-prelude , unison-util-base32hex , unison-util-bytes @@ -121,6 +122,7 @@ test-suite syntax-tests , text , unison-core , unison-core1 + , unison-hash , unison-prelude , unison-syntax , unison-util-base32hex diff --git a/yaks/easytest/src/EasyTest.hs b/yaks/easytest/src/EasyTest.hs index da89129f8..c277ac178 100644 --- a/yaks/easytest/src/EasyTest.hs +++ b/yaks/easytest/src/EasyTest.hs @@ -54,11 +54,11 @@ atomicLogger = do let dummy = foldl' (\_ ch -> ch == 'a') True msg in dummy `seq` bracket (takeMVar lock) (\_ -> putMVar lock ()) (\_ -> putStrLn msg) -expect' :: HasCallStack => Bool -> Test () +expect' :: (HasCallStack) => Bool -> Test () expect' False = crash "unexpected" expect' True = pure () -expect :: HasCallStack => Bool -> Test () +expect :: (HasCallStack) => Bool -> Test () expect False = crash "unexpected" expect True = ok @@ -80,15 +80,15 @@ expectNotEqual forbidden actual = then ok else crash $ unlines ["", show actual, "** did equal the forbidden value **", show forbidden] -expectJust :: HasCallStack => Maybe a -> Test a +expectJust :: (HasCallStack) => Maybe a -> Test a expectJust Nothing = crash "expected Just, got Nothing" expectJust (Just a) = ok >> pure a -expectRight :: HasCallStack => Either e a -> Test a +expectRight :: (HasCallStack) => Either e a -> Test a expectRight (Left _) = crash "expected Right, got Left" expectRight (Right a) = ok >> pure a -expectLeft :: HasCallStack => Either e a -> Test e +expectLeft :: (HasCallStack) => Either e a -> Test e expectLeft (Left e) = ok >> pure e expectLeft (Right _) = crash "expected Left, got Right" @@ -197,11 +197,11 @@ note msg = do pure () -- | Log a showable value -note' :: Show s => s -> Test () +note' :: (Show s) => s -> Test () note' = note . show -- | Generate a random value -random :: Random a => Test a +random :: (Random a) => Test a random = do rng <- asks rng liftIO . atomically $ do @@ -211,7 +211,7 @@ random = do pure a -- | Generate a bounded random value. Inclusive on both sides. -random' :: Random a => a -> a -> Test a +random' :: (Random a) => a -> a -> Test a random' lower upper = do rng <- asks rng liftIO . atomically $ do @@ -318,11 +318,11 @@ tuple4 = (,,,) <$> random <*> random <*> random <*> random -- | Generate a `Data.Map k v` of the given size. -mapOf :: Ord k => Int -> Test k -> Test v -> Test (Map k v) +mapOf :: (Ord k) => Int -> Test k -> Test v -> Test (Map k v) mapOf n k v = Map.fromList <$> listOf n (pair k v) -- | Generate a `[Data.Map k v]` of the given sizes. -mapsOf :: Ord k => [Int] -> Test k -> Test v -> Test [Map k v] +mapsOf :: (Ord k) => [Int] -> Test k -> Test v -> Test [Map k v] mapsOf sizes k v = sizes `forM` \n -> mapOf n k v -- | Catch all exceptions that could occur in the given `Test` @@ -374,14 +374,14 @@ skip :: Test () skip = Test (Nothing <$ putResult Skipped) -- | Record a failure at the current scope -crash :: HasCallStack => String -> Test a +crash :: (HasCallStack) => String -> Test a crash msg = do let trace = callStack msg' = msg ++ " " ++ prettyCallStack trace Test (Just <$> putResult Failed) >> noteScoped ("FAILURE " ++ msg') >> Test (pure Nothing) -- | Overwrites the env so that note_ (the logger) is a no op -nologging :: HasCallStack => Test a -> Test a +nologging :: (HasCallStack) => Test a -> Test a nologging (Test t) = Test $ do env <- ask liftIO $ runWrap (env {note_ = \_ -> pure ()}) t @@ -396,7 +396,7 @@ attempt (Test t) = nologging $ do -- | Placeholder wrapper for a failing test. The test being wrapped is expected/known to fail. -- Will produce a failure if the test being wrapped suddenly becomes a success. -pending :: HasCallStack => Test a -> Test a +pending :: (HasCallStack) => Test a -> Test a pending test = do m <- attempt test case m of