⅄ trunk → 23-06-26-reuse-unique-type-guids

This commit is contained in:
Mitchell Rosen 2023-07-11 14:43:56 -04:00
commit e7535ab704
112 changed files with 2744 additions and 1699 deletions

View File

@ -1,7 +1,5 @@
**Choose your PR title well:** Your pull request title is what's used to create release notes, so please make it descriptive of the change itself, which may be different from the initial motivation to make the change.
Note: CI will check that all code has been formatted with Ormolu. See [development.markdown](https://github.com/unisonweb/unison/blob/trunk/development.markdown) for details of how to set this up.
## Overview
What does this change accomplish and why?
@ -18,12 +16,12 @@ How does it accomplish it, in broad strokes? i.e. How does it change the Haskell
## Interesting/controversial decisions
Include anything that you thought twice about, debated, chose arbitrarily, etc.
Include anything that you thought twice about, debated, chose arbitrarily, etc.
What could have been done differently, but wasn't? And why?
## Test coverage
Have you included tests (which could be a transcript) for this change, or is it somehow covered by existing tests?
Have you included tests (which could be a transcript) for this change, or is it somehow covered by existing tests?
Would you recommend improving the test coverage (either as part of this PR or as a separate issue) or do you think its adequate?

View File

@ -22,11 +22,25 @@ jobs:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v37
with:
path: unison
- uses: mrkkrp/ormolu-action@v11
# globs copied from default settings for run-ormolu
files: |
**/*.hs
**/*.hs-boot
separator: "\n"
- uses: haskell-actions/run-ormolu@v14
with:
version: "0.5.0.1"
mode: inplace
pattern: ${{ steps.changed-files.outputs.all_changed_files }}
- name: apply formatting changes
uses: stefanzweifel/git-auto-commit-action@v4
if: ${{ always() }}
with:
commit_message: automatically run ormolu
build:
name: ${{ matrix.os }}
@ -34,7 +48,6 @@ jobs:
needs: ormolu
defaults:
run:
working-directory: unison
shell: bash
strategy:
# Run each build to completion, regardless of if any have failed
@ -47,8 +60,6 @@ jobs:
- windows-2019
steps:
- uses: actions/checkout@v2
with:
path: unison
# The number towards the beginning of the cache keys allow you to manually avoid using a previous cache.
# GitHub will automatically delete caches that haven't been accessed in 7 days, but there is no way to
@ -121,14 +132,14 @@ jobs:
# The installation process differs by OS.
- name: install stack (Linux)
if: runner.os == 'Linux'
working-directory: ${{ github.workspace }}
working-directory: ${{ runner.temp }}
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
- name: install stack (macOS)
working-directory: ${{ github.workspace }}
working-directory: ${{ runner.temp }}
if: runner.os == 'macOS'
run: |
mkdir stack && cd stack
@ -136,7 +147,7 @@ jobs:
echo "$PWD/stack-"* >> $GITHUB_PATH
- name: install stack (windows)
working-directory: ${{ github.workspace }}
working-directory: ${{ runner.temp }}
if: runner.os == 'Windows'
run: |
mkdir stack && cd stack
@ -189,8 +200,14 @@ jobs:
run: stack --no-terminal build --fast --no-run-tests --test
# Run each test suite (tests and transcripts)
- name: check disk space before
if: ${{ always() }}
run: df -h
- name: unison-cli test
run: stack --no-terminal build --fast --test unison-cli
- name: check disk space after
if: ${{ always() }}
run: df -h
- name: unison-core tests
run: stack --no-terminal build --fast --test unison-core
- name: unison-parser-typechecker tests
@ -247,8 +264,8 @@ jobs:
if: runner.os == 'Linux'
with:
path: ~/.cache/unisonlanguage/base.unison
key: base.unison_${{hashFiles('**/unison-src/builtin-tests/base.md')}}-${{github.sha}}
restore-keys: base.unison_${{hashFiles('**/unison-src/builtin-tests/base.md')}}-
key: base.unison_${{hashFiles('**/unison-src/builtin-tests/base.md','**/unison-cli/src/Unison/JitInfo.hs')}}-${{github.sha}}
restore-keys: base.unison_${{hashFiles('**/unison-src/builtin-tests/base.md','**/unison-cli/src/Unison/JitInfo.hs')}}-
- name: set up `base` codebase
if: runner.os == 'Linux'

View File

@ -75,3 +75,4 @@ The format for this list: name, GitHub handle
* Andrii Uvarov (@unorsk)
* Mario Bašić (@mabasic)
* Chris Krycho (@chriskrycho)
* Hatim Khambati (@hatimkhambati26)

View File

@ -27,8 +27,6 @@ library:
source-dirs: src
exposed-modules:
- U.Codebase.Sqlite.V2.HashHandle
- U.Codebase.Branch.Hashing
- U.Codebase.Causal.Hashing
when:
- condition: false
other-modules: Paths_unison_codebase_sqlite_hashing_v2

View File

@ -5,8 +5,8 @@ import Data.Set qualified as Set
import U.Codebase.HashTags (BranchHash (..), CausalHash (..))
import Unison.Hashing.V2 qualified as Hashing
hashCausal :: Set CausalHash -> BranchHash -> CausalHash
hashCausal ancestors branchHash =
hashCausal :: BranchHash -> Set CausalHash -> CausalHash
hashCausal branchHash ancestors =
CausalHash . Hashing.contentHash $
Hashing.Causal
{ Hashing.branchHash = unBranchHash branchHash,

View File

@ -4,6 +4,8 @@ module U.Codebase.Sqlite.V2.HashHandle
where
import Data.Set qualified as Set
import U.Codebase.Branch.Hashing qualified as H2
import U.Codebase.Causal.Hashing qualified as H2
import U.Codebase.Sqlite.HashHandle
import U.Util.Type (removeAllEffectVars)
import Unison.Hashing.V2 qualified as H2
@ -15,5 +17,7 @@ v2HashHandle =
{ toReference = h2ToV2Reference . H2.typeToReference . v2ToH2Type . removeAllEffectVars,
toReferenceMentions = Set.map h2ToV2Reference . H2.typeToReferenceMentions . v2ToH2Type . removeAllEffectVars,
toReferenceDecl = \h -> h2ToV2Reference . H2.typeToReference . v2ToH2TypeD h . removeAllEffectVars,
toReferenceDeclMentions = \h -> Set.map h2ToV2Reference . H2.typeToReferenceMentions . v2ToH2TypeD h . removeAllEffectVars
toReferenceDeclMentions = \h -> Set.map h2ToV2Reference . H2.typeToReferenceMentions . v2ToH2TypeD h . removeAllEffectVars,
hashBranch = H2.hashBranch,
hashCausal = H2.hashCausal
}

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.1.
-- This file has been generated from package.yaml by hpack version 0.35.2.
--
-- see: https://github.com/sol/hpack
@ -18,9 +18,9 @@ source-repository head
library
exposed-modules:
U.Codebase.Sqlite.V2.HashHandle
other-modules:
U.Codebase.Branch.Hashing
U.Codebase.Causal.Hashing
other-modules:
Unison.Hashing.V2.Convert2
hs-source-dirs:
src

View File

@ -0,0 +1,31 @@
module U.Codebase.Causal.Squash (squashCausal) where
import U.Codebase.Branch.Type
import U.Codebase.Causal (Causal (..))
import U.Codebase.Sqlite.HashHandle qualified as HH
import U.Codebase.Sqlite.Operations qualified as SqliteOps
import Unison.Prelude
import Unison.Sqlite qualified as Sqlite
-- Recursively discards history, resulting in a namespace tree with only single a single
-- Causal node at every level.
squashCausal :: HH.HashHandle -> CausalBranch Sqlite.Transaction -> Sqlite.Transaction (CausalBranch Sqlite.Transaction)
squashCausal hashHandle@HH.HashHandle {hashCausal, hashBranch} Causal {valueHash = unsquashedBranchHash, value} = do
runMaybeT (MaybeT (SqliteOps.tryGetSquashResult unsquashedBranchHash) >>= MaybeT . SqliteOps.loadCausalBranchByCausalHash) >>= \case
Just cb -> pure cb
Nothing -> do
branch@Branch {children} <- value
squashedChildren <- traverse (squashCausal hashHandle) children
let squashedBranchHead = branch {children = squashedChildren}
squashedBranchHash <- hashBranch squashedBranchHead
let squashedCausalHash = hashCausal squashedBranchHash mempty
let squashedCausalBranch =
Causal
{ causalHash = squashedCausalHash,
valueHash = squashedBranchHash,
parents = mempty,
value = pure squashedBranchHead
}
SqliteOps.saveBranch hashHandle squashedCausalBranch
SqliteOps.saveSquashResult unsquashedBranchHash squashedCausalHash
pure squashedCausalBranch

View File

@ -3,6 +3,8 @@ module U.Codebase.Sqlite.HashHandle
)
where
import U.Codebase.Branch.Type (Branch)
import U.Codebase.HashTags
import U.Codebase.Reference qualified as C
import U.Codebase.Sqlite.Symbol (Symbol)
import U.Codebase.Term qualified as C.Term
@ -18,5 +20,12 @@ data HashHandle = HashHandle
-- | Hash the type of a single constructor in a decl component. The provided hash argument is the hash of the decl component.
toReferenceDecl :: Hash -> C.Type.TypeD Symbol -> C.Reference,
-- | Hash decl's mentions
toReferenceDeclMentions :: Hash -> C.Type.TypeD Symbol -> Set C.Reference
toReferenceDeclMentions :: Hash -> C.Type.TypeD Symbol -> Set C.Reference,
hashBranch :: forall m. Monad m => Branch m -> m BranchHash,
hashCausal ::
-- The causal's namespace hash
BranchHash ->
-- The causal's parents
Set CausalHash ->
CausalHash
}

View File

@ -14,6 +14,8 @@ module U.Codebase.Sqlite.Operations
expectBranchByBranchHashId,
expectNamespaceStatsByHash,
expectNamespaceStatsByHashId,
tryGetSquashResult,
saveSquashResult,
-- * terms
Q.saveTermComponent,
@ -1404,6 +1406,21 @@ deleteNameLookupsExceptFor reachable = do
bhIds <- for (Set.toList reachable) Q.expectBranchHashId
Q.deleteNameLookupsExceptFor bhIds
-- | Get the causal hash which would be the result of squashing the provided branch hash.
-- Returns Nothing if we haven't computed it before.
tryGetSquashResult :: BranchHash -> Transaction (Maybe CausalHash)
tryGetSquashResult bh = do
bhId <- Q.expectBranchHashId bh
chId <- Q.tryGetSquashResult bhId
traverse Q.expectCausalHash chId
-- | Saves the result of a squash
saveSquashResult :: BranchHash -> CausalHash -> Transaction ()
saveSquashResult bh ch = do
bhId <- Q.expectBranchHashId bh
chId <- Q.saveCausalHash ch
Q.saveSquashResult bhId chId
-- | Search for term or type names which contain the provided list of segments in order.
-- Search is case insensitive.
fuzzySearchDefinitions ::

View File

@ -86,6 +86,8 @@ module U.Codebase.Sqlite.Queries
loadBranchObjectIdByBranchHashId,
expectBranchObjectIdByCausalHashId,
expectBranchObjectIdByBranchHashId,
tryGetSquashResult,
saveSquashResult,
-- ** causal_parent table
saveCausalParents,
@ -234,6 +236,8 @@ module U.Codebase.Sqlite.Queries
fixScopedNameLookupTables,
addNameLookupMountTables,
addMostRecentNamespaceTable,
addSquashResultTable,
addSquashResultTableIfNotExists,
-- ** schema version
currentSchemaVersion,
@ -387,11 +391,11 @@ type TextPathSegments = [Text]
-- * main squeeze
currentSchemaVersion :: SchemaVersion
currentSchemaVersion = 13
currentSchemaVersion = 15
createSchema :: Transaction ()
createSchema = do
executeStatements (Text.pack [hereFile|unison/sql/create.sql|])
executeStatements [hereFile|unison/sql/create.sql|]
addTempEntityTables
addNamespaceStatsTables
addReflogTable
@ -401,6 +405,7 @@ createSchema = do
addNameLookupMountTables
addMostRecentNamespaceTable
execute insertSchemaVersionSql
addSquashResultTable
where
insertSchemaVersionSql =
[sql|
@ -410,35 +415,45 @@ createSchema = do
addTempEntityTables :: Transaction ()
addTempEntityTables =
executeStatements (Text.pack [hereFile|unison/sql/001-temp-entity-tables.sql|])
executeStatements [hereFile|unison/sql/001-temp-entity-tables.sql|]
addNamespaceStatsTables :: Transaction ()
addNamespaceStatsTables =
executeStatements (Text.pack [hereFile|unison/sql/003-namespace-statistics.sql|])
executeStatements [hereFile|unison/sql/003-namespace-statistics.sql|]
addReflogTable :: Transaction ()
addReflogTable =
executeStatements (Text.pack [hereFile|unison/sql/002-reflog-table.sql|])
executeStatements [hereFile|unison/sql/002-reflog-table.sql|]
fixScopedNameLookupTables :: Transaction ()
fixScopedNameLookupTables =
executeStatements (Text.pack [hereFile|unison/sql/004-fix-scoped-name-lookup-tables.sql|])
executeStatements [hereFile|unison/sql/004-fix-scoped-name-lookup-tables.sql|]
addProjectTables :: Transaction ()
addProjectTables =
executeStatements (Text.pack [hereFile|unison/sql/005-project-tables.sql|])
executeStatements [hereFile|unison/sql/005-project-tables.sql|]
addMostRecentBranchTable :: Transaction ()
addMostRecentBranchTable =
executeStatements (Text.pack [hereFile|unison/sql/006-most-recent-branch-table.sql|])
executeStatements [hereFile|unison/sql/006-most-recent-branch-table.sql|]
addNameLookupMountTables :: Transaction ()
addNameLookupMountTables =
executeStatements (Text.pack [hereFile|unison/sql/007-add-name-lookup-mounts.sql|])
executeStatements [hereFile|unison/sql/007-add-name-lookup-mounts.sql|]
addMostRecentNamespaceTable :: Transaction ()
addMostRecentNamespaceTable =
executeStatements (Text.pack [hereFile|unison/sql/008-add-most-recent-namespace-table.sql|])
executeStatements [hereFile|unison/sql/008-add-most-recent-namespace-table.sql|]
addSquashResultTable :: Transaction ()
addSquashResultTable =
executeStatements [hereFile|unison/sql/009-add-squash-cache-table.sql|]
-- | Added as a fix because 'addSquashResultTable' was missed in the createSchema action
-- for a portion of time.
addSquashResultTableIfNotExists :: Transaction ()
addSquashResultTableIfNotExists =
executeStatements [hereFile|unison/sql/010-ensure-squash-cache-table.sql|]
schemaVersion :: Transaction SchemaVersion
schemaVersion =
@ -2219,9 +2234,9 @@ termNamesForRefWithinNamespace bhId namespaceRoot ref maySuffix = do
[sql|
$transitive_dependency_mounts
SELECT (reversed_name || reversed_mount_path) AS reversed_name
FROM all_in_scope_roots
FROM transitive_dependency_mounts
INNER JOIN scoped_term_name_lookup
ON scoped_term_name_lookup.root_branch_hash_id = all_in_scope_roots.root_branch_hash_id
ON scoped_term_name_lookup.root_branch_hash_id = transitive_dependency_mounts.root_branch_hash_id
WHERE referent_builtin IS @ref AND referent_component_hash IS @ AND referent_component_index IS @ AND referent_constructor_index IS @
AND reversed_name GLOB :suffixGlob
LIMIT 1
@ -4036,3 +4051,32 @@ setMostRecentNamespace namespace =
json :: Text
json =
Text.Lazy.toStrict (Aeson.encodeToLazyText namespace)
-- | Get the causal hash result from squashing the provided branch hash if we've squashed it
-- at some point in the past.
tryGetSquashResult :: BranchHashId -> Transaction (Maybe CausalHashId)
tryGetSquashResult bhId = do
queryMaybeCol
[sql|
SELECT
squashed_causal_hash_id
FROM
squash_results
WHERE
branch_hash_id = :bhId
|]
-- | Save the result of running a squash on the provided branch hash id.
saveSquashResult :: BranchHashId -> CausalHashId -> Transaction ()
saveSquashResult bhId chId =
execute
[sql|
INSERT INTO squash_results (
branch_hash_id,
squashed_causal_hash_id)
VALUES (
:bhId,
:chId
)
ON CONFLICT DO NOTHING
|]

View File

@ -0,0 +1,10 @@
-- A table for tracking the results of squashes we've performed.
-- This is used to avoid re-squashing the same branch multiple times.
CREATE TABLE "squash_results" (
-- The branch hash of the namespace to be squashed.
-- There should only ever be one result for each unsquashed value hash.
branch_hash_id INTEGER PRIMARY KEY NOT NULL REFERENCES hash(id),
-- The causal hash id which is the result of squashing the 'branch_hash_id's causal.'
squashed_causal_hash_id INTEGER NOT NULL REFERENCES causal(self_hash_id)
) WITHOUT ROWID;

View File

@ -0,0 +1,14 @@
-- NOTE: This was added in 009-add-squash-cache-table.sql, but it was not
-- but was mistakenly not added to the 'create-schema' action, so any new
-- codebases created on version 14 will be missing it and we need to add it to them.
-- A table for tracking the results of squashes we've performed.
-- This is used to avoid re-squashing the same branch multiple times.
CREATE TABLE IF NOT EXISTS "squash_results" (
-- The branch hash of the namespace to be squashed.
-- There should only ever be one result for each unsquashed value hash.
branch_hash_id INTEGER PRIMARY KEY NOT NULL REFERENCES hash(id),
-- The causal hash id which is the result of squashing the 'branch_hash_id's causal.'
squashed_causal_hash_id INTEGER NOT NULL REFERENCES causal(self_hash_id)
) WITHOUT ROWID;

View File

@ -18,6 +18,8 @@ extra-source-files:
sql/006-most-recent-branch-table.sql
sql/007-add-name-lookup-mounts.sql
sql/008-add-most-recent-namespace-table.sql
sql/009-add-squash-cache-table.sql
sql/010-ensure-squash-cache-table.sql
sql/create.sql
source-repository head
@ -27,6 +29,7 @@ source-repository head
library
exposed-modules:
U.Codebase.Branch
U.Codebase.Causal.Squash
U.Codebase.Sqlite.Branch.Diff
U.Codebase.Sqlite.Branch.Format
U.Codebase.Sqlite.Branch.Full

View File

@ -18,34 +18,7 @@ On startup, Unison prints a url for the codebase UI. If you did step 3 above, th
## Autoformatting your code with Ormolu
We use 0.5.0.1 of Ormolu and CI will fail if your code isn't properly formatted.
```
ghcup install ghc 9.2.7 # if not already installed
ghcup install cabal # if not already installed
cabal unpack ormolu-0.5.0.1
cd ormolu-0.5.0.1
cabal install -w ghc-9.2.7
```
You can then add the following to `.git/hooks/pre-commit` to make sure all your commits get formatted:
```
#!/bin/bash
set -e
if [[ -z "${SKIP_FORMATTING}" ]]; then
ormolu -i $(git diff --cached --name-only | grep '\.hs$')
git add $(git diff --cached --name-only)
fi
```
If you've got an existing PR that somehow hasn't been formatted correctly, you can install the correct version of Ormolu locally, then do:
```
ormolu -i $(git ls-files | grep '\.hs$')
```
We use 0.5.0.1 of Ormolu and CI will add an extra commit, if needed, to autoformat your code.
Also note that you can always wrap a comment around some code you don't want Ormolu to touch, using:
@ -137,3 +110,80 @@ More context at: https://stackoverflow.com/a/59761201/310162
### I get an error about `removeDirectoryRecursive`/`removeContentsRecursive`/`removePathRecursive`/`permission denied (Access is denied.)`
Stack doesn't work deterministically in Windows due to mismatched expectations about how file deletion works. If you get this error, you can just retry the build and it will probably make more progress than the last time.
## Building with Nix
## Building package components with nix
### Build the unison executable
```
nix build
```
### Build a specific component
This is specified with the normal
`<package>:<component-type>:<component-name>` triple.
Some examples:
```
nix build '.#unison-cli:lib:unison-cli'
nix build '.#unison-syntax:test:syntax-tests'
nix build '.#unison-cli:exe:transcripts'
```
### Development environments
#### Get into a development environment for building with stack
This gets you into a development environment with the preferred
versions of the compiler and other development tools. These
include:
- ghc
- stack
- ormolu
- haskell-language-server
```
nix develop
```
#### Get into a development environment for building with cabal
This gets you into a development environment with the preferred
versions of the compiler and other development tools. Additionally,
all non-local haskell dependencies (including profiling dependencies)
are provided in the nix shell.
```
nix develop '.#local'
```
#### Get into a development environment for building a specific package
This gets you into a development environment with the preferred
versions of the compiler and other development tools. Additionally,
all haskell dependencies of this package are provided by the nix shell
(including profiling dependencies).
```
nix develop '.#<package-name>'
```
for example:
```
nix develop '.#unison-cli'
```
or
```
nix develop '.#unison-parser-typechecker'
```
This is useful if you wanted to profile a package. For example, if you
want to profile `unison-cli:exe:unison` then you could get into one of these
shells, cd into its directory, then run the program with
profiling.
```
nix develop '.#unison-parser-typechecker'
cd unison-cli
cabal run --enable-profiling unison-cli:exe:unison -- +RTS -p
```

View File

@ -1,5 +1,121 @@
{
"nodes": {
"HTTP": {
"flake": false,
"locked": {
"lastModified": 1451647621,
"narHash": "sha256-oHIyw3x0iKBexEo49YeUDV1k74ZtyYKGR2gNJXXRxts=",
"owner": "phadej",
"repo": "HTTP",
"rev": "9bc0996d412fef1787449d841277ef663ad9a915",
"type": "github"
},
"original": {
"owner": "phadej",
"repo": "HTTP",
"type": "github"
}
},
"cabal-32": {
"flake": false,
"locked": {
"lastModified": 1603716527,
"narHash": "sha256-X0TFfdD4KZpwl0Zr6x+PLxUt/VyKQfX7ylXHdmZIL+w=",
"owner": "haskell",
"repo": "cabal",
"rev": "48bf10787e27364730dd37a42b603cee8d6af7ee",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "3.2",
"repo": "cabal",
"type": "github"
}
},
"cabal-34": {
"flake": false,
"locked": {
"lastModified": 1645834128,
"narHash": "sha256-wG3d+dOt14z8+ydz4SL7pwGfe7SiimxcD/LOuPCV6xM=",
"owner": "haskell",
"repo": "cabal",
"rev": "5ff598c67f53f7c4f48e31d722ba37172230c462",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "3.4",
"repo": "cabal",
"type": "github"
}
},
"cabal-36": {
"flake": false,
"locked": {
"lastModified": 1669081697,
"narHash": "sha256-I5or+V7LZvMxfbYgZATU4awzkicBwwok4mVoje+sGmU=",
"owner": "haskell",
"repo": "cabal",
"rev": "8fd619e33d34924a94e691c5fea2c42f0fc7f144",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "3.6",
"repo": "cabal",
"type": "github"
}
},
"cardano-shell": {
"flake": false,
"locked": {
"lastModified": 1608537748,
"narHash": "sha256-PulY1GfiMgKVnBci3ex4ptk2UNYMXqGjJOxcPy2KYT4=",
"owner": "input-output-hk",
"repo": "cardano-shell",
"rev": "9392c75087cb9a3d453998f4230930dea3a95725",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"repo": "cardano-shell",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1673956053,
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-compat_2": {
"flake": false,
"locked": {
"lastModified": 1672831974,
"narHash": "sha256-z9k3MfslLjWQfnjBtEtJZdq3H7kyi2kQtUThfTgdRk0=",
"owner": "input-output-hk",
"repo": "flake-compat",
"rev": "45f2638735f8cdc40fe302742b79f248d23eb368",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"ref": "hkm/gitlab-fix",
"repo": "flake-compat",
"type": "github"
}
},
"flake-utils": {
"inputs": {
"systems": "systems"
@ -18,26 +134,412 @@
"type": "github"
}
},
"nixpkgs": {
"flake-utils_2": {
"locked": {
"lastModified": 1684935479,
"narHash": "sha256-6QMMsXMr2nhmOPHdti2j3KRHt+bai2zw+LJfdCl97Mk=",
"lastModified": 1679360468,
"narHash": "sha256-LGnza3cfXF10Biw3ZTg0u9o9t7s680Ww200t5KkHTh8=",
"owner": "hamishmack",
"repo": "flake-utils",
"rev": "e1ea268ff47ad475443dbabcd54744b4e5b9d4f5",
"type": "github"
},
"original": {
"owner": "hamishmack",
"ref": "hkm/nested-hydraJobs",
"repo": "flake-utils",
"type": "github"
}
},
"ghc-8.6.5-iohk": {
"flake": false,
"locked": {
"lastModified": 1600920045,
"narHash": "sha256-DO6kxJz248djebZLpSzTGD6s8WRpNI9BTwUeOf5RwY8=",
"owner": "input-output-hk",
"repo": "ghc",
"rev": "95713a6ecce4551240da7c96b6176f980af75cae",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"ref": "release/8.6.5-iohk",
"repo": "ghc",
"type": "github"
}
},
"hackage": {
"flake": false,
"locked": {
"lastModified": 1688689629,
"narHash": "sha256-hNkTA2oaMSnhkvSFnOc76yN0CUl+EyHbxLXFPOJhOlk=",
"owner": "input-output-hk",
"repo": "hackage.nix",
"rev": "902793475a701a03a31411381ee17a6885b76c0b",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"repo": "hackage.nix",
"type": "github"
}
},
"haskellNix": {
"inputs": {
"HTTP": "HTTP",
"cabal-32": "cabal-32",
"cabal-34": "cabal-34",
"cabal-36": "cabal-36",
"cardano-shell": "cardano-shell",
"flake-compat": "flake-compat_2",
"flake-utils": "flake-utils_2",
"ghc-8.6.5-iohk": "ghc-8.6.5-iohk",
"hackage": "hackage",
"hls-1.10": "hls-1.10",
"hls-2.0": "hls-2.0",
"hpc-coveralls": "hpc-coveralls",
"hydra": "hydra",
"iserv-proxy": "iserv-proxy",
"nixpkgs": [
"haskellNix",
"nixpkgs-unstable"
],
"nixpkgs-2003": "nixpkgs-2003",
"nixpkgs-2105": "nixpkgs-2105",
"nixpkgs-2111": "nixpkgs-2111",
"nixpkgs-2205": "nixpkgs-2205",
"nixpkgs-2211": "nixpkgs-2211",
"nixpkgs-2305": "nixpkgs-2305",
"nixpkgs-unstable": "nixpkgs-unstable",
"old-ghc-nix": "old-ghc-nix",
"stackage": "stackage"
},
"locked": {
"lastModified": 1688713029,
"narHash": "sha256-bK2RwnBLaJgtXYwPfpWL3XQJwlRkOimhsieg13Ve5bM=",
"owner": "input-output-hk",
"repo": "haskell.nix",
"rev": "f629a8abac1bbb2168c1c763b9a80effef7156ea",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"repo": "haskell.nix",
"type": "github"
}
},
"hls-1.10": {
"flake": false,
"locked": {
"lastModified": 1680000865,
"narHash": "sha256-rc7iiUAcrHxwRM/s0ErEsSPxOR3u8t7DvFeWlMycWgo=",
"owner": "haskell",
"repo": "haskell-language-server",
"rev": "b08691db779f7a35ff322b71e72a12f6e3376fd9",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "1.10.0.0",
"repo": "haskell-language-server",
"type": "github"
}
},
"hls-2.0": {
"flake": false,
"locked": {
"lastModified": 1684398654,
"narHash": "sha256-RW44up2BIyBBYN6tZur5f9kDDR3kr0Rd+TgPbLTfwB4=",
"owner": "haskell",
"repo": "haskell-language-server",
"rev": "20c6d1e731cd9c0beef7338e2fc7a8126ba9b6fb",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "2.0.0.0",
"repo": "haskell-language-server",
"type": "github"
}
},
"hpc-coveralls": {
"flake": false,
"locked": {
"lastModified": 1607498076,
"narHash": "sha256-8uqsEtivphgZWYeUo5RDUhp6bO9j2vaaProQxHBltQk=",
"owner": "sevanspowell",
"repo": "hpc-coveralls",
"rev": "14df0f7d229f4cd2e79f8eabb1a740097fdfa430",
"type": "github"
},
"original": {
"owner": "sevanspowell",
"repo": "hpc-coveralls",
"type": "github"
}
},
"hydra": {
"inputs": {
"nix": "nix",
"nixpkgs": [
"haskellNix",
"hydra",
"nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1671755331,
"narHash": "sha256-hXsgJj0Cy0ZiCiYdW2OdBz5WmFyOMKuw4zyxKpgUKm4=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "f91ee3065de91a3531329a674a45ddcb3467a650",
"repo": "hydra",
"rev": "f48f00ee6d5727ae3e488cbf9ce157460853fea8",
"type": "github"
},
"original": {
"id": "hydra",
"type": "indirect"
}
},
"iserv-proxy": {
"flake": false,
"locked": {
"lastModified": 1670983692,
"narHash": "sha256-avLo34JnI9HNyOuauK5R69usJm+GfW3MlyGlYxZhTgY=",
"ref": "hkm/remote-iserv",
"rev": "50d0abb3317ac439a4e7495b185a64af9b7b9300",
"revCount": 10,
"type": "git",
"url": "https://gitlab.haskell.org/hamishmack/iserv-proxy.git"
},
"original": {
"ref": "hkm/remote-iserv",
"type": "git",
"url": "https://gitlab.haskell.org/hamishmack/iserv-proxy.git"
}
},
"lowdown-src": {
"flake": false,
"locked": {
"lastModified": 1633514407,
"narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=",
"owner": "kristapsdz",
"repo": "lowdown",
"rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8",
"type": "github"
},
"original": {
"owner": "kristapsdz",
"repo": "lowdown",
"type": "github"
}
},
"nix": {
"inputs": {
"lowdown-src": "lowdown-src",
"nixpkgs": "nixpkgs",
"nixpkgs-regression": "nixpkgs-regression"
},
"locked": {
"lastModified": 1661606874,
"narHash": "sha256-9+rpYzI+SmxJn+EbYxjGv68Ucp22bdFUSy/4LkHkkDQ=",
"owner": "NixOS",
"repo": "nix",
"rev": "11e45768b34fdafdcf019ddbd337afa16127ff0f",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"ref": "2.11.0",
"repo": "nix",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1657693803,
"narHash": "sha256-G++2CJ9u0E7NNTAi9n5G8TdDmGJXcIjkJ3NF8cetQB8=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "365e1b3a859281cf11b94f87231adeabbdd878a2",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-22.05-small",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-2003": {
"locked": {
"lastModified": 1620055814,
"narHash": "sha256-8LEHoYSJiL901bTMVatq+rf8y7QtWuZhwwpKE2fyaRY=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "1db42b7fe3878f3f5f7a4f2dc210772fd080e205",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-20.03-darwin",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-2105": {
"locked": {
"lastModified": 1659914493,
"narHash": "sha256-lkA5X3VNMKirvA+SUzvEhfA7XquWLci+CGi505YFAIs=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "022caabb5f2265ad4006c1fa5b1ebe69fb0c3faf",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-21.05-darwin",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-2111": {
"locked": {
"lastModified": 1659446231,
"narHash": "sha256-hekabNdTdgR/iLsgce5TGWmfIDZ86qjPhxDg/8TlzhE=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "eabc38219184cc3e04a974fe31857d8e0eac098d",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-21.11-darwin",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-2205": {
"locked": {
"lastModified": 1682600000,
"narHash": "sha256-ha4BehR1dh8EnXSoE1m/wyyYVvHI9txjW4w5/oxsW5Y=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "50fc86b75d2744e1ab3837ef74b53f103a9b55a0",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-22.05-darwin",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-2211": {
"locked": {
"lastModified": 1685314633,
"narHash": "sha256-8LXBPqTQXl5ofkjpJ18JcbmLJ/lWDoMxtUwiDYv0wro=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "c8a17ce7abc03c50cd072e9e6c9b389c5f61836b",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-22.11-darwin",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-2305": {
"locked": {
"lastModified": 1685338297,
"narHash": "sha256-+Aq4O0Jn1W1q927ZHc3Zn6RO7bwQGmb6O8xYoGy0KrM=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "6287b47dbfabbb8bfbb9b1b53d198ad58a774de4",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-23.05-darwin",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-regression": {
"locked": {
"lastModified": 1643052045,
"narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
"type": "github"
},
"original": {
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
"type": "github"
}
},
"nixpkgs-unstable": {
"locked": {
"lastModified": 1685347552,
"narHash": "sha256-9woSppRyUFo26yUffORTzttJ+apOt8MmCv6RxpPNTU4=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "f2f1ec390714d303cf84ba086e34e45b450dd8c4",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"old-ghc-nix": {
"flake": false,
"locked": {
"lastModified": 1631092763,
"narHash": "sha256-sIKgO+z7tj4lw3u6oBZxqIhDrzSkvpHtv0Kki+lh9Fg=",
"owner": "angerman",
"repo": "old-ghc-nix",
"rev": "af48a7a7353e418119b6dfe3cd1463a657f342b8",
"type": "github"
},
"original": {
"owner": "angerman",
"ref": "master",
"repo": "old-ghc-nix",
"type": "github"
}
},
"root": {
"inputs": {
"flake-compat": "flake-compat",
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
"haskellNix": "haskellNix",
"nixpkgs": [
"haskellNix",
"nixpkgs-unstable"
]
}
},
"stackage": {
"flake": false,
"locked": {
"lastModified": 1688688652,
"narHash": "sha256-HHTZ2N1qLL029/ucCidOeSNW61khhesMa062bYWBKCU=",
"owner": "input-output-hk",
"repo": "stackage.nix",
"rev": "e00911e5f687ee2fa69cba203881e31d3dedd888",
"type": "github"
},
"original": {
"owner": "input-output-hk",
"repo": "stackage.nix",
"type": "github"
}
},
"systems": {

233
flake.nix
View File

@ -1,86 +1,167 @@
{
description = "A common environment for unison development";
inputs = {
flake-utils.url = "github:numtide/flake-utils";
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
description = "Unison";
nixConfig = {
extra-substituters = [ "https://cache.iog.io" "https://unison.cachix.org" ];
extra-trusted-public-keys = [
"hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ="
"unison.cachix.org-1:gFuvOrYJX5lXoSoYm6Na3xwUbb9q+S5JFL+UAsWbmzQ="
];
};
outputs = { self, flake-utils, nixpkgs }:
let
ghc-version = "927";
systemAttrs = flake-utils.lib.eachDefaultSystem (system:
inputs = {
haskellNix.url = "github:input-output-hk/haskell.nix";
nixpkgs.follows = "haskellNix/nixpkgs-unstable";
flake-utils.url = "github:numtide/flake-utils";
flake-compat = {
url = "github:edolstra/flake-compat";
flake = false;
};
};
outputs = { self, nixpkgs, flake-utils, haskellNix, flake-compat }:
flake-utils.lib.eachSystem [
"x86_64-linux"
"x86_64-darwin"
"aarch64-darwin"
]
(system:
let
pkgs = nixpkgs.legacyPackages."${system}".extend self.overlay;
ghc = pkgs.haskell.packages."ghc${ghc-version}";
nativePackages = pkgs.lib.optionals pkgs.stdenv.isDarwin
(with pkgs.darwin.apple_sdk.frameworks; [ Cocoa ]);
unison-env = pkgs.mkShell {
packages = let exports = self.packages."${system}";
in with pkgs;
[
exports.stack
exports.hls
exports.ormolu
exports.ghc
pkg-config
zlib
glibcLocales
] ++ nativePackages;
# workaround for https://gitlab.haskell.org/ghc/ghc/-/issues/11042
shellHook = ''
export LD_LIBRARY_PATH=${pkgs.zlib}/lib:$LD_LIBRARY_PATH
'';
overlays = [
haskellNix.overlay
(final: prev: {
unison-project = with prev.lib.strings;
let
cleanSource = pth:
let
src' = prev.lib.cleanSourceWith {
filter = filt;
src = pth;
};
filt = path: type:
let
bn = baseNameOf path;
isHiddenFile = hasPrefix "." bn;
isFlakeLock = bn == "flake.lock";
isNix = hasSuffix ".nix" bn;
in
!isHiddenFile && !isFlakeLock && !isNix;
in
src';
in
final.haskell-nix.project' {
src = cleanSource ./.;
projectFileName = "stack.yaml";
modules = [
# enable profiling
{
enableLibraryProfiling = true;
profilingDetail = "none";
}
# remove buggy build tool dependencies
({ lib, ... }: {
# this component has the build tool
# `unison-cli:unison` and somehow haskell.nix
# decides to add some file sharing package
# `unison` as a build-tool dependency.
packages.unison-cli.components.exes.cli-integration-tests.build-tools =
lib.mkForce [ ];
})
];
branchMap = {
"https://github.com/unisonweb/configurator.git"."e47e9e9fe1f576f8c835183b9def52d73c01327a" =
"unison";
"https://github.com/unisonweb/shellmet.git"."2fd348592c8f51bb4c0ca6ba4bc8e38668913746" =
"topic/avoid-callCommand";
};
};
})
(final: prev: {
unison-stack = prev.symlinkJoin {
name = "stack";
paths = [ final.stack ];
buildInputs = [ final.makeWrapper ];
postBuild =
let
flags = [ "--no-nix" "--system-ghc" "--no-install-ghc" ];
add-flags =
"--add-flags '${prev.lib.concatStringsSep " " flags}'";
in
''
wrapProgram "$out/bin/stack" ${add-flags}
'';
};
})
];
pkgs = import nixpkgs {
inherit system overlays;
inherit (haskellNix) config;
};
in {
flake = pkgs.unison-project.flake { };
apps.repl = flake-utils.lib.mkApp {
drv =
nixpkgs.legacyPackages."${system}".writeShellScriptBin "repl" ''
confnix=$(mktemp)
echo "builtins.getFlake (toString $(git rev-parse --show-toplevel))" >$confnix
trap "rm $confnix" EXIT
nix repl $confnix
commonShellArgs = args:
args // {
# workaround:
# https://github.com/input-output-hk/haskell.nix/issues/1793
# https://github.com/input-output-hk/haskell.nix/issues/1885
allToolDeps = false;
additional = hpkgs: with hpkgs; [ Cabal stm exceptions ghc ghc-heap ];
buildInputs =
let
native-packages = pkgs.lib.optionals pkgs.stdenv.isDarwin
(with pkgs.darwin.apple_sdk.frameworks; [ Cocoa ]);
in
(args.buildInputs or [ ]) ++ (with pkgs; [ unison-stack pkg-config zlib glibcLocales ]) ++ native-packages;
# workaround for https://gitlab.haskell.org/ghc/ghc/-/issues/11042
shellHook = ''
export LD_LIBRARY_PATH=${pkgs.zlib}/lib:$LD_LIBRARY_PATH
'';
};
tools =
let ormolu-ver = "0.5.2.0";
in (args.tools or { }) // {
cabal = { };
ormolu = { version = ormolu-ver; };
haskell-language-server = {
version = "latest";
# specify flags via project file rather than a module override
# https://github.com/input-output-hk/haskell.nix/issues/1509
cabalProject = ''
packages: .
package haskell-language-server
flags: -brittany -fourmolu -stylishhaskell -hlint
constraints: ormolu == ${ormolu-ver}
'';
};
};
};
pkgs = pkgs;
shellFor = args: pkgs.unison-project.shellFor (commonShellArgs args);
devShells.default = unison-env;
packages = {
hls = pkgs.unison-hls;
hls-call-hierarchy-plugin = ghc.hls-call-hierarchy-plugin;
ormolu = pkgs.ormolu;
ghc = pkgs.haskell.compiler."ghc${ghc-version}";
stack = pkgs.unison-stack;
devShell = self.devShells."${system}".default;
};
defaultPackage = self.packages."${system}".devShell;
localPackages = with pkgs.lib;
filterAttrs (k: v: v.isLocal or false) pkgs.unison-project.hsPkgs;
localPackageNames = builtins.attrNames localPackages;
devShells =
let
mkDevShell = pkgName:
shellFor {
packages = hpkgs: [ hpkgs."${pkgName}" ];
withHoogle = true;
};
localPackageDevShells =
pkgs.lib.genAttrs localPackageNames mkDevShell;
in
{
default = devShells.only-tools;
only-tools = shellFor {
packages = _: [ ];
withHoogle = false;
};
local = shellFor {
packages = hpkgs: (map (p: hpkgs."${p}") localPackageNames);
withHoogle = true;
};
} // localPackageDevShells;
in
flake // {
defaultPackage = flake.packages."unison-cli:exe:unison";
inherit (pkgs) unison-project;
inherit devShells localPackageNames;
});
topLevelAttrs = {
overlay = final: prev: {
unison-hls = final.haskell-language-server.override {
haskellPackages = final.haskell.packages."ghc${ghc-version}";
dynamic = true;
supportedGhcVersions = [ ghc-version ];
};
unison-stack = prev.symlinkJoin {
name = "stack";
paths = [ final.stack ];
buildInputs = [ final.makeWrapper ];
postBuild = let
flags = [ "--no-nix" "--system-ghc" "--no-install-ghc" ];
add-flags =
"--add-flags '${prev.lib.concatStringsSep " " flags}'";
in ''
wrapProgram "$out/bin/stack" ${add-flags}
'';
};
};
};
in systemAttrs // topLevelAttrs;
}

View File

@ -50,7 +50,6 @@ dependencies:
- filelock
- filepath
- fingertree
- fsnotify
- fuzzyfind
- free
- generic-lens

View File

@ -875,7 +875,8 @@ ioBuiltins =
( "IO.tryEval",
forall1 "a" $ \a ->
(unit --> io a) --> Type.effect () [Type.builtinIO (), DD.exceptionType ()] a
)
),
("IO.randomBytes", nat --> io bytes)
]
mvarBuiltins :: [(Text, Type)]

View File

@ -22,13 +22,15 @@ import Unison.Type qualified as Type
import Unison.Var (Var)
import Unison.Var qualified as Var
builtinTermsSrc :: a -> [(Symbol, Term Symbol a, Type Symbol a)]
builtinTermsSrc :: a -> [(Symbol, a, Term Symbol a, Type Symbol a)]
builtinTermsSrc ann =
[ ( v "metadata.isPropagated",
ann,
Term.constructor ann (ConstructorReference Decls.isPropagatedRef Decls.isPropagatedConstructorId),
Type.ref ann Decls.isPropagatedRef
),
( v "metadata.isTest",
ann,
Term.constructor ann (ConstructorReference Decls.isTestRef Decls.isTestConstructorId),
Type.ref ann Decls.isTestRef
)
@ -39,8 +41,8 @@ v = Var.named
builtinTermsRef :: Map Symbol Reference.Id
builtinTermsRef =
fmap (\(refId, _, _) -> refId)
fmap (\(refId, _, _, _) -> refId)
. H.hashTermComponents
. Map.fromList
. fmap (\(v, tm, tp) -> (v, (tm, tp)))
. fmap (\(v, _a, tm, tp) -> (v, (tm, tp, ())))
$ builtinTermsSrc ()

View File

@ -317,8 +317,8 @@ addDefsToCodebase c uf = do
traverse_ goTerm (UF.hashTermsId uf)
where
goTerm t | debug && trace ("Codebase.addDefsToCodebase.goTerm " ++ show t) False = undefined
goTerm (r, Nothing, tm, tp) = putTerm c r tm tp
goTerm (r, Just WK.TestWatch, tm, tp) = putTerm c r tm tp
goTerm (_, r, Nothing, tm, tp) = putTerm c r tm tp
goTerm (_, r, Just WK.TestWatch, tm, tp) = putTerm c r tm tp
goTerm _ = pure ()
goType :: (Show t) => (t -> Decl v a) -> (Reference.Id, t) -> Sqlite.Transaction ()
goType _f pair | debug && trace ("Codebase.addDefsToCodebase.goType " ++ show pair) False = undefined

View File

@ -32,4 +32,4 @@ fromTypecheckedUnisonFile tuf = CodeLookup tm ty
Map.toList (UF.effectDeclarations' tuf)
]
termMap :: Map Reference.Id (Term.Term v a)
termMap = Map.fromList [(id, tm) | (id, _wk, tm, _tp) <- toList $ UF.hashTermsId tuf]
termMap = Map.fromList [(id, tm) | (_a, id, _wk, tm, _tp) <- toList $ UF.hashTermsId tuf]

View File

@ -27,6 +27,7 @@ import Unison.Codebase qualified as Codebase
import Unison.Codebase.FileCodebase qualified as FCC
import Unison.Codebase.Init.CreateCodebaseError
import Unison.Codebase.Init.OpenCodebaseError
import Unison.Codebase.Verbosity (Verbosity, isSilent)
import Unison.Parser.Ann (Ann (..))
import Unison.Prelude
import Unison.PrettyTerminal qualified as PT
@ -168,11 +169,11 @@ createCodebase cbInit debugName path lockOption action = do
-- previously: initCodebaseOrExit :: CodebasePath -> m (m (), Codebase m v a)
-- previously: FileCodebase.initCodebase :: CodebasePath -> m (m (), Codebase m v a)
withNewUcmCodebaseOrExit :: (MonadIO m) => Init m Symbol Ann -> DebugName -> CodebasePath -> CodebaseLockOption -> (Codebase m Symbol Ann -> m r) -> m r
withNewUcmCodebaseOrExit cbInit debugName path lockOption action = do
withNewUcmCodebaseOrExit :: (MonadIO m) => Init m Symbol Ann -> Verbosity -> DebugName -> CodebasePath -> CodebaseLockOption -> (Codebase m Symbol Ann -> m r) -> m r
withNewUcmCodebaseOrExit cbInit verbosity debugName path lockOption action = do
prettyDir <- P.string <$> canonicalizePath path
let codebaseSetup codebase = do
liftIO $ PT.putPrettyLn' . P.wrap $ "Initializing a new codebase in: " <> prettyDir
unless (isSilent verbosity) . liftIO $ PT.putPrettyLn' . P.wrap $ "Initializing a new codebase in: " <> prettyDir
Codebase.runTransaction codebase (Codebase.installUcmDependencies codebase)
createCodebase cbInit debugName path lockOption (\cb -> codebaseSetup cb *> action cb)
>>= \case
@ -180,19 +181,20 @@ withNewUcmCodebaseOrExit cbInit debugName path lockOption action = do
Right result -> pure result
-- | try to init a codebase where none exists and then exit regardless (i.e. `ucm --codebase dir init`)
initCodebaseAndExit :: (MonadIO m) => Init m Symbol Ann -> DebugName -> Maybe CodebasePath -> CodebaseLockOption -> m ()
initCodebaseAndExit i debugName mdir lockOption = do
initCodebaseAndExit :: (MonadIO m) => Init m Symbol Ann -> Verbosity -> DebugName -> Maybe CodebasePath -> CodebaseLockOption -> m ()
initCodebaseAndExit i verbosity debugName mdir lockOption = do
codebaseDir <- Codebase.getCodebaseDir mdir
withNewUcmCodebaseOrExit i debugName codebaseDir lockOption (const $ pure ())
withNewUcmCodebaseOrExit i verbosity debugName codebaseDir lockOption (const $ pure ())
withTemporaryUcmCodebase ::
(MonadUnliftIO m) =>
Init m Symbol Ann ->
Verbosity ->
DebugName ->
CodebaseLockOption ->
((CodebasePath, Codebase m Symbol Ann) -> m r) ->
m r
withTemporaryUcmCodebase cbInit debugName lockOption action = do
withTemporaryUcmCodebase cbInit verbosity debugName lockOption action = do
UnliftIO.withSystemTempDirectory debugName $ \tempDir -> do
withNewUcmCodebaseOrExit cbInit debugName tempDir lockOption $ \codebase -> do
withNewUcmCodebaseOrExit cbInit verbosity debugName tempDir lockOption $ \codebase -> do
action (tempDir, codebase)

View File

@ -80,12 +80,12 @@ evaluateWatches ::
evaluateWatches code ppe evaluationCache rt tuf = do
-- 1. compute hashes for everything in the file
let m :: Map v (Reference.Id, Term.Term v a)
m = fmap (\(id, _wk, tm, _tp) -> (id, tm)) (UF.hashTermsId tuf)
m = fmap (\(_a, id, _wk, tm, _tp) -> (id, tm)) (UF.hashTermsId tuf)
watches :: Set v = Map.keysSet watchKinds
watchKinds :: Map v WatchKind
watchKinds =
Map.fromList
[(v, k) | (k, ws) <- UF.watchComponents tuf, (v, _tm, _tp) <- ws]
[(v, k) | (k, ws) <- UF.watchComponents tuf, (v, _a, _tm, _tp) <- ws]
unann = Term.amap (const ())
-- 2. use the cache to lookup things already computed
m' <- fmap Map.fromList . for (Map.toList m) $ \(v, (r, t)) -> do
@ -96,8 +96,8 @@ evaluateWatches code ppe evaluationCache rt tuf = do
-- 3. create a big ol' let rec whose body is a big tuple of all watches
let rv :: Map Reference.Id v
rv = Map.fromList [(r, v) | (v, (r, _)) <- Map.toList m]
bindings :: [(v, Term v)]
bindings = [(v, unref rv b) | (v, (_, _, b, _)) <- Map.toList m']
bindings :: [(v, (), Term v)]
bindings = [(v, (), unref rv b) | (v, (_, _, b, _)) <- Map.toList m']
watchVars = [Term.var () v | v <- toList watches]
bigOl'LetRec = Term.letRec' True bindings (tupleTerm watchVars)
cl = void (CL.fromTypecheckedUnisonFile tuf) <> void code
@ -153,7 +153,7 @@ evaluateTerm' codeLookup cache ppe rt tm = do
mempty
mempty
mempty
[(WK.RegularWatch, [(Var.nameds "result", tm, mempty <$> mainType rt)])]
[(WK.RegularWatch, [(Var.nameds "result", mempty, tm, mempty <$> mainType rt)])]
r <- evaluateWatches (void codeLookup) ppe cache rt (void tuf)
pure $
r <&> \(_, map) ->

View File

@ -78,7 +78,9 @@ migrations getDeclType termBuffer declBuffer rootCodebasePath =
sqlMigration 10 Q.addProjectTables,
sqlMigration 11 Q.addMostRecentBranchTable,
(12, migrateSchema11To12),
sqlMigration 13 Q.addMostRecentNamespaceTable
sqlMigration 13 Q.addMostRecentNamespaceTable,
sqlMigration 14 Q.addSquashResultTable,
sqlMigration 15 Q.addSquashResultTableIfNotExists
]
where
sqlMigration :: SchemaVersion -> Sqlite.Transaction () -> (SchemaVersion, Sqlite.Transaction ())

View File

@ -579,9 +579,10 @@ migrateTermComponent getDeclType termBuffer declBuffer oldHash = fmap (either id
newTermComponents =
remappedReferences
& Map.elems
& fmap (\(v, trm, typ) -> (v, (trm, typ)))
& fmap (\(v, trm, typ) -> (v, (trm, typ, ())))
& Map.fromList
& Convert.hashTermComponents
& fmap (\(ref, trm, typ, _) -> (ref, trm, typ))
ifor newTermComponents $ \v (newReferenceId, trm, typ) -> do
let oldReferenceId = vToOldReferenceMapping ^?! ix v

View File

@ -5,7 +5,7 @@ module Unison.FileParsers
)
where
import Control.Lens (view, _3)
import Control.Lens
import Control.Monad.State (evalStateT)
import Control.Monad.Writer (tell)
import Data.Foldable qualified as Foldable
@ -37,12 +37,14 @@ import Unison.Typechecker qualified as Typechecker
import Unison.Typechecker.Context qualified as Context
import Unison.Typechecker.Extractor (RedundantTypeAnnotation)
import Unison.Typechecker.TypeLookup qualified as TL
import Unison.UnisonFile (definitionLocation)
import Unison.UnisonFile qualified as UF
import Unison.UnisonFile.Names qualified as UF
import Unison.Util.List qualified as List
import Unison.Util.Relation qualified as Rel
import Unison.Var (Var)
import Unison.Var qualified as Var
import Unison.WatchKind (WatchKind)
type Term v = Term.Term v Ann
@ -189,14 +191,21 @@ synthesizeFile env0 uf = do
in traverse (traverse addTypesToTopLevelBindings) tlcsFromTypechecker
let doTdnr = applyTdnrDecisions infos
let doTdnrInComponent (v, t, tp) = (v, doTdnr t, tp)
let tdnredTlcs = (fmap . fmap) doTdnrInComponent topLevelComponents
let tdnredTlcs =
topLevelComponents
& (fmap . fmap)
( \vtt ->
vtt
& doTdnrInComponent
& \(v, t, tp) -> (v, fromMaybe (error $ "Symbol from typechecked file not present in parsed file" <> show v) (definitionLocation v uf), t, tp)
)
let (watches', terms') = partition isWatch tdnredTlcs
isWatch = all (\(v, _, _) -> Set.member v watchedVars)
watchedVars = Set.fromList [v | (v, _) <- UF.allWatches uf]
isWatch = all (\(v, _, _, _) -> Set.member v watchedVars)
watchedVars = Set.fromList [v | (v, _a, _) <- UF.allWatches uf]
tlcKind [] = error "empty TLC, should never occur"
tlcKind tlc@((v, _, _) : _) =
let hasE k =
elem v . fmap fst $ Map.findWithDefault [] k (UF.watches uf)
tlcKind tlc@((v, _, _, _) : _) =
let hasE :: WatchKind -> Bool
hasE k = elem v . fmap (view _1) $ Map.findWithDefault [] k (UF.watches uf)
in case Foldable.find hasE (Map.keys $ UF.watches uf) of
Nothing -> error "wat"
Just kind -> (kind, tlc)

View File

@ -16,13 +16,14 @@ module Unison.Hashing.V2.Convert
)
where
import Control.Applicative
import Control.Lens (over, _3)
import Control.Lens qualified as Lens
import Control.Monad.Trans.Writer.CPS (Writer)
import Control.Monad.Trans.Writer.CPS qualified as Writer
import Data.Bifunctor (bimap)
import Data.Bitraversable (bitraverse)
import Data.Foldable (toList)
import Data.Function ((&))
import Data.Functor ((<&>))
import Data.Map (Map)
import Data.Map qualified as Map
@ -64,22 +65,26 @@ typeToReferenceMentions =
-- TODO: remove non-prime version
-- include type in hash
hashTermComponents ::
forall v a.
forall v a extra.
(Var v) =>
Map v (Memory.Term.Term v a, Memory.Type.Type v a) ->
Map v (Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a)
Map v (Memory.Term.Term v a, Memory.Type.Type v a, extra) ->
Map v (Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a, extra)
hashTermComponents mTerms =
case Writer.runWriter (traverse (bitraverse m2hTerm (pure . m2hType)) mTerms) of
case h2mTermMap mTerms of
(hTerms, constructorTypes) -> h2mTermResult (constructorTypes Map.!) <$> Hashing.hashTermComponents hTerms
where
h2mTermMap m =
m
& traverse (\(trm, typ, extra) -> liftA3 (,,) (m2hTerm trm) (pure $ m2hType typ) (pure extra))
& Writer.runWriter
h2mTermResult ::
(Ord v) =>
( Memory.Reference.Reference ->
Memory.ConstructorType.ConstructorType
) ->
(Hashing.ReferenceId, Hashing.Term v a, Hashing.Type v a) ->
(Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a)
h2mTermResult getCtorType (id, tm, typ) = (h2mReferenceId id, h2mTerm getCtorType tm, h2mType typ)
(Hashing.ReferenceId, Hashing.Term v a, Hashing.Type v a, extra) ->
(Memory.Reference.Id, Memory.Term.Term v a, Memory.Type.Type v a, extra)
h2mTermResult getCtorType (id, tm, typ, extra) = (h2mReferenceId id, h2mTerm getCtorType tm, h2mType typ, extra)
-- | This shouldn't be used when storing terms in the codebase, as it doesn't incorporate the type into the hash.
-- this should only be used in cases where you just need a way to identify some terms that you have, but won't be

View File

@ -1061,7 +1061,7 @@ renderTerm :: (IsString s, Var v) => Env -> Term.Term' (TypeVar.TypeVar loc0 v)
renderTerm env e =
let s = Color.toPlain $ TermPrinter.pretty' (Just 80) env (TypeVar.lowerTerm e)
in if length s > Settings.renderTermMaxLength
then fromString (take Settings.renderTermMaxLength s <> "...")
then fromString ("..." <> drop (length s - Settings.renderTermMaxLength) s)
else fromString s
renderPattern :: Env -> Pattern ann -> ColorText

View File

@ -179,7 +179,10 @@ enclose keep rec (LetRecNamedTop' top vbs bd) =
where
xpnd = expandRec keep' vbs
keep' = Set.union keep . Set.fromList . map fst $ vbs
lvbs = (map . fmap) (rec keep' . abstract keep' . ABT.substs xpnd) vbs
lvbs =
vbs
<&> \(v, trm) ->
(v, ABT.annotation trm, (rec keep' . abstract keep' . ABT.substs xpnd) trm)
lbd = rec keep' . ABT.substs xpnd $ bd
-- will be lifted, so keep this variable
enclose keep rec (Let1NamedTop' top v b@(unAnn -> LamsNamed' vs bd) e) =
@ -299,7 +302,7 @@ beta rec (LetRecNamedTop' top (fmap (fmap rec) -> vbs) (rec -> bd)) =
m = Map.map length $ Map.differenceWith f (Map.fromList $ map args vbs) m0
lvbs =
vbs <&> \(v, b0) -> (,) v $ case b0 of
vbs <&> \(v, b0) -> (v,ABT.annotation b0,) $ case b0 of
LamsNamed' vs b
| Just n <- Map.lookup v m ->
lam' (ABT.annotation b0) (drop n vs) (dropPrefixes m b)

View File

@ -37,6 +37,7 @@ import Control.Monad.Reader (ReaderT (..), ask, runReaderT)
import Control.Monad.State.Strict (State, execState, modify)
import Crypto.Hash qualified as Hash
import Crypto.MAC.HMAC qualified as HMAC
import Crypto.Random (getRandomBytes)
import Data.Bits (shiftL, shiftR, (.|.))
import Data.ByteArray qualified as BA
import Data.ByteString (hGet, hGetSome, hPut)
@ -2384,7 +2385,7 @@ declareForeigns = do
declareForeign Tracked "IO.listen.impl.v3" boxToEF0
. mkForeignIOF
$ \sk -> SYS.listenSock sk 2
$ \sk -> SYS.listenSock sk 2048
declareForeign Tracked "IO.clientSocket.impl.v3" boxBoxToEFBox
. mkForeignIOF
@ -2742,6 +2743,9 @@ declareForeigns = do
declareForeign Untracked "Universal.murmurHash" murmur'hash . mkForeign $
pure . asWord64 . hash64 . serializeValueLazy
declareForeign Tracked "IO.randomBytes" natToBox . mkForeign $
\n -> Bytes.fromArray <$> getRandomBytes @IO @ByteString n
declareForeign Untracked "Bytes.zlib.compress" boxDirect . mkForeign $ pure . Bytes.zlibCompress
declareForeign Untracked "Bytes.gzip.compress" boxDirect . mkForeign $ pure . Bytes.gzipCompress
declareForeign Untracked "Bytes.zlib.decompress" boxToEBoxBox . mkForeign $ \bs ->

View File

@ -3,7 +3,7 @@
module Unison.Runtime.IOSource where
import Control.Lens (view, _1)
import Control.Lens (view, _2)
import Control.Monad.Morph (hoist)
import Data.List (elemIndex, genericIndex)
import Data.Map qualified as Map
@ -54,7 +54,7 @@ typecheckedFile' =
Right file -> file
typecheckedFileTerms :: Map.Map Symbol R.Reference
typecheckedFileTerms = view _1 <$> UF.hashTerms typecheckedFile
typecheckedFileTerms = view _2 <$> UF.hashTerms typecheckedFile
termNamed :: String -> R.Reference
termNamed s =

View File

@ -159,7 +159,7 @@ fieldNames env r name dd = do
_ -> Nothing
let vars :: [v]
vars = [Var.freshenId (fromIntegral n) (Var.named "_") | n <- [0 .. Type.arity typ - 1]]
let accessors :: [(v, Term.Term v ())]
let accessors :: [(v, (), Term.Term v ())]
accessors = DD.generateRecordAccessors (map (,()) vars) (HQ.toVar name) r
let typeLookup :: TypeLookup v ()
typeLookup =
@ -176,14 +176,14 @@ fieldNames env r name dd = do
Typechecker._termsByShortname = mempty
}
accessorsWithTypes :: [(v, Term.Term v (), Type.Type v ())] <-
for accessors \(v, trm) ->
for accessors \(v, _a, trm) ->
case Result.result (Typechecker.synthesize env typecheckingEnv trm) of
Nothing -> Nothing
Just typ -> Just (v, trm, typ)
let hashes = Hashing.hashTermComponents (Map.fromList . fmap (\(v, trm, typ) -> (v, (trm, typ))) $ accessorsWithTypes)
let hashes = Hashing.hashTermComponents (Map.fromList . fmap (\(v, trm, typ) -> (v, (trm, typ, ()))) $ accessorsWithTypes)
let names =
[ (r, HQ.toString . PPE.termName env . Referent.Ref $ DerivedId r)
| r <- (\(refId, _trm, _typ) -> refId) <$> Map.elems hashes
| r <- (\(refId, _trm, _typ, _ann) -> refId) <$> Map.elems hashes
]
let fieldNames =
Map.fromList
@ -195,7 +195,7 @@ fieldNames env r name dd = do
Just
[ HQ.unsafeFromString name
| v <- vars,
Just (ref, _, _) <- [Map.lookup (Var.namespaced [HQ.toVar name, v]) hashes],
Just (ref, _, _, _) <- [Map.lookup (Var.namespaced [HQ.toVar name, v]) hashes],
Just name <- [Map.lookup ref fieldNames]
]
else Nothing

View File

@ -64,12 +64,12 @@ file = do
_ <- closeBlock
let (termsr, watchesr) = foldl' go ([], []) stanzas
go (terms, watches) s = case s of
WatchBinding kind _ ((_, v), at) ->
(terms, (kind, (v, Term.generalizeTypeSignatures at)) : watches)
WatchExpression kind guid _ at ->
(terms, (kind, (Var.unnamedTest guid, Term.generalizeTypeSignatures at)) : watches)
Binding ((_, v), at) -> ((v, Term.generalizeTypeSignatures at) : terms, watches)
Bindings bs -> ([(v, Term.generalizeTypeSignatures at) | ((_, v), at) <- bs] ++ terms, watches)
WatchBinding kind spanningAnn ((_, v), at) ->
(terms, (kind, (v, spanningAnn, Term.generalizeTypeSignatures at)) : watches)
WatchExpression kind guid spanningAnn at ->
(terms, (kind, (Var.unnamedTest guid, spanningAnn, Term.generalizeTypeSignatures at)) : watches)
Binding ((spanningAnn, v), at) -> ((v, spanningAnn, Term.generalizeTypeSignatures at) : terms, watches)
Bindings bs -> ([(v, spanningAnn, Term.generalizeTypeSignatures at) | ((spanningAnn, v), at) <- bs] ++ terms, watches)
let (terms, watches) = (reverse termsr, reverse watchesr)
-- suffixified local term bindings shadow any same-named thing from the outer codebase scope
-- example: `foo.bar` in local file scope will shadow `foo.bar` and `bar` in codebase scope
@ -109,13 +109,14 @@ file = do
let bindNames = Term.bindSomeNames Name.unsafeFromVar avoid curNames . resolveLocals
where
avoid = Set.fromList (stanzas0 >>= getVars)
terms <- case List.validate (traverse bindNames) terms of
terms <- case List.validate (traverseOf _3 bindNames) terms of
Left es -> resolutionFailures (toList es)
Right terms -> pure terms
watches <- case List.validate (traverse . traverse $ bindNames) watches of
watches <- case List.validate (traverseOf (traversed . _3) bindNames) watches of
Left es -> resolutionFailures (toList es)
Right ws -> pure ws
let toPair (tok, _) = (L.payload tok, ann tok)
let toPair (tok, typ) = (L.payload tok, ann tok <> ann typ)
accessors :: [[(v, Ann, Term v Ann)]]
accessors =
[ DD.generateRecordAccessors (toPair <$> fields) (L.payload typ) r
| (typ, fields) <- parsedAccessors,
@ -164,7 +165,7 @@ checkForDuplicateTermsAndConstructors uf = do
allTerms :: [(v, Ann)]
allTerms =
UF.terms uf
<&> (\(v, t) -> (v, ABT.annotation t))
<&> (\(v, bindingAnn, _t) -> (v, bindingAnn))
mergedTerms :: Map v (Set Ann)
mergedTerms =
(allConstructors <> allTerms)
@ -337,10 +338,13 @@ dataDeclaration mod = do
Type.foralls ctorAnn typeArgVs ctorType
)
prefixVar = TermParser.verifyRelativeVarName prefixDefinitionName
dataConstructor :: P v (Ann, v, Type v Ann)
dataConstructor = go <$> prefixVar <*> many TypeParser.valueTypeLeaf
record :: P v ([(Ann, v, Type v Ann)], [(L.Token v, [(L.Token v, Type v Ann)])])
record = do
_ <- openBlockWith "{"
let field = do
let field :: P v [(L.Token v, Type v Ann)]
field = do
f <- liftA2 (,) (prefixVar <* reserved ":") TypeParser.valueType
optional (reserved ",")
>>= ( \case

View File

@ -14,6 +14,7 @@ module Unison.UnisonFile
effectDeclarations,
typecheckingTerm,
watchesOfKind,
definitionLocation,
-- * TypecheckedUnisonFile
TypecheckedUnisonFile (..),
@ -65,23 +66,33 @@ dataDeclarations = fmap (first Reference.DerivedId) . dataDeclarationsId
effectDeclarations :: UnisonFile v a -> Map v (Reference, EffectDeclaration v a)
effectDeclarations = fmap (first Reference.DerivedId) . effectDeclarationsId
watchesOfKind :: WatchKind -> UnisonFile v a -> [(v, Term v a)]
watchesOfKind :: WatchKind -> UnisonFile v a -> [(v, a, Term v a)]
watchesOfKind kind uf = Map.findWithDefault [] kind (watches uf)
watchesOfOtherKinds :: WatchKind -> UnisonFile v a -> [(v, Term v a)]
watchesOfOtherKinds :: WatchKind -> UnisonFile v a -> [(v, a, Term v a)]
watchesOfOtherKinds kind uf =
join [ws | (k, ws) <- Map.toList (watches uf), k /= kind]
allWatches :: UnisonFile v a -> [(v, Term v a)]
allWatches :: UnisonFile v a -> [(v, a, Term v a)]
allWatches = join . Map.elems . watches
-- | Get the location of a given definition in the file.
definitionLocation :: (Var v) => v -> UnisonFile v a -> Maybe a
definitionLocation v uf =
terms uf ^? folded . filteredBy (_1 . only v) . _2
<|> watches uf ^? folded . folded . filteredBy (_1 . only v) . _2
<|> dataDeclarations uf ^? ix v . _2 . to DD.annotation
<|> effectDeclarations uf ^? ix v . _2 . to (DD.annotation . DD.toDataDecl)
-- Converts a file to a single let rec with a body of `()`, for
-- purposes of typechecking.
typecheckingTerm :: (Var v, Monoid a) => UnisonFile v a -> Term v a
typecheckingTerm uf =
Term.letRec' True (terms uf <> testWatches <> watchesOfOtherKinds TestWatch uf) $
Term.letRec' True bindings $
DD.unitTerm mempty
where
bindings =
terms uf <> testWatches <> watchesOfOtherKinds TestWatch uf
-- we make sure each test has type Test.Result
f w = let wa = ABT.annotation w in Term.ann wa w (DD.testResultType wa)
testWatches = map (second f) $ watchesOfKind TestWatch uf
@ -93,35 +104,37 @@ dataDeclarations' = fmap (first Reference.DerivedId) . dataDeclarationsId'
effectDeclarations' :: TypecheckedUnisonFile v a -> Map v (Reference, EffectDeclaration v a)
effectDeclarations' = fmap (first Reference.DerivedId) . effectDeclarationsId'
hashTerms :: TypecheckedUnisonFile v a -> Map v (Reference, Maybe WatchKind, Term v a, Type v a)
hashTerms = fmap (over _1 Reference.DerivedId) . hashTermsId
hashTerms :: TypecheckedUnisonFile v a -> Map v (a, Reference, Maybe WatchKind, Term v a, Type v a)
hashTerms = fmap (over _2 Reference.DerivedId) . hashTermsId
typecheckedUnisonFile ::
forall v a.
(Var v) =>
Map v (Reference.Id, DataDeclaration v a) ->
Map v (Reference.Id, EffectDeclaration v a) ->
[[(v, Term v a, Type v a)]] ->
[(WatchKind, [(v, Term v a, Type v a)])] ->
[[(v, a, Term v a, Type v a)]] ->
[(WatchKind, [(v, a, Term v a, Type v a)])] ->
TypecheckedUnisonFile v a
typecheckedUnisonFile datas effects tlcs watches =
TypecheckedUnisonFileId datas effects tlcs watches hashImpl
where
hashImpl :: (Map v (a, Reference.Id, Maybe WatchKind, Term v a, Type v a))
hashImpl =
let -- includes watches
allTerms :: [(v, Term v a, Type v a)]
allTerms :: [(v, a, Term v a, Type v a)]
allTerms = join tlcs ++ join (snd <$> watches)
types :: Map v (Type v a)
types = Map.fromList [(v, t) | (v, _, t) <- allTerms]
types = Map.fromList [(v, t) | (v, _a, _, t) <- allTerms]
watchKinds :: Map v (Maybe WatchKind)
watchKinds =
Map.fromList $
[(v, Nothing) | (v, _e, _t) <- join tlcs]
++ [(v, Just wk) | (wk, wkTerms) <- watches, (v, _e, _t) <- wkTerms]
hcs = Hashing.hashTermComponents $ Map.fromList $ (\(v, e, t) -> (v, (e, t))) <$> allTerms
[(v, Nothing) | (v, _a, _e, _t) <- join tlcs]
++ [(v, Just wk) | (wk, wkTerms) <- watches, (v, _a, _e, _t) <- wkTerms]
hcs :: Map v (Reference.Id, Term v a, Type v a, a)
hcs = Hashing.hashTermComponents $ Map.fromList $ (\(v, a, e, t) -> (v, (e, t, a))) <$> allTerms
in Map.fromList
[ (v, (r, wk, e, t))
| (v, (r, e, _typ)) <- Map.toList hcs,
[ (v, (a, r, wk, e, t))
| (v, (r, e, _typ, a)) <- Map.toList hcs,
Just t <- [Map.lookup v types],
wk <- [Map.findWithDefault (error $ show v ++ " missing from watchKinds") v watchKinds]
]
@ -137,7 +150,7 @@ lookupDecl v uf =
indexByReference ::
TypecheckedUnisonFile v a ->
(Map Reference.Id (Term v a, Type v a), Map Reference.Id (DD.Decl v a))
(Map Reference.Id (a, Term v a, Type v a), Map Reference.Id (DD.Decl v a))
indexByReference uf = (tms, tys)
where
tys =
@ -145,7 +158,7 @@ indexByReference uf = (tms, tys)
<> Map.fromList (over _2 Left <$> toList (effectDeclarationsId' uf))
tms =
Map.fromList
[ (r, (tm, ty)) | (Reference.DerivedId r, _wk, tm, ty) <- toList (hashTerms uf)
[ (r, (a, tm, ty)) | (a, Reference.DerivedId r, _wk, tm, ty) <- Map.elems (hashTerms uf)
]
-- | A mapping of all terms in the file by their var name.
@ -154,12 +167,12 @@ indexByReference uf = (tms, tys)
-- Includes test watches.
allTerms :: (Ord v) => TypecheckedUnisonFile v a -> Map v (Term v a)
allTerms uf =
Map.fromList [(v, t) | (v, t, _) <- join $ topLevelComponents uf]
Map.fromList [(v, t) | (v, _a, t, _) <- join $ topLevelComponents uf]
-- | the top level components (no watches) plus test watches.
topLevelComponents ::
TypecheckedUnisonFile v a ->
[[(v, Term v a, Type v a)]]
[[(v, a, Term v a, Type v a)]]
topLevelComponents file =
topLevelComponents' file ++ [comp | (TestWatch, comp) <- watchComponents file]
@ -171,7 +184,7 @@ termSignatureExternalLabeledDependencies
Set.difference
( Set.map LD.typeRef
. foldMap Type.dependencies
. fmap (\(_r, _wk, _e, t) -> t)
. fmap (\(_a, _r, _wk, _e, t) -> t)
. toList
$ hashTerms
)
@ -187,14 +200,14 @@ dependencies :: (Monoid a, Var v) => UnisonFile v a -> Set Reference
dependencies (UnisonFile ds es ts ws) =
foldMap (DD.dependencies . snd) ds
<> foldMap (DD.dependencies . DD.toDataDecl . snd) es
<> foldMap (Term.dependencies . snd) ts
<> foldMap (foldMap (Term.dependencies . snd)) ws
<> foldMap (Term.dependencies . view _3) ts
<> foldMap (foldMap (Term.dependencies . view _3)) ws
discardTypes :: TypecheckedUnisonFile v a -> UnisonFile v a
discardTypes (TypecheckedUnisonFileId datas effects terms watches _) =
let watches' = g . mconcat <$> List.multimap watches
g tup3s = [(v, e) | (v, e, _t) <- tup3s]
in UnisonFileId datas effects [(a, b) | (a, b, _) <- join terms] watches'
g tup3s = [(v, a, e) | (v, a, e, _t) <- tup3s]
in UnisonFileId datas effects [(v, a, trm) | (v, a, trm, _typ) <- join terms] watches'
declsToTypeLookup :: (Var v) => UnisonFile v a -> TL.TypeLookup v a
declsToTypeLookup uf =

View File

@ -1,5 +1,6 @@
module Unison.UnisonFile.Names where
import Control.Lens
import Data.Map qualified as Map
import Data.Set qualified as Set
import Unison.ABT qualified as ABT
@ -7,7 +8,7 @@ import Unison.DataDeclaration (DataDeclaration, EffectDeclaration (..))
import Unison.DataDeclaration qualified as DD
import Unison.DataDeclaration.Names qualified as DD.Names
import Unison.Hashing.V2.Convert qualified as Hashing
import Unison.Names (Names (Names))
import Unison.Names (Names (..))
import Unison.Names.ResolutionResult qualified as Names
import Unison.Prelude
import Unison.Reference qualified as Reference
@ -34,7 +35,7 @@ typecheckedToNames uf = Names (terms <> ctors) types
terms =
Relation.fromList
[ (Name.unsafeFromVar v, Referent.Ref r)
| (v, (r, wk, _, _)) <- Map.toList $ UF.hashTerms uf,
| (v, (_a, r, wk, _, _)) <- Map.toList $ UF.hashTerms uf,
wk == Nothing || wk == Just WK.TestWatch
]
types =
@ -72,11 +73,11 @@ bindNames names (UnisonFileId d e ts ws) = do
-- todo: consider having some kind of binding structure for terms & watches
-- so that you don't weirdly have free vars to tiptoe around.
-- The free vars should just be the things that need to be bound externally.
let termVars = (fst <$> ts) ++ (Map.elems ws >>= map fst)
let termVars = (view _1 <$> ts) ++ (Map.elems ws >>= map (view _1))
termVarsSet = Set.fromList termVars
-- todo: can we clean up this lambda using something like `second`
ts' <- traverse (\(v, t) -> (v,) <$> Term.bindNames Name.unsafeFromVar termVarsSet names t) ts
ws' <- traverse (traverse (\(v, t) -> (v,) <$> Term.bindNames Name.unsafeFromVar termVarsSet names t)) ws
ts' <- traverse (\(v, a, t) -> (v,a,) <$> Term.bindNames Name.unsafeFromVar termVarsSet names t) ts
ws' <- traverse (traverse (\(v, a, t) -> (v,a,) <$> Term.bindNames Name.unsafeFromVar termVarsSet names t)) ws
pure $ UnisonFileId d e ts' ws'
-- This function computes hashes for data and effect declarations, and

View File

@ -17,16 +17,16 @@ import Unison.WatchKind (WatchKind)
data UnisonFile v a = UnisonFileId
{ dataDeclarationsId :: Map v (TermReferenceId, DataDeclaration v a),
effectDeclarationsId :: Map v (TermReferenceId, EffectDeclaration v a),
terms :: [(v, Term v a)],
watches :: Map WatchKind [(v, Term v a)]
terms :: [(v, a {- ann for whole binding -}, Term v a)],
watches :: Map WatchKind [(v, a {- ann for whole watch -}, Term v a)]
}
deriving (Show)
pattern UnisonFile ::
Map v (TypeReference, DataDeclaration v a) ->
Map v (TypeReference, EffectDeclaration v a) ->
[(v, Term v a)] ->
Map WatchKind [(v, Term v a)] ->
[(v, a, Term v a)] ->
Map WatchKind [(v, a, Term v a)] ->
UnisonFile v a
pattern UnisonFile ds es tms ws <-
UnisonFileId
@ -42,9 +42,9 @@ pattern UnisonFile ds es tms ws <-
data TypecheckedUnisonFile v a = TypecheckedUnisonFileId
{ dataDeclarationsId' :: Map v (TypeReferenceId, DataDeclaration v a),
effectDeclarationsId' :: Map v (TypeReferenceId, EffectDeclaration v a),
topLevelComponents' :: [[(v, Term v a, Type v a)]],
watchComponents :: [(WatchKind, [(v, Term v a, Type v a)])],
hashTermsId :: Map v (TermReferenceId, Maybe WatchKind, Term v a, Type v a)
topLevelComponents' :: [[(v, a {- ann for whole binding -}, Term v a, Type v a)]],
watchComponents :: [(WatchKind, [(v, a {- ann for whole watch -}, Term v a, Type v a)])],
hashTermsId :: Map v (a {- ann for whole binding -}, TermReferenceId, Maybe WatchKind, Term v a, Type v a)
}
deriving stock (Generic, Show)
@ -53,11 +53,12 @@ data TypecheckedUnisonFile v a = TypecheckedUnisonFileId
pattern TypecheckedUnisonFile ::
Map v (TypeReference, DataDeclaration v a) ->
Map v (TypeReference, EffectDeclaration v a) ->
[[(v, Term v a, Type v a)]] ->
[(WatchKind, [(v, Term v a, Type v a)])] ->
[[(v, a, Term v a, Type v a)]] ->
[(WatchKind, [(v, a, Term v a, Type v a)])] ->
Map
v
( TermReference,
( a,
TermReference,
Maybe WatchKind,
ABT.Term (Term.F v a a) v a,
ABT.Term Type.F v a
@ -69,14 +70,16 @@ pattern TypecheckedUnisonFile ds es tlcs wcs hts <-
(fmap (first Reference.DerivedId) -> es)
tlcs
wcs
(fmap (over _1 Reference.DerivedId) -> hts)
(fmap (over _2 Reference.DerivedId) -> hts)
instance (Ord v) => Functor (TypecheckedUnisonFile v) where
fmap f (TypecheckedUnisonFileId ds es tlcs wcs hashTerms) =
TypecheckedUnisonFileId ds' es' tlcs' wcs' hashTerms'
where
ds' = fmap (\(id, dd) -> (id, fmap f dd)) ds
es' = fmap (\(id, ed) -> (id, fmap f ed)) es
tlcs' = (fmap . fmap) (\(v, tm, tp) -> (v, Term.amap f tm, fmap f tp)) tlcs
wcs' = map (\(wk, tms) -> (wk, map (\(v, tm, tp) -> (v, Term.amap f tm, fmap f tp)) tms)) wcs
hashTerms' = fmap (\(id, wk, tm, tp) -> (id, wk, Term.amap f tm, fmap f tp)) hashTerms
ds' = ds <&> \(refId, decl) -> (refId, fmap f decl)
es' = es <&> \(refId, effect) -> (refId, fmap f effect)
tlcs' =
tlcs
& (fmap . fmap) \(v, a, tm, tp) -> (v, f a, Term.amap f tm, fmap f tp)
wcs' = map (\(wk, tms) -> (wk, map (\(v, a, tm, tp) -> (v, f a, Term.amap f tm, fmap f tp)) tms)) wcs
hashTerms' = fmap (\(a, id, wk, tm, tp) -> (f a, id, wk, Term.amap f tm, fmap f tp)) hashTerms

View File

@ -148,7 +148,7 @@ resultTest rt uf filepath = do
Right tm -> do
-- compare the the watch expression from the .u with the expr in .ur
let watchResult = head (view _5 <$> Map.elems watches)
tm' = Term.letRec' False bindings watchResult
tm' = Term.letRec' False (bindings <&> \(sym, tm) -> (sym, (), tm)) watchResult
-- note . show $ tm'
-- note . show $ Term.amap (const ()) tm
expectEqual tm' (Term.amap (const ()) tm)

View File

@ -91,7 +91,6 @@ library
Unison.Codebase.Type
Unison.Codebase.TypeEdit
Unison.Codebase.Verbosity
Unison.Codebase.Watch
Unison.CodebasePath
Unison.FileParsers
Unison.Hashing.V2.Convert
@ -239,7 +238,6 @@ library
, filepath
, fingertree
, free
, fsnotify
, fuzzyfind
, generic-lens
, generic-monoid
@ -430,7 +428,6 @@ test-suite parser-typechecker-tests
, filepath
, fingertree
, free
, fsnotify
, fuzzyfind
, generic-lens
, generic-monoid

View File

@ -3,7 +3,8 @@
racket/string
racket/file
rnrs/io/ports-6
(only-in racket empty?)
(only-in rnrs standard-error-port standard-input-port standard-output-port vector-map)
(only-in racket empty? with-output-to-string system/exit-code system false?)
compatibility/mlist
(only-in unison/boot data-case define-unison)
unison/data
@ -28,15 +29,23 @@
unison/concurrent
)
(provide
unison-FOp-IO.stdHandle
(prefix-out
builtin-IO.
(combine-out
seekHandle.impl.v3
getLine.impl.v1
getSomeBytes.impl.v1
getBuffering.impl.v3
setBuffering.impl.v3
getEcho.impl.v1
setEcho.impl.v1
getArgs.impl.v1
getEnv.impl.v1
getChar.impl.v1
process.call
getCurrentDirectory.impl.v3
))
; Still to implement:
@ -46,10 +55,6 @@
; ready.impl.v1
; isFileOpen.impl.v3
; isFileEOF.impl.v3
; setEcho.impl.v1
; getEcho.impl.v1
; - unsafe-port->file-descriptor
)
(define either-id (bytevector 6 15 103 128 65 126 44 164 169 154 106 164 187 86 33 156 155 89 79 64 71 158 119 151 142 79 121 206 247 92 41 13 151 250 243 205 13 193 134 218 198 145 193 96 55 87 92 215 34 52 161 162 226 22 169 43 228 184 86 77 149 58 66 125))
@ -64,6 +69,9 @@
[x8 (data (data 'Reference 1 (data 'Id 0 failure-ability-id 0)) 0 typeLink message x7)])
(data (data 'Reference 1 (data 'Id 0 either-id 0)) 1 x8)))
(define-unison (getCurrentDirectory.impl.v3 unit)
(Right (string->chunked-string (path->string (current-directory)))))
(define-unison (seekHandle.impl.v3 handle mode amount)
(data-case mode
(0 ()
@ -83,20 +91,37 @@
(Right (string->chunked-string line))
)))
(define-unison (getChar.impl.v1 handle)
(let* ([char (read-char handle)])
(if (eof-object? char)
(Exception 'isEOFError "End of file reached")
(Right char))))
(define-unison (getSomeBytes.impl.v1 handle bytes)
(let* ([buffer (make-bytes bytes)]
[line (read-bytes-avail! buffer handle)])
(if (eof-object? line)
(Right (bytes->chunked-bytes #""))
(Right (bytes->chunked-bytes buffer))
)))
(define BufferMode
(data 'Reference 1 (data 'Id 0 (bytevector 107 13 114 185 126 64 211 42 13 102 196 109 125 88 217 3 36 251 159 9 35 172 24 16 54 158 72 167 2 22 248 214 77 251 43 81 18 154 173 92 126 242 69 233 142 79 137 22 152 161 71 175 85 193 31 162 82 54 3 70 220 161 142 37) 0)))
(define BlockBuffering (data BufferMode 2))
(define LineBuffering (data BufferMode 1))
(define NoBuffering (data BufferMode 0))
(define Boolean (data 'Reference 0 (string->chunked-string "Boolean")))
(define True (data Boolean 1))
(define False (data Boolean 0))
(define-unison (getBuffering.impl.v3 handle)
(case (file-stream-buffer-mode handle)
[(none) (Right NoBuffering)]
[(line) (Right LineBuffering)]
[(block) (Right BlockBuffering)]
[(#f) (Exception 'IO "Unable to determine buffering mode of handle")]
[else (Exception 'IO "Unexpected response from file-stream-buffer-mode")]))
[(#f) (Exception 'IO "Unable to determine buffering mode of handle" '())]
[else (Exception 'IO "Unexpected response from file-stream-buffer-mode" '())]))
(define-unison (setBuffering.impl.v3 handle mode)
(data-case mode
@ -110,5 +135,65 @@
(file-stream-buffer-mode handle 'block)
(Right none))
(3 (size)
(Exception 'IO "Sized block buffering not supported"))))
(Exception 'IO "Sized block buffering not supported" '()))))
(define (with-buffer-mode port mode)
(file-stream-buffer-mode port mode)
port)
(define stdin (with-buffer-mode (standard-input-port) 'none))
(define stdout (with-buffer-mode (standard-output-port) 'line))
(define stderr (with-buffer-mode (standard-error-port) 'line))
(define (unison-FOp-IO.stdHandle n)
(case n
[(0) stdin]
[(1) stdout]
[(2) stderr]))
(define-unison (getEcho.impl.v1 handle)
(if (eq? handle stdin)
(Right (if (get-stdin-echo) True False))
(Exception 'IO "getEcho only supported on stdin" '())))
(define-unison (setEcho.impl.v1 handle echo)
(if (eq? handle stdin)
(begin
(data-case echo
(1 () (system "stty echo"))
(0 () (system "stty -echo")))
(Right none))
(Exception 'IO "setEcho only supported on stdin" '())))
(define (get-stdin-echo)
(let ([current (with-output-to-string (lambda () (system "stty -a")))])
(string-contains? current " echo ")))
(define-unison (getArgs.impl.v1 unit)
(Right (vector->chunked-list
(vector-map string->chunked-string (current-command-line-arguments)))))
(define-unison (getEnv.impl.v1 key)
(let ([value (environment-variables-ref (current-environment-variables) (string->bytes/utf-8 (chunked-string->string key)))])
(if (false? value)
(Exception 'IO "environmental variable not found" key)
(Right (string->chunked-string (bytes->string/utf-8 value))))))
;; From https://github.com/sorawee/shlex/blob/5de06500e8c831cfc8dffb99d57a76decc02c569/main.rkt (MIT License)
;; with is a port of https://github.com/python/cpython/blob/bf2f76ec0976c09de79c8827764f30e3b6fba776/Lib/shlex.py#L325
(define unsafe-pattern #rx"[^a-zA-Z0-9_@%+=:,./-]")
(define (quote-arg s)
(if (non-empty-string? s)
(if (regexp-match unsafe-pattern s)
(string-append "'" (string-replace s "'" "'\"'\"'") "'")
s)
"''"))
(define-unison (process.call command arguments)
(system/exit-code
(string-join (cons
(chunked-string->string command)
(map (lambda (arg) (quote-arg (chunked-string->string arg)))
(vector->list
(chunked-list->vector arguments))))
" ")))

View File

@ -1,8 +1,10 @@
#lang racket/base
(require math/base)
(require math/base
(only-in unison/boot data-case define-unison))
(provide
builtin-Float.exp
(prefix-out unison-POp-
(combine-out
ABSF
@ -18,6 +20,7 @@
ATN2
ATNH
CEIL
EXPF
COSF
COSH
DIVF
@ -29,6 +32,8 @@
SINF
ITOF)))
(define-unison (builtin-Float.exp n) (exp n))
(define (EXPF n) (exp n))
(define ABSF abs)
(define ACOS acos)
(define ACSH acosh)

View File

@ -27,6 +27,7 @@
builtin-Float.*
builtin-Float.fromRepresentation
builtin-Float.toRepresentation
builtin-Float.exp
builtin-Int.+
builtin-Int.-
builtin-Int.increment
@ -36,6 +37,8 @@
builtin-Int.signum
builtin-Nat.increment
builtin-Nat.toFloat
builtin-Text.indexOf
builtin-IO.randomBytes
unison-FOp-internal.dataTag
unison-FOp-Char.toText
@ -48,8 +51,16 @@
unison-FOp-IO.getBytes.impl.v3
builtin-IO.seekHandle.impl.v3
builtin-IO.getLine.impl.v1
builtin-IO.getSomeBytes.impl.v1
builtin-IO.setBuffering.impl.v3
builtin-IO.getBuffering.impl.v3
builtin-IO.setEcho.impl.v1
builtin-IO.process.call
builtin-IO.getEcho.impl.v1
builtin-IO.getArgs.impl.v1
builtin-IO.getEnv.impl.v1
builtin-IO.getChar.impl.v1
builtin-IO.getCurrentDirectory.impl.v3
unison-FOp-IO.getFileSize.impl.v3
unison-FOp-IO.getFileTimestamp.impl.v3
unison-FOp-IO.fileExists.impl.v3
@ -221,12 +232,14 @@
unison-POp-CONS
unison-POp-DBTX
unison-POp-DECI
unison-POp-DECN
unison-POp-DIVN
unison-POp-DRPB
unison-POp-DRPS
unison-POp-DRPT
unison-POp-EQLN
unison-POp-EQLT
unison-POp-EXPF
unison-POp-LEQT
unison-POp-EQLU
unison-POp-EROR
@ -350,6 +363,8 @@
exn:fail:contract?
file-stream-buffer-mode
with-handlers
match
regexp-match-positions
sequence-ref
vector-copy!
bytes-copy!)
@ -357,6 +372,7 @@
(unison arithmetic)
(unison bytevector)
(unison core)
(only (unison boot) define-unison)
(unison data)
(unison math)
(unison chunked-seq)
@ -371,7 +387,21 @@
(unison tcp)
(unison gzip)
(unison zlib)
(unison concurrent))
(unison concurrent)
(racket random))
; NOTE: this is just a temporary stopgap until the real function is
; done. I accidentally pulled in too new a version of base in the
; project version of the unison compiler and it broke the jit tests.
(define-unison (builtin-Text.indexOf s t)
(let ([ss (chunked-string->string s)]
[tt (chunked-string->string t)])
(match (regexp-match-positions ss tt)
[#f (data 'Optional 1)] ; none
[(cons (cons i j) r) (data 'Optional 0 i)]))) ; some
(define-unison (builtin-IO.randomBytes n)
(bytes->chunked-bytes (crypto-random-bytes n)))
(define (unison-POp-UPKB bs)
(build-chunked-list
@ -403,6 +433,7 @@
(define (unison-POp-COMN n) (fxnot n))
(define (unison-POp-CONS x xs) (chunked-list-add-first xs x))
(define (unison-POp-DECI n) (fx1- n))
(define (unison-POp-DECN n) (- n 1))
(define (unison-POp-DIVN m n) (fxdiv m n))
(define (unison-POp-DRPB n bs) (chunked-bytes-drop bs n))
(define (unison-POp-DRPS n l) (chunked-list-drop l n))
@ -579,20 +610,6 @@
(define (unison-FOp-Char.toText c) (string->chunked-string (string (integer->char c))))
(define (with-buffer-mode port mode)
(file-stream-buffer-mode port mode)
port)
(define stdin (with-buffer-mode (standard-input-port) 'none))
(define stdout (with-buffer-mode (standard-output-port) 'line))
(define stderr (with-buffer-mode (standard-error-port) 'line))
(define (unison-FOp-IO.stdHandle n)
(case n
[(0) stdin]
[(1) stdout]
[(2) stderr]))
(define (unison-FOp-IO.getArgs.impl.v1)
(sum 1 (cdr (command-line))))

View File

@ -39,12 +39,9 @@ packages:
- unison-syntax
- yaks/easytest
resolver: lts-20.22
resolver: lts-20.26
extra-deps:
# version in snapshot is too new
- network-3.1.2.7 # 3.1.3.0 doesn't seem to build in Windows
# broken version in snapshot
- github: unisonweb/configurator
commit: e47e9e9fe1f576f8c835183b9def52d73c01327a

View File

@ -4,13 +4,6 @@
# https://docs.haskellstack.org/en/stable/lock_files
packages:
- completed:
hackage: network-3.1.2.7@sha256:9752628bc626e0cad8c53039bea0dc0417f39ca6663232e4d9ac4e35a8925f7d,4911
pantry-tree:
sha256: aa95093a413ed8306699098159047580e0dc0bda4a862a0264a370b993319b24
size: 3971
original:
hackage: network-3.1.2.7
- completed:
name: configurator
pantry-tree:
@ -70,7 +63,7 @@ packages:
hackage: recover-rtti-0.4.2@sha256:c179a303921126d8d782264e14f386c96e54a270df74be002e4c4ec3c8c7aebd,4529
snapshots:
- completed:
sha256: dcf4fc28f12d805480ddbe8eb8c370e11db12f0461d0110a4240af27ac88d725
size: 650255
url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/20/22.yaml
original: lts-20.22
sha256: 5a59b2a405b3aba3c00188453be172b85893cab8ebc352b1ef58b0eae5d248a2
size: 650475
url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/20/26.yaml
original: lts-20.26

View File

@ -56,11 +56,11 @@ test = do
expectExitCode ExitSuccess ucm ["transcript.fork", transcriptFile, "--codebase-create", tempCodebase] "",
-- , expectExitCode ExitSuccess ucm ["headless"] "" -- ?
-- options
expectExitCode ExitSuccess ucm ["--port", "8000", "--codebase-create", tempCodebase, "--no-base"] "exit",
expectExitCode ExitSuccess ucm ["--host", "localhost", "--codebase-create", tempCodebase, "--no-base"] "exit",
expectExitCode ExitSuccess ucm ["--token", "MY_TOKEN", "--codebase-create", tempCodebase, "--no-base"] "exit", -- ?
expectExitCode ExitSuccess ucm ["--codebase-create", tempCodebase, "--no-base"] "exit",
expectExitCode ExitSuccess ucm ["--ui", tempCodebase, "--codebase-create", tempCodebase, "--no-base"] "exit",
expectExitCode ExitSuccess ucm ["--port", "8000", "--codebase-create", tempCodebase] "exit",
expectExitCode ExitSuccess ucm ["--host", "localhost", "--codebase-create", tempCodebase] "exit",
expectExitCode ExitSuccess ucm ["--token", "MY_TOKEN", "--codebase-create", tempCodebase] "exit", -- ?
expectExitCode ExitSuccess ucm ["--codebase-create", tempCodebase] "exit",
expectExitCode ExitSuccess ucm ["--ui", tempCodebase, "--codebase-create", tempCodebase] "exit",
scope "can compile, then run compiled artifact" $
tests
[ expectExitCode ExitSuccess ucm ["transcript", transcriptFile] "",
@ -78,7 +78,7 @@ expectExitCode expected cmd args stdin = scope (intercalate " " (cmd : args)) do
expectEqual code expected
defaultArgs :: [String]
defaultArgs = ["--codebase-create", tempCodebase, "--no-base"]
defaultArgs = ["--codebase-create", tempCodebase]
clearTempCodebase :: () -> IO ()
clearTempCodebase _ =

View File

@ -30,6 +30,7 @@ dependencies:
- extra
- filepath
- free
- fsnotify
- fuzzyfind
- friendly-time
- generic-lens
@ -147,6 +148,7 @@ executables:
- process
- shellmet
- unison-cli
- silently
cli-integration-tests:
when:

View File

@ -88,10 +88,10 @@ typecheckTerm tm = do
let v = Symbol 0 (Var.Inference Var.Other)
liftIO $
fmap extract
<$> Codebase.runTransaction codebase (typecheckFile codebase [] (UF.UnisonFileId mempty mempty [(v, tm)] mempty))
<$> Codebase.runTransaction codebase (typecheckFile codebase [] (UF.UnisonFileId mempty mempty [(v, External, tm)] mempty))
where
extract tuf
| [[(_, _, ty)]] <- UF.topLevelComponents' tuf = ty
| [[(_, _, _, ty)]] <- UF.topLevelComponents' tuf = ty
| otherwise = error "internal error: typecheckTerm"
typecheckFile ::

View File

@ -64,8 +64,8 @@ createAuthorInfo a t = createAuthorInfo' . unpack <$> liftIO (getRandomBytes 32)
Term v a ->
(Reference.Id, Term v a)
hashAndWrangle v typ tm =
case Foldable.toList $ H.hashTermComponents (Map.singleton (Var.named v) (tm, typ)) of
[(id, tm, _tp)] -> (id, tm)
case Foldable.toList $ H.hashTermComponents (Map.singleton (Var.named v) (tm, typ, ())) of
[(id, tm, _tp, ())] -> (id, tm)
_ -> error "hashAndWrangle: Expected a single definition."
(chType, chTypeRef) = (Type.ref a chTypeRef, IOSource.copyrightHolderRef)
(authorType, authorTypeRef) = (Type.ref a authorTypeRef, IOSource.authorRef)

View File

@ -105,7 +105,7 @@ import Unison.Codebase.Editor.Input qualified as Input
import Unison.Codebase.Editor.Output
import Unison.Codebase.Editor.Output qualified as Output
import Unison.Codebase.Editor.Output.DumpNamespace qualified as Output.DN
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace (..), ReadShareLooseCode (..), ShareUserHandle (..))
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace (..))
import Unison.Codebase.Editor.RemoteRepo qualified as RemoteRepo
import Unison.Codebase.Editor.Slurp qualified as Slurp
import Unison.Codebase.Editor.SlurpResult qualified as SlurpResult
@ -143,6 +143,7 @@ import Unison.HashQualified qualified as HQ
import Unison.HashQualified' qualified as HQ'
import Unison.HashQualified' qualified as HashQualified
import Unison.Hashing.V2.Convert qualified as Hashing
import Unison.JitInfo qualified as JitInfo
import Unison.LabeledDependency (LabeledDependency)
import Unison.LabeledDependency qualified as LD
import Unison.LabeledDependency qualified as LabeledDependency
@ -214,6 +215,7 @@ import Unison.Var (Var)
import Unison.Var qualified as Var
import Unison.WatchKind qualified as WK
import Web.Browser (openBrowser)
import Witch (unsafeFrom)
------------------------------------------------------------------------------------------------------------------------
-- Main loop
@ -1235,7 +1237,8 @@ loop e = do
CompileSchemeI output main -> doCompileScheme output main
ExecuteSchemeI main args -> doRunAsScheme main args
GenSchemeLibsI -> doGenerateSchemeBoot True Nothing
FetchSchemeCompilerI name -> doFetchCompiler name
FetchSchemeCompilerI name branch ->
doFetchCompiler name branch
IOTestI main -> handleIOTest main
-- UpdateBuiltinsI -> do
-- stepAt updateBuiltins
@ -1311,7 +1314,7 @@ loop e = do
let datas, effects, terms :: [(Name, Reference.Id)]
datas = [(Name.unsafeFromVar v, r) | (v, (r, _d)) <- Map.toList $ UF.dataDeclarationsId' uf]
effects = [(Name.unsafeFromVar v, r) | (v, (r, _e)) <- Map.toList $ UF.effectDeclarationsId' uf]
terms = [(Name.unsafeFromVar v, r) | (v, (r, _wk, _tm, _tp)) <- Map.toList $ UF.hashTermsId uf]
terms = [(Name.unsafeFromVar v, r) | (v, (_, r, _wk, _tm, _tp)) <- Map.toList $ UF.hashTermsId uf]
Cli.respond $ DumpUnisonFileHashes hqLength datas effects terms
DebugTabCompletionI inputs -> do
Cli.Env {authHTTPClient, codebase} <- ask
@ -1627,7 +1630,8 @@ inputDescription input =
<> Text.unwords (fmap Text.pack args)
CompileSchemeI fi nm -> pure ("compile.native " <> HQ.toText nm <> " " <> Text.pack fi)
GenSchemeLibsI -> pure "compile.native.genlibs"
FetchSchemeCompilerI name -> pure ("compile.native.fetch" <> Text.pack name)
FetchSchemeCompilerI name branch ->
pure ("compile.native.fetch" <> Text.pack name <> " " <> Text.pack branch)
CreateAuthorI (NameSegment id) name -> pure ("create.author " <> id <> " " <> name)
RemoveTermReplacementI src p0 -> do
p <- opatch p0
@ -1970,7 +1974,7 @@ handleIOTest main = do
-- First, look at the terms in the latest typechecked file for a name-match.
whenJustM Cli.getLatestTypecheckedFile \typecheckedFile -> do
whenJust (HQ.toName main) \mainName ->
whenJust (Map.lookup (Name.toVar mainName) (UF.hashTermsId typecheckedFile)) \(ref, _wk, _term, typ) ->
whenJust (Map.lookup (Name.toVar mainName) (UF.hashTermsId typecheckedFile)) \(_, ref, _wk, _term, typ) ->
returnMatches [(Reference.fromId ref, typ)]
-- Then, if we get here (because nothing in the scratch file matched), look at the terms in the codebase.
@ -2205,14 +2209,14 @@ doDisplay outputLoc names tm = do
evalUnisonTermE True (PPE.suffixifiedPPE ppe) useCache (Term.amap (const External) tm)
loadTerm (Reference.DerivedId r) = case Map.lookup r tms of
Nothing -> fmap (fmap Term.unannotate) $ Cli.runTransaction (Codebase.getTerm codebase r)
Just (tm, _) -> pure (Just $ Term.unannotate tm)
Just (_, tm, _) -> pure (Just $ Term.unannotate tm)
loadTerm _ = pure Nothing
loadDecl (Reference.DerivedId r) = case Map.lookup r typs of
Nothing -> fmap (fmap $ DD.amap (const ())) $ Cli.runTransaction $ Codebase.getTypeDeclaration codebase r
Just decl -> pure (Just $ DD.amap (const ()) decl)
loadDecl _ = pure Nothing
loadTypeOfTerm' (Referent.Ref (Reference.DerivedId r))
| Just (_, ty) <- Map.lookup r tms = pure $ Just (void ty)
| Just (_, _, ty) <- Map.lookup r tms = pure $ Just (void ty)
loadTypeOfTerm' r = fmap (fmap void) . Cli.runTransaction . loadTypeOfTerm codebase $ r
rendered <-
DisplayValues.displayTerm
@ -2439,24 +2443,25 @@ compilerPath = Path.Path' {Path.unPath' = Left abs}
rootPath = Path.Path {Path.toSeq = Seq.fromList segs}
abs = Path.Absolute {Path.unabsolute = rootPath}
doFetchCompiler :: String -> Cli ()
doFetchCompiler username =
doFetchCompiler :: String -> String -> Cli ()
doFetchCompiler username branch =
doPullRemoteBranch sourceTarget SyncMode.Complete Input.PullWithoutHistory Verbosity.Silent
where
-- fetching info
ns =
ReadShareLooseCode
{ server = RemoteRepo.DefaultCodeserver,
repo = ShareUserHandle (Text.pack username),
path =
Path.fromList $ NameSegment <$> ["public", "internal", "trunk"]
}
sourceTarget = PullSourceTarget2 (ReadShare'LooseCode ns) (This compilerPath)
prj =
These
(unsafeFrom $ "@" <> Text.pack username <> "/internal")
(unsafeFrom $ Text.pack branch)
sourceTarget =
PullSourceTarget2
(ReadShare'ProjectBranch prj)
(This compilerPath)
ensureCompilerExists :: Cli ()
ensureCompilerExists =
Cli.branchExistsAtPath' compilerPath
>>= flip unless (doFetchCompiler "unison")
>>= flip unless (doFetchCompiler "unison" JitInfo.currentRelease)
getCacheDir :: Cli String
getCacheDir = liftIO $ getXdgDirectory XdgCache "unisonlanguage"
@ -2935,7 +2940,7 @@ addWatch watchName (Just uf) = do
let components = join $ UF.topLevelComponents uf
let mainComponent = filter ((\v -> Var.nameStr v == watchName) . view _1) components
case mainComponent of
[(v, tm, ty)] ->
[(v, ann, tm, ty)] ->
Just $
let v2 = Var.freshIn (Set.fromList [v]) v
a = ABT.annotation tm
@ -2944,7 +2949,7 @@ addWatch watchName (Just uf) = do
(UF.dataDeclarationsId' uf)
(UF.effectDeclarationsId' uf)
(UF.topLevelComponents' uf)
(UF.watchComponents uf <> [(WK.RegularWatch, [(v2, Term.var a v, ty)])])
(UF.watchComponents uf <> [(WK.RegularWatch, [(v2, ann, Term.var a v, ty)])])
)
_ -> addWatch watchName Nothing
@ -2962,7 +2967,7 @@ addSavedTermToUnisonFile resultName = do
UF.typecheckedUnisonFile
(UF.dataDeclarationsId' uf)
(UF.effectDeclarationsId' uf)
([(resultSymbol, trm, typ)] : UF.topLevelComponents' uf)
([(resultSymbol, External, trm, typ)] : UF.topLevelComponents' uf)
(UF.watchComponents uf)
-- | Look up runnable term with the given name in the codebase or
@ -3004,7 +3009,7 @@ getTerm' mainName =
let components = join $ UF.topLevelComponents uf
let mainComponent = filter ((\v -> Var.nameStr v == mainName) . view _1) components
case mainComponent of
[(v, tm, ty)] ->
[(v, _, tm, ty)] ->
checkType ty \otyp ->
let runMain = DD.forceTerm a a (Term.var a v)
v2 = Var.freshIn (Set.fromList [v]) v
@ -3026,7 +3031,7 @@ getTerm' mainName =
createWatcherFile :: Symbol -> Term Symbol Ann -> Type Symbol Ann -> Cli (TypecheckedUnisonFile Symbol Ann)
createWatcherFile v tm typ =
Cli.getLatestTypecheckedFile >>= \case
Nothing -> pure (UF.typecheckedUnisonFile mempty mempty mempty [(magicMainWatcherString, [(v, tm, typ)])])
Nothing -> pure (UF.typecheckedUnisonFile mempty mempty mempty [(magicMainWatcherString, [(v, External, tm, typ)])])
Just uf ->
let v2 = Var.freshIn (Set.fromList [v]) v
in pure $
@ -3035,7 +3040,7 @@ createWatcherFile v tm typ =
(UF.effectDeclarationsId' uf)
(UF.topLevelComponents' uf)
-- what about main's component? we have dropped them if they existed.
[(magicMainWatcherString, [(v2, tm, typ)])]
[(magicMainWatcherString, [(v2, External, tm, typ)])]
executePPE ::
(Var v) =>
@ -3229,7 +3234,7 @@ evalUnisonTerm sandbox ppe useCache tm =
stripUnisonFileReferences :: TypecheckedUnisonFile Symbol a -> Term Symbol () -> Term Symbol ()
stripUnisonFileReferences unisonFile term =
let refMap :: Map Reference.Id Symbol
refMap = Map.fromList . map (\(sym, (refId, _, _, _)) -> (refId, sym)) . Map.toList . UF.hashTermsId $ unisonFile
refMap = Map.fromList . map (\(sym, (_, refId, _, _, _)) -> (refId, sym)) . Map.toList . UF.hashTermsId $ unisonFile
alg () = \case
ABT.Var x -> ABT.var x
ABT.Cycle x -> ABT.cycle x

View File

@ -15,16 +15,14 @@ import U.Codebase.Sqlite.ProjectBranch qualified as Sqlite
import U.Codebase.Sqlite.Queries qualified as Queries
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli (stepAt)
import Unison.Cli.MonadUtils qualified as Cli (updateAt)
import Unison.Cli.ProjectUtils (projectBranchPath)
import Unison.Cli.ProjectUtils qualified as ProjectUtils
import Unison.Cli.Share.Projects qualified as Share
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Editor.HandleInput.Pull qualified as HandleInput.Pull
import Unison.Codebase.Editor.Output qualified as Output
import Unison.Codebase.Path (Path)
import Unison.Codebase.Path qualified as Path
import Unison.Prelude
import Unison.Project (ProjectAndBranch (..), ProjectAndBranchNames (..), ProjectBranchName, ProjectName, projectNameUserSlug)
import Unison.Share.API.Hash qualified as Share.API
@ -302,9 +300,7 @@ cloneInto localProjectBranch remoteProjectBranch = do
let branchHead = hash32ToCausalHash (Share.API.hashJWTHash remoteBranchHeadJwt)
theBranch <- liftIO (Codebase.expectBranchForHash codebase branchHead)
let path = projectBranchPath (over #project fst localProjectAndBranch)
Cli.stepAt
("clone " <> into @Text remoteProjectBranchNames)
(Path.unabsolute path, const (Branch.head theBranch))
Cli.updateAt ("clone " <> into @Text remoteProjectBranchNames) path (const theBranch)
Cli.cd path
-- Return the remote project id associated with the given project branch

View File

@ -183,53 +183,151 @@ generateRandomProjectNames = do
baseNames <-
RandomShuffle.shuffleM do
adjective <-
[ "adorable",
[ "adept",
"adorable",
"ambitious",
"beautiful",
"brave",
"brilliant",
"courageous",
"charming",
"dazzling",
"delightful",
"determined",
"devoted",
"elegant",
"enchanting",
"energetic",
"engaging",
"excited",
"fantastic",
"fortuitous",
"friendly",
"gentle",
"helpful",
"heartwarming",
"hilarious",
"humorous",
"incredible",
"imaginative",
"innocent",
"insightful",
"jolly",
"joyous",
"kind",
"lucky",
"magnificent",
"marvelous",
"nice",
"outstanding",
"patient",
"philosophical",
"pleasant",
"proficient",
"quiet",
"relaxed",
"resourceful",
"responsible",
"silly",
"sincere",
"sensible",
"sparkling",
"spectacular",
"spellbinding",
"stellar",
"thoughtful",
"useful",
"witty"
"vibrant",
"warm-hearted",
"witty",
"wondrous",
"zestful"
]
noun <-
[ "alpaca",
"armadillo",
"axolotl",
"badger",
"blobfish",
"bobcat",
"camel",
"capybara",
"caracal",
"cheetah",
"chameleon",
"chinchilla",
"chipmunk",
"donkey",
"dormouse",
"earwig",
"egret",
"elk",
"ferret",
"fennec",
"fox",
"frog",
"gecko",
"gerbil",
"gibbon",
"giraffe",
"hamster",
"hedgehog",
"herron",
"hippo",
"ibis",
"jaguar",
"kangaroo",
"kiwi",
"koala",
"ladybug",
"lemur",
"leopard",
"lizard",
"llama",
"mallard",
"marmot",
"mole",
"moonrat",
"moose",
"mouse",
"narwhal",
"ocelot",
"ostrich",
"otter",
"owl",
"panda",
"pangolin",
"penguin",
"platypus",
"polecat",
"porcupine",
"possum",
"puffin",
"quahog",
"racoon",
"reindeer",
"rhino",
"seahorse",
"seal",
"serval",
"shrew",
"sloth",
"starling",
"tapir",
"tiger",
"toad",
"toucan",
"turkey",
"turtle",
"urchin",
"vole",
"walrus",
"wallaby",
"wallaroo",
"weasel",
"woodchuck",
"wolverine",
"wombat",
"yak",
"zebra"
]

View File

@ -105,7 +105,7 @@ handleUpdate input optionalPatch requestedNames = do
hashTerms :: Map Reference (Type Symbol Ann)
hashTerms = Map.fromList (toList hashTerms0)
where
hashTerms0 = (\(r, _wk, _tm, typ) -> (r, typ)) <$> UF.hashTerms (Slurp.originalFile sr)
hashTerms0 = (\(_ann, r, _wk, _tm, typ) -> (r, typ)) <$> UF.hashTerms (Slurp.originalFile sr)
termEdits :: [(Name, Reference, Reference)]
termEdits = do
v <- Set.toList (SC.terms (updates sr))
@ -253,7 +253,7 @@ getSlurpResultForUpdate requestedNames slurpCheckNames = do
-- Running example:
--
-- "ping" => (#newping, Nothing, <#wham + 4>, <Nat>)
let nameToInterimInfo :: Map Symbol (TermReferenceId, Maybe WatchKind, Term Symbol Ann, Type Symbol Ann)
let nameToInterimInfo :: Map Symbol (Ann, TermReferenceId, Maybe WatchKind, Term Symbol Ann, Type Symbol Ann)
nameToInterimInfo =
UF.hashTermsId (Slurp.originalFile slurp0)
@ -278,7 +278,7 @@ getSlurpResultForUpdate requestedNames slurpCheckNames = do
( \name ->
case Map.lookup name nameToInterimInfo of
Nothing -> error (reportBug "E798907" "no interim ref for name")
Just (interimRef, _, _, _) -> (nameToTermRefs name, interimRef)
Just (_, interimRef, _, _, _) -> (nameToTermRefs name, interimRef)
)
namesBeingUpdated
@ -409,7 +409,7 @@ getSlurpResultForUpdate requestedNames slurpCheckNames = do
interimTermComponents =
nameToInterimInfo
& Map.elems
& map (\(ref, _wk, term, typ) -> (ref, (term, typ)))
& map (\(_ann, ref, _wk, term, typ) -> (ref, (term, typ)))
& componentize
& uncomponentize
@ -479,7 +479,7 @@ getSlurpResultForUpdate requestedNames slurpCheckNames = do
-- #newping => <#wham + 4>
interimRefToTerm :: Map TermReferenceId (Term Symbol Ann)
interimRefToTerm =
Map.remap (\(_var, (ref, _wk, term, _typ)) -> (ref, term)) nameToInterimInfo
Map.remap (\(_var, (_ann, ref, _wk, term, _typ)) -> (ref, term)) nameToInterimInfo
-- Running example: apply the following reference mapping everwhere in a term:
--
-- #pingpong.ping -> #newping
@ -504,7 +504,9 @@ getSlurpResultForUpdate requestedNames slurpCheckNames = do
-- fresh1 = fresh3 + 4
-- fresh2 = fresh1 + 2
-- fresh3 = fresh2 + 3
terms = Map.elems refToGeneratedNameAndTerm,
terms =
Map.elems refToGeneratedNameAndTerm <&> \(v, term) ->
(v, External, term),
-- In the context of this update, whatever watches were in the latest typechecked Unison file are
-- irrelevant, so we don't need to copy them over.
watches = Map.empty
@ -539,15 +541,16 @@ getSlurpResultForUpdate requestedNames slurpCheckNames = do
-- #newping => "ping"
interimRefToName :: Map TermReferenceId Symbol
interimRefToName =
Map.remap (\(name, (ref, _wk, _term, _typ)) -> (ref, name)) nameToInterimInfo
Map.remap (\(name, (_ann, ref, _wk, _term, _typ)) -> (ref, name)) nameToInterimInfo
let renameTerm ::
(Symbol, Term Symbol Ann, Type Symbol Ann) ->
(Symbol, Term Symbol Ann, Type Symbol Ann)
renameTerm (generatedName, term, typ) =
(Symbol, Ann, Term Symbol Ann, Type Symbol Ann) ->
(Symbol, Ann, Term Symbol Ann, Type Symbol Ann)
renameTerm (generatedName, ann, term, typ) =
( case Map.lookup generatedName generatedNameToName of
Just name -> name
Nothing -> error (reportBug "E440546" "no name for generated name"),
ann,
ABT.renames generatedNameToName term,
typ
)
@ -589,7 +592,7 @@ doSlurpAdds slurp uf = Branch.batchUpdates (typeActions <> termActions)
map doTerm . toList $
SC.terms slurp <> UF.constructorsForDecls (SC.types slurp) uf
names = UF.typecheckedToNames uf
tests = Set.fromList $ fst <$> UF.watchesOfKind WK.TestWatch (UF.discardTypes uf)
tests = Set.fromList $ view _1 <$> UF.watchesOfKind WK.TestWatch (UF.discardTypes uf)
(isTestType, isTestValue) = IOSource.isTest
md v =
if Set.member v tests

View File

@ -178,8 +178,8 @@ data Input
CompileSchemeI String (HQ.HashQualified Name)
| -- generate scheme libraries
GenSchemeLibsI
| -- fetch scheme compiler from a given username
FetchSchemeCompilerI String
| -- fetch scheme compiler from a given username and branch
FetchSchemeCompilerI String String
| TestI TestInput
| -- metadata
-- `link metadata definitions` (adds metadata to all of `definitions`)

View File

@ -545,10 +545,19 @@ propagate patch b = case validatePatch patch of
UnisonFileId
mempty
mempty
(Map.toList $ (\(_, tm, _) -> tm) <$> componentMap)
( componentMap
& Map.toList
& fmap
( \(v, (_ref, tm, _)) ->
(v, External, tm)
)
)
mempty
typecheckResult <- Cli.typecheckFile codebase [] file
pure . fmap UF.hashTerms $ Result.result typecheckResult
runIdentity (Result.toMaybe typecheckResult)
& fmap UF.hashTerms
& (fmap . fmap) (\(_ann, ref, wk, tm, tp) -> (ref, wk, tm, tp))
& pure
-- TypecheckFile file ambient -> liftIO $ typecheck' ambient codebase file
unhashTypeComponent :: Reference -> Sqlite.Transaction (Map Symbol (Reference, Decl Symbol Ann))

View File

@ -280,13 +280,13 @@ buildVarReferences uf =
-- Filter out non-test watch expressions
& Map.filter
( \case
(_, w, _, _)
(_, _, w, _, _)
| w == Just TestWatch || w == Nothing -> True
| otherwise -> False
)
& Map.bimap
TermVar
(\(refId, _, _, _) -> LD.derivedTerm refId)
(\(_, refId, _, _, _) -> LD.derivedTerm refId)
decls :: Map TaggedVar LD.LabeledDependency
decls =
UF.dataDeclarationsId' uf

View File

@ -193,7 +193,7 @@ pretty isPast ppe sr =
okTerm v = case Map.lookup v tms of
Nothing ->
[(P.bold (prettyVar v), Just $ P.red "(Unison bug, unknown term)")]
Just (_, _, _, ty) ->
Just (_, _, _, _, ty) ->
( plus <> P.bold (prettyVar v),
Just $ ": " <> P.indentNAfterNewline 2 (TP.pretty ppe ty)
)
@ -243,7 +243,7 @@ pretty isPast ppe sr =
(typeLineFor Collision <$> toList (types (collisions sr)))
++ (typeLineFor BlockedDependency <$> toList (types (defsWithBlockedDependencies sr)))
termLineFor status v = case Map.lookup v tms of
Just (_ref, _wk, _tm, ty) ->
Just (_, _ref, _wk, _tm, ty) ->
( prettyStatus status,
P.bold (P.text $ Var.name v),
": " <> P.indentNAfterNewline 6 (TP.pretty ppe ty)
@ -348,4 +348,4 @@ filterUnisonFile
effects = Map.restrictKeys effectDeclarations' keepTypes
tlcs = filter (not . null) $ fmap (List.filter filterTLC) topLevelComponents'
watches = filter (not . null . snd) $ fmap (second (List.filter filterTLC)) watchComponents
filterTLC (v, _, _) = Set.member v keepTerms
filterTLC (v, _, _, _) = Set.member v keepTerms

View File

@ -1,47 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Unison.Codebase.Editor.VersionParser where
import Data.Text (Text)
import Data.Text qualified as Text
import Data.Void (Void)
import Text.Megaparsec
import Text.Megaparsec.Char
import Unison.Codebase.Editor.RemoteRepo
import Unison.Codebase.Path qualified as Path
-- | Parse git version strings into valid unison namespaces.
--
-- >>> parseMaybe defaultBaseLib "release/M4"
-- >>> parseMaybe defaultBaseLib "release/M4b"
-- >>> parseMaybe defaultBaseLib "release/M4c.2"
-- Just (ReadShareLooseCode {server = DefaultCodeserver, repo = "unison", path = public.base.releases.M4})
-- Just (ReadShareLooseCode {server = DefaultCodeserver, repo = "unison", path = public.base.releases.M4b})
-- Just (ReadShareLooseCode {server = DefaultCodeserver, repo = "unison", path = public.base.releases.M4c_2})
-- >>> parseMaybe defaultBaseLib "dev/M4-1-g22ccb0b3b"
-- Just (ReadShareLooseCode {server = DefaultCodeserver, repo = "unison", path = public.base.main})
-- A version with the 'dirty' flag
-- >>> parseMaybe defaultBaseLib "dev/M3-409-gbcdf68db3'"
-- Just (ReadShareLooseCode {server = DefaultCodeserver, repo = "unison", path = public.base.main})
defaultBaseLib :: Parsec Void Text ReadShareLooseCode
defaultBaseLib = fmap makeNS $ release <|> unknown
where
unknown, release, milestoneVersion :: Parsec Void Text Text
unknown = pure "main" <* takeWhileP Nothing (const True) <* eof
release = fmap ("releases." <>) $ "release/" *> milestoneVersion <* eof
-- Parses the milestone of the current version; e.g. M4a -> M4
milestoneVersion = do
m <- char 'M'
milestoneVersion <- some digitChar
_minor <- many (alphaNumChar <|> ('_' <$ oneOf ['.', '_', '-']))
_dirty <- optional (char '\'')
pure . Text.pack $ m : milestoneVersion
makeNS :: Text -> ReadShareLooseCode
makeNS t =
ReadShareLooseCode
{ server = DefaultCodeserver,
repo = ShareUserHandle "unison",
path = "public" Path.:< "base" Path.:< Path.fromText t
}

View File

@ -57,6 +57,8 @@ import Unison.Codebase.Editor.UCMVersion (UCMVersion)
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.Path.Parse qualified as Path
import Unison.Codebase.Runtime qualified as Runtime
import Unison.Codebase.Verbosity (Verbosity, isSilent)
import Unison.Codebase.Verbosity qualified as Verbosity
import Unison.CommandLine
import Unison.CommandLine.InputPattern (InputPattern (aliases, patternName))
import Unison.CommandLine.InputPatterns (validInputs)
@ -193,17 +195,18 @@ type TranscriptRunner =
withTranscriptRunner ::
forall m r.
(UnliftIO.MonadUnliftIO m) =>
Verbosity ->
UCMVersion ->
Maybe FilePath ->
(TranscriptRunner -> m r) ->
m r
withTranscriptRunner ucmVersion configFile action = do
withTranscriptRunner verbosity ucmVersion configFile action = do
withRuntimes \runtime sbRuntime -> withConfig $ \config -> do
action \transcriptName transcriptSrc (codebaseDir, codebase) -> do
Server.startServer (Backend.BackendEnv {Backend.useNamesIndex = False}) Server.defaultCodebaseServerOpts runtime codebase $ \baseUrl -> do
let parsed = parse transcriptName transcriptSrc
result <- for parsed \stanzas -> do
liftIO $ run codebaseDir stanzas codebase runtime sbRuntime config ucmVersion (tShow baseUrl)
liftIO $ run verbosity codebaseDir stanzas codebase runtime sbRuntime config ucmVersion (tShow baseUrl)
pure $ join @(Either TranscriptError) result
where
withRuntimes :: ((Runtime.Runtime Symbol -> Runtime.Runtime Symbol -> m a) -> m a)
@ -226,6 +229,7 @@ withTranscriptRunner ucmVersion configFile action = do
(\(config, _cancelConfig) -> action (Just config))
run ::
Verbosity ->
FilePath ->
[Stanza] ->
Codebase IO Symbol Ann ->
@ -235,10 +239,10 @@ run ::
UCMVersion ->
Text ->
IO (Either TranscriptError Text)
run dir stanzas codebase runtime sbRuntime config ucmVersion baseURL = UnliftIO.try $ Ki.scoped \scope -> do
run verbosity dir stanzas codebase runtime sbRuntime config ucmVersion baseURL = UnliftIO.try $ Ki.scoped \scope -> do
httpManager <- HTTP.newManager HTTP.defaultManagerSettings
let initialPath = Path.absoluteEmpty
putPrettyLn $
unless (isSilent verbosity) . putPrettyLn $
Pretty.lines
[ asciiartUnison,
"",
@ -368,13 +372,14 @@ run dir stanzas codebase runtime sbRuntime config ucmVersion baseURL = UnliftIO.
liftIO (putStrLn "")
pure $ Right QuitI
Just (s, idx) -> do
liftIO . putStr $
"\r⚙️ Processing stanza "
++ show idx
++ " of "
++ show (length stanzas)
++ "."
liftIO (IO.hFlush IO.stdout)
unless (Verbosity.isSilent verbosity) . liftIO $ do
putStr $
"\r⚙️ Processing stanza "
++ show idx
++ " of "
++ show (length stanzas)
++ "."
IO.hFlush IO.stdout
case s of
Unfenced _ -> do
liftIO (output $ show s)

View File

@ -8,17 +8,19 @@ import Control.Lens.Cons qualified as Cons
import Data.List (intercalate)
import Data.List.NonEmpty qualified as NE
import Data.Map qualified as Map
import Data.Maybe (fromJust)
import Data.Proxy (Proxy (..))
import Data.Set qualified as Set
import Data.Text qualified as Text
import Data.These (These (..))
import Network.URI qualified as URI
import System.Console.Haskeline.Completion (Completion (Completion))
import System.Console.Haskeline.Completion qualified as Haskeline
import Text.Megaparsec qualified as P
import U.Codebase.Sqlite.DbId (ProjectBranchId, ProjectId)
import U.Codebase.Sqlite.Project qualified as Sqlite
import U.Codebase.Sqlite.Queries qualified as Queries
import Unison.Cli.Pretty (prettyProjectNameSlash, prettySlashProjectBranchName)
import Unison.Cli.Pretty (prettyProjectNameSlash, prettySlashProjectBranchName, prettyURI)
import Unison.Cli.ProjectUtils qualified as ProjectUtils
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
@ -44,6 +46,7 @@ import Unison.CommandLine.Globbing qualified as Globbing
import Unison.CommandLine.InputPattern (ArgumentType (..), InputPattern (InputPattern), IsOptional (..))
import Unison.CommandLine.InputPattern qualified as I
import Unison.HashQualified qualified as HQ
import Unison.JitInfo qualified as JitInfo
import Unison.Name (Name)
import Unison.NameSegment qualified as NameSegment
import Unison.Prelude
@ -741,7 +744,7 @@ deleteBranch =
{ patternName = "delete.branch",
aliases = ["branch.delete"],
visibility = I.Visible,
argTypes = [(Required, projectBranchNameWithOptionalProjectNameArg)],
argTypes = [(Required, projectAndBranchNamesArg True)],
help =
P.wrapColumn2
[ ("`delete.branch foo/bar`", "deletes the branch `bar` in the project `foo`"),
@ -1666,7 +1669,8 @@ helpTopicsMap =
("filestatus", fileStatusMsg),
("messages.disallowedAbsolute", disallowedAbsoluteMsg),
("remotes", remotesMsg),
("namespaces", pathnamesMsg)
("namespaces", pathnamesMsg),
("projects", projectsMsg)
]
where
blankline = ("", "")
@ -1790,6 +1794,30 @@ helpTopicsMap =
<> "If the project was created locally then the relationship will be established on"
<> "the first `push`."
]
projectsMsg =
P.lines $
[ P.wrap $
"A project is a versioned collection of code that can be edited, published, and depended on other projects."
<> "Unison projects are analogous to Git repositories.",
"",
P.column2
[ (patternName projectCreate, "create a new project"),
(patternName projectsInputPattern, "list all your projects"),
(patternName branchInputPattern, "create a new workstream"),
(patternName branchesInputPattern, "list all your branches"),
(patternName mergeLocal, "merge one branch into another"),
(patternName projectSwitch, "switch to a project or branch"),
(patternName push, "upload your changes to Unison Share"),
(patternName pull, "download code(/changes/updates) from Unison Share"),
(patternName clone, "download a Unison Share project or branch for contribution")
],
"",
tip ("Use" <> makeExample help [patternName projectCreate] <> "to learn more."),
"",
P.wrap $
"For full documentation, see"
<> prettyURI (fromJust (URI.parseURI "https://unison-lang.org/learn/projects"))
]
help :: InputPattern
help =
@ -1806,10 +1834,24 @@ help =
"\n\n"
showPatternHelp
visibleInputs
[isHelp -> Just msg] -> Left msg
[cmd] -> case Map.lookup cmd commandsByName of
Nothing -> Left . warn $ "I don't know of that command. Try `help`."
Just pat -> Left $ showPatternHelp pat
[cmd] ->
case (Map.lookup cmd commandsByName, isHelp cmd) of
(Nothing, Just msg) -> Left msg
(Nothing, Nothing) -> Left . warn $ "I don't know of that command. Try `help`."
(Just pat, Nothing) -> Left $ showPatternHelp pat
-- If we have a command and a help topic with the same name (like "projects"), then append a tip to the
-- command's help that suggests running `help-topic command`
(Just pat, Just _) ->
Left $
showPatternHelp pat
<> P.newline
<> P.newline
<> ( tip $
"To read more about"
<> P.group (P.string cmd <> ",")
<> "use"
<> makeExample helpTopics [P.string cmd]
)
_ -> Left $ warn "Use `help <cmd>` or `help`."
)
where
@ -2300,22 +2342,36 @@ fetchScheme =
[]
( P.wrapColumn2
[ ( makeExample fetchScheme [],
"Fetches the unison library for compiling to scheme.\n\n\
\This is done automatically when"
<> P.group (makeExample compileScheme [])
<> "is run\
\ if the library is not already in the standard location\
\ (unison.internal). However, this command will force\
\ a pull even if the library already exists. You can also specify\
\ a username to pull from (the default is `unison`) to use an alternate\
\ implementation of the scheme compiler. It will attempt to fetch\
\ [username].public.internal.trunk for use."
P.lines . fmap P.wrap $
[ "Fetches the unison library for compiling to scheme.",
"This is done automatically when"
<> P.group (makeExample compileScheme [])
<> "is run if the library is not already in the\
\ standard location (unison.internal). However,\
\ this command will force a pull even if the\
\ library already exists.",
"You can also specify a user and branch name to pull\
\ from in order to use an alternate version of the\
\ unison compiler (for development purposes, for\
\ example).",
"The default user is `unison`. The default branch\
\ for the `unison` user is a specified latest\
\ version of the compiler for stability. The\
\ default branch for other uses is `main`. The\
\ command fetches code from a project:",
P.indentN 2 ("@user/internal/branch")
]
)
]
)
( \case
[] -> pure (Input.FetchSchemeCompilerI "unison")
[name] -> pure (Input.FetchSchemeCompilerI name)
[] -> pure (Input.FetchSchemeCompilerI "unison" JitInfo.currentRelease)
[name] -> pure (Input.FetchSchemeCompilerI name branch)
where
branch
| name == "unison" = JitInfo.currentRelease
| otherwise = "main"
[name, branch] -> pure (Input.FetchSchemeCompilerI name branch)
_ -> Left $ showPatternHelp fetchScheme
)

View File

@ -156,8 +156,7 @@ main dir welcome initialPath config initialInputs runtime sbRuntime codebase ser
loop currentRoot
loop Nothing
eventQueue <- Q.newIO
welcomeEvents <- Welcome.run codebase welcome
initialInputsRef <- newIORef $ welcomeEvents ++ initialInputs
initialInputsRef <- newIORef $ Welcome.run welcome ++ initialInputs
pageOutput <- newIORef True
cancelFileSystemWatch <- case shouldWatchFiles of
ShouldNotWatchFiles -> pure (pure ())

View File

@ -29,11 +29,7 @@ import Network.HTTP.Types qualified as Http
import Servant.Client qualified as Servant
import System.Console.ANSI qualified as ANSI
import System.Console.Haskeline.Completion qualified as Completion
import System.Directory
( canonicalizePath,
doesFileExist,
getHomeDirectory,
)
import System.Directory (canonicalizePath, doesFileExist, getHomeDirectory)
import U.Codebase.Branch (NamespaceStats (..))
import U.Codebase.Branch.Diff (NameChanges (..))
import U.Codebase.HashTags (CausalHash (..))
@ -2124,11 +2120,11 @@ notifyUser dir = \case
"I'll now fetch the latest version of the base Unison library..."
FailedToFetchLatestReleaseOfBase ->
pure . P.wrap $ "Sorry something went wrong while fetching the library."
HappyCoding ->
HappyCoding -> do
pure $
P.wrap "🎨 Type `ui` to explore this project's code in your browser."
<> P.newline
<> P.wrap "🌏 Discover libraries at https://share.unison-lang.org"
<> P.wrap ("🔭 Discover libraries at https://share.unison-lang.org")
<> P.newline
<> P.wrap "📖 Use `help-topic projects` to learn more about projects."
<> P.newline

View File

@ -1,35 +1,22 @@
module Unison.CommandLine.Welcome where
module Unison.CommandLine.Welcome
( CodebaseInitStatus (..),
Welcome (..),
asciiartUnison,
run,
welcome,
)
where
import Data.Sequence (singleton)
import Data.These (These (..))
import System.Random (randomRIO)
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Editor.Input
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace (..), ReadShareLooseCode (..))
import Unison.Codebase.Path (Path)
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.SyncMode qualified as SyncMode
import Unison.Codebase.Verbosity qualified as Verbosity
import Unison.CommandLine.Types (ShouldWatchFiles (..))
import Unison.NameSegment (NameSegment (NameSegment))
import Unison.Prelude
import Unison.Util.Pretty qualified as P
import Prelude hiding (readFile, writeFile)
data Welcome = Welcome
{ onboarding :: Onboarding, -- Onboarding States
downloadBase :: DownloadBase,
watchDir :: FilePath,
unisonVersion :: Text,
shouldWatchFiles :: ShouldWatchFiles
unisonVersion :: Text
}
data DownloadBase
= DownloadBase ReadShareLooseCode
| DontDownloadBase
deriving (Show, Eq)
-- Previously Created is different from Previously Onboarded because a user can
-- 1.) create a new codebase
-- 2.) decide not to go through the onboarding flow until later and exit
@ -40,80 +27,44 @@ data CodebaseInitStatus
deriving (Show, Eq)
data Onboarding
= Init CodebaseInitStatus -- Can transition to [DownloadingBase, Author, Finished, PreviouslyOnboarded]
| DownloadingBase ReadShareLooseCode -- Can transition to [Author, Finished]
= Init CodebaseInitStatus -- Can transition to [Author, Finished, PreviouslyOnboarded]
| Author -- Can transition to [Finished]
-- End States
| Finished
| PreviouslyOnboarded
deriving (Show, Eq)
welcome :: CodebaseInitStatus -> DownloadBase -> FilePath -> Text -> ShouldWatchFiles -> Welcome
welcome initStatus downloadBase filePath unisonVersion shouldWatchFiles =
Welcome (Init initStatus) downloadBase filePath unisonVersion shouldWatchFiles
welcome :: CodebaseInitStatus -> Text -> Welcome
welcome initStatus unisonVersion =
Welcome (Init initStatus) unisonVersion
pullBase :: ReadShareLooseCode -> Either Event Input
pullBase ns =
let seg = NameSegment "base"
rootPath = Path.Path {Path.toSeq = singleton seg}
abs = Path.Absolute {Path.unabsolute = rootPath}
pullRemote =
PullRemoteBranchI
( PullSourceTarget2
(ReadShare'LooseCode ns)
(This (Path.Path' {Path.unPath' = Left abs}))
)
SyncMode.Complete
PullWithHistory
Verbosity.Silent
in Right pullRemote
run :: Codebase IO v a -> Welcome -> IO [Either Event Input]
run codebase Welcome {onboarding = onboarding, downloadBase = downloadBase, watchDir = dir, unisonVersion = version, shouldWatchFiles} = do
run :: Welcome -> [Either Event Input]
run Welcome {onboarding = onboarding, unisonVersion = version} = do
go onboarding []
where
go :: Onboarding -> [Either Event Input] -> IO [Either Event Input]
go :: Onboarding -> [Either Event Input] -> [Either Event Input]
go onboarding acc =
case onboarding of
Init NewlyCreatedCodebase -> do
determineFirstStep downloadBase codebase >>= \step -> go step (headerMsg : acc)
go PreviouslyOnboarded (headerMsg : acc)
where
headerMsg = toInput (header version)
Init PreviouslyCreatedCodebase -> do
go PreviouslyOnboarded (headerMsg : acc)
where
headerMsg = toInput (header version)
DownloadingBase ns@(ReadShareLooseCode {path}) ->
go Author ([pullBaseInput, downloadMsg] ++ acc)
where
downloadMsg = Right $ CreateMessage (downloading path)
pullBaseInput = pullBase ns
Author ->
go Finished (authorMsg : acc)
where
authorMsg = toInput authorSuggestion
-- These are our two terminal Welcome conditions, at the end we reverse the order of the desired input commands otherwise they come out backwards
Finished -> do
startMsg <- getStarted shouldWatchFiles dir
pure $ reverse (toInput startMsg : acc)
PreviouslyOnboarded -> do
startMsg <- getStarted shouldWatchFiles dir
pure $ reverse (toInput startMsg : acc)
Finished -> reverse (toInput getStarted : acc)
PreviouslyOnboarded -> reverse (toInput getStarted : acc)
toInput :: P.Pretty P.ColorText -> Either Event Input
toInput pretty =
Right $ CreateMessage pretty
determineFirstStep :: DownloadBase -> Codebase IO v a -> IO Onboarding
determineFirstStep downloadBase codebase = do
isEmptyCodebase <- Codebase.runTransaction codebase Codebase.getRootBranchExists
case downloadBase of
DownloadBase ns
| isEmptyCodebase ->
pure $ DownloadingBase ns
_ ->
pure PreviouslyOnboarded
asciiartUnison :: P.Pretty P.ColorText
asciiartUnison =
P.red " _____"
@ -139,20 +90,6 @@ asciiartUnison =
<> P.cyan "|___|"
<> P.purple "_|_|"
downloading :: Path -> P.Pretty P.ColorText
downloading path =
P.lines
[ P.group (P.wrap "🐣 Since this is a fresh codebase, let me download the base library for you." <> P.newline),
P.wrap
( "🕐 Downloading"
<> P.blue (P.string (show path))
<> "of the"
<> P.bold "base library"
<> "into"
<> P.group (P.blue ".base" <> ", this may take a minute...")
)
]
header :: Text -> P.Pretty P.ColorText
header version =
asciiartUnison
@ -173,22 +110,9 @@ authorSuggestion =
P.wrap $ P.blue "https://www.unison-lang.org/learn/tooling/configuration/"
]
getStarted :: ShouldWatchFiles -> FilePath -> IO (P.Pretty P.ColorText)
getStarted shouldWatchFiles dir = do
earth <- (["🌎", "🌍", "🌏"] !!) <$> randomRIO (0, 2)
pure $
P.linesSpaced
[ P.wrap "Get started:",
P.indentN 2 $
P.column2
( [ ("📖", "Type " <> P.hiBlue "help" <> " to list all commands, or " <> P.hiBlue "help <cmd>" <> " to view help for one command"),
("🎨", "Type " <> P.hiBlue "ui" <> " to open the Codebase UI in your default browser"),
("📚", "Read the official docs at " <> P.blue "https://www.unison-lang.org/learn/"),
(earth, "Visit Unison Share at " <> P.blue "https://share.unison-lang.org" <> " to discover libraries")
]
<> case shouldWatchFiles of
ShouldWatchFiles -> [("👀", "I'm watching for changes to " <> P.bold ".u" <> " files under " <> (P.group . P.blue $ P.string dir))]
ShouldNotWatchFiles -> [("📝", "File watching is disabled, use the 'load' command to parse and typecheck unison files.")]
)
]
getStarted :: P.Pretty P.ColorText
getStarted =
P.wrap "📚 Read the official docs at https://www.unison-lang.org/learn/"
<> P.newline
<> P.newline
<> P.wrap "Type 'project.create' to get started."

View File

@ -0,0 +1,4 @@
module Unison.JitInfo (currentRelease) where
currentRelease :: String
currentRelease = "releases/0.0.1"

View File

@ -115,7 +115,7 @@ mkFileSummary parsed typechecked = case (parsed, typechecked) of
(Nothing, Nothing) -> Nothing
(_, Just tf@(UF.TypecheckedUnisonFileId {dataDeclarationsId', effectDeclarationsId', hashTermsId})) ->
let (trms, testWatches, exprWatches) =
hashTermsId & ifoldMap \sym (ref, wk, trm, typ) ->
hashTermsId & ifoldMap \sym (_ann, ref, wk, trm, typ) ->
case wk of
Nothing -> (Map.singleton sym (Just ref, trm, getUserTypeAnnotation sym <|> Just typ), mempty, mempty)
Just TestWatch -> (mempty, [(assertUserSym sym, Just ref, trm, getUserTypeAnnotation sym <|> Just typ)], mempty)
@ -134,11 +134,11 @@ mkFileSummary parsed typechecked = case (parsed, typechecked) of
}
(Just uf@(UF.UnisonFileId {dataDeclarationsId, effectDeclarationsId, terms, watches}), _) ->
let trms =
terms & foldMap \(sym, trm) ->
terms & foldMap \(sym, _ann, trm) ->
(Map.singleton sym (Nothing, trm, Nothing))
(testWatches, exprWatches) =
watches & ifoldMap \wk tms ->
tms & foldMap \(v, trm) ->
tms & foldMap \(v, _ann, trm) ->
case wk of
TestWatch -> ([(assertUserSym v, Nothing, trm, Nothing)], mempty)
_ -> (mempty, [(assertUserSym v, Nothing, trm, Nothing)])
@ -172,7 +172,7 @@ mkFileSummary parsed typechecked = case (parsed, typechecked) of
getUserTypeAnnotation :: Symbol -> Maybe (Type Symbol Ann)
getUserTypeAnnotation v = do
UF.UnisonFileId {terms, watches} <- parsed
trm <- Prelude.lookup v (terms <> fold watches)
trm <- (terms <> fold watches) ^? folded . filteredBy (_1 . only v) . _3
typ <- Term.getTypeAnnotation trm
pure typ

View File

@ -29,7 +29,7 @@ foldingRangesForFile fileUri =
UnisonFileId {dataDeclarationsId, effectDeclarationsId, terms} <- MaybeT $ pure parsedFile
let dataFolds = dataDeclarationsId ^.. folded . _2 . to dataDeclSpan
let abilityFolds = effectDeclarationsId ^.. folded . _2 . to DD.toDataDecl . to dataDeclSpan
let termFolds = terms ^.. folded . _2 . to ABT.annotation
let termFolds = terms ^.. folded . _3 . to ABT.annotation
let folds = dataFolds <> abilityFolds <> termFolds
let ranges = mapMaybe annToRange folds
pure $ ranges <&> \r -> FoldingRange {_startLine = r ^. start . line, _startCharacter = Just (r ^. start . character), _endLine = r ^. end . line, _endCharacter = Just (r ^. end . character), _kind = Just FoldingRangeRegion}

View File

@ -9,7 +9,6 @@ import Unison.Test.Cli.Monad qualified as Cli.Monad
import Unison.Test.GitSync qualified as GitSync
import Unison.Test.LSP qualified as LSP
import Unison.Test.UriParser qualified as UriParser
import Unison.Test.VersionParser qualified as VersionParser
test :: Test ()
test =
@ -18,8 +17,7 @@ test =
ClearCache.test,
Cli.Monad.test,
GitSync.test,
UriParser.test,
VersionParser.test
UriParser.test
]
main :: IO ()

View File

@ -254,7 +254,7 @@ makeNodeSelectionTest (name, testSrc, testTypechecked, expected) = scope name $
pf <- maybe (crash (show ("Failed to parse" :: String, notes))) pure mayParsedFile
let pfResult =
UF.terms pf
& firstJust \(_v, trm) ->
& firstJust \(_v, _fileAnn, trm) ->
LSPQ.findSmallestEnclosingNode pos trm
expectEqual (Just expected) (void <$> pfResult)
@ -264,7 +264,7 @@ makeNodeSelectionTest (name, testSrc, testTypechecked, expected) = scope name $
let tfResult =
UF.hashTermsId tf
& toList
& firstJust \(_refId, _wk, trm, _typ) ->
& firstJust \(_fileAnn, _refId, _wk, trm, _typ) ->
LSPQ.findSmallestEnclosingNode pos trm
expectEqual (Just expected) (void <$> tfResult)
@ -302,7 +302,7 @@ annotationNestingTest (name, src) = scope name do
tf <- maybe (crash "Failed to typecheck") pure maytf
UF.hashTermsId tf
& toList
& traverse_ \(_refId, _wk, trm, _typ) ->
& traverse_ \(_fileAnn, _refId, _wk, trm, _typ) ->
assertAnnotationsAreNested trm
-- | Asserts that for all nodes in the provided ABT, the annotations of all child nodes are

View File

@ -25,6 +25,7 @@ import Unison.Codebase.Init qualified as Codebase.Init
import Unison.Codebase.Init.CreateCodebaseError (CreateCodebaseError (..))
import Unison.Codebase.SqliteCodebase qualified as SC
import Unison.Codebase.TranscriptParser qualified as TR
import Unison.Codebase.Verbosity qualified as Verbosity
import Unison.Parser.Ann (Ann)
import Unison.Prelude (traceM)
import Unison.PrettyTerminal qualified as PT
@ -65,7 +66,7 @@ runTranscript :: Codebase -> Transcript -> IO TranscriptOutput
runTranscript (Codebase codebasePath fmt) transcript = do
let err e = fail $ "Parse error: \n" <> show e
cbInit = case fmt of CodebaseFormat2 -> SC.init
TR.withTranscriptRunner "Unison.Test.Ucm.runTranscript Invalid Version String" configFile $ \runner -> do
TR.withTranscriptRunner Verbosity.Silent "Unison.Test.Ucm.runTranscript Invalid Version String" configFile $ \runner -> do
result <- Codebase.Init.withOpenCodebase cbInit "transcript" codebasePath SC.DoLock SC.DontMigrate \codebase -> do
Codebase.runTransaction codebase (Codebase.installUcmDependencies codebase)
let transcriptSrc = stripMargin . Text.pack $ unTranscript transcript

View File

@ -1,37 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Unison.Test.VersionParser where
import Control.Error.Safe (rightMay)
import Data.Text
import EasyTest
import Text.Megaparsec
import Unison.Codebase.Editor.RemoteRepo
import Unison.Codebase.Editor.VersionParser
import Unison.Codebase.Path qualified as Path
test :: Test ()
test =
scope "versionparser" . tests . fmap makeTest $
[ ("latest-abc", "main"),
("dev/M4", "main"), -- or should this be "releases.M4"?
("dev/M4-1-g22ccb0b3b", "main"), -- and should this also be "releases.m4"?
-- All non-dev releases should pull from the most recent major milestone
("release/M4", "releases.M4"),
("release/M2i_3", "releases.M2"),
("release/M2i-HOTFIX", "releases.M2")
]
makeTest :: (Text, Text) -> Test ()
makeTest (version, path) =
scope (unpack version) $
expectEqual
(rightMay $ runParser defaultBaseLib "versionparser" version)
( Just
( ReadShareLooseCode
{ server = DefaultCodeserver,
repo = ShareUserHandle "unison",
path = Path.fromList ["public", "base"] <> Path.fromText path
}
)
)

View File

@ -18,10 +18,13 @@ import System.FilePath
(</>),
)
import System.IO.CodePage (withCP65001)
import System.IO.Silently (silence)
import Unison.Codebase.Init (withTemporaryUcmCodebase)
import Unison.Codebase.SqliteCodebase qualified as SC
import Unison.Codebase.TranscriptParser (TranscriptError (..), withTranscriptRunner)
import Unison.Codebase.Verbosity qualified as Verbosity
import Unison.Prelude
import UnliftIO.STM qualified as STM
data TestConfig = TestConfig
{ matchPrefix :: Maybe String
@ -31,28 +34,36 @@ data TestConfig = TestConfig
type TestBuilder = FilePath -> [String] -> String -> Test ()
testBuilder ::
Bool -> FilePath -> [String] -> String -> Test ()
testBuilder expectFailure dir prelude transcript = scope transcript $ do
outputs <- io . withTemporaryUcmCodebase SC.init "transcript" SC.DoLock $ \(codebasePath, codebase) -> do
withTranscriptRunner "TODO: pass version here" Nothing $ \runTranscript -> do
Bool -> ((FilePath, Text) -> IO ()) -> FilePath -> [String] -> String -> Test ()
testBuilder expectFailure recordFailure dir prelude transcript = scope transcript $ do
outputs <- io . withTemporaryUcmCodebase SC.init Verbosity.Silent "transcript" SC.DoLock $ \(codebasePath, codebase) -> do
withTranscriptRunner Verbosity.Silent "TODO: pass version here" Nothing $ \runTranscript -> do
for files $ \filePath -> do
transcriptSrc <- readUtf8 filePath
out <- runTranscript filePath transcriptSrc (codebasePath, codebase)
out <- silence $ runTranscript filePath transcriptSrc (codebasePath, codebase)
pure (filePath, out)
for_ outputs $ \case
(filePath, Left err) -> do
let outputFile = outputFileForTranscript filePath
case err of
TranscriptParseError msg -> do
when (not expectFailure) . crash $ "Error parsing " <> filePath <> ": " <> Text.unpack msg
when (not expectFailure) $ do
let errMsg = "Error parsing " <> filePath <> ": " <> Text.unpack msg
io $ recordFailure (filePath, Text.pack errMsg)
crash errMsg
TranscriptRunFailure errOutput -> do
io $ writeUtf8 outputFile errOutput
io $ Text.putStrLn errOutput
when (not expectFailure) . crash $ "Failure in " <> filePath
when (not expectFailure) $ do
io $ Text.putStrLn errOutput
io $ recordFailure (filePath, errOutput)
crash $ "Failure in " <> filePath
(filePath, Right out) -> do
let outputFile = outputFileForTranscript filePath
io $ writeUtf8 outputFile out
when expectFailure $ crash "Expected a failure, but transcript was successful."
when expectFailure $ do
let errMsg = "Expected a failure, but transcript was successful."
io $ recordFailure (filePath, Text.pack errMsg)
crash errMsg
ok
where
files = fmap (dir </>) (prelude ++ [transcript])
@ -109,12 +120,23 @@ cleanup = do
test :: TestConfig -> Test ()
test config = do
buildTests config (testBuilder False) $
-- We manually aggregate and display failures at the end to it much easier to see
-- what went wrong in CI
failuresVar <- io $ STM.newTVarIO []
let recordFailure failure = STM.atomically $ STM.modifyTVar' failuresVar (failure :)
buildTests config (testBuilder False recordFailure) $
"unison-src" </> "transcripts"
buildTests config (testBuilder False) $
buildTests config (testBuilder False recordFailure) $
"unison-src" </> "transcripts-using-base"
buildTests config (testBuilder True) $
buildTests config (testBuilder True recordFailure) $
"unison-src" </> "transcripts" </> "errors"
failures <- io $ STM.readTVarIO failuresVar
-- Print all aggregated failures
when (not $ null failures) . io $ Text.putStrLn $ "Failures:"
for failures $ \(filepath, msg) -> io $ do
Text.putStrLn $ Text.replicate 80 "="
Text.putStrLn $ "🚨 " <> Text.pack filepath <> ": "
Text.putStrLn msg
cleanup
handleArgs :: [String] -> TestConfig

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.2.
-- This file has been generated from package.yaml by hpack version 0.35.1.
--
-- see: https://github.com/sol/hpack
@ -76,8 +76,8 @@ library
Unison.Codebase.Editor.TodoOutput
Unison.Codebase.Editor.UCMVersion
Unison.Codebase.Editor.UriParser
Unison.Codebase.Editor.VersionParser
Unison.Codebase.TranscriptParser
Unison.Codebase.Watch
Unison.CommandLine
Unison.CommandLine.Completion
Unison.CommandLine.DisplayValues
@ -89,6 +89,7 @@ library
Unison.CommandLine.OutputMessages
Unison.CommandLine.Types
Unison.CommandLine.Welcome
Unison.JitInfo
Unison.LSP
Unison.LSP.CancelRequest
Unison.LSP.CodeAction
@ -168,6 +169,7 @@ library
, filepath
, free
, friendly-time
, fsnotify
, fuzzyfind
, generic-lens
, haskeline
@ -302,6 +304,7 @@ executable cli-integration-tests
, filepath
, free
, friendly-time
, fsnotify
, fuzzyfind
, generic-lens
, haskeline
@ -430,6 +433,7 @@ executable transcripts
, filepath
, free
, friendly-time
, fsnotify
, fuzzyfind
, generic-lens
, haskeline
@ -461,6 +465,7 @@ executable transcripts
, servant
, servant-client
, shellmet
, silently
, stm
, text
, text-builder
@ -563,6 +568,7 @@ executable unison
, filepath
, free
, friendly-time
, fsnotify
, fuzzyfind
, generic-lens
, haskeline
@ -643,7 +649,6 @@ test-suite cli-tests
Unison.Test.LSP
Unison.Test.Ucm
Unison.Test.UriParser
Unison.Test.VersionParser
hs-source-dirs:
tests
default-extensions:
@ -704,6 +709,7 @@ test-suite cli-tests
, filepath
, free
, friendly-time
, fsnotify
, fuzzyfind
, generic-lens
, haskeline

View File

@ -80,11 +80,6 @@ data ShouldForkCodebase
| DontFork
deriving (Show, Eq)
data ShouldDownloadBase
= ShouldDownloadBase
| ShouldNotDownloadBase
deriving (Show, Eq)
data ShouldSaveCodebase
= SaveCodebase (Maybe FilePath)
| DontSaveCodebase
@ -109,7 +104,6 @@ data Command
= Launch
IsHeadless
CodebaseServerOpts
ShouldDownloadBase
-- Starting path
(Maybe Path.Absolute)
ShouldWatchFiles
@ -359,10 +353,9 @@ launchParser :: CodebaseServerOpts -> IsHeadless -> Parser Command
launchParser envOpts isHeadless = do
-- ApplicativeDo
codebaseServerOpts <- codebaseServerOptsParser envOpts
downloadBase <- downloadBaseFlag
startingPath <- startingPathOption
shouldWatchFiles <- noFileWatchFlag
pure (Launch isHeadless codebaseServerOpts downloadBase startingPath shouldWatchFiles)
pure (Launch isHeadless codebaseServerOpts startingPath shouldWatchFiles)
initParser :: Parser Command
initParser = pure Init
@ -421,18 +414,6 @@ saveCodebaseToFlag = do
_ -> DontSaveCodebase
)
downloadBaseFlag :: Parser ShouldDownloadBase
downloadBaseFlag =
flag
ShouldDownloadBase
ShouldNotDownloadBase
( long "no-base"
<> help downloadBaseHelp
<> noGlobal
)
where
downloadBaseHelp = "if set, a new codebase will be created without downloading the base library, otherwise the new codebase will download base"
startingPathOption :: Parser (Maybe Path.Absolute)
startingPathOption =
let meta =

View File

@ -7,7 +7,10 @@
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-partial-type-signatures #-}
module Main where
module Main
( main,
)
where
import ArgParse
( CodebasePathOption (..),
@ -15,7 +18,6 @@ import ArgParse
GlobalOptions (GlobalOptions, codebasePathOption, exitOption),
IsHeadless (Headless, WithCLI),
RunSource (..),
ShouldDownloadBase (..),
ShouldExit (DoNotExit, Exit),
ShouldForkCodebase (..),
ShouldSaveCodebase (..),
@ -25,7 +27,6 @@ import ArgParse
import Compat (defaultInterruptHandler, withInterruptHandler)
import Control.Concurrent (newEmptyMVar, runInUnboundThread, takeMVar)
import Control.Concurrent.STM
import Control.Error.Safe (rightMay)
import Control.Exception (evaluate)
import Data.ByteString.Lazy qualified as BL
import Data.Configurator.Types (Config)
@ -37,13 +38,11 @@ import Data.Text.IO qualified as Text
import GHC.Conc (setUncaughtExceptionHandler)
import GHC.Conc qualified
import Ki qualified
import Language.Haskell.TH qualified as TH
import Language.Haskell.TH.Syntax qualified as TH
import Network.HTTP.Client qualified as HTTP
import Network.HTTP.Client.TLS qualified as HTTP
import Stats (recordRtsStats)
import System.Directory (canonicalizePath, getCurrentDirectory, removeDirectoryRecursive)
import System.Environment (getProgName, lookupEnv, withArgs)
import System.Environment (getProgName, withArgs)
import System.Exit qualified as Exit
import System.FilePath qualified as FP
import System.IO (stderr)
@ -51,16 +50,12 @@ import System.IO.CodePage (withCP65001)
import System.IO.Error (catchIOError)
import System.IO.Temp qualified as Temp
import System.Path qualified as Path
import Text.Megaparsec (runParser)
import Text.Pretty.Simple (pHPrint)
import U.Codebase.Sqlite.Queries qualified as Queries
import Unison.Codebase (Codebase, CodebasePath)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch (Branch)
import Unison.Codebase.Editor.Input qualified as Input
import Unison.Codebase.Editor.RemoteRepo (ReadShareLooseCode)
import Unison.Codebase.Editor.UriParser (parseReadShareLooseCode)
import Unison.Codebase.Editor.VersionParser qualified as VP
import Unison.Codebase.Execute (execute)
import Unison.Codebase.Init (CodebaseInitOptions (..), InitError (..), InitResult (..), SpecifiedCodebase (..))
import Unison.Codebase.Init qualified as CodebaseInit
@ -69,6 +64,7 @@ import Unison.Codebase.Path qualified as Path
import Unison.Codebase.Runtime qualified as Rt
import Unison.Codebase.SqliteCodebase qualified as SC
import Unison.Codebase.TranscriptParser qualified as TR
import Unison.Codebase.Verbosity qualified as Verbosity
import Unison.CommandLine (plural', watchConfig)
import Unison.CommandLine.Main qualified as CommandLine
import Unison.CommandLine.Types qualified as CommandLine
@ -147,7 +143,6 @@ main = withCP65001 . runInUnboundThread . Ki.scoped $ \scope -> do
[Left fileEvent, Right $ Input.ExecuteI mainName args, Right Input.QuitI]
serverUrl
startPath
ShouldNotDownloadBase
initRes
noOpRootNotifier
noOpPathNotifier
@ -173,7 +168,6 @@ main = withCP65001 . runInUnboundThread . Ki.scoped $ \scope -> do
[Left fileEvent, Right $ Input.ExecuteI mainName args, Right Input.QuitI]
serverUrl
startPath
ShouldNotDownloadBase
initRes
noOpRootNotifier
noOpPathNotifier
@ -243,11 +237,11 @@ main = withCP65001 . runInUnboundThread . Ki.scoped $ \scope -> do
\that matches your version of Unison."
]
Transcript shouldFork shouldSaveCodebase mrtsStatsFp transcriptFiles -> do
let action = runTranscripts renderUsageInfo shouldFork shouldSaveCodebase mCodePathOption transcriptFiles
let action = runTranscripts Verbosity.Verbose renderUsageInfo shouldFork shouldSaveCodebase mCodePathOption transcriptFiles
case mrtsStatsFp of
Nothing -> action
Just fp -> recordRtsStats fp action
Launch isHeadless codebaseServerOpts downloadBase mayStartingPath shouldWatchFiles -> do
Launch isHeadless codebaseServerOpts mayStartingPath shouldWatchFiles -> do
getCodebaseOrExit mCodePathOption (SC.MigrateAfterPrompt SC.Backup SC.Vacuum) \(initRes, _, theCodebase) -> do
withRuntimes RTI.Persistent \(runtime, sbRuntime) -> do
startingPath <- case isHeadless of
@ -308,7 +302,6 @@ main = withCP65001 . runInUnboundThread . Ki.scoped $ \scope -> do
[]
(Just baseUrl)
(Just startingPath)
downloadBase
initRes
notifyOnRootChanges
notifyOnPathChanges
@ -345,8 +338,8 @@ initHTTPClient = do
manager <- HTTP.newTlsManagerWith managerSettings
HTTP.setGlobalManager manager
prepareTranscriptDir :: ShouldForkCodebase -> Maybe CodebasePathOption -> ShouldSaveCodebase -> IO FilePath
prepareTranscriptDir shouldFork mCodePathOption shouldSaveCodebase = do
prepareTranscriptDir :: Verbosity.Verbosity -> ShouldForkCodebase -> Maybe CodebasePathOption -> ShouldSaveCodebase -> IO FilePath
prepareTranscriptDir verbosity shouldFork mCodePathOption shouldSaveCodebase = do
tmp <- case shouldSaveCodebase of
SaveCodebase (Just path) -> pure path
_ -> Temp.getCanonicalTemporaryDirectory >>= (`Temp.createTempDirectory` "transcript")
@ -356,7 +349,7 @@ prepareTranscriptDir shouldFork mCodePathOption shouldSaveCodebase = do
-- A forked codebase does not need to Create a codebase, because it already exists
getCodebaseOrExit mCodePathOption (SC.MigrateAutomatically SC.Backup SC.Vacuum) $ const (pure ())
path <- Codebase.getCodebaseDir (fmap codebasePathOptionToPath mCodePathOption)
PT.putPrettyLn $
unless (Verbosity.isSilent verbosity) . PT.putPrettyLn $
P.lines
[ P.wrap "Transcript will be run on a copy of the codebase at: ",
"",
@ -365,7 +358,7 @@ prepareTranscriptDir shouldFork mCodePathOption shouldSaveCodebase = do
Path.copyDir (CodebaseInit.codebasePath cbInit path) (CodebaseInit.codebasePath cbInit tmp)
DontFork -> do
PT.putPrettyLn . P.wrap $ "Transcript will be run on a new, empty codebase."
CodebaseInit.withNewUcmCodebaseOrExit cbInit "main.transcript" tmp SC.DoLock (const $ pure ())
CodebaseInit.withNewUcmCodebaseOrExit cbInit verbosity "main.transcript" tmp SC.DoLock (const $ pure ())
pure tmp
runTranscripts' ::
@ -379,7 +372,7 @@ runTranscripts' progName mcodepath transcriptDir markdownFiles = do
configFilePath <- getConfigFilePath mcodepath
-- We don't need to create a codebase through `getCodebaseOrExit` as we've already done so previously.
and <$> getCodebaseOrExit (Just (DontCreateCodebaseWhenMissing transcriptDir)) (SC.MigrateAutomatically SC.Backup SC.Vacuum) \(_, codebasePath, theCodebase) -> do
TR.withTranscriptRunner Version.gitDescribeWithDate (Just configFilePath) $ \runTranscript -> do
TR.withTranscriptRunner Verbosity.Verbose Version.gitDescribeWithDate (Just configFilePath) $ \runTranscript -> do
for markdownFiles $ \(MarkdownFile fileName) -> do
transcriptSrc <- readUtf8 fileName
result <- runTranscript fileName transcriptSrc (codebasePath, theCodebase)
@ -422,13 +415,14 @@ runTranscripts' progName mcodepath transcriptDir markdownFiles = do
pure succeeded
runTranscripts ::
Verbosity.Verbosity ->
UsageRenderer ->
ShouldForkCodebase ->
ShouldSaveCodebase ->
Maybe CodebasePathOption ->
NonEmpty String ->
IO ()
runTranscripts renderUsageInfo shouldFork shouldSaveTempCodebase mCodePathOption args = do
runTranscripts verbosity renderUsageInfo shouldFork shouldSaveTempCodebase mCodePathOption args = do
markdownFiles <- case traverse (first (pure @[]) . markdownFile) args of
Failure invalidArgs -> do
PT.putPrettyLn $
@ -444,7 +438,7 @@ runTranscripts renderUsageInfo shouldFork shouldSaveTempCodebase mCodePathOption
Exit.exitWith (Exit.ExitFailure 1)
Success markdownFiles -> pure markdownFiles
progName <- getProgName
transcriptDir <- prepareTranscriptDir shouldFork mCodePathOption shouldSaveTempCodebase
transcriptDir <- prepareTranscriptDir verbosity shouldFork mCodePathOption shouldSaveTempCodebase
completed <-
runTranscripts' progName (Just transcriptDir) transcriptDir markdownFiles
case shouldSaveTempCodebase of
@ -479,22 +473,18 @@ launch ::
[Either Input.Event Input.Input] ->
Maybe Server.BaseUrl ->
Maybe Path.Absolute ->
ShouldDownloadBase ->
InitResult ->
(Branch IO -> STM ()) ->
(Path.Absolute -> STM ()) ->
CommandLine.ShouldWatchFiles ->
IO ()
launch dir config runtime sbRuntime codebase inputs serverBaseUrl mayStartingPath shouldDownloadBase initResult notifyRootChange notifyPathChange shouldWatchFiles =
let downloadBase = case defaultBaseLib of
Just remoteNS | shouldDownloadBase == ShouldDownloadBase -> Welcome.DownloadBase remoteNS
_ -> Welcome.DontDownloadBase
isNewCodebase = case initResult of
CreatedCodebase {} -> NewlyCreatedCodebase
_ -> PreviouslyCreatedCodebase
launch dir config runtime sbRuntime codebase inputs serverBaseUrl mayStartingPath initResult notifyRootChange notifyPathChange shouldWatchFiles =
let isNewCodebase = case initResult of
CreatedCodebase -> NewlyCreatedCodebase
OpenedCodebase -> PreviouslyCreatedCodebase
(ucmVersion, _date) = Version.gitDescribe
welcome = Welcome.welcome isNewCodebase downloadBase dir ucmVersion shouldWatchFiles
welcome = Welcome.welcome isNewCodebase ucmVersion
in CommandLine.main
dir
welcome
@ -521,27 +511,9 @@ markdownFile md = case FP.takeExtension md of
isDotU :: String -> Bool
isDotU file = FP.takeExtension file == ".u"
-- so we can do `ucm --help`, `ucm -help` or `ucm help` (I hate
-- having to remember which one is supported)
isFlag :: String -> String -> Bool
isFlag f arg = arg == f || arg == "-" ++ f || arg == "--" ++ f
getConfigFilePath :: Maybe FilePath -> IO FilePath
getConfigFilePath mcodepath = (FP.</> ".unisonConfig") <$> Codebase.getCodebaseDir mcodepath
defaultBaseLib :: Maybe ReadShareLooseCode
defaultBaseLib =
let mayBaseSharePath =
$( do
mayPath <- TH.runIO (lookupEnv "UNISON_BASE_PATH")
TH.lift mayPath
)
in mayBaseSharePath & \case
Just s -> eitherToMaybe $ parseReadShareLooseCode "UNISON_BASE_PATH" s
Nothing -> rightMay $ runParser VP.defaultBaseLib "version" gitRef
where
(gitRef, _date) = Version.gitDescribe
getCodebaseOrExit :: Maybe CodebasePathOption -> SC.MigrationStrategy -> ((InitResult, CodebasePath, Codebase IO Symbol Ann) -> IO r) -> IO r
getCodebaseOrExit codebasePathOption migrationStrategy action = do
initOptions <- argsToCodebaseInitOptions codebasePathOption

View File

@ -134,15 +134,15 @@ generateRecordAccessors ::
[(v, a)] ->
v ->
Reference ->
[(v, Term v a)]
[(v, a, Term v a)]
generateRecordAccessors fields typename typ =
join [tm t i | (t, i) <- fields `zip` [(0 :: Int) ..]]
where
argname = Var.uncapitalize typename
tm (fname, ann) i =
[ (Var.namespaced [typename, fname], get),
(Var.namespaced [typename, fname, Var.named "set"], set),
(Var.namespaced [typename, fname, Var.named "modify"], modify)
[ (Var.namespaced [typename, fname], ann, get),
(Var.namespaced [typename, fname, Var.named "set"], ann, set),
(Var.namespaced [typename, fname, Var.named "modify"], ann, modify)
]
where
-- example: `point -> case point of Point x _ -> x`

View File

@ -6,7 +6,7 @@ debugNoteSummary = False
debugRevealForalls = False
renderTermMaxLength :: Int
renderTermMaxLength = 20
renderTermMaxLength = 30
demoHideVarNumber :: Bool
demoHideVarNumber = False

View File

@ -3,7 +3,7 @@
module Unison.Term where
import Control.Lens (Lens', Prism', lens)
import Control.Lens (Lens', Prism', lens, view, _2)
import Control.Monad.State (evalState)
import Control.Monad.Writer.Strict qualified as Writer
import Data.Generics.Sum (_Ctor)
@ -893,14 +893,14 @@ unLetRecNamedAnnotated _ = Nothing
letRec' ::
(Ord v, Monoid a) =>
Bool ->
[(v, Term' vt v a)] ->
[(v, a, Term' vt v a)] ->
Term' vt v a ->
Term' vt v a
letRec' isTop bindings body =
letRec
isTop
(foldMap (ABT.annotation . snd) bindings <> ABT.annotation body)
[((ABT.annotation b, v), b) | (v, b) <- bindings]
(foldMap (view _2) bindings <> ABT.annotation body)
[((a, v), b) | (v, a, b) <- bindings]
body
-- Prepend a binding to form a (bigger) let rec. Useful when

View File

@ -90,22 +90,22 @@ refId :: (Ord v) => a -> ReferenceId -> Term2 vt at ap v a
refId a = ref a . ReferenceDerivedId
hashTermComponents ::
forall v a.
forall v a extra.
(Var v) =>
Map v (Term v a, Type v a) ->
Map v (ReferenceId, Term v a, Type v a)
Map v (Term v a, Type v a, extra) ->
Map v (ReferenceId, Term v a, Type v a, extra)
hashTermComponents terms =
Zip.zipWith keepType terms (ReferenceUtil.hashComponents (refId ()) terms')
Zip.zipWith keepExtra terms (ReferenceUtil.hashComponents (refId ()) terms')
where
terms' :: Map v (Term v a)
terms' = uncurry incorporateType <$> terms
terms' = incorporateType <$> terms
keepType :: ((Term v a, Type v a) -> (ReferenceId, Term v a) -> (ReferenceId, Term v a, Type v a))
keepType (_oldTrm, typ) (refId, trm) = (refId, trm, typ)
keepExtra :: ((Term v a, Type v a, extra) -> (ReferenceId, Term v a) -> (ReferenceId, Term v a, Type v a, extra))
keepExtra (_oldTrm, typ, extra) (refId, trm) = (refId, trm, typ, extra)
incorporateType :: Term v a -> Type v a -> Term v a
incorporateType a@(ABT.out -> ABT.Tm (TermAnn e _tp)) typ = ABT.tm' (ABT.annotation a) (TermAnn e typ)
incorporateType e typ = ABT.tm' (ABT.annotation e) (TermAnn e typ)
incorporateType :: (Term v a, Type v a, extra) -> Term v a
incorporateType (a@(ABT.out -> ABT.Tm (TermAnn e _tp)), typ, _extra) = ABT.tm' (ABT.annotation a) (TermAnn e typ)
incorporateType (e, typ, _extra) = ABT.tm' (ABT.annotation e) (TermAnn e typ)
-- keep these until we decide if we want to add the appropriate smart constructors back into this module
-- incorporateType (Term.Ann' e _) typ = Term.ann () e typ

View File

@ -96,3 +96,4 @@ default-extensions:
- TypeApplications
- TypeOperators
- ViewPatterns
- ImportQualifiedPost

View File

@ -31,7 +31,7 @@ module Unison.Server.Backend
displayType,
docsInBranchToHtmlFiles,
expandShortCausalHash,
findDocInBranchAndRender,
findDocInBranch,
formatSuffixedType,
getCurrentParseNames,
getCurrentPrettyNames,
@ -454,25 +454,16 @@ lsAtPath codebase mayRootBranch absPath = do
b <- Codebase.runTransaction codebase (Codebase.getShallowBranchAtPath (Path.unabsolute absPath) mayRootBranch)
lsBranch codebase b
findDocInBranchAndRender ::
findDocInBranch ::
Set NameSegment ->
Width ->
Rt.Runtime Symbol ->
Codebase IO Symbol Ann ->
PPED.PrettyPrintEnvDecl ->
V2Branch.Branch m ->
Backend IO (Maybe Doc.Doc)
findDocInBranchAndRender names _width runtime codebase ppe namespaceBranch =
let renderReadme :: PPED.PrettyPrintEnvDecl -> Reference -> IO Doc.Doc
renderReadme ppe docReference = do
doc <- evalDocRef runtime codebase docReference <&> Doc.renderDoc ppe
pure doc
-- choose the first term (among conflicted terms) matching any of these names, in this order.
Maybe TermReference
findDocInBranch names namespaceBranch =
let -- choose the first term (among conflicted terms) matching any of these names, in this order.
-- we might later want to return all of them to let the front end decide
toCheck = Set.toList names
readme :: Maybe Reference
readme = listToMaybe $ do
readmeRef :: Maybe Reference
readmeRef = listToMaybe $ do
name <- toCheck
term <- toList $ Map.lookup name termsMap
k <- Map.keys term
@ -482,8 +473,7 @@ findDocInBranchAndRender names _width runtime codebase ppe namespaceBranch =
V2Referent.Ref ref -> pure $ Cv.reference2to1 ref
where
termsMap = V2Branch.terms namespaceBranch
in liftIO $ do
traverse (renderReadme ppe) readme
in readmeRef
isDoc :: Codebase m Symbol Ann -> Referent.Referent -> Sqlite.Transaction Bool
isDoc codebase ref = do

View File

@ -11,11 +11,9 @@
module Unison.Server.Local.Endpoints.NamespaceDetails where
import Control.Monad.Except
import Data.Aeson
import Data.OpenApi (ToSchema)
import Data.Set qualified as Set
import Servant (Capture, QueryParam, (:>))
import Servant.Docs (DocCapture (..), ToCapture (..), ToSample (..))
import Servant.Docs (DocCapture (..), ToCapture (..))
import Servant.OpenApi ()
import U.Codebase.Causal qualified as V2Causal
import U.Codebase.HashTags (CausalHash)
@ -28,11 +26,10 @@ import Unison.Parser.Ann (Ann)
import Unison.Prelude
import Unison.Server.Backend
import Unison.Server.Backend qualified as Backend
import Unison.Server.Doc (Doc)
import Unison.Server.Doc qualified as Doc
import Unison.Server.Types
( APIGet,
UnisonHash,
mayDefaultWidth,
NamespaceDetails (..),
v2CausalBranchToUnisonHash,
)
import Unison.Symbol (Symbol)
@ -51,34 +48,6 @@ instance ToCapture (Capture "namespace" Text) where
"namespace"
"The fully qualified name of a namespace. The leading `.` is optional."
instance ToSample NamespaceDetails where
toSamples _ =
[ ( "When no value is provided for `namespace`, the root namespace `.` is "
<> "listed by default",
NamespaceDetails
Path.empty
"#gjlk0dna8dongct6lsd19d1o9hi5n642t8jttga5e81e91fviqjdffem0tlddj7ahodjo5"
Nothing
)
]
data NamespaceDetails = NamespaceDetails
{ fqn :: Path.Path,
hash :: UnisonHash,
readme :: Maybe Doc
}
deriving (Generic, Show)
instance ToJSON NamespaceDetails where
toJSON NamespaceDetails {..} =
object
[ "fqn" .= fqn,
"hash" .= hash,
"readme" .= readme
]
deriving instance ToSchema NamespaceDetails
namespaceDetails ::
Rt.Runtime Symbol ->
Codebase IO Symbol Ann ->
@ -86,33 +55,26 @@ namespaceDetails ::
Maybe (Either ShortCausalHash CausalHash) ->
Maybe Width ->
Backend IO NamespaceDetails
namespaceDetails runtime codebase namespacePath mayRoot mayWidth =
let width = mayDefaultWidth mayWidth
in do
(rootCausal, namespaceCausal, shallowBranch) <-
Backend.hoistBackend (Codebase.runTransaction codebase) do
rootCausalHash <-
case mayRoot of
Nothing -> Backend.resolveRootBranchHashV2 Nothing
Just (Left sch) -> Backend.resolveRootBranchHashV2 (Just sch)
Just (Right ch) -> lift $ Backend.resolveCausalHashV2 (Just ch)
-- lift (Backend.resolveCausalHashV2 rootCausalHash)
namespaceCausal <- lift $ Codebase.getShallowCausalAtPath namespacePath (Just rootCausalHash)
shallowBranch <- lift $ V2Causal.value namespaceCausal
pure (rootCausalHash, namespaceCausal, shallowBranch)
namespaceDetails <- do
(_localNamesOnly, ppe) <- Backend.scopedNamesForBranchHash codebase (Just rootCausal) namespacePath
readme <-
Backend.findDocInBranchAndRender
readmeNames
width
runtime
codebase
ppe
shallowBranch
let causalHash = v2CausalBranchToUnisonHash namespaceCausal
pure $ NamespaceDetails namespacePath causalHash readme
pure $ namespaceDetails
namespaceDetails runtime codebase namespacePath mayRoot _mayWidth = do
(rootCausal, namespaceCausal, shallowBranch) <-
Backend.hoistBackend (Codebase.runTransaction codebase) do
rootCausalHash <-
case mayRoot of
Nothing -> Backend.resolveRootBranchHashV2 Nothing
Just (Left sch) -> Backend.resolveRootBranchHashV2 (Just sch)
Just (Right ch) -> lift $ Backend.resolveCausalHashV2 (Just ch)
-- lift (Backend.resolveCausalHashV2 rootCausalHash)
namespaceCausal <- lift $ Codebase.getShallowCausalAtPath namespacePath (Just rootCausalHash)
shallowBranch <- lift $ V2Causal.value namespaceCausal
pure (rootCausalHash, namespaceCausal, shallowBranch)
namespaceDetails <- do
(_localNamesOnly, ppe) <- Backend.scopedNamesForBranchHash codebase (Just rootCausal) namespacePath
let mayReadmeRef = Backend.findDocInBranch readmeNames shallowBranch
renderedReadme <- for mayReadmeRef \readmeRef -> do
eDoc <- liftIO $ evalDocRef runtime codebase readmeRef
pure $ Doc.renderDoc ppe eDoc
let causalHash = v2CausalBranchToUnisonHash namespaceCausal
pure $ NamespaceDetails namespacePath causalHash renderedReadme
pure $ namespaceDetails
where
readmeNames = Set.fromList ["README", "Readme", "ReadMe", "readme"]

View File

@ -0,0 +1,38 @@
module Unison.Server.Share.NamespaceDetails (namespaceDetails) where
import Control.Monad.Except
import Data.Set qualified as Set
import Servant.OpenApi ()
import U.Codebase.HashTags (CausalHash)
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.Runtime qualified as Rt
import Unison.Parser.Ann (Ann)
import Unison.Server.Backend
import Unison.Server.Backend qualified as Backend
import Unison.Server.Share.RenderDoc qualified as RenderDoc
import Unison.Server.Types
( NamespaceDetails (..),
v2CausalBranchToUnisonHash,
)
import Unison.Symbol (Symbol)
import Unison.Util.Pretty (Width)
namespaceDetails ::
Rt.Runtime Symbol ->
Codebase IO Symbol Ann ->
Path.Path ->
CausalHash ->
Maybe Width ->
Backend IO NamespaceDetails
namespaceDetails runtime codebase namespacePath rootCausalHash mayWidth = do
causalHashAtPath <- liftIO $ Codebase.runTransaction codebase do
causalBranch <- Backend.resolveCausalHashV2 (Just rootCausalHash)
namespaceCausal <- Codebase.getShallowCausalAtPath namespacePath (Just causalBranch)
let causalHashAtPath = v2CausalBranchToUnisonHash namespaceCausal
pure causalHashAtPath
mayReadme <- RenderDoc.findAndRenderDoc readmeNames runtime codebase namespacePath rootCausalHash mayWidth
pure $ NamespaceDetails namespacePath causalHashAtPath mayReadme
where
readmeNames = Set.fromList ["README", "Readme", "ReadMe", "readme"]

View File

@ -11,55 +11,52 @@
module Unison.Server.Share.RenderDoc where
import Control.Monad.Except
import Data.Set qualified as Set
import Servant.OpenApi ()
import U.Codebase.Causal qualified as V2Causal
import U.Codebase.HashTags (CausalHash)
import U.Codebase.Sqlite.NameLookups (PathSegments (..))
import U.Codebase.Sqlite.Operations qualified as Ops
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.Runtime qualified as Rt
import Unison.Codebase.ShortCausalHash (ShortCausalHash)
import Unison.NameSegment (NameSegment)
import Unison.LabeledDependency qualified as LD
import Unison.NameSegment (NameSegment (..))
import Unison.Parser.Ann (Ann)
import Unison.Prelude
import Unison.PrettyPrintEnvDecl.Sqlite qualified as PPESqlite
import Unison.Server.Backend
import Unison.Server.Backend qualified as Backend
import Unison.Server.Doc (Doc)
import Unison.Server.Types
( mayDefaultWidth,
)
import Unison.Server.Doc qualified as Doc
import Unison.Symbol (Symbol)
import Unison.Util.Pretty (Width)
renderDoc ::
-- | Find, eval, and render the first doc we find with any of the provided names within the given namespace
-- If no doc is found, return Nothing
--
-- Requires Name Lookups, currently only usable on Share.
findAndRenderDoc ::
Set NameSegment ->
Rt.Runtime Symbol ->
Codebase IO Symbol Ann ->
Path.Path ->
Maybe (Either ShortCausalHash CausalHash) ->
CausalHash ->
Maybe Width ->
Backend IO (Maybe Doc)
renderDoc docNames runtime codebase namespacePath mayRoot mayWidth =
let width = mayDefaultWidth mayWidth
in do
(rootCausal, shallowBranch) <-
Backend.hoistBackend (Codebase.runTransaction codebase) do
rootCausalHash <-
case mayRoot of
Nothing -> Backend.resolveRootBranchHashV2 Nothing
Just (Left sch) -> Backend.resolveRootBranchHashV2 (Just sch)
Just (Right ch) -> lift $ Backend.resolveCausalHashV2 (Just ch)
-- lift (Backend.resolveCausalHashV2 rootCausalHash)
namespaceCausal <- lift $ Codebase.getShallowCausalAtPath namespacePath (Just rootCausalHash)
shallowBranch <- lift $ V2Causal.value namespaceCausal
pure (rootCausalHash, shallowBranch)
(_localNamesOnly, ppe) <- Backend.scopedNamesForBranchHash codebase (Just rootCausal) namespacePath
renderedDoc <-
Backend.findDocInBranchAndRender
docNames
width
runtime
codebase
ppe
shallowBranch
pure renderedDoc
findAndRenderDoc docNames runtime codebase namespacePath rootCausalHash _mayWidth = do
(shallowBranchAtNamespace, namesPerspective) <-
liftIO . (Codebase.runTransaction codebase) $ do
rootCausal <- Backend.resolveCausalHashV2 (Just rootCausalHash)
let rootBranchHash = V2Causal.valueHash rootCausal
namespaceCausal <- Codebase.getShallowCausalAtPath namespacePath (Just rootCausal)
shallowBranchAtNamespace <- V2Causal.value namespaceCausal
namesPerspective <- Ops.namesPerspectiveForRootAndPath rootBranchHash (coerce . Path.toList $ namespacePath)
pure (shallowBranchAtNamespace, namesPerspective)
let mayDocRef = Backend.findDocInBranch docNames shallowBranchAtNamespace
for mayDocRef \docRef -> do
eDoc <- liftIO $ evalDocRef runtime codebase docRef
let docDeps = Doc.dependencies eDoc <> Set.singleton (LD.TermReference docRef)
docPPE <- liftIO $ Codebase.runTransaction codebase $ PPESqlite.ppedForReferences namesPerspective docDeps
pure $ Doc.renderDoc docPPE eDoc

View File

@ -39,6 +39,7 @@ import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Editor.DisplayObject
( DisplayObject,
)
import Unison.Codebase.Path qualified as Path
import Unison.Hash qualified as Hash
import Unison.HashQualified qualified as HQ
import Unison.HashQualified' qualified as HQ'
@ -71,6 +72,34 @@ type UnisonName = Text
type UnisonHash = Text
data NamespaceDetails = NamespaceDetails
{ fqn :: Path.Path,
hash :: UnisonHash,
readme :: Maybe Doc
}
deriving (Generic, Show)
instance Docs.ToSample NamespaceDetails where
toSamples _ =
[ ( "When no value is provided for `namespace`, the root namespace `.` is "
<> "listed by default",
NamespaceDetails
Path.empty
"#gjlk0dna8dongct6lsd19d1o9hi5n642t8jttga5e81e91fviqjdffem0tlddj7ahodjo5"
Nothing
)
]
instance ToJSON NamespaceDetails where
toJSON NamespaceDetails {..} =
object
[ "fqn" .= fqn,
"hash" .= hash,
"readme" .= readme
]
deriving instance ToSchema NamespaceDetails
-- | A hash qualified name, unlike HashQualified, the hash is required
data ExactName name ref = ExactName
{ name :: name,

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.1.
-- This file has been generated from package.yaml by hpack version 0.35.2.
--
-- see: https://github.com/sol/hpack
@ -42,6 +42,7 @@ library
Unison.Server.Share
Unison.Server.Share.Definitions
Unison.Server.Share.FuzzyFind
Unison.Server.Share.NamespaceDetails
Unison.Server.Share.RenderDoc
Unison.Server.Syntax
Unison.Server.Types
@ -80,6 +81,7 @@ library
TypeApplications
TypeOperators
ViewPatterns
ImportQualifiedPost
ghc-options: -Wall
build-depends:
NanoID

View File

@ -5,6 +5,6 @@ Thus, make sure the contents of this file define the contents of the cache
(e.g. don't pull `latest`.)
```ucm
.> pull @unison/base/releases/2.0.0 .base
.> pull @unison/base/releases/2.2.0 .base
.> compile.native.fetch
```

View File

@ -5,9 +5,9 @@ Thus, make sure the contents of this file define the contents of the cache
(e.g. don't pull `latest`.)
```ucm
.> pull @unison/base/releases/2.0.0 .base
.> pull @unison/base/releases/2.2.0 .base
Downloaded 11939 entities.
Downloaded 12209 entities.
@ -15,11 +15,11 @@ Thus, make sure the contents of this file define the contents of the cache
.> compile.native.fetch
Downloaded 65927 entities.
Downloaded 1255 entities.
Successfully updated .unison.internal from
unison.public.internal.trunk.
@unison/internal/releases/0.0.1.
```

View File

@ -64,6 +64,7 @@ to `Tests.check` and `Tests.checkEqual`).
```
```ucm:hide
.> alias.term ##IO.randomBytes IO.randomBytes
.> load unison-src/builtin-tests/io-tests.u
.> add
```

View File

@ -1,13 +1,20 @@
io.tests = Tests.main do
!test_getFileSize
!test_getFileSize_err
!test_getFileSize_err
!test_getFileTimestamp
!test_getFileTimestamp_err
!io.test.seek.absolute
!io.test.seek.relative
!io.test.getLine
!io.test.getsetBuffering
!io.test_getEcho
!io.test_getArgs
!io.test_getEnv
!io.test.getRandomBytes
!io.test_getSomeBytes
!io.test_getChar
!io.test_getCurrentDirectory
testFile = do
fp = FilePath ((FilePath.toText !getTempDirectory) ++ "/unison-test")
@ -47,10 +54,17 @@ test_getFileTimestamp = do
Tests.fail "File timestamp is too late" ((Int.toText ts) ++ " vs " ++ (Int.toText after))
else
Tests.pass "File timestamp is reasonable"
test_getFileTimestamp_err = do
expectError' "File timestamp of missing file" ["does not exist", "error getting"] '(FilePath.getTimestamp !testFile)
io.test_getCurrentDirectory = do
match !getCurrentDirectory with
FilePath text -> if Text.startsWith "/" text then
Tests.pass "Current directory starts with /"
else
Tests.fail "Current directory doesn't start with /" text
seekFile = do
fp = !testFile
_ = writeFile fp "0123456789"
@ -104,3 +118,41 @@ io.test.getsetBuffering = do
checkEqual "Line" b2 LineBuffering
checkEqual "No" b3 NoBuffering
is_a_tty = do
exitCode = call "test" ["-t", "0"]
exitCode == 0
io.test_getEcho = do
if is_a_tty () then
prev = getEcho stdIn
setEcho stdIn false
checkEqual "echo turned off" (getEcho stdIn) false
setEcho stdIn true
checkEqual "echo turned back on" (getEcho stdIn) true
setEcho stdIn prev
else
()
io.test_getArgs = do
checkEqual "cli args" !getArgs []
io.test_getEnv = do
checkEqual "HOME env variable" (startsWith "/" (getEnv "HOME")) true
io.test.getRandomBytes = do
bs = IO.randomBytes 10
checkEqual "get 10 random bytes" 10 (base.Bytes.size bs)
io.test_getChar = do
fp = !testFile
_ = writeFile fp "oón"
h = open fp Read
checkEqual "get char" (getChar h) ?o
checkEqual "get a complicated char" (getChar h) ?ó
io.test_getSomeBytes = do
fp = !testFile
_ = writeFile fp "one\ntwo\nthree"
h = open fp Read
one = getSomeBytes h 3
checkEqual "get some bytes" one (toUtf8 "one")

View File

@ -66,6 +66,7 @@ to `Tests.check` and `Tests.checkEqual`).
```
```ucm:hide
.> alias.term ##IO.randomBytes IO.randomBytes
.> load unison-src/builtin-tests/io-tests.u
.> add
```

View File

@ -28,3 +28,6 @@ math.tests = do
checkEqual "divi" (10 / 4) 2
checkEqual "eqlf" (1.1 == 1.1) true
checkEqual "eqlf" (1.1 == 1.2) false
checkEqual "decn" (Nat.decrement 10) 9
checkEqual "deci" (Int.decrement +10) +9
checkCloseEnough "expf" (Float.exp 2.0) 7.3890560989306

View File

@ -26,6 +26,7 @@ Tests.main suite = do
Tests.run : '{IO,Exception,Tests} () ->{IO,Exception} Boolean
Tests.run suite =
use Nat +
h passed failed = cases
{ _ } -> (passed, failed)
{ pass msg -> k } ->

View File

@ -41,7 +41,7 @@ chainClient portPromise toSend =
tlsock = Tls.handshake tls
TlsSocket.send tlsock (toUtf8 toSend)
-- res = fromUtf8 (TlsSocket.receive tlsock)
TlsSocket.close tlsock
TlsSocket.terminate tlsock
-- res
-- server receives then sends
@ -61,7 +61,7 @@ chainServer portPromise toSend =
tlsock = net.Tls.handshake tls
res = fromUtf8 (TlsSocket.receive tlsock)
-- TlsSocket.send tlsock (toUtf8 toSend)
TlsSocket.close tlsock
TlsSocket.terminate tlsock
res
tlsChainTest = do

View File

@ -66,7 +66,7 @@ tls.example.com = do
conn = base.IO.net.Tls.handshake tls
TlsSocket.send conn (toUtf8 "GET /index.html HTTP/1.0\r\nHost: example.com\r\n\r\n")
response = TlsSocket.receive conn
TlsSocket.close conn
TlsSocket.terminate conn
contains "HTTP/1.0 200 OK" (fromUtf8 response)
testConnectSelfSigned = do
@ -123,7 +123,7 @@ serverThread portPromise toSend =
tls = Tls.newServer tlsconfig sock'
tlsock = net.Tls.handshake tls
TlsSocket.send tlsock (toUtf8 toSend)
TlsSocket.close tlsock
TlsSocket.terminate tlsock
testClient : Optional SignedCert -> Text -> Promise Nat -> '{IO} Either Failure Text
testClient cert hostname portVar _ = catch do

File diff suppressed because it is too large Load Diff

View File

@ -337,364 +337,365 @@ Let's try it!
251. io2.IO.putBytes.impl : Handle
-> Bytes
->{IO} Either Failure ()
252. io2.IO.ready.impl : Handle ->{IO} Either Failure Boolean
253. io2.IO.ref : a ->{IO} Ref {IO} a
254. io2.IO.removeDirectory.impl : Text
252. io2.IO.randomBytes : Nat ->{IO} Bytes
253. io2.IO.ready.impl : Handle ->{IO} Either Failure Boolean
254. io2.IO.ref : a ->{IO} Ref {IO} a
255. io2.IO.removeDirectory.impl : Text
->{IO} Either Failure ()
255. io2.IO.removeFile.impl : Text ->{IO} Either Failure ()
256. io2.IO.renameDirectory.impl : Text
256. io2.IO.removeFile.impl : Text ->{IO} Either Failure ()
257. io2.IO.renameDirectory.impl : Text
-> Text
->{IO} Either Failure ()
257. io2.IO.renameFile.impl : Text
258. io2.IO.renameFile.impl : Text
-> Text
->{IO} Either Failure ()
258. io2.IO.seekHandle.impl : Handle
259. io2.IO.seekHandle.impl : Handle
-> SeekMode
-> Int
->{IO} Either Failure ()
259. io2.IO.serverSocket.impl : Optional Text
260. io2.IO.serverSocket.impl : Optional Text
-> Text
->{IO} Either Failure Socket
260. io2.IO.setBuffering.impl : Handle
261. io2.IO.setBuffering.impl : Handle
-> BufferMode
->{IO} Either Failure ()
261. io2.IO.setCurrentDirectory.impl : Text
262. io2.IO.setCurrentDirectory.impl : Text
->{IO} Either
Failure ()
262. io2.IO.setEcho.impl : Handle
263. io2.IO.setEcho.impl : Handle
-> Boolean
->{IO} Either Failure ()
263. io2.IO.socketAccept.impl : Socket
264. io2.IO.socketAccept.impl : Socket
->{IO} Either Failure Socket
264. io2.IO.socketPort.impl : Socket ->{IO} Either Failure Nat
265. io2.IO.socketReceive.impl : Socket
265. io2.IO.socketPort.impl : Socket ->{IO} Either Failure Nat
266. io2.IO.socketReceive.impl : Socket
-> Nat
->{IO} Either Failure Bytes
266. io2.IO.socketSend.impl : Socket
267. io2.IO.socketSend.impl : Socket
-> Bytes
->{IO} Either Failure ()
267. io2.IO.stdHandle : StdHandle -> Handle
268. io2.IO.systemTime.impl : '{IO} Either Failure Nat
269. io2.IO.systemTimeMicroseconds : '{IO} Int
270. io2.IO.tryEval : '{IO} a ->{IO, Exception} a
271. unique type io2.IOError
272. io2.IOError.AlreadyExists : IOError
273. io2.IOError.EOF : IOError
274. io2.IOError.IllegalOperation : IOError
275. io2.IOError.NoSuchThing : IOError
276. io2.IOError.PermissionDenied : IOError
277. io2.IOError.ResourceBusy : IOError
278. io2.IOError.ResourceExhausted : IOError
279. io2.IOError.UserError : IOError
280. unique type io2.IOFailure
281. unique type io2.MiscFailure
282. builtin type io2.MVar
283. io2.MVar.isEmpty : MVar a ->{IO} Boolean
284. io2.MVar.new : a ->{IO} MVar a
285. io2.MVar.newEmpty : '{IO} MVar a
286. io2.MVar.put.impl : MVar a -> a ->{IO} Either Failure ()
287. io2.MVar.read.impl : MVar a ->{IO} Either Failure a
288. io2.MVar.swap.impl : MVar a -> a ->{IO} Either Failure a
289. io2.MVar.take.impl : MVar a ->{IO} Either Failure a
290. io2.MVar.tryPut.impl : MVar a
268. io2.IO.stdHandle : StdHandle -> Handle
269. io2.IO.systemTime.impl : '{IO} Either Failure Nat
270. io2.IO.systemTimeMicroseconds : '{IO} Int
271. io2.IO.tryEval : '{IO} a ->{IO, Exception} a
272. unique type io2.IOError
273. io2.IOError.AlreadyExists : IOError
274. io2.IOError.EOF : IOError
275. io2.IOError.IllegalOperation : IOError
276. io2.IOError.NoSuchThing : IOError
277. io2.IOError.PermissionDenied : IOError
278. io2.IOError.ResourceBusy : IOError
279. io2.IOError.ResourceExhausted : IOError
280. io2.IOError.UserError : IOError
281. unique type io2.IOFailure
282. unique type io2.MiscFailure
283. builtin type io2.MVar
284. io2.MVar.isEmpty : MVar a ->{IO} Boolean
285. io2.MVar.new : a ->{IO} MVar a
286. io2.MVar.newEmpty : '{IO} MVar a
287. io2.MVar.put.impl : MVar a -> a ->{IO} Either Failure ()
288. io2.MVar.read.impl : MVar a ->{IO} Either Failure a
289. io2.MVar.swap.impl : MVar a -> a ->{IO} Either Failure a
290. io2.MVar.take.impl : MVar a ->{IO} Either Failure a
291. io2.MVar.tryPut.impl : MVar a
-> a
->{IO} Either Failure Boolean
291. io2.MVar.tryRead.impl : MVar a
292. io2.MVar.tryRead.impl : MVar a
->{IO} Either
Failure (Optional a)
292. io2.MVar.tryTake : MVar a ->{IO} Optional a
293. builtin type io2.ProcessHandle
294. builtin type io2.Promise
295. io2.Promise.new : '{IO} Promise a
296. io2.Promise.read : Promise a ->{IO} a
297. io2.Promise.tryRead : Promise a ->{IO} Optional a
298. io2.Promise.write : Promise a -> a ->{IO} Boolean
299. io2.Ref.cas : Ref {IO} a -> Ticket a -> a ->{IO} Boolean
300. io2.Ref.readForCas : Ref {IO} a ->{IO} Ticket a
301. builtin type io2.Ref.Ticket
302. io2.Ref.Ticket.read : Ticket a -> a
303. unique type io2.RuntimeFailure
304. unique type io2.SeekMode
305. io2.SeekMode.AbsoluteSeek : SeekMode
306. io2.SeekMode.RelativeSeek : SeekMode
307. io2.SeekMode.SeekFromEnd : SeekMode
308. builtin type io2.Socket
309. unique type io2.StdHandle
310. io2.StdHandle.StdErr : StdHandle
311. io2.StdHandle.StdIn : StdHandle
312. io2.StdHandle.StdOut : StdHandle
313. builtin type io2.STM
314. io2.STM.atomically : '{STM} a ->{IO} a
315. io2.STM.retry : '{STM} a
316. unique type io2.STMFailure
317. builtin type io2.ThreadId
318. unique type io2.ThreadKilledFailure
319. builtin type io2.Tls
320. builtin type io2.Tls.Cipher
321. builtin type io2.Tls.ClientConfig
322. io2.Tls.ClientConfig.certificates.set : [SignedCert]
293. io2.MVar.tryTake : MVar a ->{IO} Optional a
294. builtin type io2.ProcessHandle
295. builtin type io2.Promise
296. io2.Promise.new : '{IO} Promise a
297. io2.Promise.read : Promise a ->{IO} a
298. io2.Promise.tryRead : Promise a ->{IO} Optional a
299. io2.Promise.write : Promise a -> a ->{IO} Boolean
300. io2.Ref.cas : Ref {IO} a -> Ticket a -> a ->{IO} Boolean
301. io2.Ref.readForCas : Ref {IO} a ->{IO} Ticket a
302. builtin type io2.Ref.Ticket
303. io2.Ref.Ticket.read : Ticket a -> a
304. unique type io2.RuntimeFailure
305. unique type io2.SeekMode
306. io2.SeekMode.AbsoluteSeek : SeekMode
307. io2.SeekMode.RelativeSeek : SeekMode
308. io2.SeekMode.SeekFromEnd : SeekMode
309. builtin type io2.Socket
310. unique type io2.StdHandle
311. io2.StdHandle.StdErr : StdHandle
312. io2.StdHandle.StdIn : StdHandle
313. io2.StdHandle.StdOut : StdHandle
314. builtin type io2.STM
315. io2.STM.atomically : '{STM} a ->{IO} a
316. io2.STM.retry : '{STM} a
317. unique type io2.STMFailure
318. builtin type io2.ThreadId
319. unique type io2.ThreadKilledFailure
320. builtin type io2.Tls
321. builtin type io2.Tls.Cipher
322. builtin type io2.Tls.ClientConfig
323. io2.Tls.ClientConfig.certificates.set : [SignedCert]
-> ClientConfig
-> ClientConfig
323. io2.TLS.ClientConfig.ciphers.set : [Cipher]
324. io2.TLS.ClientConfig.ciphers.set : [Cipher]
-> ClientConfig
-> ClientConfig
324. io2.Tls.ClientConfig.default : Text
325. io2.Tls.ClientConfig.default : Text
-> Bytes
-> ClientConfig
325. io2.Tls.ClientConfig.versions.set : [Version]
326. io2.Tls.ClientConfig.versions.set : [Version]
-> ClientConfig
-> ClientConfig
326. io2.Tls.decodeCert.impl : Bytes
327. io2.Tls.decodeCert.impl : Bytes
-> Either Failure SignedCert
327. io2.Tls.decodePrivateKey : Bytes -> [PrivateKey]
328. io2.Tls.encodeCert : SignedCert -> Bytes
329. io2.Tls.encodePrivateKey : PrivateKey -> Bytes
330. io2.Tls.handshake.impl : Tls ->{IO} Either Failure ()
331. io2.Tls.newClient.impl : ClientConfig
328. io2.Tls.decodePrivateKey : Bytes -> [PrivateKey]
329. io2.Tls.encodeCert : SignedCert -> Bytes
330. io2.Tls.encodePrivateKey : PrivateKey -> Bytes
331. io2.Tls.handshake.impl : Tls ->{IO} Either Failure ()
332. io2.Tls.newClient.impl : ClientConfig
-> Socket
->{IO} Either Failure Tls
332. io2.Tls.newServer.impl : ServerConfig
333. io2.Tls.newServer.impl : ServerConfig
-> Socket
->{IO} Either Failure Tls
333. builtin type io2.Tls.PrivateKey
334. io2.Tls.receive.impl : Tls ->{IO} Either Failure Bytes
335. io2.Tls.send.impl : Tls -> Bytes ->{IO} Either Failure ()
336. builtin type io2.Tls.ServerConfig
337. io2.Tls.ServerConfig.certificates.set : [SignedCert]
334. builtin type io2.Tls.PrivateKey
335. io2.Tls.receive.impl : Tls ->{IO} Either Failure Bytes
336. io2.Tls.send.impl : Tls -> Bytes ->{IO} Either Failure ()
337. builtin type io2.Tls.ServerConfig
338. io2.Tls.ServerConfig.certificates.set : [SignedCert]
-> ServerConfig
-> ServerConfig
338. io2.Tls.ServerConfig.ciphers.set : [Cipher]
339. io2.Tls.ServerConfig.ciphers.set : [Cipher]
-> ServerConfig
-> ServerConfig
339. io2.Tls.ServerConfig.default : [SignedCert]
340. io2.Tls.ServerConfig.default : [SignedCert]
-> PrivateKey
-> ServerConfig
340. io2.Tls.ServerConfig.versions.set : [Version]
341. io2.Tls.ServerConfig.versions.set : [Version]
-> ServerConfig
-> ServerConfig
341. builtin type io2.Tls.SignedCert
342. io2.Tls.terminate.impl : Tls ->{IO} Either Failure ()
343. builtin type io2.Tls.Version
344. unique type io2.TlsFailure
345. builtin type io2.TVar
346. io2.TVar.new : a ->{STM} TVar a
347. io2.TVar.newIO : a ->{IO} TVar a
348. io2.TVar.read : TVar a ->{STM} a
349. io2.TVar.readIO : TVar a ->{IO} a
350. io2.TVar.swap : TVar a -> a ->{STM} a
351. io2.TVar.write : TVar a -> a ->{STM} ()
352. io2.validateSandboxed : [Term] -> a -> Boolean
353. unique type IsPropagated
354. IsPropagated.IsPropagated : IsPropagated
355. unique type IsTest
356. IsTest.IsTest : IsTest
357. unique type Link
358. builtin type Link.Term
359. Link.Term : Term -> Link
360. Link.Term.toText : Term -> Text
361. builtin type Link.Type
362. Link.Type : Type -> Link
363. builtin type List
364. List.++ : [a] -> [a] -> [a]
365. List.+: : a -> [a] -> [a]
366. List.:+ : [a] -> a -> [a]
367. List.at : Nat -> [a] -> Optional a
368. List.cons : a -> [a] -> [a]
369. List.drop : Nat -> [a] -> [a]
370. List.empty : [a]
371. List.size : [a] -> Nat
372. List.snoc : [a] -> a -> [a]
373. List.take : Nat -> [a] -> [a]
374. metadata.isPropagated : IsPropagated
375. metadata.isTest : IsTest
376. builtin type MutableArray
377. MutableArray.copyTo! : MutableArray g a
342. builtin type io2.Tls.SignedCert
343. io2.Tls.terminate.impl : Tls ->{IO} Either Failure ()
344. builtin type io2.Tls.Version
345. unique type io2.TlsFailure
346. builtin type io2.TVar
347. io2.TVar.new : a ->{STM} TVar a
348. io2.TVar.newIO : a ->{IO} TVar a
349. io2.TVar.read : TVar a ->{STM} a
350. io2.TVar.readIO : TVar a ->{IO} a
351. io2.TVar.swap : TVar a -> a ->{STM} a
352. io2.TVar.write : TVar a -> a ->{STM} ()
353. io2.validateSandboxed : [Term] -> a -> Boolean
354. unique type IsPropagated
355. IsPropagated.IsPropagated : IsPropagated
356. unique type IsTest
357. IsTest.IsTest : IsTest
358. unique type Link
359. builtin type Link.Term
360. Link.Term : Term -> Link
361. Link.Term.toText : Term -> Text
362. builtin type Link.Type
363. Link.Type : Type -> Link
364. builtin type List
365. List.++ : [a] -> [a] -> [a]
366. List.+: : a -> [a] -> [a]
367. List.:+ : [a] -> a -> [a]
368. List.at : Nat -> [a] -> Optional a
369. List.cons : a -> [a] -> [a]
370. List.drop : Nat -> [a] -> [a]
371. List.empty : [a]
372. List.size : [a] -> Nat
373. List.snoc : [a] -> a -> [a]
374. List.take : Nat -> [a] -> [a]
375. metadata.isPropagated : IsPropagated
376. metadata.isTest : IsTest
377. builtin type MutableArray
378. MutableArray.copyTo! : MutableArray g a
-> Nat
-> MutableArray g a
-> Nat
-> Nat
->{g, Exception} ()
378. MutableArray.freeze : MutableArray g a
379. MutableArray.freeze : MutableArray g a
-> Nat
-> Nat
->{g} ImmutableArray a
379. MutableArray.freeze! : MutableArray g a
380. MutableArray.freeze! : MutableArray g a
->{g} ImmutableArray a
380. MutableArray.read : MutableArray g a
381. MutableArray.read : MutableArray g a
-> Nat
->{g, Exception} a
381. MutableArray.size : MutableArray g a -> Nat
382. MutableArray.write : MutableArray g a
382. MutableArray.size : MutableArray g a -> Nat
383. MutableArray.write : MutableArray g a
-> Nat
-> a
->{g, Exception} ()
383. builtin type MutableByteArray
384. MutableByteArray.copyTo! : MutableByteArray g
384. builtin type MutableByteArray
385. MutableByteArray.copyTo! : MutableByteArray g
-> Nat
-> MutableByteArray g
-> Nat
-> Nat
->{g, Exception} ()
385. MutableByteArray.freeze : MutableByteArray g
386. MutableByteArray.freeze : MutableByteArray g
-> Nat
-> Nat
->{g} ImmutableByteArray
386. MutableByteArray.freeze! : MutableByteArray g
387. MutableByteArray.freeze! : MutableByteArray g
->{g} ImmutableByteArray
387. MutableByteArray.read16be : MutableByteArray g
388. MutableByteArray.read16be : MutableByteArray g
-> Nat
->{g, Exception} Nat
388. MutableByteArray.read24be : MutableByteArray g
389. MutableByteArray.read24be : MutableByteArray g
-> Nat
->{g, Exception} Nat
389. MutableByteArray.read32be : MutableByteArray g
390. MutableByteArray.read32be : MutableByteArray g
-> Nat
->{g, Exception} Nat
390. MutableByteArray.read40be : MutableByteArray g
391. MutableByteArray.read40be : MutableByteArray g
-> Nat
->{g, Exception} Nat
391. MutableByteArray.read64be : MutableByteArray g
392. MutableByteArray.read64be : MutableByteArray g
-> Nat
->{g, Exception} Nat
392. MutableByteArray.read8 : MutableByteArray g
393. MutableByteArray.read8 : MutableByteArray g
-> Nat
->{g, Exception} Nat
393. MutableByteArray.size : MutableByteArray g -> Nat
394. MutableByteArray.write16be : MutableByteArray g
394. MutableByteArray.size : MutableByteArray g -> Nat
395. MutableByteArray.write16be : MutableByteArray g
-> Nat
-> Nat
->{g, Exception} ()
395. MutableByteArray.write32be : MutableByteArray g
396. MutableByteArray.write32be : MutableByteArray g
-> Nat
-> Nat
->{g, Exception} ()
396. MutableByteArray.write64be : MutableByteArray g
397. MutableByteArray.write64be : MutableByteArray g
-> Nat
-> Nat
->{g, Exception} ()
397. MutableByteArray.write8 : MutableByteArray g
398. MutableByteArray.write8 : MutableByteArray g
-> Nat
-> Nat
->{g, Exception} ()
398. builtin type Nat
399. Nat.* : Nat -> Nat -> Nat
400. Nat.+ : Nat -> Nat -> Nat
401. Nat./ : Nat -> Nat -> Nat
402. Nat.and : Nat -> Nat -> Nat
403. Nat.complement : Nat -> Nat
404. Nat.drop : Nat -> Nat -> Nat
405. Nat.eq : Nat -> Nat -> Boolean
406. Nat.fromText : Text -> Optional Nat
407. Nat.gt : Nat -> Nat -> Boolean
408. Nat.gteq : Nat -> Nat -> Boolean
409. Nat.increment : Nat -> Nat
410. Nat.isEven : Nat -> Boolean
411. Nat.isOdd : Nat -> Boolean
412. Nat.leadingZeros : Nat -> Nat
413. Nat.lt : Nat -> Nat -> Boolean
414. Nat.lteq : Nat -> Nat -> Boolean
415. Nat.mod : Nat -> Nat -> Nat
416. Nat.or : Nat -> Nat -> Nat
417. Nat.popCount : Nat -> Nat
418. Nat.pow : Nat -> Nat -> Nat
419. Nat.shiftLeft : Nat -> Nat -> Nat
420. Nat.shiftRight : Nat -> Nat -> Nat
421. Nat.sub : Nat -> Nat -> Int
422. Nat.toFloat : Nat -> Float
423. Nat.toInt : Nat -> Int
424. Nat.toText : Nat -> Text
425. Nat.trailingZeros : Nat -> Nat
426. Nat.xor : Nat -> Nat -> Nat
427. structural type Optional a
428. Optional.None : Optional a
429. Optional.Some : a -> Optional a
430. builtin type Pattern
431. Pattern.capture : Pattern a -> Pattern a
432. Pattern.isMatch : Pattern a -> a -> Boolean
433. Pattern.join : [Pattern a] -> Pattern a
434. Pattern.many : Pattern a -> Pattern a
435. Pattern.or : Pattern a -> Pattern a -> Pattern a
436. Pattern.replicate : Nat -> Nat -> Pattern a -> Pattern a
437. Pattern.run : Pattern a -> a -> Optional ([a], a)
438. builtin type Ref
439. Ref.read : Ref g a ->{g} a
440. Ref.write : Ref g a -> a ->{g} ()
441. builtin type Request
442. builtin type Scope
443. Scope.array : Nat ->{Scope s} MutableArray (Scope s) a
444. Scope.arrayOf : a
399. builtin type Nat
400. Nat.* : Nat -> Nat -> Nat
401. Nat.+ : Nat -> Nat -> Nat
402. Nat./ : Nat -> Nat -> Nat
403. Nat.and : Nat -> Nat -> Nat
404. Nat.complement : Nat -> Nat
405. Nat.drop : Nat -> Nat -> Nat
406. Nat.eq : Nat -> Nat -> Boolean
407. Nat.fromText : Text -> Optional Nat
408. Nat.gt : Nat -> Nat -> Boolean
409. Nat.gteq : Nat -> Nat -> Boolean
410. Nat.increment : Nat -> Nat
411. Nat.isEven : Nat -> Boolean
412. Nat.isOdd : Nat -> Boolean
413. Nat.leadingZeros : Nat -> Nat
414. Nat.lt : Nat -> Nat -> Boolean
415. Nat.lteq : Nat -> Nat -> Boolean
416. Nat.mod : Nat -> Nat -> Nat
417. Nat.or : Nat -> Nat -> Nat
418. Nat.popCount : Nat -> Nat
419. Nat.pow : Nat -> Nat -> Nat
420. Nat.shiftLeft : Nat -> Nat -> Nat
421. Nat.shiftRight : Nat -> Nat -> Nat
422. Nat.sub : Nat -> Nat -> Int
423. Nat.toFloat : Nat -> Float
424. Nat.toInt : Nat -> Int
425. Nat.toText : Nat -> Text
426. Nat.trailingZeros : Nat -> Nat
427. Nat.xor : Nat -> Nat -> Nat
428. structural type Optional a
429. Optional.None : Optional a
430. Optional.Some : a -> Optional a
431. builtin type Pattern
432. Pattern.capture : Pattern a -> Pattern a
433. Pattern.isMatch : Pattern a -> a -> Boolean
434. Pattern.join : [Pattern a] -> Pattern a
435. Pattern.many : Pattern a -> Pattern a
436. Pattern.or : Pattern a -> Pattern a -> Pattern a
437. Pattern.replicate : Nat -> Nat -> Pattern a -> Pattern a
438. Pattern.run : Pattern a -> a -> Optional ([a], a)
439. builtin type Ref
440. Ref.read : Ref g a ->{g} a
441. Ref.write : Ref g a -> a ->{g} ()
442. builtin type Request
443. builtin type Scope
444. Scope.array : Nat ->{Scope s} MutableArray (Scope s) a
445. Scope.arrayOf : a
-> Nat
->{Scope s} MutableArray (Scope s) a
445. Scope.bytearray : Nat
446. Scope.bytearray : Nat
->{Scope s} MutableByteArray (Scope s)
446. Scope.bytearrayOf : Nat
447. Scope.bytearrayOf : Nat
-> Nat
->{Scope s} MutableByteArray
(Scope s)
447. Scope.ref : a ->{Scope s} Ref {Scope s} a
448. Scope.run : (∀ s. '{g, Scope s} r) ->{g} r
449. structural type SeqView a b
450. SeqView.VElem : a -> b -> SeqView a b
451. SeqView.VEmpty : SeqView a b
452. Socket.toText : Socket -> Text
453. unique type Test.Result
454. Test.Result.Fail : Text -> Result
455. Test.Result.Ok : Text -> Result
456. builtin type Text
457. Text.!= : Text -> Text -> Boolean
458. Text.++ : Text -> Text -> Text
459. Text.drop : Nat -> Text -> Text
460. Text.empty : Text
461. Text.eq : Text -> Text -> Boolean
462. Text.fromCharList : [Char] -> Text
463. Text.fromUtf8.impl : Bytes -> Either Failure Text
464. Text.gt : Text -> Text -> Boolean
465. Text.gteq : Text -> Text -> Boolean
466. Text.indexOf : Text -> Text -> Optional Nat
467. Text.lt : Text -> Text -> Boolean
468. Text.lteq : Text -> Text -> Boolean
469. Text.patterns.anyChar : Pattern Text
470. Text.patterns.char : Class -> Pattern Text
471. Text.patterns.charIn : [Char] -> Pattern Text
472. Text.patterns.charRange : Char -> Char -> Pattern Text
473. Text.patterns.digit : Pattern Text
474. Text.patterns.eof : Pattern Text
475. Text.patterns.letter : Pattern Text
476. Text.patterns.literal : Text -> Pattern Text
477. Text.patterns.notCharIn : [Char] -> Pattern Text
478. Text.patterns.notCharRange : Char -> Char -> Pattern Text
479. Text.patterns.punctuation : Pattern Text
480. Text.patterns.space : Pattern Text
481. Text.repeat : Nat -> Text -> Text
482. Text.reverse : Text -> Text
483. Text.size : Text -> Nat
484. Text.take : Nat -> Text -> Text
485. Text.toCharList : Text -> [Char]
486. Text.toLowercase : Text -> Text
487. Text.toUppercase : Text -> Text
488. Text.toUtf8 : Text -> Bytes
489. Text.uncons : Text -> Optional (Char, Text)
490. Text.unsnoc : Text -> Optional (Text, Char)
491. ThreadId.toText : ThreadId -> Text
492. todo : a -> b
493. structural type Tuple a b
494. Tuple.Cons : a -> b -> Tuple a b
495. structural type Unit
496. Unit.Unit : ()
497. Universal.< : a -> a -> Boolean
498. Universal.<= : a -> a -> Boolean
499. Universal.== : a -> a -> Boolean
500. Universal.> : a -> a -> Boolean
501. Universal.>= : a -> a -> Boolean
502. Universal.compare : a -> a -> Int
503. Universal.murmurHash : a -> Nat
504. unsafe.coerceAbilities : (a ->{e1} b) -> a ->{e2} b
505. builtin type Value
506. Value.dependencies : Value -> [Term]
507. Value.deserialize : Bytes -> Either Text Value
508. Value.load : Value ->{IO} Either [Term] a
509. Value.serialize : Value -> Bytes
510. Value.value : a -> Value
448. Scope.ref : a ->{Scope s} Ref {Scope s} a
449. Scope.run : (∀ s. '{g, Scope s} r) ->{g} r
450. structural type SeqView a b
451. SeqView.VElem : a -> b -> SeqView a b
452. SeqView.VEmpty : SeqView a b
453. Socket.toText : Socket -> Text
454. unique type Test.Result
455. Test.Result.Fail : Text -> Result
456. Test.Result.Ok : Text -> Result
457. builtin type Text
458. Text.!= : Text -> Text -> Boolean
459. Text.++ : Text -> Text -> Text
460. Text.drop : Nat -> Text -> Text
461. Text.empty : Text
462. Text.eq : Text -> Text -> Boolean
463. Text.fromCharList : [Char] -> Text
464. Text.fromUtf8.impl : Bytes -> Either Failure Text
465. Text.gt : Text -> Text -> Boolean
466. Text.gteq : Text -> Text -> Boolean
467. Text.indexOf : Text -> Text -> Optional Nat
468. Text.lt : Text -> Text -> Boolean
469. Text.lteq : Text -> Text -> Boolean
470. Text.patterns.anyChar : Pattern Text
471. Text.patterns.char : Class -> Pattern Text
472. Text.patterns.charIn : [Char] -> Pattern Text
473. Text.patterns.charRange : Char -> Char -> Pattern Text
474. Text.patterns.digit : Pattern Text
475. Text.patterns.eof : Pattern Text
476. Text.patterns.letter : Pattern Text
477. Text.patterns.literal : Text -> Pattern Text
478. Text.patterns.notCharIn : [Char] -> Pattern Text
479. Text.patterns.notCharRange : Char -> Char -> Pattern Text
480. Text.patterns.punctuation : Pattern Text
481. Text.patterns.space : Pattern Text
482. Text.repeat : Nat -> Text -> Text
483. Text.reverse : Text -> Text
484. Text.size : Text -> Nat
485. Text.take : Nat -> Text -> Text
486. Text.toCharList : Text -> [Char]
487. Text.toLowercase : Text -> Text
488. Text.toUppercase : Text -> Text
489. Text.toUtf8 : Text -> Bytes
490. Text.uncons : Text -> Optional (Char, Text)
491. Text.unsnoc : Text -> Optional (Text, Char)
492. ThreadId.toText : ThreadId -> Text
493. todo : a -> b
494. structural type Tuple a b
495. Tuple.Cons : a -> b -> Tuple a b
496. structural type Unit
497. Unit.Unit : ()
498. Universal.< : a -> a -> Boolean
499. Universal.<= : a -> a -> Boolean
500. Universal.== : a -> a -> Boolean
501. Universal.> : a -> a -> Boolean
502. Universal.>= : a -> a -> Boolean
503. Universal.compare : a -> a -> Int
504. Universal.murmurHash : a -> Nat
505. unsafe.coerceAbilities : (a ->{e1} b) -> a ->{e2} b
506. builtin type Value
507. Value.dependencies : Value -> [Term]
508. Value.deserialize : Bytes -> Either Text Value
509. Value.load : Value ->{IO} Either [Term] a
510. Value.serialize : Value -> Bytes
511. Value.value : a -> Value
.builtin> alias.many 94-104 .mylib

View File

@ -74,7 +74,7 @@ The `builtins.merge` command adds the known builtins to a `builtin` subnamespace
63. Value/ (5 terms)
64. bug (a -> b)
65. crypto/ (13 terms, 1 type)
66. io2/ (132 terms, 32 types)
66. io2/ (133 terms, 32 types)
67. metadata/ (2 terms)
68. todo (a -> b)
69. unsafe/ (1 term)

View File

@ -7,7 +7,7 @@ your working directory with each command).
🎉 I've created the project foo.
🎨 Type `ui` to explore this project's code in your browser.
🌏 Discover libraries at https://share.unison-lang.org
🔭 Discover libraries at https://share.unison-lang.org
📖 Use `help-topic projects` to learn more about projects.
Write your first Unison code with UCM:

View File

@ -6,7 +6,7 @@
🎉 I've created the project foo.
🎨 Type `ui` to explore this project's code in your browser.
🌏 Discover libraries at https://share.unison-lang.org
🔭 Discover libraries at https://share.unison-lang.org
📖 Use `help-topic projects` to learn more about projects.
Write your first Unison code with UCM:
@ -24,7 +24,7 @@
🎉 I've created the project bar.
🎨 Type `ui` to explore this project's code in your browser.
🌏 Discover libraries at https://share.unison-lang.org
🔭 Discover libraries at https://share.unison-lang.org
📖 Use `help-topic projects` to learn more about projects.
Write your first Unison code with UCM:

View File

@ -23,7 +23,7 @@ Technically, the definitions all exist, but they have no names. `builtins.merge`
.foo> ls
1. builtin/ (444 terms, 66 types)
1. builtin/ (445 terms, 66 types)
```
And for a limited time, you can get even more builtin goodies:
@ -35,7 +35,7 @@ And for a limited time, you can get even more builtin goodies:
.foo> ls
1. builtin/ (616 terms, 84 types)
1. builtin/ (617 terms, 84 types)
```
More typically, you'd start out by pulling `base.

Some files were not shown because too many files have changed in this diff Show More