Merge remote-tracking branch 'origin/trunk' into pg/causals

This commit is contained in:
Chris Penner 2024-01-10 15:18:22 -08:00
commit e8a222c320
546 changed files with 19377 additions and 8699 deletions

View File

@ -15,6 +15,8 @@ on:
- trunk - trunk
tags: tags:
- release/* - release/*
workflow_dispatch:
jobs: jobs:
@ -28,7 +30,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Get changed files - name: Get changed files
id: changed-files id: changed-files
uses: tj-actions/changed-files@v37 uses: tj-actions/changed-files@v41
with: with:
# globs copied from default settings for run-ormolu # globs copied from default settings for run-ormolu
files: | files: |
@ -229,17 +231,13 @@ jobs:
- name: unison-util-relation tests - name: unison-util-relation tests
run: stack --no-terminal build --fast --test unison-util-relation run: stack --no-terminal build --fast --test unison-util-relation
- name: round-trip-tests - name: round-trip-tests
if: runner.os == 'macOS'
run: | run: |
mkdir -p /private/tmp
touch /private/tmp/roundtrip.u
touch /private/tmp/rewrite-tmp.u
stack --no-terminal exec unison transcript unison-src/transcripts-round-trip/main.md stack --no-terminal exec unison transcript unison-src/transcripts-round-trip/main.md
git add unison-src/transcripts-round-trip/main.output.md git add unison-src/transcripts-round-trip/main.output.md
# Fail if any transcripts cause git diffs. # Fail if any transcripts cause git diffs.
git diff --cached --ignore-cr-at-eol --exit-code git diff --cached --ignore-cr-at-eol --exit-code
stack --no-terminal exec unison transcript unison-src/transcripts-manual/rewrites.md stack --no-terminal exec unison transcript unison-src/transcripts-manual/rewrites.md
git add unison-src/transcripts-manual/rewrites.output.md git add unison-src/transcripts-manual/rewrites.output.md
# Fail if any transcripts cause git diffs. # Fail if any transcripts cause git diffs.
git diff --cached --ignore-cr-at-eol --exit-code git diff --cached --ignore-cr-at-eol --exit-code
- name: transcripts - name: transcripts
@ -314,6 +312,9 @@ jobs:
exit 1 exit 1
fi fi
- name: verify stack ghci startup
if: runner.os == 'macOS'
run: echo | stack ghci
- name: check final stackage cache size - name: check final stackage cache size
run: | run: |
echo global .stack echo global .stack

View File

@ -90,11 +90,9 @@ jobs:
# Erase any stale files # Erase any stale files
cd "$GITHUB_WORKSPACE"/haddocks cd "$GITHUB_WORKSPACE"/haddocks
rm -rf ./* rm -rf ./*
git checkout --orphan fresh-haddocks-branch
cp -r "${docs_root}"/* "$GITHUB_WORKSPACE"/haddocks cp -r "${docs_root}"/* "$GITHUB_WORKSPACE"/haddocks
if [[ -z "$(git status --porcelain)" ]]; then git add .
echo No changes. git commit -m "Regenerated haddocks based on ${GITHUB_SHA}"
else # Push the branch with only a single commit over the remote
git add . git push --force origin fresh-haddocks-branch:haddocks
git commit -m "Regenerated haddocks based on ${GITHUB_SHA}"
git push
fi

View File

@ -20,6 +20,7 @@ jobs:
matrix: matrix:
os: os:
- ubuntu-20.04 - ubuntu-20.04
- macOS-12
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: cachix/install-nix-action@v22 - uses: cachix/install-nix-action@v22
@ -32,5 +33,5 @@ jobs:
name: unison name: unison
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- name: build all packages and development shells - name: build all packages and development shells
run: nix -L build --accept-flake-config --no-link --keep-going '.#all' run: nix -L build --accept-flake-config --no-link --keep-going '.#build-tools'

View File

@ -162,7 +162,7 @@ jobs:
- uses: "marvinpinto/action-automatic-releases@latest" - uses: "marvinpinto/action-automatic-releases@latest"
with: with:
repo_token: "${{ secrets.GITHUB_TOKEN }}" repo_token: "${{ secrets.GITHUB_TOKEN }}"
automatic_release_tag: "latest" automatic_release_tag: "pre-release"
prerelease: true prerelease: true
title: "Development Build" title: "Development Build"
files: | files: |

View File

@ -10,7 +10,7 @@ on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
version: version:
description: 'Release Version (E.g. M4 or M4a)' description: 'Release Version (E.g. M4 or M4a or 0.4.1)'
required: true required: true
type: string type: string
target: target:

View File

@ -0,0 +1,89 @@
name: update-transcripts
on:
workflow_dispatch:
jobs:
update_transcripts:
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash
strategy:
matrix:
os:
- macOS-12
steps:
- uses: actions/checkout@v4
- id: stackage-resolver
name: record stackage resolver
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files
# looks for `resolver: nightly-yyyy-mm-dd` or `resolver: lts-xx.yy` in `stack.yaml` and splits it into
# `nightly` or `lts-xx`. the whole resolver string is put into resolver_long as a backup cache key
# ${{ steps.stackage-resolver.outputs.resolver_short }}
# ${{ steps.stackage-resolver.outputs.resolver_long }}
run: |
grep resolver stack.yaml | awk '{ x="resolver_short="; if (split($2,a,"-") > 2) print x a[1]; else {split($2,b,"."); print x b[1]}}' >> "$GITHUB_OUTPUT"
grep resolver stack.yaml | awk '{print "resolver_long="$2}' >> "$GITHUB_OUTPUT"
# Cache ~/.stack, keyed by the contents of 'stack.yaml'.
- uses: actions/cache@v3
name: cache ~/.stack (unix)
if: runner.os != 'Windows'
with:
path: ~/.stack
key: stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-${{github.sha}}
# Fall-back to use the most recent cache for the stack.yaml, or failing that the OS
restore-keys: |
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-
stack-1_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}.
stack-1_${{matrix.os}}-
# Cache each local package's ~/.stack-work for fast incremental builds in CI.
- uses: actions/cache@v3
name: cache .stack-work
with:
path: |
**/.stack-work
# Main cache key: commit hash. This should always result in a cache miss...
# So when loading a cache we'll always fall back to the restore-keys,
# which should load the most recent cache via a prefix search on the most
# recent branch cache.
# Then it will save a new cache at this commit sha, which should be used by
# the next build on this branch.
key: stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-${{hashFiles('**/stack.yaml')}}-${{github.sha}}
restore-keys: |
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-${{hashFiles('**/stack.yaml')}}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_long }}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}-
stack-work-4_${{matrix.os}}-${{ steps.stackage-resolver.outputs.resolver_short }}.
stack-work-4_${{matrix.os}}-
# Install stack by downloading the binary from GitHub.
# The installation process differs by OS.
- name: install stack (Linux)
if: runner.os == 'Linux'
working-directory: ${{ runner.temp }}
run: |
mkdir stack && cd stack
curl -L https://github.com/commercialhaskell/stack/releases/download/v2.9.1/stack-2.9.1-linux-x86_64.tar.gz | tar -xz
echo "$PWD/stack-"* >> $GITHUB_PATH
# One of the transcripts fails if the user's git name hasn't been set.
- name: set git user info
run: |
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
- name: build
run: stack --no-terminal build --fast --no-run-tests --test
- name: round-trip-tests
run: |
stack --no-terminal exec unison transcript unison-src/transcripts-round-trip/main.md
stack --no-terminal exec unison transcript unison-src/transcripts-manual/rewrites.md
- name: transcripts
run: stack --no-terminal exec transcripts
- name: save transcript changes
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: rerun transcripts (reminder to rerun CI!)

View File

@ -73,8 +73,11 @@ The format for this list: name, GitHub handle
* Jesse Looney (@jesselooney) * Jesse Looney (@jesselooney)
* Vlad Posmangiu Luchian (@cstml) * Vlad Posmangiu Luchian (@cstml)
* Andrii Uvarov (@unorsk) * Andrii Uvarov (@unorsk)
* Fabio Labella (@SystemFw)
* Alexis King (@lexi-lambda)
* Mario Bašić (@mabasic) * Mario Bašić (@mabasic)
* Chris Krycho (@chriskrycho) * Chris Krycho (@chriskrycho)
* Hatim Khambati (@hatimkhambati26) * Hatim Khambati (@hatimkhambati26)
* Kyle Goetz (@kylegoetz) * Kyle Goetz (@kylegoetz)
* Ethan Morgan (@sixfourtwelve) * Ethan Morgan (@sixfourtwelve)
* Johan Winther (@JohanWinther)

View File

@ -6,6 +6,7 @@ module U.Codebase.Sqlite.Decode
decodeBranchFormat, decodeBranchFormat,
decodeComponentLengthOnly, decodeComponentLengthOnly,
decodeDeclElement, decodeDeclElement,
decodeDeclElementNumConstructors,
decodeDeclFormat, decodeDeclFormat,
decodePatchFormat, decodePatchFormat,
decodeSyncDeclFormat, decodeSyncDeclFormat,
@ -80,6 +81,10 @@ decodeDeclElement :: Word64 -> ByteString -> Either DecodeError (LocalIds, DeclF
decodeDeclElement i = decodeDeclElement i =
getFromBytesOr ("lookupDeclElement " <> tShow i) (Serialization.lookupDeclElement i) getFromBytesOr ("lookupDeclElement " <> tShow i) (Serialization.lookupDeclElement i)
decodeDeclElementNumConstructors :: Word64 -> ByteString -> Either DecodeError Int
decodeDeclElementNumConstructors i =
getFromBytesOr ("lookupDeclElementNumConstructors " <> tShow i) (Serialization.lookupDeclElementNumConstructors i)
decodeDeclFormat :: ByteString -> Either DecodeError DeclFormat.DeclFormat decodeDeclFormat :: ByteString -> Either DecodeError DeclFormat.DeclFormat
decodeDeclFormat = decodeDeclFormat =
getFromBytesOr "getDeclFormat" Serialization.getDeclFormat getFromBytesOr "getDeclFormat" Serialization.getDeclFormat

View File

@ -31,6 +31,7 @@ module U.Codebase.Sqlite.Operations
loadDeclComponent, loadDeclComponent,
loadDeclByReference, loadDeclByReference,
expectDeclByReference, expectDeclByReference,
expectDeclNumConstructors,
expectDeclTypeById, expectDeclTypeById,
-- * terms/decls -- * terms/decls
@ -64,10 +65,13 @@ module U.Codebase.Sqlite.Operations
-- ** dependents index -- ** dependents index
dependents, dependents,
dependentsOfComponent, dependentsOfComponent,
dependentsWithinScope,
-- ** type index -- ** type index
Q.addTypeToIndexForTerm, Q.addTypeToIndexForTerm,
termsHavingType, termsHavingType,
filterTermsByReferenceHavingType,
filterTermsByReferentHavingType,
-- ** type mentions index -- ** type mentions index
Q.addTypeMentionsToIndexForTerm, Q.addTypeMentionsToIndexForTerm,
@ -539,6 +543,11 @@ expectDeclByReference r@(C.Reference.Id h i) = do
>>= (\oid -> Q.expectDeclObject oid (decodeDeclElement i)) >>= (\oid -> Q.expectDeclObject oid (decodeDeclElement i))
>>= uncurry Q.s2cDecl >>= uncurry Q.s2cDecl
expectDeclNumConstructors :: C.Reference.Id -> Transaction Int
expectDeclNumConstructors (C.Reference.Id h i) = do
oid <- Q.expectObjectIdForPrimaryHash h
Q.expectDeclObject oid (decodeDeclElementNumConstructors i)
-- * Branch transformation -- * Branch transformation
s2cBranch :: S.DbBranch -> Transaction (C.Branch.Branch Transaction) s2cBranch :: S.DbBranch -> Transaction (C.Branch.Branch Transaction)
@ -1041,6 +1050,24 @@ termsHavingType cTypeRef =
set <- traverse s2cReferentId sIds set <- traverse s2cReferentId sIds
pure (Set.fromList set) pure (Set.fromList set)
filterTermsByReferenceHavingType :: C.TypeReference -> [C.Reference.Id] -> Transaction [C.Reference.Id]
filterTermsByReferenceHavingType cTypeRef cTermRefIds =
runMaybeT (c2hReference cTypeRef) >>= \case
Nothing -> pure []
Just sTypeRef -> do
sTermRefIds <- traverse c2sReferenceId cTermRefIds
matches <- Q.filterTermsByReferenceHavingType sTypeRef sTermRefIds
traverse s2cReferenceId matches
filterTermsByReferentHavingType :: C.TypeReference -> [C.Referent.Id] -> Transaction [C.Referent.Id]
filterTermsByReferentHavingType cTypeRef cTermRefIds =
runMaybeT (c2hReference cTypeRef) >>= \case
Nothing -> pure []
Just sTypeRef -> do
sTermRefIds <- traverse c2sReferentId cTermRefIds
matches <- Q.filterTermsByReferentHavingType sTypeRef sTermRefIds
traverse s2cReferentId matches
typeReferenceForTerm :: S.Reference.Id -> Transaction S.ReferenceH typeReferenceForTerm :: S.Reference.Id -> Transaction S.ReferenceH
typeReferenceForTerm = Q.getTypeReferenceForReferent . C.Referent.RefId typeReferenceForTerm = Q.getTypeReferenceForReferent . C.Referent.RefId
@ -1135,6 +1162,20 @@ dependents selector r = do
sIds <- Q.getDependentsForDependency selector r' sIds <- Q.getDependentsForDependency selector r'
Set.traverse s2cReferenceId sIds Set.traverse s2cReferenceId sIds
-- | `dependentsWithinScope scope query` returns all of transitive dependents of `query` that are in `scope` (not
-- including `query` itself). Each dependent is also tagged with whether it is a term or decl.
dependentsWithinScope :: Set C.Reference.Id -> Set C.Reference -> Transaction (Map C.Reference.Id C.ReferenceType)
dependentsWithinScope scope query = do
scope' <- Set.traverse c2sReferenceId scope
query' <- Set.traverse c2sReference query
Q.getDependentsWithinScope scope' query'
>>= Map.bitraverse s2cReferenceId (pure . objectTypeToReferenceType)
where
objectTypeToReferenceType = \case
ObjectType.TermComponent -> C.RtTerm
ObjectType.DeclComponent -> C.RtType
_ -> error "Q.getDependentsWithinScope shouldn't return any other types"
-- | returns a list of known definitions referencing `h` -- | returns a list of known definitions referencing `h`
dependentsOfComponent :: H.Hash -> Transaction (Set C.Reference.Id) dependentsOfComponent :: H.Hash -> Transaction (Set C.Reference.Id)
dependentsOfComponent h = do dependentsOfComponent h = do

View File

@ -160,12 +160,15 @@ module U.Codebase.Sqlite.Queries
getDependenciesForDependent, getDependenciesForDependent,
getDependencyIdsForDependent, getDependencyIdsForDependent,
getDependenciesBetweenTerms, getDependenciesBetweenTerms,
getDependentsWithinScope,
-- ** type index -- ** type index
addToTypeIndex, addToTypeIndex,
getReferentsByType, getReferentsByType,
getTypeReferenceForReferent, getTypeReferenceForReferent,
getTypeReferencesForComponent, getTypeReferencesForComponent,
filterTermsByReferenceHavingType,
filterTermsByReferentHavingType,
-- ** type mentions index -- ** type mentions index
addToTypeMentionsIndex, addToTypeMentionsIndex,
@ -1459,6 +1462,76 @@ getTypeReferencesForComponent oId =
WHERE term_referent_object_id = :oId WHERE term_referent_object_id = :oId
|] |]
filterTermsByReferentHavingType :: S.ReferenceH -> [S.Referent.Id] -> Transaction [S.Referent.Id]
filterTermsByReferentHavingType typ terms = create *> for_ terms insert *> select <* drop
where
select = queryListRow [sql|
SELECT
q.term_referent_object_id,
q.term_referent_component_index,
q.term_referent_constructor_index
FROM filter_query q, find_type_index t
WHERE t.type_reference_builtin IS :typeBuiltin
AND t.type_reference_hash_id IS :typeHashId
AND t.type_reference_component_index IS :typeComponentIndex
AND t.term_referent_object_id = q.term_referent_object_id
AND t.term_referent_component_index = q.term_referent_component_index
AND t.term_referent_constructor_index IS q.term_referent_constructor_index
|]
insert r = execute [sql|
INSERT INTO filter_query (
term_referent_object_id,
term_referent_component_index,
term_referent_constructor_index
) VALUES (@r, @, @)
|]
typeBuiltin :: Maybe TextId = Lens.preview C.Reference.t_ typ
typeHashId :: Maybe HashId = Lens.preview (C.Reference._ReferenceDerived . C.Reference.idH) typ
typeComponentIndex :: Maybe C.Reference.Pos = Lens.preview (C.Reference._ReferenceDerived . C.Reference.idPos) typ
create = execute
[sql|
CREATE TEMPORARY TABLE filter_query (
term_referent_object_id INTEGER NOT NULL,
term_referent_component_index INTEGER NOT NULL,
term_referent_constructor_index INTEGER NULL
)
|]
drop = execute [sql|DROP TABLE filter_query|]
filterTermsByReferenceHavingType :: S.ReferenceH -> [S.Reference.Id] -> Transaction [S.Reference.Id]
filterTermsByReferenceHavingType typ terms = create *> for_ terms insert *> select <* drop
where
select = queryListRow [sql|
SELECT
q.term_reference_object_id,
q.term_reference_component_index
FROM filter_query q, find_type_index t
WHERE t.type_reference_builtin IS :typeBuiltin
AND t.type_reference_hash_id IS :typeHashId
AND t.type_reference_component_index IS :typeComponentIndex
AND t.term_referent_object_id = q.term_reference_object_id
AND t.term_referent_component_index = q.term_reference_component_index
AND t.term_referent_constructor_index IS NULL
|]
insert r = execute [sql|
INSERT INTO filter_query (
term_reference_object_id,
term_reference_component_index
) VALUES (@r, @)
|]
typeBuiltin :: Maybe TextId = Lens.preview C.Reference.t_ typ
typeHashId :: Maybe HashId = Lens.preview (C.Reference._ReferenceDerived . C.Reference.idH) typ
typeComponentIndex :: Maybe C.Reference.Pos = Lens.preview (C.Reference._ReferenceDerived . C.Reference.idPos) typ
create = execute
[sql|
CREATE TEMPORARY TABLE filter_query (
term_reference_object_id INTEGER NOT NULL,
term_reference_component_index INTEGER NOT NULL
)
|]
drop = execute [sql|DROP TABLE filter_query|]
addToTypeMentionsIndex :: Reference' TextId HashId -> Referent.Id -> Transaction () addToTypeMentionsIndex :: Reference' TextId HashId -> Referent.Id -> Transaction ()
addToTypeMentionsIndex tp tm = addToTypeMentionsIndex tp tm =
execute execute
@ -1775,6 +1848,83 @@ getDependenciesBetweenTerms oid1 oid2 =
WHERE path_elem IS NOT null WHERE path_elem IS NOT null
|] |]
-- | `getDependentsWithinScope scope query` returns all of transitive dependents of `query` that are in `scope` (not
-- including `query` itself). Each dependent is also tagged with whether it is a term or decl.
getDependentsWithinScope :: Set Reference.Id -> Set S.Reference -> Transaction (Map Reference.Id ObjectType)
getDependentsWithinScope scope query = do
-- Populate a temporary table with all of the references in `scope`
execute
[sql|
CREATE TEMPORARY TABLE dependents_search_scope (
dependent_object_id INTEGER NOT NULL,
dependent_component_index INTEGER NOT NULL,
PRIMARY KEY (dependent_object_id, dependent_component_index)
)
|]
for_ scope \r ->
execute [sql|INSERT INTO dependents_search_scope VALUES (@r, @)|]
-- Populate a temporary table with all of the references in `query`
execute
[sql|
CREATE TEMPORARY TABLE dependencies_query (
dependency_builtin INTEGER NULL,
dependency_object_id INTEGER NULL,
dependency_component_index INTEGER NULL,
CHECK ((dependency_builtin IS NULL) = (dependency_object_id IS NOT NULL)),
CHECK ((dependency_object_id IS NULL) = (dependency_component_index IS NULL))
)
|]
for_ query \r ->
execute [sql|INSERT INTO dependencies_query VALUES (@r, @, @)|]
-- Say the query set is { #foo, #bar }, and the scope set is { #foo, #bar, #baz, #qux, #honk }.
--
-- Furthermore, say the dependencies are as follows, where `x -> y` means "x depends on y".
--
-- #honk -> #baz -> #foo
-- #qux -> #bar
--
-- The recursive query below is seeded with direct dependents of the `query` set that are in `scope`, namely:
--
-- #honk -> #baz -> #foo
-- #qux -> #bar
-- ^^^^
-- direct deps of { #foo, #bar } are: { #baz, #qux }
--
-- Then, every iteration of the query expands to that set's dependents (#honk and onwards), until there are no more.
-- We use `UNION` rather than `UNION ALL` so as to not track down the transitive dependents of any particular
-- reference more than once.
result :: [Reference.Id :. Only ObjectType] <- queryListRow [sql|
WITH RECURSIVE transitive_dependents (dependent_object_id, dependent_component_index, type_id) AS (
SELECT d.dependent_object_id, d.dependent_component_index, object.type_id
FROM dependents_index d
JOIN object ON d.dependent_object_id = object.id
JOIN dependencies_query q
ON q.dependency_builtin IS d.dependency_builtin
AND q.dependency_object_id IS d.dependency_object_id
AND q.dependency_component_index IS d.dependency_component_index
JOIN dependents_search_scope s
ON s.dependent_object_id = d.dependent_object_id
AND s.dependent_component_index = d.dependent_component_index
UNION SELECT d.dependent_object_id, d.dependent_component_index, object.type_id
FROM dependents_index d
JOIN object ON d.dependent_object_id = object.id
JOIN transitive_dependents t
ON t.dependent_object_id = d.dependency_object_id
AND t.dependent_component_index = d.dependency_component_index
JOIN dependents_search_scope s
ON s.dependent_object_id = d.dependent_object_id
AND s.dependent_component_index = d.dependent_component_index
)
SELECT * FROM transitive_dependents
|]
execute [sql|DROP TABLE dependents_search_scope|]
execute [sql|DROP TABLE dependencies_query|]
pure . Map.fromList $ [(r, t) | r :. Only t <- result]
objectIdByBase32Prefix :: ObjectType -> Text -> Transaction [ObjectId] objectIdByBase32Prefix :: ObjectType -> Text -> Transaction [ObjectId]
objectIdByBase32Prefix objType prefix = objectIdByBase32Prefix objType prefix =
queryListCol queryListCol

View File

@ -9,6 +9,7 @@ module U.Codebase.Sqlite.Serialization
getBranchFormat, getBranchFormat,
getLocalBranch, getLocalBranch,
getDeclElement, getDeclElement,
getDeclElementNumConstructors,
getDeclFormat, getDeclFormat,
getPatchFormat, getPatchFormat,
getTempCausalFormat, getTempCausalFormat,
@ -20,6 +21,7 @@ module U.Codebase.Sqlite.Serialization
getTermFormat, getTermFormat,
getWatchResultFormat, getWatchResultFormat,
lookupDeclElement, lookupDeclElement,
lookupDeclElementNumConstructors,
lookupTermElement, lookupTermElement,
lookupTermElementDiscardingTerm, lookupTermElementDiscardingTerm,
lookupTermElementDiscardingType, lookupTermElementDiscardingType,
@ -63,6 +65,7 @@ import U.Codebase.Sqlite.Branch.Diff qualified as BranchDiff
import U.Codebase.Sqlite.Branch.Format qualified as BranchFormat import U.Codebase.Sqlite.Branch.Format qualified as BranchFormat
import U.Codebase.Sqlite.Branch.Full qualified as BranchFull import U.Codebase.Sqlite.Branch.Full qualified as BranchFull
import U.Codebase.Sqlite.Causal qualified as Causal import U.Codebase.Sqlite.Causal qualified as Causal
import U.Codebase.Sqlite.DbId (ObjectId, TextId)
import U.Codebase.Sqlite.Decl.Format qualified as DeclFormat import U.Codebase.Sqlite.Decl.Format qualified as DeclFormat
import U.Codebase.Sqlite.Entity qualified as Entity import U.Codebase.Sqlite.Entity qualified as Entity
import U.Codebase.Sqlite.LocalIds (LocalIds, LocalIds' (..), LocalTextId, WatchLocalIds) import U.Codebase.Sqlite.LocalIds (LocalIds, LocalIds' (..), LocalTextId, WatchLocalIds)
@ -159,6 +162,9 @@ putLocalIdsWith putText putDefn LocalIds {textLookup, defnLookup} = do
getLocalIds :: (MonadGet m) => m LocalIds getLocalIds :: (MonadGet m) => m LocalIds
getLocalIds = getLocalIdsWith getVarInt getVarInt getLocalIds = getLocalIdsWith getVarInt getVarInt
skipLocalIds :: (MonadGet m) => m ()
skipLocalIds = skipLocalIdsWith @TextId @ObjectId getVarInt getVarInt
getWatchLocalIds :: (MonadGet m) => m WatchLocalIds getWatchLocalIds :: (MonadGet m) => m WatchLocalIds
getWatchLocalIds = getLocalIdsWith getVarInt getVarInt getWatchLocalIds = getLocalIdsWith getVarInt getVarInt
@ -166,6 +172,11 @@ getLocalIdsWith :: (MonadGet m) => m t -> m d -> m (LocalIds' t d)
getLocalIdsWith getText getDefn = getLocalIdsWith getText getDefn =
LocalIds <$> getVector getText <*> getVector getDefn LocalIds <$> getVector getText <*> getVector getDefn
skipLocalIdsWith :: forall t d m. (MonadGet m) => m t -> m d -> m ()
skipLocalIdsWith skipText skipDefn = do
skipVector skipText
skipVector skipDefn
putUnit :: (Applicative m) => () -> m () putUnit :: (Applicative m) => () -> m ()
putUnit _ = pure () putUnit _ = pure ()
@ -479,12 +490,34 @@ getDeclElement =
1 -> Decl.Unique <$> getText 1 -> Decl.Unique <$> getText
other -> unknownTag "DeclModifier" other other -> unknownTag "DeclModifier" other
-- | Get the number of constructors in a decl element.
getDeclElementNumConstructors :: (MonadGet m) => m Int
getDeclElementNumConstructors = do
skipDeclType
skipDeclModifier
skipDeclTypeVariables
getListLength
where
skipDeclType = void getWord8
skipDeclModifier = void getWord8
skipDeclTypeVariables = void (getList skipSymbol)
lookupDeclElement :: lookupDeclElement ::
(MonadGet m) => Reference.Pos -> m (LocalIds, DeclFormat.Decl Symbol) (MonadGet m) => Reference.Pos -> m (LocalIds, DeclFormat.Decl Symbol)
lookupDeclElement i = lookupDeclElement i =
lookupDeclElementWith i (getPair getLocalIds getDeclElement)
lookupDeclElementNumConstructors :: (MonadGet m) => Reference.Pos -> m Int
lookupDeclElementNumConstructors i =
lookupDeclElementWith i (skipLocalIds *> getDeclElementNumConstructors)
-- Note: the caller is responsible for either consuming the whole decl, or not
-- parsing anything after a partially-parsed decl
lookupDeclElementWith :: (MonadGet m) => Reference.Pos -> m a -> m a
lookupDeclElementWith i get =
getWord8 >>= \case getWord8 >>= \case
0 -> unsafeFramedArrayLookup (getPair getLocalIds getDeclElement) $ fromIntegral i 0 -> unsafeFramedArrayLookup get $ fromIntegral @Reference.Pos @Int i
other -> unknownTag "lookupDeclElement" other other -> unknownTag "lookupDeclElementWith" other
putBranchFormat :: (MonadPut m) => BranchFormat.BranchFormat -> m () putBranchFormat :: (MonadPut m) => BranchFormat.BranchFormat -> m ()
putBranchFormat b | debug && trace ("putBranchFormat " ++ show b) False = undefined putBranchFormat b | debug && trace ("putBranchFormat " ++ show b) False = undefined
@ -919,6 +952,11 @@ getTempCausalFormat =
getSymbol :: (MonadGet m) => m Symbol getSymbol :: (MonadGet m) => m Symbol
getSymbol = Symbol <$> getVarInt <*> getText getSymbol = Symbol <$> getVarInt <*> getText
skipSymbol :: (MonadGet m) => m ()
skipSymbol = do
_ :: Word64 <- getVarInt
skipText
putSymbol :: (MonadPut m) => Symbol -> m () putSymbol :: (MonadPut m) => Symbol -> m ()
putSymbol (Symbol n t) = putVarInt n >> putText t putSymbol (Symbol n t) = putVarInt n >> putText t

View File

@ -1,5 +1,3 @@
{-# LANGUAGE RecordWildCards #-}
module U.Codebase.Branch.Type module U.Codebase.Branch.Type
( Branch (..), ( Branch (..),
CausalBranch, CausalBranch,
@ -12,8 +10,6 @@ module U.Codebase.Branch.Type
childAt, childAt,
hoist, hoist,
hoistCausalBranch, hoistCausalBranch,
termMetadata,
typeMetadata,
U.Codebase.Branch.Type.empty, U.Codebase.Branch.Type.empty,
) )
where where
@ -105,26 +101,3 @@ hoistCausalBranch f cb =
cb cb
& Causal.hoist f & Causal.hoist f
& Causal.emap (hoist f) (hoist f) & Causal.emap (hoist f) (hoist f)
-- | Returns all the metadata value references that are attached to a term with the provided name in the
-- provided branch.
--
-- If only name is specified, metadata will be returned for all terms at that name.
termMetadata :: (Monad m) => Branch m -> NameSegment -> Maybe Referent -> m [Map MetadataValue MetadataType]
termMetadata Branch {terms} = metadataHelper terms
-- | Returns all the metadata value references that are attached to a type with the provided name in the
-- provided branch.
--
-- If only name is specified, metadata will be returned for all types at that name.
typeMetadata :: (Monad m) => Branch m -> NameSegment -> Maybe Reference -> m [Map MetadataValue MetadataType]
typeMetadata Branch {types} = metadataHelper types
metadataHelper :: (Monad m, Ord ref) => Map NameSegment (Map ref (m MdValues)) -> NameSegment -> Maybe ref -> m [Map MetadataValue MetadataType]
metadataHelper t ns mayQualifier = do
case Map.lookup ns t of
Nothing -> pure []
Just allRefsAtName -> do
case mayQualifier of
Nothing -> (fmap . fmap) unMdValues . sequenceA $ Map.elems allRefsAtName
Just qualifier -> (fmap . fmap) unMdValues . sequenceA . maybeToList $ Map.lookup qualifier allRefsAtName

View File

@ -10,6 +10,7 @@ module U.Codebase.Reference
Reference' (..), Reference' (..),
TermReference', TermReference',
TypeReference', TypeReference',
ReferenceType (..),
pattern Derived, pattern Derived,
Id, Id,
Id' (..), Id' (..),
@ -19,6 +20,7 @@ module U.Codebase.Reference
t_, t_,
h_, h_,
idH, idH,
idPos,
idToHash, idToHash,
idToShortHash, idToShortHash,
isBuiltin, isBuiltin,
@ -29,16 +31,16 @@ module U.Codebase.Reference
) )
where where
import Control.Lens (Lens, Prism, Prism', Traversal, lens, preview, prism) import Control.Lens (Lens, Lens', Prism, Prism', Traversal, lens, preview, prism)
import Data.Bifoldable (Bifoldable (..)) import Data.Bifoldable (Bifoldable (..))
import Data.Bitraversable (Bitraversable (..)) import Data.Bitraversable (Bitraversable (..))
import Data.Text qualified as Text import Data.Text qualified as Text
import Unison.Hash (Hash) import Unison.Hash (Hash)
import Unison.Hash qualified as H
import Unison.Hash qualified as Hash import Unison.Hash qualified as Hash
import Unison.Prelude import Unison.Prelude
import Unison.ShortHash (ShortHash) import Unison.ShortHash (ShortHash)
import Unison.ShortHash qualified as SH import Unison.ShortHash qualified as SH
import Unison.Hash qualified as H
-- | This is the canonical representation of Reference -- | This is the canonical representation of Reference
type Reference = Reference' Text Hash type Reference = Reference' Text Hash
@ -66,6 +68,8 @@ type TermReferenceId = Id
-- | A type declaration reference id. -- | A type declaration reference id.
type TypeReferenceId = Id type TypeReferenceId = Id
data ReferenceType = RtTerm | RtType deriving (Eq, Ord, Show)
-- | Either a builtin or a user defined (hashed) top-level declaration. Used for both terms and types. -- | Either a builtin or a user defined (hashed) top-level declaration. Used for both terms and types.
data Reference' t h data Reference' t h
= ReferenceBuiltin t = ReferenceBuiltin t
@ -109,16 +113,19 @@ type Pos = Word64
data Id' h = Id h Pos data Id' h = Id h Pos
deriving stock (Eq, Ord, Show, Functor, Foldable, Traversable) deriving stock (Eq, Ord, Show, Functor, Foldable, Traversable)
t_ :: Traversal (Reference' t h) (Reference' t' h) t t' t_ :: Prism (Reference' t h) (Reference' t' h) t t'
t_ f = \case t_ = prism ReferenceBuiltin \case
ReferenceBuiltin t -> ReferenceBuiltin <$> f t ReferenceBuiltin t -> Right t
ReferenceDerived id -> pure (ReferenceDerived id) ReferenceDerived id -> Left (ReferenceDerived id)
h_ :: Traversal (Reference' t h) (Reference' t h') h h' h_ :: Traversal (Reference' t h) (Reference' t h') h h'
h_ f = \case h_ f = \case
ReferenceBuiltin t -> pure (ReferenceBuiltin t) ReferenceBuiltin t -> pure (ReferenceBuiltin t)
Derived h i -> Derived <$> f h <*> pure i Derived h i -> Derived <$> f h <*> pure i
idPos :: Lens' (Id' h) Pos
idPos = lens (\(Id _h w) -> w) (\(Id h _w) w -> Id h w)
idH :: Lens (Id' h) (Id' h') h h' idH :: Lens (Id' h) (Id' h') h h'
idH = lens (\(Id h _w) -> h) (\(Id _h w) h -> Id h w) idH = lens (\(Id h _w) -> h) (\(Id _h w) h -> Id h w)
@ -163,4 +170,3 @@ component :: H.Hash -> [k] -> [(k, Id)]
component h ks = component h ks =
let let
in [(k, (Id h i)) | (k, i) <- ks `zip` [0 ..]] in [(k, (Id h i)) | (k, i) <- ks `zip` [0 ..]]

View File

@ -8,7 +8,7 @@ import Unison.Util.Alphabetical (Alphabetical, compareAlphabetical)
-- Represents the parts of a name between the `.`s -- Represents the parts of a name between the `.`s
newtype NameSegment = NameSegment {toText :: Text} newtype NameSegment = NameSegment {toText :: Text}
deriving stock (Eq, Ord, Generic, Show) deriving stock (Eq, Ord, Generic)
instance Alphabetical NameSegment where instance Alphabetical NameSegment where
compareAlphabetical n1 n2 = compareAlphabetical (toText n1) (toText n2) compareAlphabetical n1 n2 = compareAlphabetical (toText n1) (toText n2)
@ -58,3 +58,6 @@ toTextBuilder =
instance IsString NameSegment where instance IsString NameSegment where
fromString = NameSegment . Text.pack fromString = NameSegment . Text.pack
instance Show NameSegment where
show = show . toText

View File

@ -9,7 +9,7 @@
module U.Util.Serialization where module U.Util.Serialization where
import Control.Applicative (Applicative (liftA2), liftA3) import Control.Applicative (Applicative (liftA2), liftA3)
import Control.Monad (foldM, replicateM, when) import Control.Monad (foldM, replicateM, when, replicateM_)
import Data.Bits (Bits, clearBit, setBit, shiftL, shiftR, testBit, (.|.)) import Data.Bits (Bits, clearBit, setBit, shiftL, shiftR, testBit, (.|.))
import Data.ByteString (ByteString, readFile, writeFile) import Data.ByteString (ByteString, readFile, writeFile)
import qualified Data.ByteString as BS import qualified Data.ByteString as BS
@ -142,14 +142,23 @@ putFoldable putA as = do
getList :: (MonadGet m) => m a -> m [a] getList :: (MonadGet m) => m a -> m [a]
getList getA = do getList getA = do
length <- getVarInt length <- getListLength
replicateM length getA replicateM length getA
getListLength :: (MonadGet m) => m Int
getListLength =
getVarInt
getVector :: (MonadGet m) => m a -> m (Vector a) getVector :: (MonadGet m) => m a -> m (Vector a)
getVector getA = do getVector getA = do
length <- getVarInt length <- getVarInt
Vector.replicateM length getA Vector.replicateM length getA
skipVector :: MonadGet m => m a -> m ()
skipVector getA = do
length <- getVarInt
replicateM_ length getA
getSequence :: (MonadGet m) => m a -> m (Seq a) getSequence :: (MonadGet m) => m a -> m (Seq a)
getSequence getA = do getSequence getA = do
length <- getVarInt length <- getVarInt

View File

@ -8,6 +8,8 @@
* [`UNISON_LSP_ENABLED`](#unison_lsp_enabled) * [`UNISON_LSP_ENABLED`](#unison_lsp_enabled)
* [`UNISON_SHARE_HOST`](#unison_share_host) * [`UNISON_SHARE_HOST`](#unison_share_host)
* [`UNISON_SHARE_ACCESS_TOKEN`](#unison_share_access_token) * [`UNISON_SHARE_ACCESS_TOKEN`](#unison_share_access_token)
* [`UNISON_READONLY`](#unison_readonly)
* [`UNISON_ENTITY_VALIDATION`](#unison_entity_validation)
* [Local Codebase Server](#local-codebase-server) * [Local Codebase Server](#local-codebase-server)
* [Codebase Configuration](#codebase-configuration) * [Codebase Configuration](#codebase-configuration)
@ -104,6 +106,14 @@ Force unison to use readonly connections to codebases.
$ UNISON_READONLY="true" ucm $ UNISON_READONLY="true" ucm
``` ```
### `UNISON_ENTITY_VALIDATION`
Enable validation of entities pulled from a codebase server.
```sh
$ UNISON_ENTITY_VALIDATION="true" ucm
```
### Local Codebase Server ### Local Codebase Server
The port, host and token to be used for the local codebase server can all be configured by providing environment The port, host and token to be used for the local codebase server can all be configured by providing environment

View File

@ -82,3 +82,8 @@ curl -L https://github.com/unisonweb/unison/releases/download/release%2FM2h/ucm-
tar -xzf unisonlanguage/ucm.tar.gz -C unisonlanguage tar -xzf unisonlanguage/ucm.tar.gz -C unisonlanguage
./unisonlanguage/ucm ./unisonlanguage/ucm
``` ```
_Windows manual install:_
* Recommended: [Set your default Terminal application](https://devblogs.microsoft.com/commandline/windows-terminal-as-your-default-command-line-experience/) to “Windows Terminal”.
* Download [the release](https://github.com/unisonweb/unison/releases/download/release%2FM5h/ucm-windows.zip) and extract it to a location of your choosing.
* Run `ucm.exe`

View File

@ -86,11 +86,11 @@
"flake-compat": { "flake-compat": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1673956053, "lastModified": 1696426674,
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", "narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra", "owner": "edolstra",
"repo": "flake-compat", "repo": "flake-compat",
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", "rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -121,11 +121,11 @@
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1681202837, "lastModified": 1694529238,
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=", "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "cfacdce06f30d2b68473a46042957675eebb3401", "rev": "ff7b65b44d01cf9ba6a71320833626af21126384",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -134,22 +134,6 @@
"type": "github" "type": "github"
} }
}, },
"flake-utils_2": {
"locked": {
"lastModified": 1679360468,
"narHash": "sha256-LGnza3cfXF10Biw3ZTg0u9o9t7s680Ww200t5KkHTh8=",
"owner": "hamishmack",
"repo": "flake-utils",
"rev": "e1ea268ff47ad475443dbabcd54744b4e5b9d4f5",
"type": "github"
},
"original": {
"owner": "hamishmack",
"ref": "hkm/nested-hydraJobs",
"repo": "flake-utils",
"type": "github"
}
},
"ghc-8.6.5-iohk": { "ghc-8.6.5-iohk": {
"flake": false, "flake": false,
"locked": { "locked": {
@ -167,14 +151,51 @@
"type": "github" "type": "github"
} }
}, },
"ghc98X": {
"flake": false,
"locked": {
"lastModified": 1696643148,
"narHash": "sha256-E02DfgISH7EvvNAu0BHiPvl1E5FGMDi0pWdNZtIBC9I=",
"ref": "ghc-9.8",
"rev": "443e870d977b1ab6fc05f47a9a17bc49296adbd6",
"revCount": 61642,
"submodules": true,
"type": "git",
"url": "https://gitlab.haskell.org/ghc/ghc"
},
"original": {
"ref": "ghc-9.8",
"submodules": true,
"type": "git",
"url": "https://gitlab.haskell.org/ghc/ghc"
}
},
"ghc99": {
"flake": false,
"locked": {
"lastModified": 1697054644,
"narHash": "sha256-kKarOuXUaAH3QWv7ASx+gGFMHaHKe0pK5Zu37ky2AL4=",
"ref": "refs/heads/master",
"rev": "f383a242c76f90bcca8a4d7ee001dcb49c172a9a",
"revCount": 62040,
"submodules": true,
"type": "git",
"url": "https://gitlab.haskell.org/ghc/ghc"
},
"original": {
"submodules": true,
"type": "git",
"url": "https://gitlab.haskell.org/ghc/ghc"
}
},
"hackage": { "hackage": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1692577366, "lastModified": 1699402991,
"narHash": "sha256-PkMJxz0AOgsmTGUppr9obJaGLHxSJbeNxa8C0t8RUio=", "narHash": "sha256-2nQBlA3ygBiIqVPh2J1JwP51rEO0xMjyoOaoJk5PboY=",
"owner": "input-output-hk", "owner": "input-output-hk",
"repo": "hackage.nix", "repo": "hackage.nix",
"rev": "4bb79ccf9e2e80990cf06c96cdf3c61ca1dfa684", "rev": "9e963602a5b3259dac9cf5e994f0a338fb352b7e",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -191,11 +212,15 @@
"cabal-36": "cabal-36", "cabal-36": "cabal-36",
"cardano-shell": "cardano-shell", "cardano-shell": "cardano-shell",
"flake-compat": "flake-compat_2", "flake-compat": "flake-compat_2",
"flake-utils": "flake-utils_2",
"ghc-8.6.5-iohk": "ghc-8.6.5-iohk", "ghc-8.6.5-iohk": "ghc-8.6.5-iohk",
"ghc98X": "ghc98X",
"ghc99": "ghc99",
"hackage": "hackage", "hackage": "hackage",
"hls-1.10": "hls-1.10", "hls-1.10": "hls-1.10",
"hls-2.0": "hls-2.0", "hls-2.0": "hls-2.0",
"hls-2.2": "hls-2.2",
"hls-2.3": "hls-2.3",
"hls-2.4": "hls-2.4",
"hpc-coveralls": "hpc-coveralls", "hpc-coveralls": "hpc-coveralls",
"hydra": "hydra", "hydra": "hydra",
"iserv-proxy": "iserv-proxy", "iserv-proxy": "iserv-proxy",
@ -214,11 +239,11 @@
"stackage": "stackage" "stackage": "stackage"
}, },
"locked": { "locked": {
"lastModified": 1692579024, "lastModified": 1699404571,
"narHash": "sha256-alHUQAAmeyKm/aZ8q8/AQSpxv+Uo6P2E9eXJJTjyC2M=", "narHash": "sha256-EwI7vKBxCHvIKPWbvGlOF9IZlSFqPODgT/BQy8Z2s/w=",
"owner": "input-output-hk", "owner": "input-output-hk",
"repo": "haskell.nix", "repo": "haskell.nix",
"rev": "884be454d5087a37ecc6f3665de7333e3c2e72a8", "rev": "cec253ca482301509e9e90cb5c15299dd3550cce",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -261,6 +286,57 @@
"type": "github" "type": "github"
} }
}, },
"hls-2.2": {
"flake": false,
"locked": {
"lastModified": 1693064058,
"narHash": "sha256-8DGIyz5GjuCFmohY6Fa79hHA/p1iIqubfJUTGQElbNk=",
"owner": "haskell",
"repo": "haskell-language-server",
"rev": "b30f4b6cf5822f3112c35d14a0cba51f3fe23b85",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "2.2.0.0",
"repo": "haskell-language-server",
"type": "github"
}
},
"hls-2.3": {
"flake": false,
"locked": {
"lastModified": 1695910642,
"narHash": "sha256-tR58doOs3DncFehHwCLczJgntyG/zlsSd7DgDgMPOkI=",
"owner": "haskell",
"repo": "haskell-language-server",
"rev": "458ccdb55c9ea22cd5d13ec3051aaefb295321be",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "2.3.0.0",
"repo": "haskell-language-server",
"type": "github"
}
},
"hls-2.4": {
"flake": false,
"locked": {
"lastModified": 1696939266,
"narHash": "sha256-VOMf5+kyOeOmfXTHlv4LNFJuDGa7G3pDnOxtzYR40IU=",
"owner": "haskell",
"repo": "haskell-language-server",
"rev": "362fdd1293efb4b82410b676ab1273479f6d17ee",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "2.4.0.0",
"repo": "haskell-language-server",
"type": "github"
}
},
"hpc-coveralls": { "hpc-coveralls": {
"flake": false, "flake": false,
"locked": { "locked": {
@ -303,11 +379,11 @@
"iserv-proxy": { "iserv-proxy": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1688517130, "lastModified": 1691634696,
"narHash": "sha256-hUqfxSlo+ffqVdkSZ1EDoB7/ILCL25eYkcCXW9/P3Wc=", "narHash": "sha256-MZH2NznKC/gbgBu8NgIibtSUZeJ00HTLJ0PlWKCBHb0=",
"ref": "hkm/remote-iserv", "ref": "hkm/remote-iserv",
"rev": "9151db2a9a61d7f5fe52ff8836f18bbd0fd8933c", "rev": "43a979272d9addc29fbffc2e8542c5d96e993d73",
"revCount": 13, "revCount": 14,
"type": "git", "type": "git",
"url": "https://gitlab.haskell.org/hamishmack/iserv-proxy.git" "url": "https://gitlab.haskell.org/hamishmack/iserv-proxy.git"
}, },
@ -452,11 +528,11 @@
}, },
"nixpkgs-2305": { "nixpkgs-2305": {
"locked": { "locked": {
"lastModified": 1690680713, "lastModified": 1695416179,
"narHash": "sha256-NXCWA8N+GfSQyoN7ZNiOgq/nDJKOp5/BHEpiZP8sUZw=", "narHash": "sha256-610o1+pwbSu+QuF3GE0NU5xQdTHM3t9wyYhB9l94Cd8=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "b81af66deb21f73a70c67e5ea189568af53b1e8c", "rev": "715d72e967ec1dd5ecc71290ee072bcaf5181ed6",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -484,11 +560,11 @@
}, },
"nixpkgs-unstable": { "nixpkgs-unstable": {
"locked": { "locked": {
"lastModified": 1690720142, "lastModified": 1695318763,
"narHash": "sha256-GywuiZjBKfFkntQwpNQfL+Ksa2iGjPprBGL0/psgRZM=", "narHash": "sha256-FHVPDRP2AfvsxAdc+AsgFJevMz5VBmnZglFUMlxBkcY=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "3acb5c4264c490e7714d503c7166a3fde0c51324", "rev": "e12483116b3b51a185a33a272bf351e357ba9a99",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -498,6 +574,22 @@
"type": "github" "type": "github"
} }
}, },
"nixpkgs-unstable_2": {
"locked": {
"lastModified": 1699781429,
"narHash": "sha256-UYefjidASiLORAjIvVsUHG6WBtRhM67kTjEY4XfZOFs=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "e44462d6021bfe23dfb24b775cc7c390844f773d",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"old-ghc-nix": { "old-ghc-nix": {
"flake": false, "flake": false,
"locked": { "locked": {
@ -523,17 +615,18 @@
"nixpkgs": [ "nixpkgs": [
"haskellNix", "haskellNix",
"nixpkgs-unstable" "nixpkgs-unstable"
] ],
"nixpkgs-unstable": "nixpkgs-unstable_2"
} }
}, },
"stackage": { "stackage": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1692576558, "lastModified": 1699402155,
"narHash": "sha256-cFQs/lSEhKD6oIBPX1SRVvU81sxviB81CF+bwGwGHP0=", "narHash": "sha256-fOywUFLuAuZAkIrv1JdjGzfY53uEiMRlu8UpdJtCjh0=",
"owner": "input-output-hk", "owner": "input-output-hk",
"repo": "stackage.nix", "repo": "stackage.nix",
"rev": "ae06057930b59a55b17aee2303ce604ae79b4db6", "rev": "7c7bfe8cca23c96b850e16f3c0b159aca1850314",
"type": "github" "type": "github"
}, },
"original": { "original": {

213
flake.nix
View File

@ -9,13 +9,14 @@
inputs = { inputs = {
haskellNix.url = "github:input-output-hk/haskell.nix"; haskellNix.url = "github:input-output-hk/haskell.nix";
nixpkgs.follows = "haskellNix/nixpkgs-unstable"; nixpkgs.follows = "haskellNix/nixpkgs-unstable";
nixpkgs-unstable.url = "github:NixOS/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils"; flake-utils.url = "github:numtide/flake-utils";
flake-compat = { flake-compat = {
url = "github:edolstra/flake-compat"; url = "github:edolstra/flake-compat";
flake = false; flake = false;
}; };
}; };
outputs = { self, nixpkgs, flake-utils, haskellNix, flake-compat }: outputs = { self, nixpkgs, flake-utils, haskellNix, flake-compat, nixpkgs-unstable }:
flake-utils.lib.eachSystem [ flake-utils.lib.eachSystem [
"x86_64-linux" "x86_64-linux"
"x86_64-darwin" "x86_64-darwin"
@ -23,162 +24,104 @@
] ]
(system: (system:
let let
versions = {
ghc = "928";
ormolu = "0.5.2.0";
hls = "2.4.0.0";
stack = "2.13.1";
hpack = "0.35.2";
};
overlays = [ overlays = [
haskellNix.overlay haskellNix.overlay
(final: prev: { (import ./nix/haskell-nix-overlay.nix)
unison-project = with prev.lib.strings; (import ./nix/unison-overlay.nix)
let
cleanSource = pth:
let
src' = prev.lib.cleanSourceWith {
filter = filt;
src = pth;
};
filt = path: type:
let
bn = baseNameOf path;
isHiddenFile = hasPrefix "." bn;
isFlakeLock = bn == "flake.lock";
isNix = hasSuffix ".nix" bn;
in
!isHiddenFile && !isFlakeLock && !isNix;
in
src';
in
final.haskell-nix.project' {
src = cleanSource ./.;
projectFileName = "stack.yaml";
modules = [
# enable profiling
{
enableLibraryProfiling = true;
profilingDetail = "none";
}
# remove buggy build tool dependencies
({ lib, ... }: {
# this component has the build tool
# `unison-cli:unison` and somehow haskell.nix
# decides to add some file sharing package
# `unison` as a build-tool dependency.
packages.unison-cli.components.exes.cli-integration-tests.build-tools =
lib.mkForce [ ];
})
];
branchMap = {
"https://github.com/unisonweb/configurator.git"."e47e9e9fe1f576f8c835183b9def52d73c01327a" =
"unison";
"https://github.com/unisonweb/shellmet.git"."2fd348592c8f51bb4c0ca6ba4bc8e38668913746" =
"topic/avoid-callCommand";
};
};
})
(final: prev: {
unison-stack = prev.symlinkJoin {
name = "stack";
paths = [ final.stack ];
buildInputs = [ final.makeWrapper ];
postBuild =
let
flags = [ "--no-nix" "--system-ghc" "--no-install-ghc" ];
add-flags =
"--add-flags '${prev.lib.concatStringsSep " " flags}'";
in
''
wrapProgram "$out/bin/stack" ${add-flags}
'';
};
})
]; ];
pkgs = import nixpkgs { pkgs = import nixpkgs {
inherit system overlays; inherit system overlays;
inherit (haskellNix) config; inherit (haskellNix) config;
}; };
flake = pkgs.unison-project.flake { }; haskell-nix-flake = import ./nix/haskell-nix-flake.nix {
inherit pkgs versions;
commonShellArgs = args: inherit (nixpkgs-packages) stack hpack;
args // { };
# workaround: unstable = import nixpkgs-unstable {
# https://github.com/input-output-hk/haskell.nix/issues/1793 inherit system;
# https://github.com/input-output-hk/haskell.nix/issues/1885 overlays = [
allToolDeps = false; (import ./nix/unison-overlay.nix)
additional = hpkgs: with hpkgs; [ Cabal stm exceptions ghc ghc-heap ]; (import ./nix/nixpkgs-overlay.nix { inherit versions; })
];
};
nixpkgs-packages =
let
hpkgs = unstable.haskell.packages.ghcunison;
exe = unstable.haskell.lib.justStaticExecutables;
in
{
ghc = unstable.haskell.compiler."ghc${versions.ghc}";
ormolu = exe hpkgs.ormolu;
hls = unstable.unison-hls;
stack = unstable.unison-stack;
unwrapped-stack = unstable.stack;
hpack = unstable.hpack;
};
nixpkgs-devShells = {
only-tools-nixpkgs = unstable.mkShell {
name = "only-tools-nixpkgs";
buildInputs = buildInputs =
let let
build-tools = with nixpkgs-packages; [
ghc
ormolu
hls
stack
hpack
];
native-packages = pkgs.lib.optionals pkgs.stdenv.isDarwin native-packages = pkgs.lib.optionals pkgs.stdenv.isDarwin
(with pkgs.darwin.apple_sdk.frameworks; [ Cocoa ]); (with unstable.darwin.apple_sdk.frameworks;
[ Cocoa ]);
c-deps = with unstable;
[ pkg-config zlib glibcLocales ];
in in
(args.buildInputs or [ ]) ++ (with pkgs; [ unison-stack pkg-config zlib glibcLocales ]) ++ native-packages; build-tools ++ c-deps ++ native-packages;
# workaround for https://gitlab.haskell.org/ghc/ghc/-/issues/11042
shellHook = '' shellHook = ''
export LD_LIBRARY_PATH=${pkgs.zlib}/lib:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=${pkgs.zlib}/lib:$LD_LIBRARY_PATH
''; '';
tools =
let ormolu-ver = "0.5.2.0";
in (args.tools or { }) // {
cabal = { };
ormolu = { version = ormolu-ver; };
haskell-language-server = {
version = "latest";
modules = [
{
packages.haskell-language-server.components.exes.haskell-language-server.postInstall = ''
ln -sr "$out/bin/haskell-language-server" "$out/bin/haskell-language-server-wrapper"
'';
}
];
# specify flags via project file rather than a module override
# https://github.com/input-output-hk/haskell.nix/issues/1509
cabalProject = ''
packages: .
package haskell-language-server
flags: -brittany -fourmolu -stylishhaskell -hlint
constraints: ormolu == ${ormolu-ver}
'';
};
};
}; };
};
shellFor = args: pkgs.unison-project.shellFor (commonShellArgs args);
localPackages = with pkgs.lib;
filterAttrs (k: v: v.isLocal or false) pkgs.unison-project.hsPkgs;
localPackageNames = builtins.attrNames localPackages;
devShells =
let
mkDevShell = pkgName:
shellFor {
packages = hpkgs: [ hpkgs."${pkgName}" ];
withHoogle = true;
};
localPackageDevShells =
pkgs.lib.genAttrs localPackageNames mkDevShell;
in
{
default = devShells.only-tools;
only-tools = shellFor {
packages = _: [ ];
withHoogle = false;
};
local = shellFor {
packages = hpkgs: (map (p: hpkgs."${p}") localPackageNames);
withHoogle = true;
};
} // localPackageDevShells;
in in
flake // { assert nixpkgs-packages.ormolu.version == versions.ormolu;
defaultPackage = flake.packages."unison-cli:exe:unison"; assert nixpkgs-packages.hls.version == versions.hls;
inherit (pkgs) unison-project; assert nixpkgs-packages.unwrapped-stack.version == versions.stack;
inherit devShells localPackageNames; assert nixpkgs-packages.hpack.version == versions.hpack;
packages = flake.packages // { {
packages = nixpkgs-packages // {
haskell-nix = haskell-nix-flake.packages;
build-tools = pkgs.symlinkJoin {
name = "build-tools";
paths = self.devShells."${system}".only-tools-nixpkgs.buildInputs;
};
all = pkgs.symlinkJoin { all = pkgs.symlinkJoin {
name = "all-packages"; name = "all";
paths = paths =
let let
all-other-packages = builtins.attrValues (builtins.removeAttrs self.packages."${system}" [ "all" ]); all-other-packages = builtins.attrValues (builtins.removeAttrs self.packages."${system}" [ "all" "build-tools" ]);
devshell-inputs = builtins.concatMap (devShell: devShell.buildInputs ++ devShell.nativeBuildInputs) [ devShells.only-tools ]; devshell-inputs = builtins.concatMap
(devShell: devShell.buildInputs ++ devShell.nativeBuildInputs)
[
self.devShells."${system}".only-tools-nixpkgs
];
in in
all-other-packages ++ devshell-inputs; all-other-packages ++ devshell-inputs;
}; };
}; };
apps = haskell-nix-flake.apps // {
default = self.apps."${system}"."unison-cli:exe:unison";
};
devShells = nixpkgs-devShells // {
default = self.devShells."${system}".only-tools-nixpkgs;
haskell-nix = haskell-nix-flake.devShells;
};
}); });
} }

View File

@ -15,6 +15,7 @@ dependencies:
- generic-lens - generic-lens
- either - either
- extra - extra
- filepath
- generic-lens - generic-lens
- lens - lens
- mtl - mtl
@ -36,15 +37,28 @@ default-extensions:
- BangPatterns - BangPatterns
- BlockArguments - BlockArguments
- ConstraintKinds - ConstraintKinds
- DeriveAnyClass
- DeriveFunctor - DeriveFunctor
- DeriveGeneric
- DeriveTraversable
- DerivingStrategies - DerivingStrategies
- DerivingVia
- DoAndIfThenElse - DoAndIfThenElse
- DuplicateRecordFields
- FlexibleContexts - FlexibleContexts
- FlexibleInstances - FlexibleInstances
- GeneralizedNewtypeDeriving
- ImportQualifiedPost - ImportQualifiedPost
- LambdaCase - LambdaCase
- MultiParamTypeClasses - MultiParamTypeClasses
- ScopedTypeVariables - NamedFieldPuns
- OverloadedLabels
- OverloadedStrings
- PatternSynonyms
- RankNTypes - RankNTypes
- ScopedTypeVariables
- StandaloneDeriving
- TupleSections - TupleSections
- TypeApplications - TypeApplications
- TypeFamilies
- ViewPatterns

View File

@ -43,6 +43,8 @@ data DebugFlag
Server Server
| PatternCoverage | PatternCoverage
| PatternCoverageConstraintSolver | PatternCoverageConstraintSolver
| KindInference
| Update
deriving (Eq, Ord, Show, Bounded, Enum) deriving (Eq, Ord, Show, Bounded, Enum)
debugFlags :: Set DebugFlag debugFlags :: Set DebugFlag
@ -68,6 +70,8 @@ debugFlags = case (unsafePerformIO (lookupEnv "UNISON_DEBUG")) of
"SERVER" -> pure Server "SERVER" -> pure Server
"PATTERN_COVERAGE" -> pure PatternCoverage "PATTERN_COVERAGE" -> pure PatternCoverage
"PATTERN_COVERAGE_CONSTRAINT_SOLVER" -> pure PatternCoverageConstraintSolver "PATTERN_COVERAGE_CONSTRAINT_SOLVER" -> pure PatternCoverageConstraintSolver
"KIND_INFERENCE" -> pure KindInference
"UPDATE" -> pure Update
_ -> empty _ -> empty
{-# NOINLINE debugFlags #-} {-# NOINLINE debugFlags #-}
@ -119,6 +123,14 @@ debugServer :: Bool
debugServer = Server `Set.member` debugFlags debugServer = Server `Set.member` debugFlags
{-# NOINLINE debugServer #-} {-# NOINLINE debugServer #-}
debugKindInference :: Bool
debugKindInference = KindInference `Set.member` debugFlags
{-# NOINLINE debugKindInference #-}
debugUpdate :: Bool
debugUpdate = Update `Set.member` debugFlags
{-# NOINLINE debugUpdate #-}
debugPatternCoverage :: Bool debugPatternCoverage :: Bool
debugPatternCoverage = PatternCoverage `Set.member` debugFlags debugPatternCoverage = PatternCoverage `Set.member` debugFlags
{-# NOINLINE debugPatternCoverage #-} {-# NOINLINE debugPatternCoverage #-}
@ -181,3 +193,5 @@ shouldDebug = \case
Server -> debugServer Server -> debugServer
PatternCoverage -> debugPatternCoverage PatternCoverage -> debugPatternCoverage
PatternCoverageConstraintSolver -> debugPatternCoverageConstraintSolver PatternCoverageConstraintSolver -> debugPatternCoverageConstraintSolver
KindInference -> debugKindInference
Update -> debugUpdate

View File

@ -4,11 +4,18 @@ module Unison.Prelude
safeReadUtf8, safeReadUtf8,
safeReadUtf8StdIn, safeReadUtf8StdIn,
writeUtf8, writeUtf8,
prependUtf8,
uncurry4, uncurry4,
reportBug, reportBug,
tShow, tShow,
wundefined, wundefined,
-- * @Bool@ control flow
onFalse,
onFalseM,
onTrue,
onTrueM,
-- * @Maybe@ control flow -- * @Maybe@ control flow
onNothing, onNothing,
onNothingM, onNothingM,
@ -53,7 +60,8 @@ import Data.Foldable as X (fold, foldl', for_, toList, traverse_)
import Data.Function as X ((&)) import Data.Function as X ((&))
import Data.Functor as X import Data.Functor as X
import Data.Functor.Identity as X import Data.Functor.Identity as X
import Data.Generics.Labels () -- #labelSyntax for generics-derived lenses -- #labelSyntax for generics-derived lenses
import Data.Generics.Labels ()
import Data.Int as X import Data.Int as X
import Data.List as X (foldl1', sortOn) import Data.List as X (foldl1', sortOn)
import Data.Map as X (Map) import Data.Map as X (Map)
@ -74,10 +82,12 @@ import GHC.Generics as X (Generic, Generic1)
import GHC.IO.Handle qualified as Handle import GHC.IO.Handle qualified as Handle
import GHC.Stack as X (HasCallStack) import GHC.Stack as X (HasCallStack)
import Safe as X (atMay, headMay, lastMay, readMay) import Safe as X (atMay, headMay, lastMay, readMay)
import System.FilePath qualified as FilePath
import System.IO qualified as IO import System.IO qualified as IO
import Text.Read as X (readMaybe) import Text.Read as X (readMaybe)
import UnliftIO as X (MonadUnliftIO (..), askRunInIO, askUnliftIO, try, withUnliftIO) import UnliftIO as X (MonadUnliftIO (..), askRunInIO, askUnliftIO, try, withUnliftIO)
import UnliftIO qualified import UnliftIO qualified
import UnliftIO.Directory qualified as UnliftIO
import Witch as X (From (from), TryFrom (tryFrom), TryFromException (TryFromException), into, tryInto) import Witch as X (From (from), TryFrom (tryFrom), TryFromException (TryFromException), into, tryInto)
import Witherable as X (filterA, forMaybe, mapMaybe, wither, witherMap) import Witherable as X (filterA, forMaybe, mapMaybe, wither, witherMap)
@ -93,6 +103,36 @@ altSum = foldl' (<|>) empty
altMap :: (Alternative f, Foldable t) => (a -> f b) -> t a -> f b altMap :: (Alternative f, Foldable t) => (a -> f b) -> t a -> f b
altMap f = altSum . fmap f . toList altMap f = altSum . fmap f . toList
-- |
-- > condition & onFalse do
-- > shortCircuit
onFalse :: (Applicative m) => m () -> Bool -> m ()
onFalse action = \case
False -> action
True -> pure ()
-- |
-- > action & onFalseM do
-- > shortCircuit
onFalseM :: (Monad m) => m () -> m Bool -> m ()
onFalseM x y =
y >>= onFalse x
-- |
-- > condition & onTrue do
-- > shortCircuit
onTrue :: (Applicative m) => m () -> Bool -> m ()
onTrue action = \case
True -> action
False -> pure ()
-- |
-- > action & onTrueM do
-- > shortCircuit
onTrueM :: (Monad m) => m () -> m Bool -> m ()
onTrueM x y =
y >>= onTrue x
-- | E.g. -- | E.g.
-- --
-- @@ -- @@
@ -196,6 +236,24 @@ writeUtf8 fileName txt = do
Handle.hSetEncoding handle IO.utf8 Handle.hSetEncoding handle IO.utf8
Text.hPutStr handle txt Text.hPutStr handle txt
-- | Atomically prepend some text to a file
prependUtf8 :: FilePath -> Text -> IO ()
prependUtf8 path txt = do
let withTempFile tmpFilePath tmpHandle = do
Text.hPutStrLn tmpHandle txt
IO.withFile path IO.ReadMode \currentScratchFile -> do
let copyLoop = do
chunk <- Text.hGetChunk currentScratchFile
case Text.length chunk == 0 of
True -> pure ()
False -> do
Text.hPutStr tmpHandle chunk
copyLoop
copyLoop
IO.hClose tmpHandle
UnliftIO.renameFile tmpFilePath path
UnliftIO.withTempFile (FilePath.takeDirectory path) ".unison-scratch" withTempFile
reportBug :: String -> String -> String reportBug :: String -> String -> String
reportBug bugId msg = reportBug bugId msg =
unlines unlines

View File

@ -11,6 +11,7 @@ module Unison.Util.Map
traverseKeysWith, traverseKeysWith,
swap, swap,
upsert, upsert,
upsertF,
valuesVector, valuesVector,
) )
where where
@ -45,6 +46,11 @@ upsert :: (Ord k) => (Maybe v -> v) -> k -> Map k v -> Map k v
upsert f = upsert f =
Map.alter (Just . f) Map.alter (Just . f)
-- | Upsert an element into a map.
upsertF :: (Functor f, Ord k) => (Maybe v -> f v) -> k -> Map k v -> f (Map k v)
upsertF f =
Map.alterF (fmap Just . f)
valuesVector :: Map k v -> Vector v valuesVector :: Map k v -> Vector v
valuesVector = valuesVector =
Vector.fromList . Map.elems Vector.fromList . Map.elems

View File

@ -34,18 +34,31 @@ library
BangPatterns BangPatterns
BlockArguments BlockArguments
ConstraintKinds ConstraintKinds
DeriveAnyClass
DeriveFunctor DeriveFunctor
DeriveGeneric
DeriveTraversable
DerivingStrategies DerivingStrategies
DerivingVia
DoAndIfThenElse DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts FlexibleContexts
FlexibleInstances FlexibleInstances
GeneralizedNewtypeDeriving
ImportQualifiedPost ImportQualifiedPost
LambdaCase LambdaCase
MultiParamTypeClasses MultiParamTypeClasses
ScopedTypeVariables NamedFieldPuns
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes RankNTypes
ScopedTypeVariables
StandaloneDeriving
TupleSections TupleSections
TypeApplications TypeApplications
TypeFamilies
ViewPatterns
ghc-options: -Wall ghc-options: -Wall
build-depends: build-depends:
base base
@ -53,6 +66,7 @@ library
, containers , containers
, either , either
, extra , extra
, filepath
, generic-lens , generic-lens
, lens , lens
, mtl , mtl

View File

@ -370,13 +370,7 @@ arrayToChunk bs = case BA.convert bs :: Block Word8 of
chunkFromArray = arrayToChunk chunkFromArray = arrayToChunk
fromBase16 :: Bytes -> Either Text.Text Bytes fromBase16 :: Bytes -> Either Text.Text Bytes
fromBase16 bs = case traverse convert (chunks bs) of fromBase16 = fromBase BE.Base16
Left e -> Left (Text.pack e)
Right bs -> Right (fromChunks bs)
where
convert b =
BE.convertFromBase BE.Base16 (chunkToArray @BA.Bytes b)
<&> arrayToChunk @BA.Bytes
toBase32, toBase64, toBase64UrlUnpadded :: Bytes -> Bytes toBase32, toBase64, toBase64UrlUnpadded :: Bytes -> Bytes
toBase32 = toBase BE.Base32 toBase32 = toBase BE.Base32

View File

@ -0,0 +1,55 @@
name: unison-util-nametree
github: unisonweb/unison
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
ghc-options: -Wall
dependencies:
- base
- containers
- lens
- semialign
- semigroups
- these
- unison-core
- unison-core1
- unison-prelude
- unison-util-relation
library:
source-dirs: src
when:
- condition: false
other-modules: Paths_unison_util_nametree
default-extensions:
- BangPatterns
- BlockArguments
- DeriveAnyClass
- DeriveFoldable
- DeriveFunctor
- DeriveGeneric
- DeriveTraversable
- DerivingStrategies
- DerivingVia
- DoAndIfThenElse
- DuplicateRecordFields
- FlexibleContexts
- FlexibleInstances
- GADTs
- GeneralizedNewtypeDeriving
- ImportQualifiedPost
- InstanceSigs
- LambdaCase
- MultiParamTypeClasses
- MultiWayIf
- NamedFieldPuns
- NumericUnderscores
- OverloadedLabels
- OverloadedStrings
- PatternSynonyms
- RankNTypes
- ScopedTypeVariables
- TupleSections
- TypeApplications
- ViewPatterns

View File

@ -0,0 +1,167 @@
module Unison.Util.Nametree
( -- * Nametree
Nametree (..),
traverseNametreeWithName,
unfoldNametree,
-- ** Flattening and unflattening
flattenNametree,
unflattenNametree,
-- * Definitions
Defns (..),
mapDefns,
bimapDefns,
)
where
import Data.List.NonEmpty (NonEmpty, pattern (:|))
import Data.List.NonEmpty qualified as List.NonEmpty
import Data.Map.Strict qualified as Map
import Data.Semialign (Semialign (alignWith), Unzip (unzipWith), Zip (zipWith))
import Data.Semigroup.Generic (GenericSemigroupMonoid (..))
import Data.These (These (..), these)
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.NameSegment
import Unison.Prelude
import Unison.Util.BiMultimap (BiMultimap)
import Unison.Util.BiMultimap qualified as BiMultimap
import Prelude hiding (zipWith)
-- | A nametree has a value, and a collection of children nametrees keyed by name segment.
data Nametree a = Nametree
{ value :: !a,
children :: !(Map NameSegment (Nametree a))
}
deriving stock (Functor, Generic, Show)
instance Semialign Nametree where
alignWith :: (These a b -> c) -> Nametree a -> Nametree b -> Nametree c
alignWith f (Nametree x xs) (Nametree y ys) =
Nametree (f (These x y)) (alignWith (these (fmap (f . This)) (fmap (f . That)) (alignWith f)) xs ys)
instance Zip Nametree where
zipWith :: (a -> b -> c) -> Nametree a -> Nametree b -> Nametree c
zipWith f (Nametree x xs) (Nametree y ys) =
Nametree (f x y) (zipWith (zipWith f) xs ys)
instance Unzip Nametree where
unzipWith :: (c -> (a, b)) -> Nametree c -> (Nametree a, Nametree b)
unzipWith f (Nametree x xs) =
(Nametree y ys, Nametree z zs)
where
(y, z) = f x
(ys, zs) = unzipWith (unzipWith f) xs
-- | Traverse over a nametree, with access to the list of name segments (in reverse order) leading to each value.
traverseNametreeWithName :: Applicative f => ([NameSegment] -> a -> f b) -> Nametree a -> f (Nametree b)
traverseNametreeWithName f =
go []
where
go names (Nametree x xs) =
Nametree <$> f names x <*> Map.traverseWithKey (\name -> go (name : names)) xs
-- | Build a nametree from a seed value.
unfoldNametree :: (a -> (b, Map NameSegment a)) -> a -> Nametree b
unfoldNametree f x =
let (y, ys) = f x
in Nametree y (unfoldNametree f <$> ys)
-- | 'flattenNametree' organizes a nametree like
--
-- > "foo" = #foo
-- > "foo": {
-- > "bar" = #bar
-- > "bar": {
-- > "baz" = #baz
-- > }
-- > }
--
-- into an equivalent-but-flatter association between names and definitions, like
--
-- > {
-- > "foo" = #bar,
-- > "foo.bar" = #bar,
-- > "foo.bar.baz" = #baz
-- > }
flattenNametree ::
forall a b.
Ord b =>
(a -> Map NameSegment b) ->
Nametree a ->
BiMultimap b Name
flattenNametree f =
go []
where
go :: [NameSegment] -> Nametree a -> BiMultimap b Name
go prefix (Nametree node children) =
foldr
( \(name, child) ->
-- This union is safe because the keys are disjoint
BiMultimap.unsafeUnion (go (name : prefix) child)
)
( BiMultimap.fromRange
( Map.mapKeysMonotonic
(\name -> Name.fromReverseSegments (name :| prefix))
(f node)
)
)
(Map.toList children)
-- | 'unflattenNametree' organizes an association between names and definitions like
--
-- > {
-- > "foo" = #bar,
-- > "foo.bar" = #bar,
-- > "foo.bar.baz" = #baz
-- > }
--
-- into an equivalent-but-less-flat nametree, like
--
-- > "foo" = #foo
-- > "foo": {
-- > "bar" = #bar
-- > "bar": {
-- > "baz" = #baz
-- > }
-- > }
unflattenNametree :: Ord a => BiMultimap a Name -> Nametree (Map NameSegment a)
unflattenNametree =
unfoldNametree unflattenLevel . map (first Name.segments) . Map.toList . BiMultimap.range
where
unflattenLevel :: [(NonEmpty NameSegment, a)] -> (Map NameSegment a, Map NameSegment [(NonEmpty NameSegment, a)])
unflattenLevel =
foldl' phi (Map.empty, Map.empty)
where
phi (!accValue, !accChildren) = \case
(NameHere n, v) -> (Map.insert n v accValue, accChildren)
(NameThere n ns, v) -> (accValue, Map.insertWith (++) n [(ns, v)] accChildren)
-- Helper patterns for switching on "name here" (1 name segment) or "name there" (2+ name segments)
pattern NameHere :: a -> NonEmpty a
pattern NameHere x <- x :| (List.NonEmpty.nonEmpty -> Nothing)
pattern NameThere :: a -> NonEmpty a -> NonEmpty a
pattern NameThere x xs <- x :| (List.NonEmpty.nonEmpty -> Just xs)
{-# COMPLETE NameHere, NameThere #-}
-- | Definitions (terms and types) in a namespace.
--
-- FIXME this doesn't belong in this module
data Defns terms types = Defns
{ terms :: !terms,
types :: !types
}
deriving stock (Generic, Show)
deriving (Monoid, Semigroup) via GenericSemigroupMonoid (Defns terms types)
mapDefns :: (a -> b) -> Defns a a -> Defns b b
mapDefns f (Defns terms types) =
Defns (f terms) (f types)
bimapDefns :: (terms -> terms') -> (types -> types') -> Defns terms types -> Defns terms' types'
bimapDefns f g (Defns terms types) =
Defns (f terms) (g types)

View File

@ -0,0 +1,66 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.2.
--
-- see: https://github.com/sol/hpack
name: unison-util-nametree
version: 0.0.0
homepage: https://github.com/unisonweb/unison#readme
bug-reports: https://github.com/unisonweb/unison/issues
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
build-type: Simple
source-repository head
type: git
location: https://github.com/unisonweb/unison
library
exposed-modules:
Unison.Util.Nametree
hs-source-dirs:
src
default-extensions:
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFoldable
DeriveFunctor
DeriveGeneric
DeriveTraversable
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall
build-depends:
base
, containers
, lens
, semialign
, semigroups
, these
, unison-core
, unison-core1
, unison-prelude
, unison-util-relation
default-language: Haskell2010

View File

@ -39,15 +39,16 @@ benchmarks:
dependencies: dependencies:
- base - base
- containers - containers
- extra
- unison-prelude
- deepseq - deepseq
- extra
- nonempty-containers
- unison-prelude
ghc-options: ghc-options:
-Wall -Wall
default-extensions: default-extensions:
- ApplicativeDo - BangPatterns
- BlockArguments - BlockArguments
- DeriveFunctor - DeriveFunctor
- DerivingStrategies - DerivingStrategies

View File

@ -0,0 +1,246 @@
-- | A left-unique relation.
module Unison.Util.BiMultimap
( BiMultimap,
Unison.Util.BiMultimap.empty,
-- ** Lookup
memberDom,
lookupDom,
lookupRan,
lookupPreimage,
-- ** Mapping / traversing
unsafeTraverseDom,
-- ** Filtering
filter,
filterDom,
filterDomain,
restrictDom,
restrictRan,
withoutDom,
withoutRan,
-- ** Maps
domain,
range,
unsafeFromDomain,
fromRange,
-- ** Sets
dom,
ran,
-- ** Insert
insert,
unsafeInsert,
-- ** Union
unsafeUnion,
)
where
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import Data.Set.NonEmpty (NESet)
import Data.Set.NonEmpty qualified as Set.NonEmpty
import Unison.Prelude
import Unison.Util.Map qualified as Map
import Prelude hiding (filter)
-- | A left-unique relation.
--
-- "Left-unique" means that for all @(x, y)@ in the relation, @y@ is related only to @x@.
data BiMultimap a b = BiMultimap
{ toMultimap :: !(Map a (NESet b)),
toMapR :: !(Map b a)
}
deriving (Eq, Ord, Show)
-- | An empty left-unique relation.
empty :: (Ord a, Ord b) => BiMultimap a b
empty = BiMultimap mempty mempty
memberDom :: Ord a => a -> BiMultimap a b -> Bool
memberDom x =
Map.member x . domain
-- | Look up the set of @b@ related to an @a@.
--
-- /O(log a)/.
lookupDom :: Ord a => a -> BiMultimap a b -> Set b
lookupDom a =
lookupDom_ a . domain
lookupDom_ :: Ord a => a -> Map a (NESet b) -> Set b
lookupDom_ x xs =
maybe Set.empty Set.NonEmpty.toSet (Map.lookup x xs)
-- | Look up the @a@ related to a @b@.
--
-- /O(log b)/.
lookupRan :: Ord b => b -> BiMultimap a b -> Maybe a
lookupRan b (BiMultimap _ r) =
Map.lookup b r
-- | Look up the preimage of a @b@, that is, the set of @b@ that are related to the same @a@ as the input @b@.
--
-- /O(log a + log b)
lookupPreimage :: (Ord a, Ord b) => b -> BiMultimap a b -> Set b
lookupPreimage y (BiMultimap domain range) =
maybe Set.empty (\x -> lookupDom_ x domain) (Map.lookup y range)
-- | Traverse over the domain a left-unique relation.
--
-- The caller is responsible for maintaining left-uniqueness.
unsafeTraverseDom :: forall a b m x. (Monad m, Ord b, Ord x) => (a -> m b) -> BiMultimap a x -> m (BiMultimap b x)
unsafeTraverseDom f m =
foldr g pure (Map.toList (domain m)) Unison.Util.BiMultimap.empty
where
g :: (a, NESet x) -> (BiMultimap b x -> m (BiMultimap b x)) -> (BiMultimap b x -> m (BiMultimap b x))
g (a, xs) acc (BiMultimap domain0 range0) = do
!b <- f a
acc $! BiMultimap (Map.insert b xs domain0) (deriveRangeFromDomain b xs range0)
-- | Filter a left-unique relation, keeping only members @(a, b)@ that satisfy a predicate.
filter :: (Ord a, Ord b) => (a -> b -> Bool) -> BiMultimap a b -> BiMultimap a b
filter p (BiMultimap domain range) =
BiMultimap
( Map.mapMaybeWithKey
( \x ys ->
ys
& Set.NonEmpty.filter (p x)
& Set.NonEmpty.nonEmptySet
)
domain
)
(Map.filterWithKey (flip p) range)
-- | Filter a left-unique relation, keeping only members @(a, b)@ whose @a@ satisfies a predicate.
filterDom :: (Ord a, Ord b) => (a -> Bool) -> BiMultimap a b -> BiMultimap a b
filterDom f m =
unsafeFromDomain (Map.filterWithKey (\x _ -> f x) (domain m))
-- | Filter a left-unique relation, keeping only members @(a, b)@ whose @a@ and set of @b@ satisfies a predicate.
filterDomain :: (Ord a, Ord b) => (a -> NESet b -> Bool) -> BiMultimap a b -> BiMultimap a b
filterDomain f m =
unsafeFromDomain (Map.filterWithKey f (domain m))
-- | Restrict a left-unique relation to only those @(a, b)@ members whose @a@ is in the given set.
restrictDom :: (Ord a, Ord b) => Set a -> BiMultimap a b -> BiMultimap a b
restrictDom xs m =
unsafeFromDomain (Map.restrictKeys (domain m) xs)
-- | Restrict a left-unique relation to only those @(a, b)@ members whose @b@ is in the given set.
restrictRan :: (Ord a, Ord b) => Set b -> BiMultimap a b -> BiMultimap a b
restrictRan ys m =
fromRange (Map.restrictKeys (range m) ys)
-- | Restrict a left-unique relation to only those @(a, b)@ members whose @a@ is not in the given set.
withoutDom :: (Ord a, Ord b) => Set a -> BiMultimap a b -> BiMultimap a b
withoutDom xs m =
unsafeFromDomain (Map.withoutKeys (domain m) xs)
-- | Restrict a left-unique relation to only those @(a, b)@ members whose @b@ is not in the given set.
withoutRan :: (Ord a, Ord b) => Set b -> BiMultimap a b -> BiMultimap a b
withoutRan ys m =
fromRange (Map.withoutKeys (range m) ys)
domain :: BiMultimap a b -> Map a (NESet b)
domain = toMultimap
range :: BiMultimap a b -> Map b a
range = toMapR
-- | Construct a left-unique relation from a mapping from its left-elements to set-of-right-elements. The caller is
-- responsible for ensuring that no right-element is mapped to by two different left-elements.
unsafeFromDomain :: Ord b => Map a (NESet b) -> BiMultimap a b
unsafeFromDomain domain =
BiMultimap domain (invertDomain domain)
invertDomain :: forall a b. Ord b => Map a (NESet b) -> Map b a
invertDomain =
Map.foldlWithKey' f Map.empty
where
f :: Map b a -> a -> NESet b -> Map b a
f acc x ys =
Set.NonEmpty.foldl' (g x) acc ys
g :: a -> Map b a -> b -> Map b a
g x acc y =
Map.insert y x acc
fromRange :: (Ord a, Ord b) => Map b a -> BiMultimap a b
fromRange m =
BiMultimap (Map.foldlWithKey' f Map.empty m) m
where
f acc k v =
Map.insertWith Set.NonEmpty.union v (Set.NonEmpty.singleton k) acc
-- | Returns the domain of the relation, as a Set, in its entirety.
--
-- /O(a)/.
dom :: BiMultimap a b -> Set a
dom =
Map.keysSet . toMultimap
-- | Returns the range of the relation, as a Set, in its entirety.
--
-- /O(a)/.
ran :: BiMultimap a b -> Set b
ran =
Map.keysSet . toMapR
-- | Insert a pair into a left-unique relation, maintaining left-uniqueness, preferring the latest inserted element.
--
-- That is, if a left-unique relation already contains the pair @(x, y)@, then inserting the pair @(z, y)@ will cause
-- the @(x, y)@ pair to be deleted.
insert :: (Ord a, Ord b) => a -> b -> BiMultimap a b -> BiMultimap a b
insert a b m@(BiMultimap l r) =
case Map.alterF (upsertFunc a) b r of
(Ignored, _) -> m
(Inserted, r') -> BiMultimap l' r'
(Replaced old, r') ->
let l'' = Map.update (Set.NonEmpty.nonEmptySet . Set.NonEmpty.delete b) old l'
in BiMultimap l'' r'
where
l' = Map.upsert (maybe (Set.NonEmpty.singleton b) (Set.NonEmpty.insert b)) a l
-- @upsertFunc x@ returns a function that upserts @x@, suitable for passing to @Map.alterF@.
upsertFunc :: Eq a => a -> Maybe a -> (UpsertResult a, Maybe a)
upsertFunc new existing =
case existing of
Nothing -> (Inserted, Just new)
Just old
| old == new -> (Ignored, existing)
| otherwise -> (Replaced old, Just new)
data UpsertResult old
= Ignored -- Ignored because an equivalent thing was already there
| Inserted -- Inserted something new
| Replaced old -- Replaced what was there, here's the old thing
-- | Like @insert x y@, but the caller is responsible maintaining left-uniqueness.
unsafeInsert :: (Ord a, Ord b) => a -> b -> BiMultimap a b -> BiMultimap a b
unsafeInsert x y (BiMultimap xs ys) =
BiMultimap
(Map.upsert (maybe (Set.NonEmpty.singleton y) (Set.NonEmpty.insert y)) x xs)
(Map.insert y x ys)
-- | Union two left-unique relations together.
--
-- The caller is responsible for maintaining left-uniqueness.
unsafeUnion :: (Ord a, Ord b) => BiMultimap a b -> BiMultimap a b -> BiMultimap a b
unsafeUnion xs ys =
BiMultimap
(Map.unionWith Set.NonEmpty.union (toMultimap xs) (toMultimap ys))
(Map.union (toMapR xs) (toMapR ys))
------------------------------------------------------------------------------------------------------------------------
-- @deriveRangeFromDomain x ys range@ is a helper that inserts @(x, y1)@, @(x, y2)@, ... into range @r@.
deriveRangeFromDomain :: Ord b => a -> NESet b -> Map b a -> Map b a
deriveRangeFromDomain x ys acc =
foldr (flip Map.insert x) acc ys
{-# INLINE deriveRangeFromDomain #-}

View File

@ -28,6 +28,7 @@ module Unison.Util.Relation
-- ** Searches -- ** Searches
searchDom, searchDom,
searchDomG,
searchRan, searchRan,
-- ** Filters -- ** Filters
@ -367,13 +368,13 @@ lookupDom' x r = M.lookup x (domain r)
lookupRan' :: (Ord b) => b -> Relation a b -> Maybe (Set a) lookupRan' :: (Ord b) => b -> Relation a b -> Maybe (Set a)
lookupRan' y r = M.lookup y (range r) lookupRan' y r = M.lookup y (range r)
-- | True if the element @ x @ exists in the domain of @ r @. -- | True if the element exists in the domain.
memberDom :: (Ord a) => a -> Relation a b -> Bool memberDom :: (Ord a) => a -> Relation a b -> Bool
memberDom x r = isJust $ lookupDom' x r memberDom x r = M.member x (domain r)
-- | True if the element exists in the range. -- | True if the element exists in the range.
memberRan :: (Ord b) => b -> Relation a b -> Bool memberRan :: (Ord b) => b -> Relation a b -> Bool
memberRan y r = isJust $ lookupRan' y r memberRan y r = M.member y (range r)
filterDom :: (Ord a, Ord b) => (a -> Bool) -> Relation a b -> Relation a b filterDom :: (Ord a, Ord b) => (a -> Bool) -> Relation a b -> Relation a b
filterDom f r = S.filter f (dom r) <| r filterDom f r = S.filter f (dom r) <| r
@ -588,21 +589,24 @@ lookupDom a r = fromMaybe S.empty $ lookupDom' a r
-- or empty, this function takes time logarithmic in the number of unique keys -- or empty, this function takes time logarithmic in the number of unique keys
-- of the domain, `a`. -- of the domain, `a`.
searchDom :: (Ord a, Ord b) => (a -> Ordering) -> Relation a b -> Set b searchDom :: (Ord a, Ord b) => (a -> Ordering) -> Relation a b -> Set b
searchDom f r = go (domain r) searchDom = searchDomG (\_ set -> set)
searchDomG :: (Ord a, Monoid c) => (a -> Set b -> c) -> (a -> Ordering) -> Relation a b -> c
searchDomG g f r = go (domain r)
where where
go Map.Tip = mempty go Map.Tip = mempty
go (Map.Bin _ amid bs l r) = case f amid of go (Map.Bin _ amid bs l r) = case f amid of
EQ -> bs <> goL l <> goR r EQ -> goL l <> g amid bs <> goR r
LT -> go r LT -> go r
GT -> go l GT -> go l
goL Map.Tip = mempty goL Map.Tip = mempty
goL (Map.Bin _ amid bs l r) = case f amid of goL (Map.Bin _ amid bs l r) = case f amid of
EQ -> bs <> goL l <> S.unions (Map.elems r) EQ -> goL l <> g amid bs <> Map.foldrWithKey (\k v acc -> g k v <> acc) mempty r
LT -> goL r LT -> goL r
GT -> error "predicate not monotone with respect to ordering" GT -> error "predicate not monotone with respect to ordering"
goR Map.Tip = mempty goR Map.Tip = mempty
goR (Map.Bin _ amid bs l r) = case f amid of goR (Map.Bin _ amid bs l r) = case f amid of
EQ -> bs <> goR r <> S.unions (Map.elems l) EQ -> Map.foldrWithKey (\k v acc -> g k v <> acc) mempty l <> g amid bs <> goR r
GT -> goR l GT -> goR l
LT -> error "predicate not monotone with respect to ordering" LT -> error "predicate not monotone with respect to ordering"

View File

@ -17,13 +17,14 @@ source-repository head
library library
exposed-modules: exposed-modules:
Unison.Util.BiMultimap
Unison.Util.Relation Unison.Util.Relation
Unison.Util.Relation3 Unison.Util.Relation3
Unison.Util.Relation4 Unison.Util.Relation4
hs-source-dirs: hs-source-dirs:
src src
default-extensions: default-extensions:
ApplicativeDo BangPatterns
BlockArguments BlockArguments
DeriveFunctor DeriveFunctor
DerivingStrategies DerivingStrategies
@ -44,6 +45,7 @@ library
, containers , containers
, deepseq , deepseq
, extra , extra
, nonempty-containers
, unison-prelude , unison-prelude
default-language: Haskell2010 default-language: Haskell2010
@ -53,7 +55,7 @@ test-suite util-relation-tests
hs-source-dirs: hs-source-dirs:
test test
default-extensions: default-extensions:
ApplicativeDo BangPatterns
BlockArguments BlockArguments
DeriveFunctor DeriveFunctor
DerivingStrategies DerivingStrategies
@ -76,6 +78,7 @@ test-suite util-relation-tests
, deepseq , deepseq
, easytest , easytest
, extra , extra
, nonempty-containers
, random , random
, unison-prelude , unison-prelude
, unison-util-relation , unison-util-relation
@ -87,7 +90,7 @@ benchmark relation
hs-source-dirs: hs-source-dirs:
benchmarks/relation benchmarks/relation
default-extensions: default-extensions:
ApplicativeDo BangPatterns
BlockArguments BlockArguments
DeriveFunctor DeriveFunctor
DerivingStrategies DerivingStrategies
@ -109,6 +112,7 @@ benchmark relation
, containers , containers
, deepseq , deepseq
, extra , extra
, nonempty-containers
, random , random
, tasty-bench , tasty-bench
, unison-prelude , unison-prelude

76
nix/haskell-nix-flake.nix Normal file
View File

@ -0,0 +1,76 @@
{ stack, hpack, pkgs, versions }:
let
haskell-nix-flake = pkgs.unison-project.flake { };
commonShellArgs = args:
args // {
# workaround:
# https://github.com/input-output-hk/haskell.nix/issues/1793
# https://github.com/input-output-hk/haskell.nix/issues/1885
allToolDeps = false;
additional = hpkgs: with hpkgs; [ Cabal stm exceptions ghc ghc-heap ];
buildInputs =
let
native-packages = pkgs.lib.optionals pkgs.stdenv.isDarwin
(with pkgs.darwin.apple_sdk.frameworks; [ Cocoa ]);
in
(args.buildInputs or [ ]) ++ [ stack hpack pkgs.pkg-config pkgs.zlib pkgs.glibcLocales ] ++ native-packages;
# workaround for https://gitlab.haskell.org/ghc/ghc/-/issues/11042
shellHook = ''
export LD_LIBRARY_PATH=${pkgs.zlib}/lib:$LD_LIBRARY_PATH
'';
tools =
(args.tools or { }) // {
cabal = { };
ormolu = { version = versions.ormolu; };
haskell-language-server = {
version = versions.hls;
modules = [
{
packages.haskell-language-server.components.exes.haskell-language-server.postInstall = ''
ln -sr "$out/bin/haskell-language-server" "$out/bin/haskell-language-server-wrapper"
'';
}
];
# specify flags via project file rather than a module override
# https://github.com/input-output-hk/haskell.nix/issues/1509
cabalProject = ''
packages: .
package haskell-language-server
flags: -brittany -fourmolu -stylishhaskell -hlint
constraints: ormolu == ${versions.ormolu}
'';
};
};
};
shellFor = args: pkgs.unison-project.shellFor (commonShellArgs args);
localPackages = with pkgs.lib;
filterAttrs (k: v: v.isLocal or false) pkgs.unison-project.hsPkgs;
localPackageNames = builtins.attrNames localPackages;
devShells =
let
mkDevShell = pkgName:
shellFor {
packages = hpkgs: [ hpkgs."${pkgName}" ];
withHoogle = true;
};
localPackageDevShells =
pkgs.lib.genAttrs localPackageNames mkDevShell;
in
{
only-tools = shellFor {
packages = _: [ ];
withHoogle = false;
};
local = shellFor {
packages = hpkgs: (map (p: hpkgs."${p}") localPackageNames);
withHoogle = true;
};
} // localPackageDevShells;
in
haskell-nix-flake // {
defaultPackage = haskell-nix-flake.packages."unison-cli:exe:unison";
inherit (pkgs) unison-project;
inherit devShells localPackageNames;
}

View File

@ -0,0 +1,47 @@
final: prev: {
unison-project = with prev.lib.strings;
let
cleanSource = pth:
let
src' = prev.lib.cleanSourceWith {
filter = filt;
src = pth;
};
filt = path: type:
let
bn = baseNameOf path;
isHiddenFile = hasPrefix "." bn;
isFlakeLock = bn == "flake.lock";
isNix = hasSuffix ".nix" bn;
in
!isHiddenFile && !isFlakeLock && !isNix;
in
src';
in
final.haskell-nix.project' {
src = cleanSource ./..;
projectFileName = "stack.yaml";
modules = [
# enable profiling
{
enableLibraryProfiling = true;
profilingDetail = "none";
}
# remove buggy build tool dependencies
({ lib, ... }: {
# this component has the build tool
# `unison-cli:unison` and somehow haskell.nix
# decides to add some file sharing package
# `unison` as a build-tool dependency.
packages.unison-cli.components.exes.cli-integration-tests.build-tools =
lib.mkForce [ ];
})
];
branchMap = {
"https://github.com/unisonweb/configurator.git"."e47e9e9fe1f576f8c835183b9def52d73c01327a" =
"unison";
"https://github.com/unisonweb/shellmet.git"."2fd348592c8f51bb4c0ca6ba4bc8e38668913746" =
"topic/avoid-callCommand";
};
};
}

43
nix/nixpkgs-overlay.nix Normal file
View File

@ -0,0 +1,43 @@
{ versions }:
final: prev: {
unison-hls = final.haskell-language-server.override {
# build with our overridden haskellPackages that have our pinned
# version of ormolu and hls
haskellPackages = final.haskell.packages."ghc${versions.ghc}";
dynamic = true;
supportedGhcVersions = [ versions.ghc ];
};
haskell = prev.haskell // {
packages = prev.haskell.packages // {
ghcunison = prev.haskell.packages."ghc${versions.ghc}".extend (hfinal: hprev:
let inherit (prev.haskell.lib) overrideCabal; in {
# dependency overrides for ormolu 0.5.2.0
haskell-language-server =
let
p = hfinal.callHackageDirect
{
pkg = "haskell-language-server";
ver = versions.hls;
sha256 = "0kp586yc162raljyd5arsxm5ndcx5zfw9v94v27bkjg7x0hp1s8b";
}
{
hls-fourmolu-plugin = null;
hls-stylish-haskell-plugin = null;
hls-hlint-plugin = null;
hls-floskell-plugin = null;
};
override = drv: {
doCheck = false;
configureFlags = (drv.configureFlags or [ ]) ++ [
"-f-fourmolu"
"-f-stylishhaskell"
"-f-hlint"
"-f-floskell"
];
};
in
overrideCabal p override;
});
};
};
}

18
nix/unison-overlay.nix Normal file
View File

@ -0,0 +1,18 @@
final: prev: {
# a wrapped version of stack that passes the necessary flags to use
# the nix provided ghc.
unison-stack = prev.symlinkJoin {
name = "stack";
paths = [ final.stack ];
buildInputs = [ final.makeWrapper ];
postBuild =
let
flags = [ "--no-nix" "--system-ghc" "--no-install-ghc" ];
add-flags =
"--add-flags '${prev.lib.concatStringsSep " " flags}'";
in
''
wrapProgram "$out/bin/stack" ${add-flags}
'';
};
}

View File

@ -53,7 +53,6 @@ dependencies:
- fuzzyfind - fuzzyfind
- free - free
- generic-lens - generic-lens
- generic-monoid
- hashable - hashable
- hashtables - hashtables
- haskeline - haskeline
@ -90,6 +89,7 @@ dependencies:
- safe - safe
- safe-exceptions - safe-exceptions
- semialign - semialign
- semigroups
- servant - servant
- servant-client - servant-client
- servant-docs - servant-docs
@ -122,6 +122,7 @@ dependencies:
- unison-util-base32hex - unison-util-base32hex
- unison-util-bytes - unison-util-bytes
- unison-util-cache - unison-util-cache
- unison-util-nametree
- unison-util-relation - unison-util-relation
- unison-util-rope - unison-util-rope
- unison-util-serialization - unison-util-serialization
@ -132,6 +133,7 @@ dependencies:
- vector - vector
- wai - wai
- warp - warp
- witch
- witherable - witherable
- x509 - x509
- x509-store - x509-store
@ -179,6 +181,7 @@ default-extensions:
- LambdaCase - LambdaCase
- MultiParamTypeClasses - MultiParamTypeClasses
- NamedFieldPuns - NamedFieldPuns
- OverloadedLabels
- OverloadedStrings - OverloadedStrings
- PatternSynonyms - PatternSynonyms
- RankNTypes - RankNTypes

View File

@ -45,12 +45,10 @@ data Diff a = Diff
-- | Represents the changes to definitions at a given path, not including child paths. -- | Represents the changes to definitions at a given path, not including child paths.
-- --
-- Note: doesn't yet include any info on metadata or patch diffs. Feel free to add it. -- Note: doesn't yet include any info on patch diffs. Feel free to add it.
data DefinitionDiffs = DefinitionDiffs data DefinitionDiffs = DefinitionDiffs
{ termDiffs :: Map NameSegment (Diff Referent), { termDiffs :: Map NameSegment (Diff Referent),
typeDiffs :: Map NameSegment (Diff Reference) typeDiffs :: Map NameSegment (Diff Reference)
-- termMetadataDiffs :: Map (NameSegment, Referent) (Diff Reference),
-- typeMetadataDiffs :: Map (NameSegment, Reference) (Diff Reference)
-- patchDiffs :: Map NameSegment (Diff ()) -- patchDiffs :: Map NameSegment (Diff ())
} }
deriving stock (Show, Eq, Ord) deriving stock (Show, Eq, Ord)

View File

@ -1,11 +1,7 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
module Unison.Builtin module Unison.Builtin
( codeLookup, ( codeLookup,
constructorType, constructorType,
names, names,
names0,
builtinDataDecls, builtinDataDecls,
builtinEffectDecls, builtinEffectDecls,
builtinConstructorType, builtinConstructorType,
@ -37,7 +33,6 @@ import Unison.Hash (Hash)
import Unison.Hashing.V2.Convert qualified as H import Unison.Hashing.V2.Convert qualified as H
import Unison.Name (Name) import Unison.Name (Name)
import Unison.Names (Names (Names)) import Unison.Names (Names (Names))
import Unison.NamesWithHistory (NamesWithHistory (..))
import Unison.Parser.Ann (Ann (..)) import Unison.Parser.Ann (Ann (..))
import Unison.Prelude import Unison.Prelude
import Unison.Reference qualified as R import Unison.Reference qualified as R
@ -55,11 +50,8 @@ type EffectDeclaration = DD.EffectDeclaration Symbol Ann
type Type = Type.Type Symbol () type Type = Type.Type Symbol ()
names :: NamesWithHistory names :: Names
names = NamesWithHistory names0 mempty names = Names terms types
names0 :: Names
names0 = Names terms types
where where
terms = terms =
Rel.mapRan Referent.Ref (Rel.fromMap termNameRefs) Rel.mapRan Referent.Ref (Rel.fromMap termNameRefs)
@ -516,6 +508,7 @@ builtinsSrc =
B "Pattern.many" $ forall1 "a" (\a -> pat a --> pat a), B "Pattern.many" $ forall1 "a" (\a -> pat a --> pat a),
B "Pattern.replicate" $ forall1 "a" (\a -> nat --> nat --> pat a --> pat a), B "Pattern.replicate" $ forall1 "a" (\a -> nat --> nat --> pat a --> pat a),
B "Pattern.capture" $ forall1 "a" (\a -> pat a --> pat a), B "Pattern.capture" $ forall1 "a" (\a -> pat a --> pat a),
B "Pattern.captureAs" $ forall1 "a" (\a -> a --> pat a --> pat a),
B "Pattern.join" $ forall1 "a" (\a -> list (pat a) --> pat a), B "Pattern.join" $ forall1 "a" (\a -> list (pat a) --> pat a),
B "Pattern.or" $ forall1 "a" (\a -> pat a --> pat a --> pat a), B "Pattern.or" $ forall1 "a" (\a -> pat a --> pat a --> pat a),
-- Pattern.run : Pattern a -> a -> Optional ([a], a) -- Pattern.run : Pattern a -> a -> Optional ([a], a)
@ -833,6 +826,12 @@ ioBuiltins =
( "validateSandboxed", ( "validateSandboxed",
forall1 "a" $ \a -> list termLink --> a --> boolean forall1 "a" $ \a -> list termLink --> a --> boolean
), ),
("sandboxLinks", termLink --> io (list termLink)),
( "Value.validateSandboxed",
list termLink
--> value
--> io (eithert (list termLink) (list termLink))
),
("Tls.newClient.impl.v3", tlsClientConfig --> socket --> iof tls), ("Tls.newClient.impl.v3", tlsClientConfig --> socket --> iof tls),
("Tls.newServer.impl.v3", tlsServerConfig --> socket --> iof tls), ("Tls.newServer.impl.v3", tlsServerConfig --> socket --> iof tls),
("Tls.handshake.impl.v3", tls --> iof unit), ("Tls.handshake.impl.v3", tls --> iof unit),

View File

@ -1,6 +1,3 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeApplications #-}
module Unison.Builtin.Terms module Unison.Builtin.Terms
( builtinTermsRef, ( builtinTermsRef,
builtinTermsSrc, builtinTermsSrc,

View File

@ -13,13 +13,14 @@ module Unison.Codebase
isTerm, isTerm,
putTerm, putTerm,
putTermComponent, putTermComponent,
termMetadata,
-- ** Referents (sorta-termlike) -- ** Referents (sorta-termlike)
getTypeOfReferent, getTypeOfReferent,
-- ** Search -- ** Search
termsOfType, termsOfType,
filterTermsByReferenceIdHavingType,
filterTermsByReferentHavingType,
termsMentioningType, termsMentioningType,
SqliteCodebase.Operations.termReferencesByPrefix, SqliteCodebase.Operations.termReferencesByPrefix,
termReferentsByPrefix, termReferentsByPrefix,
@ -121,7 +122,6 @@ import U.Codebase.Branch qualified as V2
import U.Codebase.Branch qualified as V2Branch import U.Codebase.Branch qualified as V2Branch
import U.Codebase.Causal qualified as V2Causal import U.Codebase.Causal qualified as V2Causal
import U.Codebase.HashTags (CausalHash) import U.Codebase.HashTags (CausalHash)
import U.Codebase.Referent qualified as V2
import U.Codebase.Sqlite.Operations qualified as Operations import U.Codebase.Sqlite.Operations qualified as Operations
import U.Codebase.Sqlite.Queries qualified as Queries import U.Codebase.Sqlite.Queries qualified as Queries
import Unison.Builtin qualified as Builtin import Unison.Builtin qualified as Builtin
@ -151,11 +151,10 @@ import Unison.DataDeclaration (Decl)
import Unison.DataDeclaration qualified as DD import Unison.DataDeclaration qualified as DD
import Unison.Hash (Hash) import Unison.Hash (Hash)
import Unison.Hashing.V2.Convert qualified as Hashing import Unison.Hashing.V2.Convert qualified as Hashing
import Unison.NameSegment qualified as NameSegment
import Unison.Parser.Ann (Ann) import Unison.Parser.Ann (Ann)
import Unison.Parser.Ann qualified as Parser import Unison.Parser.Ann qualified as Parser
import Unison.Prelude import Unison.Prelude
import Unison.Reference (Reference) import Unison.Reference (Reference, TermReferenceId, TypeReference)
import Unison.Reference qualified as Reference import Unison.Reference qualified as Reference
import Unison.Referent qualified as Referent import Unison.Referent qualified as Referent
import Unison.Runtime.IOSource qualified as IOSource import Unison.Runtime.IOSource qualified as IOSource
@ -268,19 +267,6 @@ expectBranchForHash codebase hash =
Just branch -> pure branch Just branch -> pure branch
Nothing -> error $ reportBug "E412939" ("expectBranchForHash: " ++ show hash ++ " not found in codebase") Nothing -> error $ reportBug "E412939" ("expectBranchForHash: " ++ show hash ++ " not found in codebase")
-- | Get the metadata attached to the term at a given path and name relative to the given branch.
termMetadata ::
-- | The branch to search inside. Use the current root if 'Nothing'.
Maybe (V2Branch.Branch Sqlite.Transaction) ->
Split ->
-- | There may be multiple terms at the given name. You can specify a Referent to
-- disambiguate if desired.
Maybe V2.Referent ->
Sqlite.Transaction [Map V2Branch.MetadataValue V2Branch.MetadataType]
termMetadata mayBranch (path, nameSeg) ref = do
b <- getShallowBranchAtPath path mayBranch
V2Branch.termMetadata b (coerce @NameSegment.NameSegment nameSeg) ref
-- | Get the lowest common ancestor of two branches, i.e. the most recent branch that is an ancestor of both branches. -- | Get the lowest common ancestor of two branches, i.e. the most recent branch that is an ancestor of both branches.
lca :: (MonadIO m) => Codebase m v a -> Branch m -> Branch m -> m (Maybe (Branch m)) lca :: (MonadIO m) => Codebase m v a -> Branch m -> Branch m -> m (Maybe (Branch m))
lca code b1@(Branch.headHash -> h1) b2@(Branch.headHash -> h2) = do lca code b1@(Branch.headHash -> h1) b2@(Branch.headHash -> h2) = do
@ -326,9 +312,7 @@ addDefsToCodebase c uf = do
traverse_ goTerm (UF.hashTermsId uf) traverse_ goTerm (UF.hashTermsId uf)
where where
goTerm t | debug && trace ("Codebase.addDefsToCodebase.goTerm " ++ show t) False = undefined goTerm t | debug && trace ("Codebase.addDefsToCodebase.goTerm " ++ show t) False = undefined
goTerm (_, r, Nothing, tm, tp) = putTerm c r tm tp goTerm (_, r, wk, tm, tp) = when (WK.watchKindShouldBeStoredInDatabase wk) (putTerm c r tm tp)
goTerm (_, r, Just WK.TestWatch, tm, tp) = putTerm c r tm tp
goTerm _ = pure ()
goType :: (Show t) => (t -> Decl v a) -> (Reference.Id, t) -> Sqlite.Transaction () goType :: (Show t) => (t -> Decl v a) -> (Reference.Id, t) -> Sqlite.Transaction ()
goType _f pair | debug && trace ("Codebase.addDefsToCodebase.goType " ++ show pair) False = undefined goType _f pair | debug && trace ("Codebase.addDefsToCodebase.goType " ++ show pair) False = undefined
goType f (ref, decl) = putTypeDeclaration c ref (f decl) goType f (ref, decl) = putTypeDeclaration c ref (f decl)
@ -461,6 +445,28 @@ termsOfTypeByReference c r =
. Set.map (fmap Reference.DerivedId) . Set.map (fmap Reference.DerivedId)
<$> termsOfTypeImpl c r <$> termsOfTypeImpl c r
filterTermsByReferentHavingType :: (Var v) => Codebase m v a -> Type v a -> Set Referent.Referent -> Sqlite.Transaction (Set Referent.Referent)
filterTermsByReferentHavingType c ty = filterTermsByReferentHavingTypeByReference c $ Hashing.typeToReference ty
filterTermsByReferenceIdHavingType :: (Var v) => Codebase m v a -> Type v a -> Set TermReferenceId -> Sqlite.Transaction (Set TermReferenceId)
filterTermsByReferenceIdHavingType c ty = filterTermsByReferenceIdHavingTypeImpl c (Hashing.typeToReference ty)
-- | Find the subset of `tms` which match the exact type `r` points to.
filterTermsByReferentHavingTypeByReference :: Codebase m v a -> TypeReference -> Set Referent.Referent -> Sqlite.Transaction (Set Referent.Referent)
filterTermsByReferentHavingTypeByReference c r tms = do
let (builtins, derived) = partitionEithers . map p $ Set.toList tms
let builtins' =
Set.intersection
(Set.fromList builtins)
(Rel.lookupDom r Builtin.builtinTermsByType)
derived' <- filterTermsByReferentIdHavingTypeImpl c r (Set.fromList derived)
pure $ builtins' <> Set.mapMonotonic Referent.fromId derived'
where
p :: Referent.Referent -> Either Referent.Referent Referent.Id
p r = case Referent.toId r of
Just rId -> Right rId
Nothing -> Left r
-- | Get the set of terms-or-constructors mention the given type anywhere in their signature. -- | Get the set of terms-or-constructors mention the given type anywhere in their signature.
termsMentioningType :: (Var v) => Codebase m v a -> Type v a -> Sqlite.Transaction (Set Referent.Referent) termsMentioningType :: (Var v) => Codebase m v a -> Type v a -> Sqlite.Transaction (Set Referent.Referent)
termsMentioningType c ty = termsMentioningType c ty =

View File

@ -1,7 +1,4 @@
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-orphans #-} {-# OPTIONS_GHC -Wno-orphans #-}
module Unison.Codebase.Branch module Unison.Codebase.Branch
@ -33,6 +30,7 @@ module Unison.Codebase.Branch
-- * properties -- * properties
history, history,
head, head,
head_,
headHash, headHash,
children, children,
nonEmptyChildren, nonEmptyChildren,
@ -51,6 +49,8 @@ module Unison.Codebase.Branch
addTermName, addTermName,
addTypeName, addTypeName,
deleteTermName, deleteTermName,
annihilateTermName,
annihilateTypeName,
deleteTypeName, deleteTypeName,
setChildBranch, setChildBranch,
replacePatch, replacePatch,
@ -66,6 +66,8 @@ module Unison.Codebase.Branch
modifyAt, modifyAt,
modifyAtM, modifyAtM,
children0, children0,
withoutLib,
withoutTransitiveLibs,
-- * Branch terms/types/edits -- * Branch terms/types/edits
@ -76,6 +78,7 @@ module Unison.Codebase.Branch
-- ** Term/type queries -- ** Term/type queries
deepReferents, deepReferents,
deepTermReferences,
deepTypeReferences, deepTypeReferences,
consBranchSnapshot, consBranchSnapshot,
) )
@ -118,14 +121,16 @@ import Unison.Name (Name)
import Unison.Name qualified as Name import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment) import Unison.NameSegment (NameSegment)
import Unison.Prelude hiding (empty) import Unison.Prelude hiding (empty)
import Unison.Reference (TypeReference) import Unison.Reference (TermReference, TypeReference)
import Unison.Referent (Referent) import Unison.Referent (Referent)
import Unison.Referent qualified as Referent
import Unison.Util.List qualified as List import Unison.Util.List qualified as List
import Unison.Util.Monoid qualified as Monoid import Unison.Util.Monoid qualified as Monoid
import Unison.Util.Relation qualified as R import Unison.Util.Relation qualified as R
import Unison.Util.Relation qualified as Relation import Unison.Util.Relation qualified as Relation
import Unison.Util.Relation4 qualified as R4 import Unison.Util.Set qualified as Set
import Unison.Util.Star3 qualified as Star3 import Unison.Util.Star3 qualified as Star3
import Witherable (FilterableWithIndex (imapMaybe))
import Prelude hiding (head, read, subtract) import Prelude hiding (head, read, subtract)
instance AsEmpty (Branch m) where instance AsEmpty (Branch m) where
@ -138,9 +143,41 @@ instance AsEmpty (Branch m) where
instance Hashing.ContentAddressable (Branch0 m) where instance Hashing.ContentAddressable (Branch0 m) where
contentHash = H.hashBranch0 contentHash = H.hashBranch0
-- | Remove any lib subtrees reachable within the branch.
-- Note: This DOES affect the hash.
withoutLib :: Branch0 m -> Branch0 m
withoutLib Branch0 {..} =
let newChildren =
_children
& imapMaybe
( \nameSegment child ->
if nameSegment == Name.libSegment
then Nothing
else Just (child & head_ %~ withoutLib)
)
in branch0 _terms _types newChildren _edits
-- | Remove any transitive libs reachable within the branch.
-- Note: This DOES affect the hash.
withoutTransitiveLibs :: Branch0 m -> Branch0 m
withoutTransitiveLibs Branch0 {..} =
let newChildren =
_children
& imapMaybe
( \nameSegment child ->
if nameSegment == Name.libSegment
then Just (child & head_ %~ withoutLib)
else Just (child & head_ %~ withoutTransitiveLibs)
)
in branch0 _terms _types newChildren _edits
deepReferents :: Branch0 m -> Set Referent deepReferents :: Branch0 m -> Set Referent
deepReferents = R.dom . deepTerms deepReferents = R.dom . deepTerms
deepTermReferences :: Branch0 m -> Set TermReference
deepTermReferences =
Set.mapMaybe Referent.toTermReference . deepReferents
deepTypeReferences :: Branch0 m -> Set TypeReference deepTypeReferences :: Branch0 m -> Set TypeReference
deepTypeReferences = R.dom . deepTypes deepTypeReferences = R.dom . deepTypes
@ -151,7 +188,6 @@ terms =
\branch terms -> \branch terms ->
branch {_terms = terms} branch {_terms = terms}
& deriveDeepTerms & deriveDeepTerms
& deriveDeepTermMetadata
types :: Lens' (Branch0 m) (Star TypeReference NameSegment) types :: Lens' (Branch0 m) (Star TypeReference NameSegment)
types = types =
@ -160,7 +196,6 @@ types =
\branch types -> \branch types ->
branch {_types = types} branch {_types = types}
& deriveDeepTypes & deriveDeepTypes
& deriveDeepTypeMetadata
children :: Lens' (Branch0 m) (Map NameSegment (Branch m)) children :: Lens' (Branch0 m) (Map NameSegment (Branch m))
children = lens _children (\Branch0 {..} x -> branch0 _terms _types x _edits) children = lens _children (\Branch0 {..} x -> branch0 _terms _types x _edits)
@ -201,15 +236,11 @@ branch0 terms types children edits =
-- These are all overwritten immediately -- These are all overwritten immediately
deepTerms = R.empty, deepTerms = R.empty,
deepTypes = R.empty, deepTypes = R.empty,
deepTermMetadata = R4.empty,
deepTypeMetadata = R4.empty,
deepPaths = Set.empty, deepPaths = Set.empty,
deepEdits = Map.empty deepEdits = Map.empty
} }
& deriveDeepTerms & deriveDeepTerms
& deriveDeepTypes & deriveDeepTypes
& deriveDeepTermMetadata
& deriveDeepTypeMetadata
& deriveDeepPaths & deriveDeepPaths
& deriveDeepEdits & deriveDeepEdits
@ -258,50 +289,6 @@ deriveDeepTypes branch =
children <- deepChildrenHelper e children <- deepChildrenHelper e
go (work <> children) (types <> acc) go (work <> children) (types <> acc)
-- | Derive the 'deepTermMetadata' field of a branch.
deriveDeepTermMetadata :: forall m. Branch0 m -> Branch0 m
deriveDeepTermMetadata branch =
branch {deepTermMetadata = R4.fromList (makeDeepTermMetadata branch)}
where
makeDeepTermMetadata :: Branch0 m -> [(Referent, Name, Metadata.Type, Metadata.Value)]
makeDeepTermMetadata branch = State.evalState (go (Seq.singleton ([], 0, branch)) mempty) Set.empty
where
go ::
Seq (DeepChildAcc m) ->
[(Referent, Name, Metadata.Type, Metadata.Value)] ->
DeepState m [(Referent, Name, Metadata.Type, Metadata.Value)]
go Seq.Empty acc = pure acc
go (e@(reversePrefix, _, b0) Seq.:<| work) acc = do
let termMetadata :: [(Referent, Name, Metadata.Type, Metadata.Value)]
termMetadata =
map
(\(r, n, t, v) -> (r, Name.fromReverseSegments (n NonEmpty.:| reversePrefix), t, v))
(Metadata.starToR4List (_terms b0))
children <- deepChildrenHelper e
go (work <> children) (termMetadata <> acc)
-- | Derive the 'deepTypeMetadata' field of a branch.
deriveDeepTypeMetadata :: forall m. Branch0 m -> Branch0 m
deriveDeepTypeMetadata branch =
branch {deepTypeMetadata = R4.fromList (makeDeepTypeMetadata branch)}
where
makeDeepTypeMetadata :: Branch0 m -> [(TypeReference, Name, Metadata.Type, Metadata.Value)]
makeDeepTypeMetadata branch = State.evalState (go (Seq.singleton ([], 0, branch)) mempty) Set.empty
where
go ::
Seq (DeepChildAcc m) ->
[(TypeReference, Name, Metadata.Type, Metadata.Value)] ->
DeepState m [(TypeReference, Name, Metadata.Type, Metadata.Value)]
go Seq.Empty acc = pure acc
go (e@(reversePrefix, _, b0) Seq.:<| work) acc = do
let typeMetadata :: [(TypeReference, Name, Metadata.Type, Metadata.Value)]
typeMetadata =
map
(\(r, n, t, v) -> (r, Name.fromReverseSegments (n NonEmpty.:| reversePrefix), t, v))
(Metadata.starToR4List (_types b0))
children <- deepChildrenHelper e
go (work <> children) (typeMetadata <> acc)
-- | Derive the 'deepPaths' field of a branch. -- | Derive the 'deepPaths' field of a branch.
deriveDeepPaths :: forall m. Branch0 m -> Branch0 m deriveDeepPaths :: forall m. Branch0 m -> Branch0 m
deriveDeepPaths branch = deriveDeepPaths branch =
@ -442,7 +429,17 @@ one = Branch . Causal.one
empty0 :: Branch0 m empty0 :: Branch0 m
empty0 = empty0 =
Branch0 mempty mempty mempty mempty True mempty mempty mempty mempty mempty mempty Branch0
{ _terms = mempty,
_types = mempty,
_children = Map.empty,
_edits = Map.empty,
isEmpty0 = True,
deepTerms = Relation.empty,
deepTypes = Relation.empty,
deepPaths = Set.empty,
deepEdits = Map.empty
}
-- | Checks whether a branch is empty AND has no history. -- | Checks whether a branch is empty AND has no history.
isEmpty :: Branch m -> Bool isEmpty :: Branch m -> Bool
@ -677,15 +674,13 @@ batchUpdatesM (toList -> actions) curBranch = foldM execActions curBranch (group
pathLocation _ = ChildActions pathLocation _ = ChildActions
-- todo: consider inlining these into Actions2 -- todo: consider inlining these into Actions2
addTermName :: addTermName :: Referent -> NameSegment -> Branch0 m -> Branch0 m
Referent -> NameSegment -> Metadata.Metadata -> Branch0 m -> Branch0 m addTermName r new =
addTermName r new md = over terms (Star3.insertD1 (r, new))
over terms (Metadata.insertWithMetadata (r, md) . Star3.insertD1 (r, new))
addTypeName :: addTypeName :: TypeReference -> NameSegment -> Branch0 m -> Branch0 m
TypeReference -> NameSegment -> Metadata.Metadata -> Branch0 m -> Branch0 m addTypeName r new =
addTypeName r new md = over types (Star3.insertD1 (r, new))
over types (Metadata.insertWithMetadata (r, md) . Star3.insertD1 (r, new))
deleteTermName :: Referent -> NameSegment -> Branch0 m -> Branch0 m deleteTermName :: Referent -> NameSegment -> Branch0 m -> Branch0 m
deleteTermName r n b deleteTermName r n b
@ -693,6 +688,12 @@ deleteTermName r n b
over terms (Star3.deletePrimaryD1 (r, n)) b over terms (Star3.deletePrimaryD1 (r, n)) b
deleteTermName _ _ b = b deleteTermName _ _ b = b
annihilateTermName :: NameSegment -> Branch0 m -> Branch0 m
annihilateTermName = over terms . Star3.deleteD1
annihilateTypeName :: NameSegment -> Branch0 m -> Branch0 m
annihilateTypeName = over types . Star3.deleteD1
deleteTypeName :: TypeReference -> NameSegment -> Branch0 m -> Branch0 m deleteTypeName :: TypeReference -> NameSegment -> Branch0 m -> Branch0 m
deleteTypeName r n b deleteTypeName r n b
| Star3.memberD1 (r, n) (view types b) = | Star3.memberD1 (r, n) (view types b) =

View File

@ -0,0 +1,230 @@
-- | The "decl coherency check": a type declaration in a namespace is "coherent" if it satisfies both of the following
-- criteria.
--
-- 1. For each naming of the type decl (say "Foo"#foohash), there exists exactly one name for each of its constructors
-- arbitrarily deep in the corresponding namespace ("Foo" in this example).
--
-- This allows us to render the decl naturally, as in
--
-- structural type Foo
-- = Bar Nat Int
-- | internal.hello.Bonk Nat
--
-- which corresponds to the three names
--
-- "Foo" => #foohash
-- "Foo.Bar" => #foohash#0
-- "Foo.internal.hello.Bonk" => #foohash#1
--
-- We could not do if there was at least one constructor whose full name does not contain the full name of the type
-- decl itself as a prefix.
--
-- A notable consequence of this requirement is that a second naming of a decl (i.e. an alias) cannot be embedded
-- within the first naming, as in:
--
-- type Foo = ...
-- type Foo.some.inner.namespace = ... -- an alias of Foo
--
-- 2. No constructor has a "stray" name that does not have a prefix that equals the type declaration's name. For
-- example, in the namespace
--
-- "Foo" => #foohash
-- "Foo.Bar" => #foohash#0
-- "Deep.What.SomeAlias" => #foohash#0
--
-- the constructor "What.SomeAlias" is "stray", as the type decl #foohash has no name that matches any prefix
-- (i.e. "Deep.What" nor "Deep").
--
-- On to the implementation. We are going to traverse the namespace depth-first. As we go, we have a stateful mapping
-- between decl reference that we *have* seen a name for in one of our parent namespace, and its corresponding set of
-- constructors that we *haven't* yet seen names for, but expect to, before fully searching the corresponding
-- sub-namespace (e.g. the child namespace named "Foo" of the namepace that declares a decl "Foo").
--
-- When processing a namespace, we first process all terms. Each constructor will fall into one of three cases:
--
-- > +----------------------------------------------------------------------------------------------------------------+
-- > | Case | Mapping before | Encountered constructor | Mapping after |
-- > +----------------------------------------------------------------------------------------------------------------+
-- > | Happy path | { #foo : {0, 1, 2} } | #foo#1 | { #foo : {0, 2} } |
-- > | Already seen | { #foo : {0, 1, 2} } | #foo#5 | Error: duplicate naming for constructor #foo#5 |
-- > | Never seen | { #foo : {0, 1, 2} } | #bar#2 | Error: stray constructor #bar#2 |
-- > +----------------------------------------------------------------------------------------------------------------+
--
-- In "happy path", we see a naming of a constructor that we're expecting, and check it off.
-- In "already seen", we see a second naming of a constructor that we're no longer expecting, and fail.
-- In "never seen", we see a naming of a constructor before any naming of its decl, so we fail.
--
-- Next, we process all type decls. Each will again fall into one of three cases:
--
-- > +-----------------------------------------------------------------------------------------------------+
-- > | Case | Mapping before | Declaration | Num constructors | New mapping |
-- > +-----------------------------------------------------------------------------------------------------+
-- > | Uninhabited decl | | #foo | 0 | |
-- > | Inhabited decl | | #foo | 1 or more | { #foo : {0, ..., n-1} } |
-- > | Already seen | { foo : {0, 1, 2} } | #foo | Irrelevant | Error: nested decl alias |
-- > +-----------------------------------------------------------------------------------------------------+
--
-- In "uninhabited decl", we find a decl with no constructors, so we don't expect anything new.
-- In "already seen", we find a second naming of a decl, whose constructors will necessarily violate coherency condition
-- (1) above.
--
-- In "inhabited decl", we find a decl with N constructors, and handle it by:
-- 1. Adding to our state that we expect a name for each.
-- 2. Recursing into the child namespace whose name matches the decl.
-- 3. (If we return from the recursion without short-circuiting) remove the mapping added in step (1) and assert that
-- its value is the empty set (meaning we encountered a name for every constructor).
--
-- Note: This check could be moved into SQLite (with sufficient schema support) some day, but for now, we just do this
-- in memory.
--
-- Note: once upon a time, decls could be "incoherent". Then, we decided we want decls to be "coherent". Thus, this
-- machinery was invented.
module Unison.Codebase.Branch.DeclCoherencyCheck
( IncoherentDeclReason (..),
checkDeclCoherency,
)
where
import Control.Lens (view, (%=), (.=))
import Control.Monad.Except (ExceptT)
import Control.Monad.Except qualified as Except
import Control.Monad.State.Strict (StateT)
import Control.Monad.State.Strict qualified as State
import Control.Monad.Trans.Except qualified as Except (except)
import Data.Functor.Compose (Compose (..))
import Data.IntMap.Strict (IntMap)
import Data.IntMap.Strict qualified as IntMap
import Data.List.NonEmpty (pattern (:|))
import Data.Map.Strict qualified as Map
import Data.Maybe (fromJust)
import Data.Set qualified as Set
import U.Codebase.Reference (Reference' (..), TypeReference, TypeReferenceId)
import U.Codebase.Referent (Referent)
import U.Codebase.Referent qualified as Referent
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment)
import Unison.Prelude
import Unison.Sqlite (Transaction)
import Unison.Util.BiMultimap (BiMultimap)
import Unison.Util.BiMultimap qualified as BiMultimap
import Unison.Util.Map qualified as Map (deleteLookup, upsertF)
import Unison.Util.Nametree (Defns (..), Nametree (..))
import Witch (unsafeFrom)
data IncoherentDeclReason
= -- | A second naming of a constructor was discovered underneath a decl's name, e.g.
--
-- Foo#Foo
-- Foo.Bar#Foo#0
-- Foo.Some.Other.Name.For.Bar#Foo#0
IncoherentDeclReason'ConstructorAlias !Name !Name
| IncoherentDeclReason'MissingConstructorName !Name
| IncoherentDeclReason'NestedDeclAlias !Name
| IncoherentDeclReason'NoConstructorNames !Name
| IncoherentDeclReason'StrayConstructor !Name
checkDeclCoherency ::
(TypeReferenceId -> Transaction Int) ->
(Nametree (Defns (Map NameSegment Referent) (Map NameSegment TypeReference))) ->
Transaction (Either IncoherentDeclReason (BiMultimap Name Name))
checkDeclCoherency loadDeclNumConstructors =
Except.runExceptT
. fmap (view #declNames)
. (`State.execStateT` DeclCoherencyCheckState Map.empty BiMultimap.empty)
. go []
where
go ::
[NameSegment] ->
( Nametree
(Defns (Map NameSegment Referent) (Map NameSegment TypeReference))
) ->
StateT DeclCoherencyCheckState (ExceptT IncoherentDeclReason Transaction) ()
go prefix (Nametree Defns {terms, types} children) = do
for_ (Map.toList terms) \case
(_, Referent.Ref _) -> pure ()
(_, Referent.Con (ReferenceBuiltin _) _) -> pure ()
(name, Referent.Con (ReferenceDerived typeRef) conId) -> do
DeclCoherencyCheckState {expectedConstructors} <- State.get
expectedConstructors1 <- lift (Except.except (Map.upsertF f typeRef expectedConstructors))
#expectedConstructors .= expectedConstructors1
where
f :: Maybe (IntMap MaybeConstructorName) -> Either IncoherentDeclReason (IntMap MaybeConstructorName)
f = \case
Nothing -> Left (IncoherentDeclReason'StrayConstructor (fullName name))
Just expected -> IntMap.alterF g (unsafeFrom @Word64 conId) expected
where
g :: Maybe MaybeConstructorName -> Either IncoherentDeclReason (Maybe MaybeConstructorName)
g = \case
Nothing -> error "didnt put expected constructor id"
Just NoConstructorNameYet -> Right (Just (YesConstructorName (fullName name)))
Just (YesConstructorName firstName) -> Left (IncoherentDeclReason'ConstructorAlias firstName (fullName name))
childrenWeWentInto <-
forMaybe (Map.toList types) \case
(_, ReferenceBuiltin _) -> pure Nothing
(name, ReferenceDerived typeRef) -> do
DeclCoherencyCheckState {expectedConstructors} <- State.get
whatHappened <- do
let recordNewDecl ::
Maybe (IntMap MaybeConstructorName) ->
Compose (ExceptT IncoherentDeclReason Transaction) WhatHappened (IntMap MaybeConstructorName)
recordNewDecl =
Compose . \case
Just _ -> Except.throwError (IncoherentDeclReason'NestedDeclAlias typeName)
Nothing ->
lift (loadDeclNumConstructors typeRef) <&> \case
0 -> UninhabitedDecl
n -> InhabitedDecl (IntMap.fromAscList [(i, NoConstructorNameYet) | i <- [0 .. n - 1]])
lift (getCompose (Map.upsertF recordNewDecl typeRef expectedConstructors))
case whatHappened of
UninhabitedDecl -> pure Nothing
InhabitedDecl expectedConstructors1 -> do
child <-
Map.lookup name children & onNothing do
Except.throwError (IncoherentDeclReason'NoConstructorNames typeName)
#expectedConstructors .= expectedConstructors1
go (name : prefix) child
DeclCoherencyCheckState {expectedConstructors} <- State.get
-- fromJust is safe here because we upserted `typeRef` key above
let (fromJust -> maybeConstructorNames, expectedConstructors1) =
Map.deleteLookup typeRef expectedConstructors
constructorNames <-
unMaybeConstructorNames maybeConstructorNames & onNothing do
Except.throwError (IncoherentDeclReason'MissingConstructorName typeName)
#expectedConstructors .= expectedConstructors1
#declNames %= \declNames ->
foldr (BiMultimap.insert typeName) declNames constructorNames
pure (Just name)
where
typeName = fullName name
let childrenWeHaventGoneInto = children `Map.withoutKeys` Set.fromList childrenWeWentInto
for_ (Map.toList childrenWeHaventGoneInto) \(name, child) -> go (name : prefix) child
where
fullName name =
Name.fromReverseSegments (name :| prefix)
data DeclCoherencyCheckState = DeclCoherencyCheckState
{ expectedConstructors :: !(Map TypeReferenceId (IntMap MaybeConstructorName)),
declNames :: !(BiMultimap Name Name)
}
deriving stock (Generic)
data MaybeConstructorName
= NoConstructorNameYet
| YesConstructorName !Name
unMaybeConstructorNames :: IntMap MaybeConstructorName -> Maybe [Name]
unMaybeConstructorNames =
traverse f . IntMap.elems
where
f :: MaybeConstructorName -> Maybe Name
f = \case
NoConstructorNameYet -> Nothing
YesConstructorName name -> Just name
data WhatHappened a
= UninhabitedDecl
| InhabitedDecl !a
deriving stock (Functor, Show)

View File

@ -1,33 +1,12 @@
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
module Unison.Codebase.Branch.Names module Unison.Codebase.Branch.Names
( findHistoricalHQs, ( namesDiff,
findHistoricalRefs,
findHistoricalRefs',
namesDiff,
toNames, toNames,
) )
where where
import Data.Set qualified as Set
import Unison.Codebase.Branch import Unison.Codebase.Branch
import Unison.Codebase.Causal.FoldHistory qualified as Causal
import Unison.HashQualified (HashQualified)
import Unison.HashQualified qualified as HQ
import Unison.LabeledDependency (LabeledDependency)
import Unison.LabeledDependency qualified as LD
import Unison.Name (Name)
import Unison.Names (Names (..)) import Unison.Names (Names (..))
import Unison.Names qualified as Names
import Unison.NamesWithHistory qualified as Names import Unison.NamesWithHistory qualified as Names
import Unison.Prelude hiding (empty)
import Unison.Reference (Reference)
import Unison.Referent (Referent)
import Unison.Referent qualified as Referent
import Unison.Util.Relation qualified as R import Unison.Util.Relation qualified as R
import Prelude hiding (head, read, subtract) import Prelude hiding (head, read, subtract)
@ -37,79 +16,5 @@ toNames b =
(R.swap . deepTerms $ b) (R.swap . deepTerms $ b)
(R.swap . deepTypes $ b) (R.swap . deepTypes $ b)
-- This stops searching for a given HashQualified once it encounters
-- any term or type in any Branch0 that satisfies that HashQualified.
findHistoricalHQs ::
(Monad m) =>
Set (HashQualified Name) ->
Branch m ->
m (Set (HashQualified Name), Names)
findHistoricalHQs =
findInHistory
(\hq r n -> HQ.matchesNamedReferent n r hq)
(\hq r n -> HQ.matchesNamedReference n r hq)
findHistoricalRefs ::
(Monad m) =>
Set LabeledDependency ->
Branch m ->
m (Set LabeledDependency, Names)
findHistoricalRefs =
findInHistory
(\query r _n -> LD.fold (const False) (== r) query)
(\query r _n -> LD.fold (== r) (const False) query)
findHistoricalRefs' ::
(Monad m) =>
Set Reference ->
Branch m ->
m (Set Reference, Names)
findHistoricalRefs' =
findInHistory
(\queryRef r _n -> r == Referent.Ref queryRef)
(\queryRef r _n -> r == queryRef)
findInHistory ::
forall m q.
(Monad m, Ord q) =>
(q -> Referent -> Name -> Bool) ->
(q -> Reference -> Name -> Bool) ->
Set q ->
Branch m ->
m (Set q, Names)
findInHistory termMatches typeMatches queries b =
(Causal.foldHistoryUntil f (queries, mempty) . _history) b <&> \case
-- could do something more sophisticated here later to report that some SH
-- couldn't be found anywhere in the history. but for now, I assume that
-- the normal thing will happen when it doesn't show up in the namespace.
Causal.Satisfied (_, names) -> (mempty, names)
Causal.Unsatisfied (missing, names) -> (missing, names)
where
-- in order to not favor terms over types, we iterate through the ShortHashes,
-- for each `remainingQueries`, if we find a matching Referent or Reference,
-- we remove `q` from the accumulated `remainingQueries`, and add the Ref* to
-- the accumulated `names0`.
f acc@(remainingQueries, _) b0 = (acc', null remainingQueries')
where
acc'@(remainingQueries', _) = foldl' findQ acc remainingQueries
findQ :: (Set q, Names) -> q -> (Set q, Names)
findQ acc sh =
foldl'
(doType sh)
( foldl'
(doTerm sh)
acc
(R.toList $ deepTerms b0)
)
(R.toList $ deepTypes b0)
doTerm q acc@(remainingSHs, names0) (r, n) =
if termMatches q r n
then (Set.delete q remainingSHs, Names.addTerm n r names0)
else acc
doType q acc@(remainingSHs, names0) (r, n) =
if typeMatches q r n
then (Set.delete q remainingSHs, Names.addType n r names0)
else acc
namesDiff :: Branch m -> Branch m -> Names.Diff namesDiff :: Branch m -> Branch m -> Names.Diff
namesDiff b1 b2 = Names.diff (toNames (head b1)) (toNames (head b2)) namesDiff b1 b2 = Names.diff (toNames (head b1)) (toNames (head b2))

View File

@ -70,12 +70,9 @@ data Branch0 m = Branch0
-- | True if a branch and its children have no definitions or edits in them. -- | True if a branch and its children have no definitions or edits in them.
-- (Computed recursively, and small enough to justify storing here to avoid computing more than once.) -- (Computed recursively, and small enough to justify storing here to avoid computing more than once.)
isEmpty0 :: Bool, isEmpty0 :: Bool,
-- names and metadata for this branch and its children -- names for this branch and its children
-- (ref, (name, value)) iff ref has metadata `value` at name `name`
deepTerms :: Relation Referent Name, deepTerms :: Relation Referent Name,
deepTypes :: Relation Reference Name, deepTypes :: Relation Reference Name,
deepTermMetadata :: Metadata.R4 Referent Name,
deepTypeMetadata :: Metadata.R4 Reference Name,
deepPaths :: Set Path, deepPaths :: Set Path,
deepEdits :: Map Name PatchHash deepEdits :: Map Name PatchHash
} }

View File

@ -5,37 +5,23 @@ import Data.Set qualified as Set
import U.Codebase.HashTags (PatchHash) import U.Codebase.HashTags (PatchHash)
import Unison.Codebase.Branch (Branch0 (..)) import Unison.Codebase.Branch (Branch0 (..))
import Unison.Codebase.Branch qualified as Branch import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Metadata qualified as Metadata
import Unison.Codebase.Patch (Patch, PatchDiff) import Unison.Codebase.Patch (Patch, PatchDiff)
import Unison.Codebase.Patch qualified as Patch import Unison.Codebase.Patch qualified as Patch
import Unison.Name (Name) import Unison.Name (Name)
import Unison.Prelude import Unison.Prelude
import Unison.Reference (Reference) import Unison.Reference (Reference)
import Unison.Referent (Referent) import Unison.Referent (Referent)
import Unison.Runtime.IOSource (isPropagatedValue)
import Unison.Util.Relation (Relation) import Unison.Util.Relation (Relation)
import Unison.Util.Relation qualified as R import Unison.Util.Relation qualified as R
import Unison.Util.Relation3 (Relation3)
import Unison.Util.Relation3 qualified as R3
import Unison.Util.Relation4 qualified as R4
data DiffType a = Create a | Delete a | Modify a deriving (Show) data DiffType a = Create a | Delete a | Modify a deriving (Show)
-- todo: maybe simplify this file using Relation3?
data NamespaceSlice r = NamespaceSlice
{ names :: Relation r Name,
metadata :: Relation3 r Name Metadata.Value
}
deriving (Show)
data DiffSlice r = DiffSlice data DiffSlice r = DiffSlice
{ -- tpatchUpdates :: Relation r r, -- old new { -- tpatchUpdates :: Relation r r, -- old new
tallnamespaceUpdates :: Map Name (Set r, Set r), tallnamespaceUpdates :: Map Name (Set r, Set r),
talladds :: Relation r Name, talladds :: Relation r Name,
tallremoves :: Relation r Name, tallremoves :: Relation r Name,
trenames :: Map r (Set Name, Set Name), -- ref (old, new) trenames :: Map r (Set Name, Set Name)
taddedMetadata :: Relation3 r Name Metadata.Value,
tremovedMetadata :: Relation3 r Name Metadata.Value
} }
deriving stock (Generic, Show) deriving stock (Generic, Show)
@ -51,10 +37,10 @@ diff0 old new = BranchDiff terms types <$> patchDiff old new
where where
(terms, types) = (terms, types) =
computeSlices computeSlices
(deepr4ToSlice (Branch.deepTerms old) (Branch.deepTermMetadata old)) (Branch.deepTerms old)
(deepr4ToSlice (Branch.deepTerms new) (Branch.deepTermMetadata new)) (Branch.deepTerms new)
(deepr4ToSlice (Branch.deepTypes old) (Branch.deepTypeMetadata old)) (Branch.deepTypes old)
(deepr4ToSlice (Branch.deepTypes new) (Branch.deepTypeMetadata new)) (Branch.deepTypes new)
patchDiff :: forall m. (Monad m) => Branch0 m -> Branch0 m -> m (Map Name (DiffType PatchDiff)) patchDiff :: forall m. (Monad m) => Branch0 m -> Branch0 m -> m (Map Name (DiffType PatchDiff))
patchDiff old new = do patchDiff old new = do
@ -79,48 +65,33 @@ patchDiff old new = do
modified <- foldM f mempty (Set.intersection (Map.keysSet oldDeepEdits) (Map.keysSet newDeepEdits)) modified <- foldM f mempty (Set.intersection (Map.keysSet oldDeepEdits) (Map.keysSet newDeepEdits))
pure $ added <> removed <> modified pure $ added <> removed <> modified
deepr4ToSlice ::
(Ord r) =>
R.Relation r Name ->
Metadata.R4 r Name ->
NamespaceSlice r
deepr4ToSlice deepNames deepMetadata =
NamespaceSlice deepNames (R4.d124 deepMetadata)
computeSlices :: computeSlices ::
NamespaceSlice Referent -> Relation Referent Name ->
NamespaceSlice Referent -> Relation Referent Name ->
NamespaceSlice Reference -> Relation Reference Name ->
NamespaceSlice Reference -> Relation Reference Name ->
(DiffSlice Referent, DiffSlice Reference) (DiffSlice Referent, DiffSlice Reference)
computeSlices oldTerms newTerms oldTypes newTypes = (termsOut, typesOut) computeSlices oldTerms newTerms oldTypes newTypes = (termsOut, typesOut)
where where
termsOut = termsOut =
let nc = allNames oldTerms newTerms let nc = R.outerJoinDomMultimaps oldTerms newTerms
nu = allNamespaceUpdates oldTerms newTerms nu = allNamespaceUpdates oldTerms newTerms
in DiffSlice in DiffSlice
{ tallnamespaceUpdates = nu, { tallnamespaceUpdates = nu,
talladds = allAdds nc nu, talladds = allAdds nc nu,
tallremoves = allRemoves nc nu, tallremoves = allRemoves nc nu,
trenames = remainingNameChanges nc, trenames = remainingNameChanges nc
taddedMetadata = addedMetadata oldTerms newTerms,
tremovedMetadata = removedMetadata oldTerms newTerms
} }
typesOut = typesOut =
let nc = allNames oldTypes newTypes let nc = R.outerJoinDomMultimaps oldTypes newTypes
nu = allNamespaceUpdates oldTypes newTypes nu = allNamespaceUpdates oldTypes newTypes
in DiffSlice in DiffSlice
{ tallnamespaceUpdates = nu, { tallnamespaceUpdates = nu,
talladds = allAdds nc nu, talladds = allAdds nc nu,
tallremoves = allRemoves nc nu, tallremoves = allRemoves nc nu,
trenames = remainingNameChanges nc, trenames = remainingNameChanges nc
taddedMetadata = addedMetadata oldTypes newTypes,
tremovedMetadata = removedMetadata oldTypes newTypes
} }
allNames :: (Ord r) => NamespaceSlice r -> NamespaceSlice r -> Map r (Set Name, Set Name)
allNames old new = R.outerJoinDomMultimaps (names old) (names new)
allAdds, allAdds,
allRemoves :: allRemoves ::
forall r. forall r.
@ -153,33 +124,14 @@ computeSlices oldTerms newTerms oldTypes newTypes = (termsOut, typesOut)
remainingNameChanges = remainingNameChanges =
Map.filter (\(old, new) -> not (null old) && not (null new) && old /= new) Map.filter (\(old, new) -> not (null old) && not (null new) && old /= new)
allNamespaceUpdates :: (Ord r) => NamespaceSlice r -> NamespaceSlice r -> Map Name (Set r, Set r) allNamespaceUpdates :: (Ord r) => Relation r Name -> Relation r Name -> Map Name (Set r, Set r)
allNamespaceUpdates old new = allNamespaceUpdates old new =
Map.filter f $ R.innerJoinRanMultimaps (names old) (names new) Map.filter f $ R.innerJoinRanMultimaps old new
where where
f (old, new) = old /= new f (old, new) = old /= new
addedMetadata :: (Ord r) => NamespaceSlice r -> NamespaceSlice r -> Relation3 r Name Metadata.Value
addedMetadata old new = metadata new `R3.difference` metadata old
removedMetadata :: (Ord r) => NamespaceSlice r -> NamespaceSlice r -> Relation3 r Name Metadata.Value
removedMetadata old new = metadata old `R3.difference` metadata new
-- the namespace updates that aren't propagated
namespaceUpdates :: (Ord r) => DiffSlice r -> Map Name (Set r, Set r) namespaceUpdates :: (Ord r) => DiffSlice r -> Map Name (Set r, Set r)
namespaceUpdates s = Map.mapMaybeWithKey f (tallnamespaceUpdates s) namespaceUpdates s = Map.mapMaybe f (tallnamespaceUpdates s)
where where
f name (olds, news) = f (olds, news) =
let news' = Set.difference news (Map.findWithDefault mempty name propagated) if null news then Nothing else Just (olds, news)
in if null news' then Nothing else Just (olds, news')
propagated = propagatedUpdates s
propagatedUpdates :: (Ord r) => DiffSlice r -> Map Name (Set r)
propagatedUpdates s =
Map.fromList
[ (name, news)
| (name, (_olds0, news0)) <- Map.toList $ tallnamespaceUpdates s,
let news = Set.filter propagated news0
propagated rnew = R3.member rnew name isPropagatedValue (taddedMetadata s),
not (null news)
]

View File

@ -6,17 +6,15 @@ module Unison.Codebase.BranchUtil
getBranch, getBranch,
getTerm, getTerm,
getType, getType,
getTermMetadataAt,
getTypeMetadataAt,
getTermMetadataHQNamed,
getTypeMetadataHQNamed,
-- * Branch modifications -- * Branch modifications
makeSetBranch, makeSetBranch,
makeAddTypeName, makeAddTypeName,
makeDeleteTypeName, makeDeleteTypeName,
makeAnnihilateTypeName,
makeAddTermName, makeAddTermName,
makeDeleteTermName, makeDeleteTermName,
makeAnnihilateTermName,
makeDeletePatch, makeDeletePatch,
makeReplacePatch, makeReplacePatch,
) )
@ -26,14 +24,10 @@ import Data.Map qualified as Map
import Data.Set qualified as Set import Data.Set qualified as Set
import Unison.Codebase.Branch (Branch, Branch0) import Unison.Codebase.Branch (Branch, Branch0)
import Unison.Codebase.Branch qualified as Branch import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Metadata (Metadata)
import Unison.Codebase.Metadata qualified as Metadata
import Unison.Codebase.Patch (Patch) import Unison.Codebase.Patch (Patch)
import Unison.Codebase.Path (Path) import Unison.Codebase.Path (Path)
import Unison.Codebase.Path qualified as Path import Unison.Codebase.Path qualified as Path
import Unison.HashQualified' (HashQualified (HashQualified, NameOnly)) import Unison.HashQualified' (HashQualified (HashQualified, NameOnly))
import Unison.HashQualified' qualified as HQ'
import Unison.NameSegment (NameSegment)
import Unison.Names (Names) import Unison.Names (Names)
import Unison.Names qualified as Names import Unison.Names qualified as Names
import Unison.Prelude import Unison.Prelude
@ -42,9 +36,7 @@ import Unison.Reference qualified as Reference
import Unison.Referent (Referent) import Unison.Referent (Referent)
import Unison.Referent qualified as Referent import Unison.Referent qualified as Referent
import Unison.ShortHash qualified as SH import Unison.ShortHash qualified as SH
import Unison.Util.List qualified as List
import Unison.Util.Relation qualified as R import Unison.Util.Relation qualified as R
import Unison.Util.Relation4 qualified as R4
import Unison.Util.Star3 qualified as Star3 import Unison.Util.Star3 qualified as Star3
-- | Creates a branch containing all of the given names, with a single history node. -- | Creates a branch containing all of the given names, with a single history node.
@ -53,10 +45,8 @@ fromNames names0 = Branch.stepManyAt (typeActions <> termActions) Branch.empty
where where
typeActions = map doType . R.toList $ Names.types names0 typeActions = map doType . R.toList $ Names.types names0
termActions = map doTerm . R.toList $ Names.terms names0 termActions = map doTerm . R.toList $ Names.terms names0
-- doTerm :: (Name, Referent) -> (Path, Branch0 m -> Branch0 m) doTerm (n, r) = makeAddTermName (Path.splitFromName n) r
doTerm (n, r) = makeAddTermName (Path.splitFromName n) r mempty -- no metadata doType (n, r) = makeAddTypeName (Path.splitFromName n) r
-- doType :: (Name, Reference) -> (Path, Branch0 m -> Branch0 m)
doType (n, r) = makeAddTypeName (Path.splitFromName n) r mempty -- no metadata
getTerm :: Path.HQSplit -> Branch0 m -> Set Referent getTerm :: Path.HQSplit -> Branch0 m -> Set Referent
getTerm (p, hq) b = case hq of getTerm (p, hq) b = case hq of
@ -66,32 +56,7 @@ getTerm (p, hq) b = case hq of
filter sh = Set.filter (SH.isPrefixOf sh . Referent.toShortHash) filter sh = Set.filter (SH.isPrefixOf sh . Referent.toShortHash)
terms = Branch._terms (Branch.getAt0 p b) terms = Branch._terms (Branch.getAt0 p b)
getTermMetadataHQNamed :: getType :: Path.HQSplit -> Branch0 m -> Set Reference.TypeReference
(Path.Path, HQ'.HQSegment) -> Branch0 m -> Metadata.R4 Referent NameSegment
getTermMetadataHQNamed (path, hqseg) b =
R4.filter (\(r, n, _t, _v) -> HQ'.matchesNamedReferent n r hqseg) terms
where
terms = Metadata.starToR4 . Branch._terms $ Branch.getAt0 path b
getTypeMetadataHQNamed ::
(Path.Path, HQ'.HQSegment) ->
Branch0 m ->
Metadata.R4 Reference NameSegment
getTypeMetadataHQNamed (path, hqseg) b =
R4.filter (\(r, n, _t, _v) -> HQ'.matchesNamedReference n r hqseg) types
where
types = Metadata.starToR4 . Branch._types $ Branch.getAt0 path b
-- todo: audit usages and maybe eliminate!
-- Only returns metadata for the term at the exact level given
getTermMetadataAt :: (Path.Path, a) -> Referent -> Branch0 m -> Metadata
getTermMetadataAt (path, _) r b = Set.fromList <$> List.multimap mdList
where
mdList :: [(Metadata.Type, Metadata.Value)]
mdList = Set.toList . R.ran . Star3.d3 . Star3.selectFact (Set.singleton r) $ terms
terms = Branch._terms $ Branch.getAt0 path b
getType :: Path.HQSplit -> Branch0 m -> Set Reference
getType (p, hq) b = case hq of getType (p, hq) b = case hq of
NameOnly n -> Star3.lookupD1 n types NameOnly n -> Star3.lookupD1 n types
HashQualified n sh -> filter sh $ Star3.lookupD1 n types HashQualified n sh -> filter sh $ Star3.lookupD1 n types
@ -99,13 +64,6 @@ getType (p, hq) b = case hq of
filter sh = Set.filter (SH.isPrefixOf sh . Reference.toShortHash) filter sh = Set.filter (SH.isPrefixOf sh . Reference.toShortHash)
types = Branch._types (Branch.getAt0 p b) types = Branch._types (Branch.getAt0 p b)
getTypeMetadataAt :: (Path.Path, a) -> Reference -> Branch0 m -> Metadata
getTypeMetadataAt (path, _) r b = Set.fromList <$> List.multimap mdList
where
mdList :: [(Metadata.Type, Metadata.Value)]
mdList = Set.toList . R.ran . Star3.d3 . Star3.selectFact (Set.singleton r) $ types
types = Branch._types $ Branch.getAt0 path b
getBranch :: Path.Split -> Branch0 m -> Maybe (Branch m) getBranch :: Path.Split -> Branch0 m -> Maybe (Branch m)
getBranch (p, seg) b = case Path.toList p of getBranch (p, seg) b = case Path.toList p of
[] -> Map.lookup seg (Branch._children b) [] -> Map.lookup seg (Branch._children b)
@ -113,24 +71,29 @@ getBranch (p, seg) b = case Path.toList p of
(Branch.head <$> Map.lookup h (Branch._children b)) (Branch.head <$> Map.lookup h (Branch._children b))
>>= getBranch (Path.fromList p, seg) >>= getBranch (Path.fromList p, seg)
makeAddTermName :: Path.Split -> Referent -> Metadata -> (Path, Branch0 m -> Branch0 m) makeAddTermName :: Path.Split -> Referent -> (Path, Branch0 m -> Branch0 m)
makeAddTermName (p, name) r md = (p, Branch.addTermName r name md) makeAddTermName (p, name) r = (p, Branch.addTermName r name)
makeDeleteTermName :: Path.Split -> Referent -> (Path, Branch0 m -> Branch0 m) makeDeleteTermName :: Path.Split -> Referent -> (Path, Branch0 m -> Branch0 m)
makeDeleteTermName (p, name) r = (p, Branch.deleteTermName r name) makeDeleteTermName (p, name) r = (p, Branch.deleteTermName r name)
makeAnnihilateTermName :: Path.Split -> (Path, Branch0 m -> Branch0 m)
makeAnnihilateTermName (p, name) = (p, Branch.annihilateTermName name)
makeAnnihilateTypeName :: Path.Split -> (Path, Branch0 m -> Branch0 m)
makeAnnihilateTypeName (p, name) = (p, Branch.annihilateTypeName name)
makeReplacePatch :: (Applicative m) => Path.Split -> Patch -> (Path, Branch0 m -> Branch0 m) makeReplacePatch :: (Applicative m) => Path.Split -> Patch -> (Path, Branch0 m -> Branch0 m)
makeReplacePatch (p, name) patch = (p, Branch.replacePatch name patch) makeReplacePatch (p, name) patch = (p, Branch.replacePatch name patch)
makeDeletePatch :: Path.Split -> (Path, Branch0 m -> Branch0 m) makeDeletePatch :: Path.Split -> (Path, Branch0 m -> Branch0 m)
makeDeletePatch (p, name) = (p, Branch.deletePatch name) makeDeletePatch (p, name) = (p, Branch.deletePatch name)
makeAddTypeName :: Path.Split -> Reference -> Metadata -> (Path, Branch0 m -> Branch0 m) makeAddTypeName :: Path.Split -> Reference -> (Path, Branch0 m -> Branch0 m)
makeAddTypeName (p, name) r md = (p, Branch.addTypeName r name md) makeAddTypeName (p, name) r = (p, Branch.addTypeName r name)
makeDeleteTypeName :: Path.Split -> Reference -> (Path, Branch0 m -> Branch0 m) makeDeleteTypeName :: Path.Split -> Reference -> (Path, Branch0 m -> Branch0 m)
makeDeleteTypeName (p, name) r = (p, Branch.deleteTypeName r name) makeDeleteTypeName (p, name) r = (p, Branch.deleteTypeName r name)
makeSetBranch :: makeSetBranch :: Path.Split -> Branch m -> (Path, Branch0 m -> Branch0 m)
Path.Split -> Branch m -> (Path, Branch0 m -> Branch0 m)
makeSetBranch (p, name) b = (p, Branch.setChildBranch name b) makeSetBranch (p, name) b = (p, Branch.setChildBranch name b)

View File

@ -11,3 +11,6 @@ class BuiltinAnnotation a where
instance BuiltinAnnotation Ann where instance BuiltinAnnotation Ann where
builtinAnnotation = Ann.Intrinsic builtinAnnotation = Ann.Intrinsic
instance BuiltinAnnotation () where
builtinAnnotation = ()

View File

@ -1,53 +0,0 @@
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE RankNTypes #-}
module Unison.Codebase.Causal.FoldHistory (FoldHistoryResult (..), foldHistoryUntil) where
import Data.Map qualified as Map
import Data.Sequence qualified as Seq
import Data.Set qualified as Set
import U.Codebase.HashTags (CausalHash)
import Unison.Codebase.Causal (Causal (..), pattern Cons, pattern Merge, pattern One)
import Unison.Prelude
import Prelude hiding (head, tail)
data FoldHistoryResult a = Satisfied a | Unsatisfied a deriving (Eq, Ord, Show)
-- foldHistoryUntil some condition on the accumulator is met,
-- attempting to work backwards fairly through merge nodes
-- (rather than following one back all the way to its root before working
-- through others). Returns Unsatisfied if the condition was never satisfied,
-- otherwise Satisfied.
--
-- NOTE by RÓB: this short-circuits immediately and only looks at the first
-- entry in the history, since this operation is far too slow to be practical.
foldHistoryUntil ::
forall m e a.
(Monad m) =>
(a -> e -> (a, Bool)) ->
a ->
Causal m e ->
m (FoldHistoryResult a)
foldHistoryUntil f a c = step a mempty (pure c)
where
step :: a -> Set CausalHash -> Seq (Causal m e) -> m (FoldHistoryResult a)
step a _seen Seq.Empty = pure (Unsatisfied a)
step a seen (c Seq.:<| rest)
| currentHash c `Set.member` seen =
step a seen rest
step a seen (c Seq.:<| rest) = case f a (head c) of
(a, True) -> pure (Satisfied a)
(a, False) -> do
tails <- case c of
One {} -> pure mempty
Cons {} ->
let (_, t) = tail c
in -- if h `Set.member` seen
if not (Set.null seen) then pure mempty else Seq.singleton <$> t
Merge {} ->
fmap Seq.fromList
. traverse snd
. filter (\(_, _) -> not (Set.null seen))
. Map.toList
$ tails c
step a (Set.insert (currentHash c) seen) (rest <> tails)

View File

@ -3,11 +3,14 @@
-- | Find a computation of type '{IO} () in the codebase. -- | Find a computation of type '{IO} () in the codebase.
module Unison.Codebase.MainTerm where module Unison.Codebase.MainTerm where
import Data.List.NonEmpty qualified as NEList
import Data.Set.NonEmpty (NESet)
import Data.Set.NonEmpty qualified as NESet
import Unison.Builtin.Decls qualified as DD import Unison.Builtin.Decls qualified as DD
import Unison.HashQualified qualified as HQ import Unison.HashQualified qualified as HQ
import Unison.Name (Name) import Unison.Name (Name)
import Unison.Names qualified as Names import Unison.Names qualified as Names
import Unison.NamesWithHistory qualified as NamesWithHistory import Unison.NamesWithHistory qualified as Names
import Unison.Parser.Ann (Ann) import Unison.Parser.Ann (Ann)
import Unison.Parser.Ann qualified as Parser.Ann import Unison.Parser.Ann qualified as Parser.Ann
import Unison.Prelude import Unison.Prelude
@ -39,7 +42,7 @@ getMainTerm loadTypeOfTerm parseNames mainName mainType =
case HQ.fromString mainName of case HQ.fromString mainName of
Nothing -> pure (NotAFunctionName mainName) Nothing -> pure (NotAFunctionName mainName)
Just hq -> do Just hq -> do
let refs = NamesWithHistory.lookupHQTerm hq (NamesWithHistory.NamesWithHistory parseNames mempty) let refs = Names.lookupHQTerm Names.IncludeSuffixes hq parseNames
let a = Parser.Ann.External let a = Parser.Ann.External
case toList refs of case toList refs of
[] -> pure (NotFound mainName) [] -> pure (NotFound mainName)
@ -67,14 +70,15 @@ builtinMainWithResultType a res = Type.arrow a (Type.ref a DD.unitRef) io
where where
io = Type.effect a [Type.builtinIO a, DD.exceptionType a] res io = Type.effect a [Type.builtinIO a, DD.exceptionType a] res
-- [Result] -- | All possible IO'ish test types, e.g.
resultArr :: (Ord v) => a -> Type.Type v a -- '{IO, Exception} [Result]
resultArr a = Type.app a (Type.ref a Type.listRef) (Type.ref a DD.testResultRef) -- '{IO} [Result]
builtinIOTestTypes :: forall v a. (Ord v, Var v) => a -> NESet (Type.Type v a)
builtinResultArr :: (Ord v) => a -> Type.Type v a builtinIOTestTypes a =
builtinResultArr a = Type.effect a [Type.builtinIO a, DD.exceptionType a] (resultArr a) NESet.fromList
( delayedResultWithEffects ([Type.builtinIO a, DD.exceptionType a])
-- '{io2.IO} [Result] NEList.:| [delayedResultWithEffects ([Type.builtinIO a])]
builtinTest :: (Ord v) => a -> Type.Type v a )
builtinTest a = where
Type.arrow a (Type.ref a DD.unitRef) (builtinResultArr a) delayed = Type.arrow a (Type.ref a DD.unitRef)
delayedResultWithEffects es = delayed (Type.effect a es (DD.testResultType a))

View File

@ -52,11 +52,6 @@ hasMetadataWithType a t = Set.member t . R.lookupDom a . Star3.d2
inserts :: (Ord a, Ord n) => [(a, Type, Value)] -> Star a n -> Star a n inserts :: (Ord a, Ord n) => [(a, Type, Value)] -> Star a n -> Star a n
inserts tups s = foldl' (flip insert) s tups inserts tups s = foldl' (flip insert) s tups
insertWithMetadata ::
(Ord a, Ord n) => (a, Metadata) -> Star a n -> Star a n
insertWithMetadata (a, md) =
inserts [(a, ty, v) | (ty, vs) <- Map.toList md, v <- toList vs]
insert :: (Ord a, Ord n) => (a, Type, Value) -> Star a n -> Star a n insert :: (Ord a, Ord n) => (a, Type, Value) -> Star a n -> Star a n
insert (a, ty, v) = Star3.insertD23 (a, ty, (ty, v)) insert (a, ty, v) = Star3.insertD23 (a, ty, (ty, v))

View File

@ -138,11 +138,12 @@ isRoot :: Absolute -> Bool
isRoot = Seq.null . toSeq . unabsolute isRoot = Seq.null . toSeq . unabsolute
absoluteToPath' :: Absolute -> Path' absoluteToPath' :: Absolute -> Path'
absoluteToPath' abs = Path' (Left abs) absoluteToPath' = AbsolutePath'
instance Show Path' where instance Show Path' where
show (Path' (Left abs)) = show abs show = \case
show (Path' (Right rel)) = show rel AbsolutePath' abs -> show abs
RelativePath' rel -> show rel
instance Show Absolute where instance Show Absolute where
show s = "." ++ show (unabsolute s) show s = "." ++ show (unabsolute s)
@ -151,8 +152,9 @@ instance Show Relative where
show = show . unrelative show = show . unrelative
unsplit' :: Split' -> Path' unsplit' :: Split' -> Path'
unsplit' (Path' (Left (Absolute p)), seg) = Path' (Left (Absolute (unsplit (p, seg)))) unsplit' = \case
unsplit' (Path' (Right (Relative p)), seg) = Path' (Right (Relative (unsplit (p, seg)))) (AbsolutePath' (Absolute p), seg) -> AbsolutePath' (Absolute (unsplit (p, seg)))
(RelativePath' (Relative p), seg) -> RelativePath' (Relative (unsplit (p, seg)))
unsplit :: Split -> Path unsplit :: Split -> Path
unsplit (Path p, a) = Path (p :|> a) unsplit (Path p, a) = Path (p :|> a)
@ -182,15 +184,15 @@ type HQSplitAbsolute = (Absolute, HQ'.HQSegment)
-- unprefix .foo.bar id == id (relative paths starting w/ nonmatching prefix left alone) -- unprefix .foo.bar id == id (relative paths starting w/ nonmatching prefix left alone)
-- unprefix .foo.bar foo.bar.baz == baz (relative paths w/ common prefix get stripped) -- unprefix .foo.bar foo.bar.baz == baz (relative paths w/ common prefix get stripped)
unprefix :: Absolute -> Path' -> Path unprefix :: Absolute -> Path' -> Path
unprefix (Absolute prefix) (Path' p) = case p of unprefix (Absolute prefix) = \case
Left abs -> unabsolute abs AbsolutePath' abs -> unabsolute abs
Right (unrelative -> rel) -> fromList $ dropPrefix (toList prefix) (toList rel) RelativePath' rel -> fromList $ dropPrefix (toList prefix) (toList (unrelative rel))
-- too many types -- too many types
prefix :: Absolute -> Path' -> Path prefix :: Absolute -> Path' -> Path
prefix (Absolute (Path prefix)) (Path' p) = case p of prefix (Absolute (Path prefix)) = \case
Left (unabsolute -> abs) -> abs AbsolutePath' abs -> unabsolute abs
Right (unrelative -> rel) -> Path $ prefix <> toSeq rel RelativePath' rel -> Path $ prefix <> toSeq (unrelative rel)
-- | Finds the longest shared path prefix of two paths. -- | Finds the longest shared path prefix of two paths.
-- Returns (shared prefix, path to first location from shared prefix, path to second location from shared prefix) -- Returns (shared prefix, path to first location from shared prefix, path to second location from shared prefix)
@ -218,22 +220,22 @@ relativeEmpty :: Relative
relativeEmpty = Relative empty relativeEmpty = Relative empty
relativeEmpty' :: Path' relativeEmpty' :: Path'
relativeEmpty' = Path' (Right (Relative empty)) relativeEmpty' = RelativePath' (Relative empty)
absoluteEmpty' :: Path' absoluteEmpty' :: Path'
absoluteEmpty' = Path' (Left (Absolute empty)) absoluteEmpty' = AbsolutePath' (Absolute empty)
-- | Mitchell: this function is bogus, because an empty name segment is bogus -- | Mitchell: this function is bogus, because an empty name segment is bogus
toPath' :: Path -> Path' toPath' :: Path -> Path'
toPath' = \case toPath' = \case
Path (NameSegment "" :<| tail) -> Path' . Left . Absolute . Path $ tail Path (NameSegment "" :<| tail) -> AbsolutePath' . Absolute . Path $ tail
p -> Path' . Right . Relative $ p p -> Path' . Right . Relative $ p
-- Forget whether the path is absolute or relative -- Forget whether the path is absolute or relative
fromPath' :: Path' -> Path fromPath' :: Path' -> Path
fromPath' (Path' e) = case e of fromPath' = \case
Left (Absolute p) -> p AbsolutePath' (Absolute p) -> p
Right (Relative p) -> p RelativePath' (Relative p) -> p
toList :: Path -> [NameSegment] toList :: Path -> [NameSegment]
toList = Foldable.toList . toSeq toList = Foldable.toList . toSeq
@ -301,8 +303,8 @@ fromName = fromList . List.NonEmpty.toList . Name.segments
fromName' :: Name -> Path' fromName' :: Name -> Path'
fromName' n = case take 1 (Name.toString n) of fromName' n = case take 1 (Name.toString n) of
"." -> Path' . Left . Absolute $ Path seq "." -> AbsolutePath' . Absolute $ Path seq
_ -> Path' . Right $ Relative path _ -> RelativePath' $ Relative path
where where
path = fromName n path = fromName n
seq = toSeq path seq = toSeq path
@ -366,15 +368,13 @@ fromText' :: Text -> Path'
fromText' txt = fromText' txt =
case Text.uncons txt of case Text.uncons txt of
Nothing -> relativeEmpty' Nothing -> relativeEmpty'
Just ('.', p) -> Just ('.', p) -> AbsolutePath' . Absolute $ fromText p
Path' (Left . Absolute $ fromText p) Just _ -> RelativePath' . Relative $ fromText txt
Just _ ->
Path' (Right . Relative $ fromText txt)
toText' :: Path' -> Text toText' :: Path' -> Text
toText' = \case toText' = \case
Path' (Left (Absolute path)) -> Text.cons '.' (toText path) AbsolutePath' (Absolute path) -> Text.cons '.' (toText path)
Path' (Right (Relative path)) -> toText path RelativePath' (Relative path) -> toText path
{-# COMPLETE Empty, (:<) #-} {-# COMPLETE Empty, (:<) #-}
@ -451,18 +451,18 @@ instance Snoc Path Path NameSegment NameSegment where
snoc (Path p) ns = Path (p <> pure ns) snoc (Path p) ns = Path (p <> pure ns)
instance Snoc Path' Path' NameSegment NameSegment where instance Snoc Path' Path' NameSegment NameSegment where
_Snoc = prism (uncurry snoc') $ \case _Snoc = prism (uncurry snoc') \case
Path' (Left (Lens.unsnoc -> Just (s, a))) -> Right (Path' (Left s), a) AbsolutePath' (Lens.unsnoc -> Just (s, a)) -> Right (AbsolutePath' s, a)
Path' (Right (Lens.unsnoc -> Just (s, a))) -> Right (Path' (Right s), a) RelativePath' (Lens.unsnoc -> Just (s, a)) -> Right (RelativePath' s, a)
e -> Left e e -> Left e
where where
snoc' :: Path' -> NameSegment -> Path' snoc' :: Path' -> NameSegment -> Path'
snoc' (Path' e) n = case e of snoc' = \case
Left abs -> Path' (Left . Absolute $ Lens.snoc (unabsolute abs) n) AbsolutePath' abs -> AbsolutePath' . Absolute . Lens.snoc (unabsolute abs)
Right rel -> Path' (Right . Relative $ Lens.snoc (unrelative rel) n) RelativePath' rel -> RelativePath' . Relative . Lens.snoc (unrelative rel)
instance Snoc Split' Split' NameSegment NameSegment where instance Snoc Split' Split' NameSegment NameSegment where
_Snoc = prism (uncurry snoc') $ \case _Snoc = prism (uncurry snoc') \case
-- unsnoc -- unsnoc
(Lens.unsnoc -> Just (s, a), ns) -> Right ((s, a), ns) (Lens.unsnoc -> Just (s, a), ns) -> Right ((s, a), ns)
e -> Left e e -> Left e
@ -482,10 +482,13 @@ instance Resolve Relative Relative Relative where
instance Resolve Absolute Relative Absolute where instance Resolve Absolute Relative Absolute where
resolve (Absolute l) (Relative r) = Absolute (resolve l r) resolve (Absolute l) (Relative r) = Absolute (resolve l r)
instance Resolve Absolute Relative Path' where
resolve l r = AbsolutePath' (resolve l r)
instance Resolve Path' Path' Path' where instance Resolve Path' Path' Path' where
resolve _ a@(Path' Left {}) = a resolve _ a@(AbsolutePath' {}) = a
resolve (Path' (Left a)) (Path' (Right r)) = Path' (Left (resolve a r)) resolve (AbsolutePath' a) (RelativePath' r) = AbsolutePath' (resolve a r)
resolve (Path' (Right r1)) (Path' (Right r2)) = Path' (Right (resolve r1 r2)) resolve (RelativePath' r1) (RelativePath' r2) = RelativePath' (resolve r1 r2)
instance Resolve Path' Split' Path' where instance Resolve Path' Split' Path' where
resolve l r = resolve l (unsplit' r) resolve l r = resolve l (unsplit' r)
@ -497,8 +500,8 @@ instance Resolve Absolute HQSplit HQSplitAbsolute where
resolve l (r, hq) = (resolve l (Relative r), hq) resolve l (r, hq) = (resolve l (Relative r), hq)
instance Resolve Absolute Path' Absolute where instance Resolve Absolute Path' Absolute where
resolve _ (Path' (Left a)) = a resolve _ (AbsolutePath' a) = a
resolve a (Path' (Right r)) = resolve a r resolve a (RelativePath' r) = resolve a r
instance Convert Absolute Path where convert = unabsolute instance Convert Absolute Path where convert = unabsolute

View File

@ -4,6 +4,7 @@
module Unison.Codebase.Runtime where module Unison.Codebase.Runtime where
import Data.Map qualified as Map import Data.Map qualified as Map
import Data.Set.NonEmpty (NESet)
import Unison.ABT qualified as ABT import Unison.ABT qualified as ABT
import Unison.Builtin.Decls (tupleTerm, pattern TupleTerm') import Unison.Builtin.Decls (tupleTerm, pattern TupleTerm')
import Unison.Codebase.CodeLookup qualified as CL import Unison.Codebase.CodeLookup qualified as CL
@ -42,7 +43,7 @@ data Runtime v = Runtime
FilePath -> FilePath ->
IO (Maybe Error), IO (Maybe Error),
mainType :: Type v Ann, mainType :: Type v Ann,
ioTestType :: Type v Ann ioTestTypes :: NESet (Type v Ann)
} }
type IsCacheHit = Bool type IsCacheHit = Bool

View File

@ -71,7 +71,7 @@ import Unison.DataDeclaration (Decl)
import Unison.Hash (Hash) import Unison.Hash (Hash)
import Unison.Parser.Ann (Ann) import Unison.Parser.Ann (Ann)
import Unison.Prelude import Unison.Prelude
import Unison.Reference (Reference) import Unison.Reference (Reference, TermReferenceId)
import Unison.Reference qualified as Reference import Unison.Reference qualified as Reference
import Unison.Referent qualified as Referent import Unison.Referent qualified as Referent
import Unison.ShortHash (ShortHash) import Unison.ShortHash (ShortHash)
@ -352,6 +352,14 @@ sqliteCodebase debugName root localOrRemote lockOption migrationStrategy action
termsOfTypeImpl = termsOfTypeImpl =
CodebaseOps.termsOfTypeImpl getDeclType CodebaseOps.termsOfTypeImpl getDeclType
filterTermsByReferentIdHavingTypeImpl :: Reference -> Set Referent.Id -> Sqlite.Transaction (Set Referent.Id)
filterTermsByReferentIdHavingTypeImpl =
CodebaseOps.filterReferentsHavingTypeImpl getDeclType
filterTermsByReferenceIdHavingTypeImpl :: Reference -> Set TermReferenceId -> Sqlite.Transaction (Set TermReferenceId)
filterTermsByReferenceIdHavingTypeImpl =
CodebaseOps.filterReferencesHavingTypeImpl
termsMentioningTypeImpl :: Reference -> Sqlite.Transaction (Set Referent.Id) termsMentioningTypeImpl :: Reference -> Sqlite.Transaction (Set Referent.Id)
termsMentioningTypeImpl = termsMentioningTypeImpl =
CodebaseOps.termsMentioningTypeImpl getDeclType CodebaseOps.termsMentioningTypeImpl getDeclType
@ -382,6 +390,8 @@ sqliteCodebase debugName root localOrRemote lockOption migrationStrategy action
getWatch, getWatch,
termsOfTypeImpl, termsOfTypeImpl,
termsMentioningTypeImpl, termsMentioningTypeImpl,
filterTermsByReferenceIdHavingTypeImpl,
filterTermsByReferentIdHavingTypeImpl,
termReferentsByPrefix = referentsByPrefix, termReferentsByPrefix = referentsByPrefix,
withConnection = withConn, withConnection = withConn,
withConnectionIO = withConnection debugName root withConnectionIO = withConnection debugName root

View File

@ -8,7 +8,7 @@ module Unison.Codebase.SqliteCodebase.Branch.Dependencies where
import Data.Foldable (toList) import Data.Foldable (toList)
import Data.Map (Map) import Data.Map (Map)
import Data.Map qualified as Map import Data.Map qualified as Map
import Data.Monoid.Generic (GenericMonoid (..), GenericSemigroup (..)) import Data.Semigroup.Generic (GenericSemigroupMonoid (..))
import Data.Set (Set) import Data.Set (Set)
import Data.Set qualified as Set import Data.Set qualified as Set
import GHC.Generics (Generic) import GHC.Generics (Generic)
@ -34,8 +34,7 @@ data Dependencies = Dependencies
} }
deriving (Show) deriving (Show)
deriving (Generic) deriving (Generic)
deriving (Semigroup) via GenericSemigroup Dependencies deriving (Semigroup, Monoid) via GenericSemigroupMonoid Dependencies
deriving (Monoid) via GenericMonoid Dependencies
data Dependencies' = Dependencies' data Dependencies' = Dependencies'
{ patches' :: [PatchHash], { patches' :: [PatchHash],
@ -44,8 +43,7 @@ data Dependencies' = Dependencies'
} }
deriving (Eq, Show) deriving (Eq, Show)
deriving (Generic) deriving (Generic)
deriving (Semigroup) via GenericSemigroup Dependencies' deriving (Semigroup, Monoid) via GenericSemigroupMonoid Dependencies'
deriving (Monoid) via GenericMonoid Dependencies'
to' :: Dependencies -> Dependencies' to' :: Dependencies -> Dependencies'
to' Dependencies {..} = Dependencies' (toList patches) (toList terms) (toList decls) to' Dependencies {..} = Dependencies' (toList patches) (toList terms) (toList decls)

View File

@ -273,20 +273,16 @@ branchHash2to1 :: forall m. BranchHash -> V1.Branch.NamespaceHash m
branchHash2to1 = V1.HashFor . unBranchHash branchHash2to1 = V1.HashFor . unBranchHash
reference2to1 :: V2.Reference -> V1.Reference reference2to1 :: V2.Reference -> V1.Reference
reference2to1 = \case reference2to1 = id
V2.ReferenceBuiltin t -> V1.Reference.Builtin t
V2.ReferenceDerived i -> V1.Reference.DerivedId $ referenceid2to1 i
reference1to2 :: V1.Reference -> V2.Reference reference1to2 :: V1.Reference -> V2.Reference
reference1to2 = \case reference1to2 = id
V1.Reference.Builtin t -> V2.ReferenceBuiltin t
V1.Reference.DerivedId i -> V2.ReferenceDerived (referenceid1to2 i)
referenceid1to2 :: V1.Reference.Id -> V2.Reference.Id referenceid1to2 :: V1.Reference.Id -> V2.Reference.Id
referenceid1to2 (V1.Reference.Id h i) = V2.Reference.Id h i referenceid1to2 = id
referenceid2to1 :: V2.Reference.Id -> V1.Reference.Id referenceid2to1 :: V2.Reference.Id -> V1.Reference.Id
referenceid2to1 (V2.Reference.Id h i) = V1.Reference.Id h i referenceid2to1 = id
rreferent2to1 :: (Applicative m) => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.ReferentH -> m V1.Referent rreferent2to1 :: (Applicative m) => Hash -> (V2.Reference -> m CT.ConstructorType) -> V2.ReferentH -> m V1.Referent
rreferent2to1 h lookupCT = \case rreferent2to1 h lookupCT = \case
@ -314,6 +310,11 @@ referent1to2 = \case
V1.Ref r -> V2.Ref $ reference1to2 r V1.Ref r -> V2.Ref $ reference1to2 r
V1.Con (V1.ConstructorReference r i) _ct -> V2.Con (reference1to2 r) (fromIntegral i) V1.Con (V1.ConstructorReference r i) _ct -> V2.Con (reference1to2 r) (fromIntegral i)
referentid1to2 :: V1.Referent.Id -> V2.Referent.Id
referentid1to2 = \case
V1.RefId r -> V2.RefId (referenceid1to2 r)
V1.ConId (V1.ConstructorReference r i) _ct -> V2.ConId (referenceid1to2 r) i
referentid2to1 :: (Applicative m) => (V2.Reference -> m CT.ConstructorType) -> V2.Referent.Id -> m V1.Referent.Id referentid2to1 :: (Applicative m) => (V2.Reference -> m CT.ConstructorType) -> V2.Referent.Id -> m V1.Referent.Id
referentid2to1 lookupCT = \case referentid2to1 lookupCT = \case
V2.RefId r -> pure $ V1.RefId (referenceid2to1 r) V2.RefId r -> pure $ V1.RefId (referenceid2to1 r)

View File

@ -503,6 +503,23 @@ termsMentioningTypeImpl doGetDeclType r =
Ops.termsMentioningType (Cv.reference1to2 r) Ops.termsMentioningType (Cv.reference1to2 r)
>>= Set.traverse (Cv.referentid2to1 doGetDeclType) >>= Set.traverse (Cv.referentid2to1 doGetDeclType)
filterReferencesHavingTypeImpl :: Reference -> Set Reference.Id -> Transaction (Set Reference.Id)
filterReferencesHavingTypeImpl typRef termRefs =
Ops.filterTermsByReferenceHavingType (Cv.reference1to2 typRef) (Cv.referenceid1to2 <$> toList termRefs)
<&> fmap Cv.referenceid2to1
<&> Set.fromList
filterReferentsHavingTypeImpl ::
-- | A 'getDeclType'-like lookup, possibly backed by a cache.
(C.Reference.Reference -> Transaction CT.ConstructorType) ->
Reference ->
Set Referent.Id ->
Transaction (Set Referent.Id)
filterReferentsHavingTypeImpl doGetDeclType typRef termRefs =
Ops.filterTermsByReferentHavingType (Cv.reference1to2 typRef) (Cv.referentid1to2 <$> toList termRefs)
>>= traverse (Cv.referentid2to1 doGetDeclType)
<&> Set.fromList
-- | The number of base32 characters needed to distinguish any two references in the codebase. -- | The number of base32 characters needed to distinguish any two references in the codebase.
hashLength :: Transaction Int hashLength :: Transaction Int
hashLength = pure 10 hashLength = pure 10

View File

@ -27,7 +27,7 @@ import Unison.ConstructorType qualified as CT
import Unison.DataDeclaration (Decl) import Unison.DataDeclaration (Decl)
import Unison.Hash (Hash) import Unison.Hash (Hash)
import Unison.Prelude import Unison.Prelude
import Unison.Reference (Reference) import Unison.Reference (Reference, TypeReference)
import Unison.Reference qualified as Reference import Unison.Reference qualified as Reference
import Unison.Referent qualified as Referent import Unison.Referent qualified as Referent
import Unison.ShortHash (ShortHash) import Unison.ShortHash (ShortHash)
@ -98,6 +98,10 @@ data Codebase m v a = Codebase
termsOfTypeImpl :: Reference -> Sqlite.Transaction (Set Referent.Id), termsOfTypeImpl :: Reference -> Sqlite.Transaction (Set Referent.Id),
-- | Get the set of user-defined terms-or-constructors mention the given type anywhere in their signature. -- | Get the set of user-defined terms-or-constructors mention the given type anywhere in their signature.
termsMentioningTypeImpl :: Reference -> Sqlite.Transaction (Set Referent.Id), termsMentioningTypeImpl :: Reference -> Sqlite.Transaction (Set Referent.Id),
-- | Return the subset of the given set that has the given type.
filterTermsByReferenceIdHavingTypeImpl :: TypeReference -> Set Reference.Id -> Sqlite.Transaction (Set Reference.Id),
-- | Return the subset of the given set that has the given type.
filterTermsByReferentIdHavingTypeImpl :: TypeReference -> Set Referent.Id -> Sqlite.Transaction (Set Referent.Id),
-- | Get the set of user-defined terms-or-constructors whose hash matches the given prefix. -- | Get the set of user-defined terms-or-constructors whose hash matches the given prefix.
termReferentsByPrefix :: ShortHash -> Sqlite.Transaction (Set Referent.Id), termReferentsByPrefix :: ShortHash -> Sqlite.Transaction (Set Referent.Id),
-- | Acquire a new connection to the same underlying database file this codebase object connects to. -- | Acquire a new connection to the same underlying database file this codebase object connects to.

View File

@ -100,7 +100,7 @@ hashFieldAccessors ppe declName vars declRef dd = do
} }
accessorsWithTypes :: [(v, Term.Term v (), Type.Type v ())] <- accessorsWithTypes :: [(v, Term.Term v (), Type.Type v ())] <-
for accessors \(v, _a, trm) -> for accessors \(v, _a, trm) ->
case Result.result (Typechecker.synthesize ppe Typechecker.PatternMatchCoverageCheckSwitch'Disabled typecheckingEnv trm) of case Result.result (Typechecker.synthesize ppe Typechecker.PatternMatchCoverageCheckAndKindInferenceSwitch'Disabled typecheckingEnv trm) of
Nothing -> Nothing Nothing -> Nothing
-- Note: Typechecker.synthesize doesn't normalize the output -- Note: Typechecker.synthesize doesn't normalize the output
-- type. We do so here using `Type.cleanup`, mirroring what's -- type. We do so here using `Type.cleanup`, mirroring what's

View File

@ -18,7 +18,7 @@ import Unison.Blank qualified as Blank
import Unison.Builtin qualified as Builtin import Unison.Builtin qualified as Builtin
import Unison.Name qualified as Name import Unison.Name qualified as Name
import Unison.Names qualified as Names import Unison.Names qualified as Names
import Unison.NamesWithHistory qualified as NamesWithHistory import Unison.NamesWithHistory qualified as Names
import Unison.Parser.Ann (Ann) import Unison.Parser.Ann (Ann)
import Unison.Prelude import Unison.Prelude
import Unison.PrettyPrintEnv.Names qualified as PPE import Unison.PrettyPrintEnv.Names qualified as PPE
@ -90,7 +90,7 @@ computeTypecheckingEnvironment shouldUseTndr ambientAbilities typeLookupf uf =
_termsByShortname = Map.empty _termsByShortname = Map.empty
} }
ShouldUseTndr'Yes parsingEnv -> do ShouldUseTndr'Yes parsingEnv -> do
let preexistingNames = NamesWithHistory.currentNames (Parser.names parsingEnv) let preexistingNames = Parser.names parsingEnv
tm = UF.typecheckingTerm uf tm = UF.typecheckingTerm uf
possibleDeps = possibleDeps =
[ (Name.toText name, Var.name v, r) [ (Name.toText name, Var.name v, r)
@ -143,11 +143,7 @@ synthesizeFile env0 uf = do
let term = UF.typecheckingTerm uf let term = UF.typecheckingTerm uf
-- substitute Blanks for any remaining free vars in UF body -- substitute Blanks for any remaining free vars in UF body
tdnrTerm = Term.prepareTDNR term tdnrTerm = Term.prepareTDNR term
unisonFilePPE = unisonFilePPE = PPE.makePPE (PPE.hqNamer 10 (Names.shadowing (UF.toNames uf) Builtin.names)) PPE.dontSuffixify
( PPE.fromNames
10
(NamesWithHistory.shadowing (UF.toNames uf) Builtin.names)
)
Result notes mayType = Result notes mayType =
evalStateT (Typechecker.synthesizeAndResolve unisonFilePPE env0) tdnrTerm evalStateT (Typechecker.synthesizeAndResolve unisonFilePPE env0) tdnrTerm
-- If typechecking succeeded, reapply the TDNR decisions to user's term: -- If typechecking succeeded, reapply the TDNR decisions to user's term:

View File

@ -0,0 +1,90 @@
-- | Kind inference for Unison
--
-- Unison has Type, ->, and Ability kinds
--
-- An algorithm sketch: First break all decls into strongly connected
-- components in reverse topological order. Then, for each component,
-- generate kind constraints that arise from the constructors in the
-- decl to discover constraints on the decl vars. These constraints
-- are then given to a constraint solver that determines a unique kind
-- for each type variable. Unconstrained variables are defaulted to
-- kind * (just like Haskell 98). This is done by 'inferDecls'.
--
-- Afterwards, the 'SolveState' holds the kinds of all decls and we
-- can check that type annotations in terms that may mention the
-- decls are well-kinded with 'kindCheckAnnotations'.
module Unison.KindInference
( inferDecls,
kindCheckAnnotations,
KindError,
)
where
import Data.Foldable (foldlM)
import Data.Graph (flattenSCC, stronglyConnCompR)
import Data.List.NonEmpty (NonEmpty (..))
import Data.Map.Strict qualified as Map
import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation)
import Unison.DataDeclaration
import Unison.KindInference.Generate (declComponentConstraints, termConstraints)
import Unison.KindInference.Solve (KindError, verify, initialState, step, defaultUnconstrainedVars)
import Unison.KindInference.Solve.Monad (Env (..), SolveState, run, runGen)
import Unison.Prelude
import Unison.PrettyPrintEnv qualified as PrettyPrintEnv
import Unison.Reference
import Unison.Term qualified as Term
import Unison.Var qualified as Var
-- | Check that all annotations in a term are well-kinded
kindCheckAnnotations ::
forall v loc.
(Var.Var v, Ord loc, Show loc, BuiltinAnnotation loc) =>
PrettyPrintEnv.PrettyPrintEnv ->
SolveState v loc ->
Term.Term v loc ->
Either (NonEmpty (KindError v loc)) ()
kindCheckAnnotations ppe st t =
let (cs, st') = run env st (runGen $ termConstraints t)
env = Env ppe
in step env st' cs $> ()
-- | Infer the kinds of all decl vars
inferDecls ::
forall v loc.
(Var.Var v, BuiltinAnnotation loc, Ord loc, Show loc) =>
PrettyPrintEnv.PrettyPrintEnv ->
Map Reference (Decl v loc) ->
Either (NonEmpty (KindError v loc)) (SolveState v loc)
inferDecls ppe declMap =
let components :: [[(Reference, Decl v loc)]]
components = intoComponents declMap
env = Env ppe
handleComponent ::
SolveState v loc ->
[(Reference, Decl v loc)] ->
Either (NonEmpty (KindError v loc)) (SolveState v loc)
handleComponent s c =
let (cs, st) = run env s (runGen $ declComponentConstraints c)
in step env st cs
handleComponents ::
[[(Reference, Decl v loc)]] ->
Either (NonEmpty (KindError v loc)) (SolveState v loc)
handleComponents = verify <=< foldlM phi (initialState env)
where
phi b a = handleComponent b a
in defaultUnconstrainedVars <$> handleComponents components
-- | Break the decls into strongly connected components in reverse
-- topological order
intoComponents :: forall v a. Ord v => Map Reference (Decl v a) -> [[(Reference, Decl v a)]]
intoComponents declMap =
let graphInput :: [(Decl v a, Reference, [Reference])]
graphInput = Map.foldrWithKey (\k a b -> (a, k, declReferences a) : b) [] declMap
in map (\(a, b, _) -> (b, a)) . flattenSCC <$> stronglyConnCompR graphInput
where
declReferences :: Decl v a -> [Reference]
declReferences = toList . typeDependencies . asDataDecl

View File

@ -0,0 +1,21 @@
module Unison.KindInference.Constraint.Context
( ConstraintContext(..)
) where
import Unison.KindInference.UVar (UVar)
import Unison.Type (Type)
-- | The context in which the constraint was generated. This is useful
-- when generating user-facing error messages.
data ConstraintContext v loc
= AppAbs !(UVar v loc) !(UVar v loc)
| AppArg !(UVar v loc) !(UVar v loc) !(UVar v loc)
| AppArrow loc !(Type v loc) !(Type v loc)
| Annotation
| EffectsList
| ScopeReference
| TypeAnnotation
| DeclDefinition
| Builtin
| ContextLookup
deriving stock (Show, Eq, Ord)

View File

@ -0,0 +1,159 @@
{-# LANGUAGE RecursiveDo #-}
-- | Description: Pretty printers for kind inference constraints
module Unison.KindInference.Constraint.Pretty
( prettyUVarKind,
prettySolvedConstraint,
prettyCyclicUVarKind,
)
where
import Data.Map qualified as Map
import Data.Set qualified as Set
import Unison.KindInference.Constraint.Solved qualified as Solved
import Unison.KindInference.Solve.Monad
( ConstraintMap,
Env (..),
Solve (..),
SolveState (..),
find,
run,
)
import Unison.KindInference.UVar (UVar (..))
import Unison.Prelude
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.Util.Pretty qualified as P
import Unison.Var (Var)
arrPrec :: Int
arrPrec = 1
prettyAbility :: Int -> P.Pretty P.ColorText
prettyAbility _prec = "Ability"
prettyType :: Int -> P.Pretty P.ColorText
prettyType _prec = "Type"
prettyUnknown :: Int -> P.Pretty P.ColorText
prettyUnknown _prec = "_"
prettyArrow :: Int -> P.Pretty P.ColorText -> P.Pretty P.ColorText -> P.Pretty P.ColorText
prettyArrow prec lhs rhs =
let wrap = if prec > arrPrec then P.parenthesize else id
in wrap (lhs <> " -> " <> rhs)
prettyCyclicSolvedConstraint ::
Var v =>
Solved.Constraint (UVar v loc) v loc ->
Int ->
Map (UVar v loc) (P.Pretty P.ColorText) ->
Set (UVar v loc) ->
Solve v loc (P.Pretty P.ColorText, Set (UVar v loc))
prettyCyclicSolvedConstraint constraint prec nameMap visitingSet = case constraint of
Solved.IsAbility _ -> pure (prettyAbility prec, Set.empty)
Solved.IsType _ -> pure (prettyType prec, Set.empty)
Solved.IsArr _ a b -> do
(pa, cyclicLhs) <- case Set.member a visitingSet of
True -> pure (nameMap Map.! a, Set.singleton a)
False -> prettyCyclicUVarKindWorker (arrPrec + 1) a nameMap visitingSet
(pb, cyclicRhs) <- case Set.member b visitingSet of
True -> pure (nameMap Map.! b, Set.singleton b)
False -> prettyCyclicUVarKindWorker arrPrec b nameMap visitingSet
pure (prettyArrow prec pa pb, cyclicLhs <> cyclicRhs)
prettyCyclicUVarKindWorker ::
Var v =>
Int ->
UVar v loc ->
Map (UVar v loc) (P.Pretty P.ColorText) ->
Set (UVar v loc) ->
Solve v loc (P.Pretty P.ColorText, Set (UVar v loc))
prettyCyclicUVarKindWorker prec u nameMap visitingSet =
find u >>= \case
Nothing -> pure (prettyUnknown prec, Set.empty)
Just c -> do
let visitingSet1 = Set.insert u visitingSet
prettyCyclicSolvedConstraint c prec nameMap visitingSet1
-- | Pretty print the kind constraint on the given @UVar@.
--
-- __Precondition:__ The @ConstraintMap@ is acyclic.
prettyUVarKind :: Var v => PrettyPrintEnv -> ConstraintMap v loc -> UVar v loc -> P.Pretty P.ColorText
prettyUVarKind ppe constraints uvar = ppRunner ppe constraints do
prettyUVarKind' arrPrec uvar
prettyUVarKind' :: Var v => Int -> UVar v loc -> Solve v loc (P.Pretty P.ColorText)
prettyUVarKind' prec u =
find u >>= \case
Nothing -> pure (prettyUnknown prec)
Just c -> prettySolvedConstraint' prec c
-- | Pretty print a 'Solved.Constraint'
--
-- __Precondition:__ The @ConstraintMap@ is acyclic.
prettySolvedConstraint ::
Var v =>
PrettyPrintEnv ->
ConstraintMap v loc ->
Solved.Constraint (UVar v loc) v loc ->
P.Pretty P.ColorText
prettySolvedConstraint ppe constraints c =
ppRunner ppe constraints (prettySolvedConstraint' arrPrec c)
prettySolvedConstraint' :: Var v => Int -> Solved.Constraint (UVar v loc) v loc -> Solve v loc (P.Pretty P.ColorText)
prettySolvedConstraint' prec = \case
Solved.IsAbility _ -> pure (prettyAbility prec)
Solved.IsType _ -> pure (prettyType prec)
Solved.IsArr _ a b -> do
a <- prettyUVarKind' (arrPrec + 1) a
b <- prettyUVarKind' arrPrec b
pure (prettyArrow prec a b)
-- | Pretty printers for constraints need to look them up in the
-- constraint map, but no constraints are added. This runner just
-- allows running pretty printers outside of the @Solve@ monad by
-- discarding the resulting state.
ppRunner :: Var v => PrettyPrintEnv -> ConstraintMap v loc -> (forall r. Solve v loc r -> r)
ppRunner ppe constraints =
let st =
SolveState
{ unifVars = Set.empty,
newUnifVars = [],
constraints = constraints,
typeMap = mempty
}
env = Env ppe
in \solve -> fst (run env st solve)
-- | A pretty printer for cyclic kind constraints on a
-- @UVar@. Expresses the infinite kind by a generating equation.
--
-- __Precondition:__ The @UVar@ has a cyclic constraint.
prettyCyclicUVarKind ::
Var v =>
PrettyPrintEnv ->
ConstraintMap v loc ->
UVar v loc ->
-- | A function to style the cyclic @UVar@'s variable name
(P.Pretty P.ColorText -> P.Pretty P.ColorText) ->
-- | (the pretty @UVar@ variable, the generating equation)
(P.Pretty P.ColorText, P.Pretty P.ColorText)
prettyCyclicUVarKind ppe constraints uvar theUVarStyle = ppRunner ppe constraints do
find uvar >>= \case
Nothing -> explode
Just c -> do
rec (pp, cyclicUVars) <- prettyCyclicSolvedConstraint c arrPrec nameMap (Set.singleton uvar)
let nameMap = snd $ foldl' phi (0 :: Int, Map.empty) cyclicUVars
phi (n, m) a =
let name = P.string (if n == 0 then "k" else "k" <> show n)
!newN = n + 1
prettyVar = case a == uvar of
True -> theUVarStyle name
False -> name
!newMap = Map.insert a prettyVar m
in (newN, newMap)
case Map.lookup uvar nameMap of
Nothing -> explode
Just n -> pure (n, P.wrap (n <> "=" <> pp))
where
explode = error ("[prettyCyclicUVarKind] called with non-cyclic uvar: " <> show uvar)

View File

@ -0,0 +1,21 @@
module Unison.KindInference.Constraint.Provenance
( Provenance (..),
loc,
)
where
import Control.Lens (Lens')
import Unison.KindInference.Constraint.Context (ConstraintContext (..))
-- | A tag for a source span and context indicating where the
-- generated constraint came from. This is helpful to propagate the
-- constraint context through the solver for user-facing error
-- messages.
data Provenance v loc
= Provenance !(ConstraintContext v loc) !loc
deriving stock (Show, Eq, Ord)
loc :: Lens' (Provenance v loc) loc
loc f = \case
Provenance ctx x -> Provenance ctx <$> f x
{-# INLINE loc #-}

View File

@ -0,0 +1,38 @@
module Unison.KindInference.Constraint.Solved
( Constraint (..),
prov,
loc,
)
where
import Control.Lens (Traversal, Traversal')
import Unison.KindInference.Constraint.Provenance (Provenance)
import Unison.KindInference.Constraint.Provenance qualified as Provenance
import Unison.KindInference.Constraint.StarProvenance (StarProvenance)
import Unison.KindInference.Constraint.StarProvenance qualified as SP
-- | Solved constraints
--
-- These constraints are associated with unification variables during
-- kind inference.
data Constraint uv v loc
= IsType (StarProvenance v loc)
| IsAbility (Provenance v loc)
| IsArr (Provenance v loc) uv uv
deriving stock (Show, Eq, Ord)
prov ::
Traversal
(Constraint uv v loc)
(Constraint uv v loc')
(Provenance v loc)
(Provenance v loc')
prov f = \case
IsType x -> IsType <$> SP.prov f x
IsAbility x -> IsAbility <$> f x
IsArr l a b -> (\x -> IsArr x a b) <$> f l
{-# INLINE prov #-}
loc :: Traversal' (Constraint uv v loc) loc
loc = prov . Provenance.loc
{-# INLINE loc #-}

View File

@ -0,0 +1,28 @@
module Unison.KindInference.Constraint.StarProvenance
( StarProvenance (..),
prov,
)
where
import Control.Lens (Traversal)
import Unison.KindInference.Constraint.Provenance (Provenance)
-- | Provenance of an @IsType@ constraint. @IsType@ constraints arise
-- in constraint generation (in which case it will have a
-- @Provenance@) and also in the solver through kind-defaulting on
-- unconstrained unification variables.
data StarProvenance v loc
= NotDefault (Provenance v loc)
| Default
deriving stock (Show, Eq, Ord)
prov ::
Traversal
(StarProvenance v loc)
(StarProvenance v loc')
(Provenance v loc)
(Provenance v loc')
prov f = \case
Default -> pure Default
NotDefault p -> NotDefault <$> f p
{-# INLINE prov #-}

View File

@ -0,0 +1,56 @@
module Unison.KindInference.Constraint.Unsolved
( Constraint (..),
starProv,
prov,
loc,
)
where
import Control.Lens (Traversal, Lens, Lens')
import Unison.KindInference.Constraint.Provenance (Provenance)
import Unison.KindInference.Constraint.Provenance qualified as Provenance
-- | Unsolved constraints
--
-- These are produced during constraint generation and given as input
-- to the constraint solver.
data Constraint uv v loc starProv
= -- | An IsType constraint may arise from generation or from the
-- solver. During generation the provenance is always a real
-- source code location, but the solver defaults unconstrained
-- kind vars to Star.
IsType uv (starProv v loc)
| IsArr uv (Provenance v loc) uv uv
| IsAbility uv (Provenance v loc)
| Unify (Provenance v loc) uv uv
deriving stock (Show, Eq, Ord)
starProv ::
Traversal
(Constraint uv v loc prov)
(Constraint uv v loc prov')
(prov v loc)
(prov' v loc)
starProv f = \case
IsType x l -> IsType x <$> f l
IsAbility x l -> pure (IsAbility x l)
IsArr s l a b -> pure (IsArr s l a b)
Unify l a b -> pure (Unify l a b)
{-# INLINE starProv #-}
prov ::
Lens
(Constraint uv v loc Provenance)
(Constraint uv v loc' Provenance)
(Provenance v loc)
(Provenance v loc')
prov f = \case
IsType x l -> IsType x <$> f l
IsAbility x l -> IsAbility x <$> f l
IsArr s l a b -> (\x -> IsArr s x a b) <$> f l
Unify l a b -> (\x -> Unify x a b) <$> f l
{-# INLINE prov #-}
loc :: Lens' (Constraint uv v loc Provenance) loc
loc = prov . Provenance.loc
{-# INLINE loc #-}

View File

@ -0,0 +1,110 @@
module Unison.KindInference.Error
( KindError (..),
lspLoc,
ConstraintConflict (..),
improveError,
)
where
import Control.Lens ((^.))
import Unison.ABT qualified as ABT
import Unison.KindInference.Constraint.Context (ConstraintContext (..))
import Unison.KindInference.Constraint.Provenance (Provenance (..))
import Unison.KindInference.Constraint.Solved qualified as Solved
import Unison.KindInference.Constraint.Unsolved qualified as Unsolved
import Unison.KindInference.Generate.Monad (GeneratedConstraint)
import Unison.KindInference.Solve.Monad
( ConstraintMap,
Solve (..),
)
import Unison.KindInference.UVar (UVar (..))
import Unison.Type (Type)
import Unison.Var (Var)
-- | Two incompatible constraints on a @UVar@.
data ConstraintConflict v loc = ConstraintConflict'
{ conflictedVar :: UVar v loc,
impliedConstraint :: Solved.Constraint (UVar v loc) v loc,
conflictedConstraint :: Solved.Constraint (UVar v loc) v loc
}
lspLoc :: Semigroup loc => KindError v loc -> loc
lspLoc = \case
CycleDetected loc _ _ -> loc
UnexpectedArgument _ abs arg _ -> varLoc abs <> varLoc arg
ArgumentMismatch abs _ actual _ -> varLoc abs <> varLoc actual
ArgumentMismatchArrow _ ConstraintConflict' {conflictedVar} _ -> varLoc conflictedVar
EffectListMismatch ConstraintConflict' {conflictedVar} _ -> varLoc conflictedVar
ConstraintConflict gen _ _ -> gen ^. Unsolved.loc
where
varLoc var = ABT.annotation $ uvarType var
-- | Errors that may arise during kind inference
data KindError v loc
= -- | A variable is constrained to have an infinite kind
CycleDetected loc (UVar v loc) (ConstraintMap v loc)
| -- | Something of kind * or Effect is applied to an argument
UnexpectedArgument
loc
-- ^ src span of abs
(UVar v loc)
-- ^ abs var
(UVar v loc)
-- ^ arg var
(ConstraintMap v loc)
-- ^ context
-- | An arrow kind is applied to a type, but its kind doesn't match
-- the expected argument kind
| ArgumentMismatch
(UVar v loc)
-- ^ abs var
(UVar v loc)
-- ^ expected var
(UVar v loc)
-- ^ given var
(ConstraintMap v loc)
-- ^ context
-- | Same as @ArgumentMismatch@, but for applications to the builtin
-- @Arrow@ type.
| ArgumentMismatchArrow
(loc, Type v loc, Type v loc)
-- ^ (The applied arrow range, lhs, rhs)
(ConstraintConflict v loc)
(ConstraintMap v loc)
| -- | Something appeared in an effect list that isn't of kind Effect
EffectListMismatch
(ConstraintConflict v loc)
(ConstraintMap v loc)
| -- | Generic constraint conflict
ConstraintConflict
(GeneratedConstraint v loc)
-- ^ Failed to add this constraint
(ConstraintConflict v loc)
-- ^ Due to this conflict
(ConstraintMap v loc)
-- ^ in this context
-- | Transform generic constraint conflicts into more specific error
-- by examining its @ConstraintContext@.
improveError :: Var v => KindError v loc -> Solve v loc (KindError v loc)
improveError = \case
ConstraintConflict a b c -> improveError' a b c
e -> pure e
improveError' ::
Var v =>
GeneratedConstraint v loc ->
ConstraintConflict v loc ->
ConstraintMap v loc ->
Solve v loc (KindError v loc)
improveError' generatedConstraint constraintConflict constraintMap =
let Provenance ctx loc = generatedConstraint ^. Unsolved.prov
in case ctx of
AppAbs abs arg -> pure (UnexpectedArgument loc abs arg constraintMap)
AppArg abs expected actual -> pure (ArgumentMismatch abs expected actual constraintMap)
AppArrow loc dom cod -> pure (ArgumentMismatchArrow (loc, dom, cod) constraintConflict constraintMap)
EffectsList -> pure (EffectListMismatch constraintConflict constraintMap)
_ -> pure (ConstraintConflict generatedConstraint constraintConflict constraintMap)
instance Show (KindError v loc) where
show _ = "kind error"

View File

@ -0,0 +1,189 @@
module Unison.KindInference.Error.Pretty
( prettyKindError,
)
where
import Unison.ABT qualified as ABT
import Unison.KindInference.Constraint.Pretty
import Unison.KindInference.Error (ConstraintConflict (..), KindError (..))
import Unison.KindInference.UVar (UVar (..))
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.Type (Type)
import Unison.Util.AnnotatedText qualified as AT
import Unison.Util.ColorText (Color)
import Unison.Util.Pretty (ColorText, Pretty)
import Unison.Util.Pretty qualified as Pr
import Unison.Var (Var)
-- | Pretty print a user-facing @KindError@.
prettyKindError ::
Var v =>
-- | How to print types
(Type v loc -> Pretty ColorText) ->
-- | How to print source spans
([(loc, Color)] -> Pretty ColorText) ->
-- | Contrasting colors used in error message output
Color ->
Color ->
PrettyPrintEnv ->
KindError v loc ->
Pretty ColorText
prettyKindError prettyType showSource color1 color2 env = \case
CycleDetected loc conflictedVar constraints ->
let annotatedSrc =
showSource
[ (loc, color2)
]
(prettyVarKind, prettyVarKindConstraint) = prettyCyclicUVarKind env constraints conflictedVar (stylePretty color1)
theErrMsg =
Pr.hang
(Pr.bold "Cannot construct infinite kind")
( Pr.sep
"\n"
[ annotatedSrc,
Pr.wrap
( "The above application constrains the kind of"
<> stylePretty color2 (prettyTyp conflictedVar)
<> "to be infinite, generated by the constraint"
<> prettyVarKindConstraint
<> "where"
<> stylePretty color1 prettyVarKind
<> "is the kind of"
<> Pr.group (stylePretty color2 (prettyTyp conflictedVar) <> ".")
)
]
)
in theErrMsg
UnexpectedArgument _loc abs arg _constraints ->
let theErrMsg =
Pr.hang
(Pr.bold "Kind mismatch arising from")
( Pr.sep
"\n"
[ annotatedSrc,
Pr.wrap
( pabs
<> "doesn't expect an argument; however,"
<> "it is applied to"
<> Pr.group (parg <> ".")
)
]
)
annotatedSrc =
showSource
[ (varLoc abs, color1),
(varLoc arg, color2)
]
pabs = stylePretty color1 (prettyTyp abs)
parg = stylePretty color2 (prettyTyp arg)
in theErrMsg
ArgumentMismatch abs expected actual constraints ->
let theErrMsg =
Pr.hang
(Pr.bold "Kind mismatch arising from")
( Pr.sep
"\n"
[ annotatedSrc,
Pr.wrap
( mconcat
[ pabs,
" expects an argument of kind: ",
Pr.group (stylePretty color1 (prettyUVarKind env constraints expected) <> ";"),
"however, it is applied to ",
parg,
"which has kind: ",
Pr.group (stylePretty color2 (prettyUVarKind env constraints actual) <> ".")
]
)
]
)
annotatedSrc =
showSource
[ (varLoc abs, color1),
(varLoc actual, color2)
]
pabs = stylePretty color1 (prettyTyp abs)
parg = stylePretty color2 (prettyTyp actual)
in theErrMsg
ArgumentMismatchArrow (_loc, _cod, _dom) ConstraintConflict' {conflictedVar, impliedConstraint, conflictedConstraint} constraints ->
let theErrMsg =
Pr.hang
(Pr.bold "Kind mismatch arising from")
( Pr.sep
"\n"
[ annotatedSrc,
Pr.wrap
( mconcat
[ "The arrow type",
Pr.group ("(" <> prettyArrow <> ")"),
"expects arguments of kind",
Pr.group (stylePretty color1 (prettySolvedConstraint env constraints impliedConstraint) <> ";"),
"however, it is applied to",
parg,
"which has kind:",
Pr.group (stylePretty color2 (prettySolvedConstraint env constraints conflictedConstraint) <> ".")
]
)
]
)
prettyArrow = stylePretty color1 "->"
annotatedSrc =
showSource
[ (varLoc conflictedVar, color2)
]
parg = stylePretty color2 (prettyTyp conflictedVar)
in theErrMsg
EffectListMismatch ConstraintConflict' {conflictedVar, impliedConstraint, conflictedConstraint} constraints ->
let theErrMsg =
Pr.hang
(Pr.bold "Kind mismatch arising from")
( Pr.sep
"\n"
[ annotatedSrc,
Pr.wrap
( mconcat
[ "An ability list must consist solely of abilities;",
"however, this list contains",
parg,
"which has kind",
Pr.group (stylePretty color2 (prettySolvedConstraint env constraints conflictedConstraint) <> "."),
"Abilities are of kind ",
Pr.group (stylePretty color1 (prettySolvedConstraint env constraints impliedConstraint) <> ".")
]
)
]
)
annotatedSrc =
showSource
[ (varLoc conflictedVar, color2)
]
parg = stylePretty color2 (prettyTyp conflictedVar)
in theErrMsg
ConstraintConflict _generatedConstraint ConstraintConflict' {conflictedVar, impliedConstraint, conflictedConstraint} constraints ->
let prettySolvedConstraint' c = Pr.bold (prettySolvedConstraint env constraints c)
theErrMsg =
Pr.hang
(Pr.bold "Kind mismatch arising from")
( Pr.sep
"\n"
[ annotatedSrc,
"Expected kind: " <> stylePretty color1 (prettySolvedConstraint' impliedConstraint),
"Given kind: " <> stylePretty color2 (prettySolvedConstraint' conflictedConstraint)
]
)
annotatedSrc =
showSource
[ (varLoc conflictedVar, color2)
]
in theErrMsg
where
varLoc var = ABT.annotation $ uvarType var
prettyTyp = prettyType . uvarType
stylePretty :: Color -> Pretty ColorText -> Pretty ColorText
stylePretty = Pr.map . AT.annotate

View File

@ -0,0 +1,443 @@
module Unison.KindInference.Generate
( typeConstraints,
termConstraints,
declComponentConstraints,
builtinConstraints,
)
where
import Control.Lens ((^.))
import Data.Foldable (foldlM)
import Data.Set qualified as Set
import U.Core.ABT qualified as ABT
import Unison.Builtin.Decls (rewriteTypeRef)
import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation (builtinAnnotation))
import Unison.ConstructorReference (GConstructorReference (..))
import Unison.DataDeclaration (Decl, asDataDecl)
import Unison.DataDeclaration qualified as DD
import Unison.Kind qualified as Unison
import Unison.KindInference.Constraint.Context (ConstraintContext (..))
import Unison.KindInference.Constraint.Provenance (Provenance (..))
import Unison.KindInference.Constraint.Provenance qualified as Provenance
import Unison.KindInference.Constraint.Unsolved (Constraint (..))
import Unison.KindInference.Generate.Monad (Gen, GeneratedConstraint, freshVar, pushType, lookupType, scopedType)
import Unison.KindInference.UVar (UVar)
import Unison.Prelude
import Unison.Reference (Reference)
import Unison.Term qualified as Term
import Unison.Type qualified as Type
import Unison.Var (Type (User), Var (typed), freshIn)
data ConstraintTree v loc
= Node [ConstraintTree v loc]
| Constraint (GeneratedConstraint v loc) (ConstraintTree v loc)
| ParentConstraint (GeneratedConstraint v loc) (ConstraintTree v loc)
| StrictOrder (ConstraintTree v loc) (ConstraintTree v loc)
newtype TreeWalk = TreeWalk (forall a. ([a] -> [a]) -> [([a] -> [a], [a] -> [a])] -> [a] -> [a])
bottomUp :: TreeWalk
bottomUp = TreeWalk \down pairs0 -> foldr (\(d, u) b -> d . u . b) id pairs0 . down
flatten :: TreeWalk -> ConstraintTree v loc -> [GeneratedConstraint v loc]
flatten (TreeWalk f) = ($ []) . flattenTop
where
flattenTop :: ConstraintTree v loc -> [GeneratedConstraint v loc] -> [GeneratedConstraint v loc]
flattenTop t0 =
f id [flattenRec id t0]
flattenRec ::
([GeneratedConstraint v loc] -> [GeneratedConstraint v loc]) ->
ConstraintTree v loc ->
([GeneratedConstraint v loc] -> [GeneratedConstraint v loc], [GeneratedConstraint v loc] -> [GeneratedConstraint v loc])
flattenRec down = \case
Node cts ->
let pairs = map (flattenRec id) cts
in (f down pairs, id)
Constraint c ct -> flattenRec (down . (c :)) ct
ParentConstraint c ct ->
let (down', up) = flattenRec down ct
in (down', up . (c :))
StrictOrder a b ->
let as = flattenTop a
bs = flattenTop b
in (f down [(as . bs, id)], id)
typeConstraintTree :: (Var v, Ord loc) => UVar v loc -> Type.Type v loc -> Gen v loc (ConstraintTree v loc)
typeConstraintTree resultVar term@ABT.Term {annotation, out} = do
case out of
ABT.Abs _ _ -> error "[typeConstraintTree] malformed type: Abs without an enclosing Forall or IntroOuter"
ABT.Var v ->
lookupType (Type.var annotation v) >>= \case
Nothing -> error ("[typeConstraintTree] bug: encountered var " <> show v <> " missing from context")
Just x -> pure $ Constraint (Unify (Provenance ContextLookup annotation) resultVar x) (Node [])
ABT.Cycle _ -> error "[typeConstraintTree] malformed type: Encountered Cycle in a type?"
ABT.Tm t0 -> case t0 of
Type.Arrow dom cod -> do
let ctx = AppArrow annotation dom cod
k1 <- freshVar dom
domConstraints <- typeConstraintTree k1 dom
k2 <- freshVar cod
codConstraints <- typeConstraintTree k2 cod
pure $
Constraint
(IsType resultVar (Provenance ctx annotation))
( Node
[ ParentConstraint (IsType k1 (Provenance ctx $ ABT.annotation dom)) domConstraints,
ParentConstraint (IsType k2 (Provenance ctx $ ABT.annotation cod)) codConstraints
]
)
Type.App abs arg -> do
absVar <- freshVar abs
absArgVar <- freshVar arg
absConstraints <- typeConstraintTree absVar abs
argVar <- freshVar arg
argConstraints <- typeConstraintTree argVar arg
let wellKindedAbs = IsArr absVar (Provenance (AppAbs absVar absArgVar) (ABT.annotation abs)) absArgVar resultVar
applicationUnification = Unify (Provenance (AppArg absVar absArgVar argVar) (ABT.annotation arg)) absArgVar argVar
pure $
Constraint
applicationUnification
( Node
[ ParentConstraint wellKindedAbs absConstraints,
argConstraints
]
)
Type.Forall ABT.Term {annotation, out} -> case out of
ABT.Abs v x ->
scopedType (Type.var annotation v) \_ -> do
typeConstraintTree resultVar x
_ -> error "[typeConstraintTree] Forall wrapping a non-abs"
Type.IntroOuter ABT.Term {annotation, out} -> case out of
ABT.Abs v x -> handleIntroOuter v annotation (\c -> Constraint c <$> typeConstraintTree resultVar x)
_ -> error "[typeConstraintTree] IntroOuter wrapping a non-abs"
Type.Ann x kind -> do
ct <- typeConstraintTree resultVar x
gcs <- constrainToKind (Provenance Annotation annotation) resultVar (fromUnisonKind kind)
pure (foldr Constraint ct gcs)
Type.Ref r ->
lookupType (Type.ref annotation r) >>= \case
Nothing -> error ("[typeConstraintTree] Ref lookup failure: " <> show term)
Just x -> pure $ Constraint (Unify (Provenance ContextLookup annotation) resultVar x) (Node [])
Type.Effect effTyp b -> do
effKind <- freshVar effTyp
effConstraints <- typeConstraintTree effKind effTyp
restConstraints <- typeConstraintTree resultVar b
pure $ Node [effConstraints, restConstraints]
Type.Effects effs -> do
Node <$> for effs \eff -> do
effKind <- freshVar eff
effConstraints <- typeConstraintTree effKind eff
pure $ ParentConstraint (IsAbility effKind (Provenance EffectsList $ ABT.annotation eff)) effConstraints
-- | Generate kind constraints arising from a given type. The given
-- @UVar@ is constrained to have the kind of the given type.
typeConstraints :: (Var v, Ord loc) => UVar v loc -> Type.Type v loc -> Gen v loc [GeneratedConstraint v loc]
typeConstraints resultVar typ =
flatten bottomUp <$> typeConstraintTree resultVar typ
handleIntroOuter :: Var v => v -> loc -> (GeneratedConstraint v loc -> Gen v loc r) -> Gen v loc r
handleIntroOuter v loc k = do
let typ = Type.var loc v
new <- freshVar typ
orig <-
lookupType typ >>= \case
Nothing -> error "[malformed type] IntroOuter not in lexical scope of matching Forall"
Just a -> pure a
k (Unify (Provenance ScopeReference loc) new orig)
-- | Helper for @termConstraints@ that instantiates the outermost
-- foralls and keeps the type in scope (in the type map) while
-- checking lexically nested type annotations.
instantiateType ::
forall v loc r.
(Var v, Ord loc) =>
Type.Type v loc ->
(Type.Type v loc -> [GeneratedConstraint v loc] -> Gen v loc r) ->
Gen v loc r
instantiateType type0 k =
let go acc = \case
ABT.Tm' (Type.Forall ABT.Term {annotation, out = ABT.Abs x t}) -> do
scopedType (Type.var annotation x) \_ -> do
go acc t
ABT.Tm' (Type.IntroOuter ABT.Term {annotation, out = ABT.Abs x t}) -> do
handleIntroOuter x annotation \c -> go (c : acc) t
t -> k t (reverse acc)
in go [] type0
-- | Check that all annotations in a term are well-kinded
termConstraints :: forall v loc. (Var v, Ord loc) => Term.Term v loc -> Gen v loc [GeneratedConstraint v loc]
termConstraints x = flatten bottomUp <$> termConstraintTree x
termConstraintTree :: forall v loc. (Var v, Ord loc) => Term.Term v loc -> Gen v loc (ConstraintTree v loc)
termConstraintTree = fmap Node . dfAnns processAnn cons nil . hackyStripAnns
where
processAnn :: loc -> Type.Type v loc -> Gen v loc [ConstraintTree v loc] -> Gen v loc [ConstraintTree v loc]
processAnn ann typ mrest = do
instantiateType typ \typ gcs -> do
typKind <- freshVar typ
annConstraints <- ParentConstraint (IsType typKind (Provenance TypeAnnotation ann)) <$> typeConstraintTree typKind typ
let annConstraints' = foldr Constraint annConstraints gcs
rest <- mrest
pure (annConstraints' : rest)
cons mlhs mrhs = (++) <$> mlhs <*> mrhs
nil = pure []
-- | Process type annotations depth-first. Allows processing
-- annotations with lexical scoping.
dfAnns :: (loc -> Type.Type v loc -> b -> b) -> (b -> b -> b) -> b -> Term.Term v loc -> b
dfAnns annAlg cons nil = ABT.cata \ann abt0 -> case abt0 of
ABT.Var _ -> nil
ABT.Cycle x -> x
ABT.Abs _ x -> x
ABT.Tm t -> case t of
Term.Ann trm typ -> annAlg ann typ trm
x -> foldr cons nil x
-- Our rewrite signature machinery generates type annotations that are
-- not well kinded. Work around this for now by stripping those
-- annotations.
hackyStripAnns :: Ord v => Term.Term v loc -> Term.Term v loc
hackyStripAnns =
snd . ABT.cata \ann abt0 -> case abt0 of
ABT.Var v -> (False, ABT.var ann v)
ABT.Cycle (_, x) -> (False, ABT.cycle ann x)
ABT.Abs v (_, x) -> (False, ABT.abs ann v x)
ABT.Tm tm0 -> case tm0 of
Term.App (isHack, abs) (_, arg) ->
let argMod = case isHack of
True -> stripAnns
False -> id
in (isHack, Term.app ann abs (argMod arg))
Term.Constructor cref@(ConstructorReference r _) ->
let isHack = r == rewriteTypeRef
in (isHack, Term.constructor ann cref)
t -> (False, ABT.tm ann (snd <$> t))
where
stripAnns = ABT.cata \ann abt0 -> case abt0 of
ABT.Var v -> ABT.var ann v
ABT.Cycle x -> ABT.cycle ann x
ABT.Abs v x -> ABT.abs ann v x
ABT.Tm tm0 -> case tm0 of
Term.Ann trm _typ -> trm
t -> ABT.tm ann t
-- | Generate kind constraints for a mutally recursive component of
-- decls
declComponentConstraints ::
forall v loc.
(Var v, Ord loc) =>
[(Reference, Decl v loc)] ->
Gen v loc [GeneratedConstraint v loc]
declComponentConstraints decls = flatten bottomUp <$> declComponentConstraintTree decls
declComponentConstraintTree ::
forall v loc.
(Var v, Ord loc) =>
[(Reference, Decl v loc)] ->
Gen v loc (ConstraintTree v loc)
declComponentConstraintTree decls = do
decls <- for decls \(ref, decl) -> do
-- Add a kind variable for every datatype
declKind <- pushType (Type.ref (DD.annotation $ asDataDecl decl) ref)
pure (ref, decl, declKind)
(declConstraints, constructorConstraints) <- unzip <$> for decls \(ref, decl, declKind) -> do
let declAnn = DD.annotation $ asDataDecl decl
let declType = Type.ref declAnn ref
-- Unify the datatype with @k_1 -> ... -> k_n -> *@ where @n@ is
-- the number of type parameters
let tyVars = map (\tyVar -> Type.var declAnn tyVar) (DD.bound $ asDataDecl decl)
tyvarKinds <- for tyVars \tyVar -> do
-- it would be nice to annotate these type vars with their
-- precise location, but that information doesn't seem to be
-- available via "DataDeclaration", so we currently settle for
-- the whole decl annotation.
k <- freshVar tyVar
pure (k, tyVar)
let tyvarKindsOnly = map fst tyvarKinds
constructorConstraints <-
Node <$> for (DD.constructors' $ asDataDecl decl) \(constructorAnn, _, constructorType) -> do
withInstantiatedConstructorType declType tyvarKindsOnly constructorType \constructorType -> do
constructorKind <- freshVar constructorType
ct <- typeConstraintTree constructorKind constructorType
pure $ ParentConstraint (IsType constructorKind (Provenance DeclDefinition constructorAnn)) ct
(fullyAppliedKind, _fullyAppliedType, declConstraints) <-
let phi (dk, dt, cts) (ak, at) = do
-- introduce a kind uvar for each app node
let t' = Type.app declAnn dt at
v <- freshVar t'
let cts' = Constraint (IsArr dk (Provenance DeclDefinition declAnn) ak v) cts
pure (v, t', cts')
in foldlM phi (declKind, declType, Node []) tyvarKinds
let finalDeclConstraints = case decl of
Left _effectDecl -> Constraint (IsAbility fullyAppliedKind (Provenance DeclDefinition declAnn)) declConstraints
Right _dataDecl -> Constraint (IsType fullyAppliedKind (Provenance DeclDefinition declAnn)) declConstraints
pure (finalDeclConstraints, constructorConstraints)
pure (Node declConstraints `StrictOrder` Node constructorConstraints)
-- | This is a helper to unify the kind constraints on type variables
-- across a decl's constructors.
--
-- With a decl like
--
-- @
-- unique type T a = C0 Nat a | C1 (a Nat)
-- @
--
-- @C0@ will have type @forall a. Nat -> a -> T a@ and @C1@ will have
-- type @forall a. (a Nat) -> T a@. We must unify the kind constraints
-- that the two constructors make on @a@ in order to determine the
-- kind of @a@ (or observe that there are contradictory
-- constraints). In this example @C0@ constrains @a@ to be of type *
-- because it is applied to the arrow type, whereas @C1@ constrains
-- @a@ to be of kind * -> * since it is applied to a Nat.
--
-- We unify these variables by instantiating the outermost foralls
-- with fresh kind variables, then follow any arrows to find the
-- result type which must have type @T b@ for some b, then unify @b@
-- with some kind variable representing the unification of @a@ for
-- each constructor.
withInstantiatedConstructorType ::
forall v loc.
(Var v, Ord loc) =>
Type.Type v loc ->
[UVar v loc] ->
Type.Type v loc ->
(Type.Type v loc -> Gen v loc (ConstraintTree v loc)) ->
Gen v loc (ConstraintTree v loc)
withInstantiatedConstructorType declType tyParams0 constructorType0 k =
let goForall constructorType = case constructorType of
ABT.Tm' (Type.Forall ABT.Term {annotation, out = ABT.Abs x t}) -> do
scopedType (Type.var annotation x) \_ -> do
goForall t
_ -> do
cs <- goArrow constructorType
rest <- k constructorType
pure $ StrictOrder (foldr Constraint (Node []) cs) rest
goArrow :: Type.Type v loc -> Gen v loc [GeneratedConstraint v loc]
goArrow = \case
Type.Arrow' _ o -> goArrow o
Type.Effect' es _ -> goEffs es
resultTyp@(Type.Apps' f xs)
| f == declType -> unifyVars resultTyp xs
f | f == declType -> pure []
resultTyp -> error ("[goArrow] unexpected result type: " <> show resultTyp)
goEffs = \case
[] -> error ("[goEffs] couldn't find the expected ability: " <> show declType)
e : es
| e == declType -> pure []
| Type.Apps' f xs <- e,
f == declType ->
unifyVars e xs
| otherwise -> goEffs es
unifyVars :: Type.Type v loc -> [Type.Type v loc] -> Gen v loc [GeneratedConstraint v loc]
unifyVars typ vs = for (zip vs tyParams0) \(v, tp) -> do
lookupType v >>= \case
Nothing -> error ("[unifyVars] unknown type in decl result: " <> show v)
Just x ->
pure (Unify (Provenance DeclDefinition (ABT.annotation typ)) x tp)
in goForall constructorType0
builtinConstraints :: forall v loc. (Ord loc, BuiltinAnnotation loc, Var v) => Gen v loc [GeneratedConstraint v loc]
builtinConstraints = flatten bottomUp <$> builtinConstraintTree
-- | Kind constraints for builtin types
builtinConstraintTree :: forall v loc. (Ord loc, BuiltinAnnotation loc, Var v) => Gen v loc (ConstraintTree v loc)
builtinConstraintTree =
mergeTrees
[ traverse
(constrain Type)
[ Type.nat,
Type.int,
Type.float,
Type.boolean,
Type.text,
Type.char,
Type.bytes,
Type.any,
Type.termLink,
Type.typeLink,
Type.fileHandle,
flip Type.ref Type.filePathRef,
Type.threadId,
Type.socket,
Type.processHandle,
Type.ibytearrayType,
flip Type.ref Type.charClassRef,
flip Type.ref Type.tlsRef,
flip Type.ref Type.tlsClientConfigRef,
flip Type.ref Type.tlsServerConfigRef,
flip Type.ref Type.tlsSignedCertRef,
flip Type.ref Type.tlsPrivateKeyRef,
flip Type.ref Type.tlsCipherRef,
flip Type.ref Type.tlsVersionRef,
flip Type.ref Type.codeRef,
flip Type.ref Type.valueRef,
flip Type.ref Type.timeSpecRef,
flip Type.ref Type.hashAlgorithmRef
],
traverse
(constrain (Type :-> Type))
[ Type.list,
Type.iarrayType,
flip Type.ref Type.mvarRef,
flip Type.ref Type.tvarRef,
flip Type.ref Type.ticketRef,
flip Type.ref Type.promiseRef,
flip Type.ref Type.patternRef
],
traverse
(constrain Ability)
[ Type.builtinIO,
flip Type.ref Type.stmRef
],
traverse
(constrain (Type :-> Ability))
[flip Type.ref Type.scopeRef],
traverse
(constrain (Ability :-> Type))
[Type.mbytearrayType],
traverse
(constrain (Ability :-> Type :-> Type))
[Type.effectType, Type.marrayType, Type.refType]
]
where
mergeTrees :: [Gen v loc [ConstraintTree v loc]] -> Gen v loc (ConstraintTree v loc)
mergeTrees = fmap (Node . concat) . sequence
constrain :: Kind -> (loc -> Type.Type v loc) -> Gen v loc (ConstraintTree v loc)
constrain k t = do
kindVar <- pushType (t builtinAnnotation)
foldr Constraint (Node []) <$> constrainToKind (Provenance Builtin builtinAnnotation) kindVar k
constrainToKind :: (Var v) => Provenance v loc -> UVar v loc -> Kind -> Gen v loc [GeneratedConstraint v loc]
constrainToKind prov resultVar0 = fmap ($ []) . go resultVar0
where
go resultVar = \case
Type -> do
pure (IsType resultVar prov :)
Ability -> do
pure (IsAbility resultVar prov :)
lhs :-> rhs -> do
let inputTypeVar = Type.var (prov ^. Provenance.loc) (freshIn Set.empty (typed (User "a")))
let outputTypeVar = Type.var (prov ^. Provenance.loc) (freshIn Set.empty (typed (User "a")))
input <- freshVar inputTypeVar
output <- freshVar outputTypeVar
ctl <- go input lhs
ctr <- go output rhs
pure ((IsArr resultVar prov input output :) . ctl . ctr)
data Kind = Type | Ability | Kind :-> Kind
infixr 9 :->
fromUnisonKind :: Unison.Kind -> Kind
fromUnisonKind = \case
Unison.Star -> Type
Unison.Arrow a b -> fromUnisonKind a :-> fromUnisonKind b

View File

@ -0,0 +1,96 @@
module Unison.KindInference.Generate.Monad
( Gen (..),
GenState (..),
GeneratedConstraint,
run,
freshVar,
pushType,
popType,
scopedType,
lookupType,
)
where
import Control.Monad.State.Strict
import Data.Functor.Compose
import Data.List.NonEmpty (NonEmpty ((:|)))
import Data.List.NonEmpty qualified as NonEmpty
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import Unison.KindInference.Constraint.Provenance (Provenance)
import Unison.KindInference.Constraint.Unsolved (Constraint (..))
import Unison.KindInference.UVar (UVar (..))
import Unison.Prelude
import Unison.Symbol
import Unison.Type qualified as T
import Unison.Var
type GeneratedConstraint v loc = Constraint (UVar v loc) v loc Provenance
data GenState v loc = GenState
{ unifVars :: !(Set Symbol),
typeMap :: !(Map (T.Type v loc) (NonEmpty (UVar v loc))),
newVars :: [UVar v loc]
}
deriving stock (Generic)
newtype Gen v loc a = Gen
{ unGen :: GenState v loc -> (a, GenState v loc)
}
deriving
( Functor,
Applicative,
Monad,
MonadState (GenState v loc)
)
via State (GenState v loc)
run :: Gen v loc a -> GenState v loc -> (a, GenState v loc)
run (Gen ma) st0 = ma st0
-- | Create a unique @UVar@ associated with @typ@
freshVar :: Var v => T.Type v loc -> Gen v loc (UVar v loc)
freshVar typ = do
st@GenState {unifVars, newVars} <- get
let var :: Symbol
var = freshIn unifVars (typed (Inference Other))
uvar = UVar var typ
unifVars' = Set.insert var unifVars
put st {unifVars = unifVars', newVars = uvar : newVars}
pure uvar
-- | Associate a fresh @UVar@ with @t@, push onto context
pushType :: Var v => T.Type v loc -> Gen v loc (UVar v loc)
pushType t = do
GenState {typeMap} <- get
(var, newTypeMap) <-
let f = \case
Nothing -> Compose $ (\v -> (v, Just (v :| []))) <$> freshVar t
Just xs -> Compose $ (\v -> (v, Just (NonEmpty.cons v xs))) <$> freshVar t
in getCompose $ Map.alterF f t typeMap
modify \st -> st {typeMap = newTypeMap}
pure var
lookupType :: Var v => T.Type v loc -> Gen v loc (Maybe (UVar v loc))
lookupType t = do
GenState {typeMap} <- get
pure (NonEmpty.head <$> Map.lookup t typeMap)
popType :: Var v => T.Type v loc -> Gen v loc ()
popType t = do
modify \st -> st {typeMap = del (typeMap st)}
where
del m =
let f = \case
Nothing -> Nothing
Just (_ :| ys) -> case ys of
[] -> Nothing
x : xs -> Just (x :| xs)
in Map.alter f t m
scopedType :: Var v => T.Type v loc -> (UVar v loc -> Gen v loc r) -> Gen v loc r
scopedType t m = do
s <- pushType t
r <- m s
popType t
pure r

View File

@ -0,0 +1,408 @@
module Unison.KindInference.Solve
( step,
verify,
initialState,
defaultUnconstrainedVars,
KindError (..),
ConstraintConflict (..),
)
where
import Unison.KindInference.Error (KindError(..), ConstraintConflict(..), improveError)
import Control.Lens (Prism', prism', review, (%~))
import Control.Monad.Reader (asks)
import Control.Monad.Reader qualified as M
import Control.Monad.State.Strict qualified as M
import Control.Monad.Trans.Except
import Data.List.NonEmpty (NonEmpty (..))
import Data.Set qualified as Set
import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation)
import Unison.Debug (DebugFlag (KindInference), shouldDebug)
import Unison.KindInference.Constraint.Provenance (Provenance (..))
import Unison.KindInference.Constraint.Solved qualified as Solved
import Unison.KindInference.Constraint.StarProvenance (StarProvenance (..))
import Unison.KindInference.Constraint.Unsolved qualified as Unsolved
import Unison.KindInference.Generate (builtinConstraints)
import Unison.KindInference.Generate.Monad (Gen (..), GeneratedConstraint)
import Unison.KindInference.Solve.Monad
( ConstraintMap,
Descriptor (..),
Env (..),
Solve (..),
SolveState (..),
emptyState,
run,
runGen,
)
import Unison.KindInference.UVar (UVar (..))
import Unison.PatternMatchCoverage.Pretty as P
import Unison.PatternMatchCoverage.UFMap qualified as U
import Unison.Prelude
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.Syntax.TypePrinter qualified as TP
import Unison.Util.Pretty qualified as P
import Unison.Var (Var)
type UnsolvedConstraint v loc = Unsolved.Constraint (UVar v loc) v loc StarProvenance
_Generated :: forall v loc. Prism' (UnsolvedConstraint v loc) (GeneratedConstraint v loc)
_Generated = prism' (Unsolved.starProv %~ NotDefault) \case
Unsolved.IsType s l -> case l of
Default -> Nothing
NotDefault l -> Just (Unsolved.IsType s l)
Unsolved.IsAbility s l -> Just (Unsolved.IsAbility s l)
Unsolved.IsArr s l a b -> Just (Unsolved.IsArr s l a b)
Unsolved.Unify l a b -> Just (Unsolved.Unify l a b)
-- | Apply some generated constraints to a solve state, returning a
-- kind error if detected or a new solve state.
step ::
(Var v, Ord loc, Show loc) =>
Env ->
SolveState v loc ->
[GeneratedConstraint v loc] ->
Either (NonEmpty (KindError v loc)) (SolveState v loc)
step e st cs =
let action = do
reduce cs >>= \case
[] -> pure (Right ())
e : es -> do
-- We have an error, but do an occ check first to ensure
-- we present the most sensible error.
st <- M.get
case verify st of
Left e -> pure (Left e)
Right _ -> do
Left <$> traverse improveError (e :| es)
in case unSolve action e st of
(res, finalState) -> case res of
Left e -> Left e
Right () -> Right finalState
-- | Default any unconstrained vars to *
defaultUnconstrainedVars :: Var v => SolveState v loc -> SolveState v loc
defaultUnconstrainedVars st =
let newConstraints = foldl' phi (constraints st) (newUnifVars st)
phi b a = U.alter a handleNothing handleJust b
handleNothing = error "impossible"
handleJust _canonK ecSize d = case descriptorConstraint d of
Nothing -> U.Canonical ecSize d {descriptorConstraint = Just $ Solved.IsType Default}
Just _ -> U.Canonical ecSize d
in st {constraints = newConstraints, newUnifVars = []}
prettyConstraintD' :: Show loc => Var v => PrettyPrintEnv -> UnsolvedConstraint v loc -> P.Pretty P.ColorText
prettyConstraintD' ppe =
P.wrap . \case
Unsolved.IsType v p -> prettyUVar ppe v <> " ~ Type" <> prettyProv p
Unsolved.IsAbility v p -> prettyUVar ppe v <> " ~ Ability" <> prettyProv p
Unsolved.IsArr v p a b -> prettyUVar ppe v <> " ~ " <> prettyUVar ppe a <> " -> " <> prettyUVar ppe b <> prettyProv p
Unsolved.Unify p a b -> prettyUVar ppe a <> " ~ " <> prettyUVar ppe b <> prettyProv p
where
prettyProv x =
"[" <> P.string (show x) <> "]"
prettyConstraints :: Show loc => Var v => PrettyPrintEnv -> [UnsolvedConstraint v loc] -> P.Pretty P.ColorText
prettyConstraints ppe = P.sep "\n" . map (prettyConstraintD' ppe)
prettyUVar :: Var v => PrettyPrintEnv -> UVar v loc -> P.Pretty P.ColorText
prettyUVar ppe (UVar s t) = TP.pretty ppe t <> " :: " <> P.prettyVar s
tracePretty :: P.Pretty P.ColorText -> a -> a
tracePretty p = trace (P.toAnsiUnbroken p)
data OccCheckState v loc = OccCheckState
{ visitingSet :: Set (UVar v loc),
visitingStack :: [UVar v loc],
solvedSet :: Set (UVar v loc),
solvedConstraints :: ConstraintMap v loc,
kindErrors :: [KindError v loc]
}
markVisiting :: Var v => UVar v loc -> M.State (OccCheckState v loc) CycleCheck
markVisiting x = do
OccCheckState {visitingSet, visitingStack} <- M.get
case Set.member x visitingSet of
True -> do
OccCheckState{solvedConstraints} <- M.get
let loc = case U.lookupCanon x solvedConstraints of
Just (_, _, Descriptor { descriptorConstraint = Just (Solved.IsArr (Provenance _ loc) _ _ )}, _) -> loc
_ -> error "cycle without IsArr constraint"
addError (CycleDetected loc x solvedConstraints)
pure Cycle
False -> do
M.modify \st ->
st
{ visitingSet = Set.insert x visitingSet,
visitingStack = x : visitingStack
}
pure NoCycle
unmarkVisiting :: Var v => UVar v loc -> M.State (OccCheckState v loc) ()
unmarkVisiting x = M.modify \st ->
st
{ visitingSet = Set.delete x (visitingSet st),
visitingStack = tail (visitingStack st),
solvedSet = Set.insert x (solvedSet st)
}
addError :: KindError v loc -> M.State (OccCheckState v loc) ()
addError ke = M.modify \st -> st {kindErrors = ke : kindErrors st}
isSolved :: Var v => UVar v loc -> M.State (OccCheckState v loc) Bool
isSolved x = do
OccCheckState {solvedSet} <- M.get
pure $ Set.member x solvedSet
data CycleCheck
= Cycle
| NoCycle
-- | occurence check and report any errors
occCheck ::
forall v loc.
Var v =>
ConstraintMap v loc ->
Either (NonEmpty (KindError v loc)) (ConstraintMap v loc)
occCheck constraints0 =
let go ::
[(UVar v loc)] ->
M.State (OccCheckState v loc) ()
go = \case
[] -> pure ()
u : us -> do
isSolved u >>= \case
True -> go us
False -> do
markVisiting u >>= \case
Cycle -> pure ()
NoCycle -> do
st@OccCheckState {solvedConstraints} <- M.get
let handleNothing = error "impossible"
handleJust _canonK ecSize d = case descriptorConstraint d of
Nothing -> ([], U.Canonical ecSize d {descriptorConstraint = Just $ Solved.IsType Default})
Just v ->
let descendants = case v of
Solved.IsType _ -> []
Solved.IsAbility _ -> []
Solved.IsArr _ a b -> [a, b]
in (descendants, U.Canonical ecSize d)
let (descendants, solvedConstraints') = U.alterF u handleNothing handleJust solvedConstraints
M.put st {solvedConstraints = solvedConstraints'}
go descendants
unmarkVisiting u
go us
OccCheckState {solvedConstraints, kindErrors} =
M.execState
(go (U.keys constraints0))
OccCheckState
{ visitingSet = Set.empty,
visitingStack = [],
solvedSet = Set.empty,
solvedConstraints = constraints0,
kindErrors = []
}
in case kindErrors of
[] -> Right solvedConstraints
e : es -> Left (e :| es)
-- | loop through the constraints, eliminating constraints until we
-- have some set that cannot be reduced
reduce ::
forall v loc.
(Show loc, Var v, Ord loc) =>
[GeneratedConstraint v loc] ->
Solve v loc [KindError v loc]
reduce cs0 = dbg "reduce" cs0 (go False [])
where
go b acc = \case
[] -> case b of
True -> dbg "go" acc (go False [])
False -> for acc \c ->
dbgSingle "failed to add constraint" c addConstraint >>= \case
Left x -> pure x
Right () -> error "impossible"
c : cs ->
addConstraint c >>= \case
Left _ -> go b (c : acc) cs
Right () -> go True acc cs
dbg ::
forall a.
P.Pretty P.ColorText ->
[GeneratedConstraint v loc] ->
([GeneratedConstraint v loc] -> Solve v loc a) ->
Solve v loc a
dbg hdr cs f =
case shouldDebug KindInference of
True -> do
ppe <- asks prettyPrintEnv
tracePretty (P.hang (P.bold hdr) (prettyConstraints ppe (map (review _Generated) cs))) (f cs)
False -> f cs
dbgSingle ::
forall a.
P.Pretty P.ColorText ->
GeneratedConstraint v loc ->
(GeneratedConstraint v loc -> Solve v loc a) ->
Solve v loc a
dbgSingle hdr c f =
case shouldDebug KindInference of
True -> do
ppe <- asks prettyPrintEnv
tracePretty (P.hang (P.bold hdr) (prettyConstraintD' ppe (review _Generated c))) (f c)
False -> f c
-- | Add a single constraint, returning an error if there is a
-- contradictory constraint
addConstraint ::
forall v loc.
Ord loc =>
Var v =>
GeneratedConstraint v loc ->
Solve v loc (Either (KindError v loc) ())
addConstraint constraint = do
initialState <- M.get
-- Process implied constraints until they are all solved or an error
-- is encountered
let processPostAction ::
Either (ConstraintConflict v loc) [UnsolvedConstraint v loc] ->
Solve v loc (Either (KindError v loc) ())
processPostAction = \case
-- failure
Left cc -> do
-- roll back state changes
M.put initialState
pure (Left (ConstraintConflict constraint cc (constraints initialState)))
-- success
Right [] -> pure (Right ())
-- undetermined
Right (x : xs) -> do
-- we could return a list of kind errors that are implied by
-- this constraint, but for now we just return the first
-- contradiction.
processPostAction . fmap concat =<< runExceptT ((traverse (ExceptT . addConstraint') (x : xs)))
processPostAction =<< addConstraint' (review _Generated constraint)
addConstraint' ::
forall v loc.
Ord loc =>
Var v =>
UnsolvedConstraint v loc ->
Solve v loc (Either (ConstraintConflict v loc) [UnsolvedConstraint v loc])
addConstraint' = \case
Unsolved.IsAbility s p0 -> do
handleConstraint s (Solved.IsAbility p0) \case
Solved.IsAbility _ -> Just (Solved.IsAbility p0, [])
_ -> Nothing
Unsolved.IsArr s p0 a b -> do
handleConstraint s (Solved.IsArr p0 a b) \case
Solved.IsArr _p1 c d ->
let implied =
[ Unsolved.Unify prov a c,
Unsolved.Unify prov b d
]
prov = p0
in Just (Solved.IsArr prov a b, implied)
_ -> Nothing
Unsolved.IsType s p0 -> do
handleConstraint s (Solved.IsType p0) \case
Solved.IsType _ -> Just (Solved.IsType p0, [])
_ -> Nothing
Unsolved.Unify l a b -> Right <$> union l a b
where
handleConstraint ::
UVar v loc ->
Solved.Constraint (UVar v loc) v loc ->
( Solved.Constraint (UVar v loc) v loc ->
Maybe (Solved.Constraint (UVar v loc) v loc, [UnsolvedConstraint v loc])
) ->
Solve v loc (Either (ConstraintConflict v loc) [UnsolvedConstraint v loc])
handleConstraint s solvedConstraint phi = do
st@SolveState {constraints} <- M.get
let (postAction, constraints') =
U.alterF
s
(error "adding new uvar?")
( \_canon eqSize des@Descriptor {descriptorConstraint} ->
let newDescriptor = case descriptorConstraint of
Nothing -> (Right [], des {descriptorConstraint = Just solvedConstraint})
Just c1' -> case phi c1' of
Just (newConstraint, impliedConstraints) ->
(Right impliedConstraints, des {descriptorConstraint = Just newConstraint})
Nothing ->
let conflict =
ConstraintConflict'
{ conflictedVar = s,
impliedConstraint = solvedConstraint,
conflictedConstraint = c1'
}
in (Left conflict, des {descriptorConstraint = descriptorConstraint})
in U.Canonical eqSize <$> newDescriptor
)
constraints
M.put st {constraints = constraints'}
pure postAction
-- unify two uvars, returning implied constraints
union :: (Ord loc, Var v) => Provenance v loc -> UVar v loc -> UVar v loc -> Solve v loc [UnsolvedConstraint v loc]
union _unionLoc a b = do
SolveState {constraints} <- M.get
res <- U.union a b constraints noMerge \_canonK nonCanonV constraints' -> do
st <- M.get
M.put st {constraints = constraints'}
let impliedConstraints = case descriptorConstraint nonCanonV of
Nothing -> []
Just c ->
let cd = case c of
Solved.IsType loc -> Unsolved.IsType a case loc of
Default -> Default
NotDefault loc -> NotDefault loc
Solved.IsArr loc l r -> Unsolved.IsArr a loc l r
Solved.IsAbility loc -> Unsolved.IsAbility a loc
in [cd]
pure (Just impliedConstraints)
case res of
Nothing -> error "impossible"
Just impliedConstraints -> pure impliedConstraints
where
noMerge m = do
st <- M.get
M.put st {constraints = m}
pure []
-- | Do an occurence check and return an error or the resulting solve
-- state
verify ::
Var v =>
SolveState v loc ->
Either (NonEmpty (KindError v loc)) (SolveState v loc)
verify st =
let solveState = occCheck (constraints st)
in case solveState of
Left e -> Left e
Right m -> Right st {constraints = m}
initializeState :: forall v loc. (BuiltinAnnotation loc, Ord loc, Show loc, Var v) => Solve v loc ()
initializeState = assertGen do
builtinConstraints
-- | Generate and solve constraints, asserting no conflicts or
-- decomposition occurs
assertGen :: (Ord loc, Show loc, Var v) => Gen v loc [GeneratedConstraint v loc] -> Solve v loc ()
assertGen gen = do
cs <- runGen gen
env <- M.ask
st <- M.get
let comp = do
st <- step env st cs
verify st
case comp of
Left _ -> error "[assertGen]: constraint failure in among builtin constraints"
Right st -> M.put st
initialState :: forall v loc. (BuiltinAnnotation loc, Show loc, Ord loc, Var v) => Env -> SolveState v loc
initialState env =
let ((), finalState) = run env emptyState initializeState
in finalState

View File

@ -0,0 +1,113 @@
module Unison.KindInference.Solve.Monad
( Solve (..),
Env (..),
SolveState (..),
Descriptor (..),
ConstraintMap,
run,
emptyState,
find,
genStateL,
runGen,
addUnconstrainedVar,
)
where
import Control.Lens (Lens', (%%~))
import Control.Monad.Reader qualified as M
import Control.Monad.State.Strict qualified as M
import Data.Functor.Identity
import Data.List.NonEmpty (NonEmpty)
import Data.Map.Strict qualified as M
import Data.Set qualified as Set
import Unison.KindInference.Constraint.Solved (Constraint (..))
import Unison.KindInference.Generate.Monad (Gen (..))
import Unison.KindInference.Generate.Monad qualified as Gen
import Unison.KindInference.UVar (UVar (..))
import Unison.PatternMatchCoverage.UFMap qualified as U
import Unison.Prelude
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.Symbol
import Unison.Type qualified as T
import Unison.Var
data Env = Env {prettyPrintEnv :: PrettyPrintEnv}
type ConstraintMap v loc = U.UFMap (UVar v loc) (Descriptor v loc)
data SolveState v loc = SolveState
{ unifVars :: !(Set Symbol),
newUnifVars :: [UVar v loc],
constraints :: !(U.UFMap (UVar v loc) (Descriptor v loc)),
typeMap :: !(Map (T.Type v loc) (NonEmpty (UVar v loc)))
}
data Descriptor v loc = Descriptor
{ descriptorConstraint :: Maybe (Constraint (UVar v loc) v loc)
}
newtype Solve v loc a = Solve {unSolve :: Env -> SolveState v loc -> (a, SolveState v loc)}
deriving
( Functor,
Applicative,
Monad,
M.MonadFix,
M.MonadReader Env,
M.MonadState (SolveState v loc)
)
via M.ReaderT Env (M.State (SolveState v loc))
genStateL :: Lens' (SolveState v loc) (Gen.GenState v loc)
genStateL f st =
( \genState ->
st
{ unifVars = Gen.unifVars genState,
typeMap = Gen.typeMap genState
}
)
<$> f
Gen.GenState
{ unifVars = unifVars st,
typeMap = typeMap st,
newVars = []
}
runGen :: Var v => Gen v loc a -> Solve v loc a
runGen gena = do
st <- M.get
let gena' = do
res <- gena
st <- M.get
pure (res, Gen.newVars st)
let ((cs, vs), st') = st & genStateL %%~ Gen.run gena'
M.put st'
traverse_ addUnconstrainedVar vs
M.modify \st -> st {newUnifVars = vs ++ newUnifVars st}
pure cs
addUnconstrainedVar :: Var v => UVar v loc -> Solve v loc ()
addUnconstrainedVar uvar = do
st@SolveState {constraints} <- M.get
let constraints' = U.insert uvar Descriptor {descriptorConstraint = Nothing} constraints
M.put st {constraints = constraints'}
run :: Env -> SolveState v loc -> Solve v loc a -> (a, SolveState v loc)
run e st action = unSolve action e st
emptyState :: SolveState v loc
emptyState =
SolveState
{ unifVars = Set.empty,
newUnifVars = [],
constraints = U.empty,
typeMap = M.empty
}
find :: Var v => UVar v loc -> Solve v loc (Maybe (Constraint (UVar v loc) v loc))
find k = do
st@SolveState {constraints} <- M.get
case U.lookupCanon k constraints of
Just (_canon, _size, Descriptor {descriptorConstraint}, constraints') -> do
M.put st {constraints = constraints'}
pure descriptorConstraint
Nothing -> error "find: Nothing"

View File

@ -0,0 +1,15 @@
{-# LANGUAGE DataKinds #-}
module Unison.KindInference.UVar
( UVar (..),
)
where
import Unison.Symbol
import Unison.Type qualified as T
data UVar v loc = UVar
{ _uvarSymbol :: Symbol,
uvarType :: T.Type v loc
}
deriving stock (Eq, Ord, Show)

View File

@ -2,7 +2,6 @@ module Unison.Parsers where
import Data.Text qualified as Text import Data.Text qualified as Text
import Unison.Builtin qualified as Builtin import Unison.Builtin qualified as Builtin
import Unison.NamesWithHistory qualified as Names
import Unison.Parser.Ann (Ann) import Unison.Parser.Ann (Ann)
import Unison.Prelude import Unison.Prelude
import Unison.PrintError (defaultWidth, prettyParseError) import Unison.PrintError (defaultWidth, prettyParseError)
@ -79,7 +78,7 @@ unsafeParseFileBuiltinsOnly =
Parser.ParsingEnv Parser.ParsingEnv
{ uniqueNames = mempty, { uniqueNames = mempty,
uniqueTypeGuid = \_ -> pure Nothing, uniqueTypeGuid = \_ -> pure Nothing,
names = Names.NamesWithHistory Builtin.names0 mempty names = Builtin.names
} }
unsafeParseFile :: Monad m => String -> Parser.ParsingEnv m -> m (UnisonFile Symbol Ann) unsafeParseFile :: Monad m => String -> Parser.ParsingEnv m -> m (UnisonFile Symbol Ann)

View File

@ -676,7 +676,7 @@ union ::
NormalizedConstraints vt v loc -> NormalizedConstraints vt v loc ->
m (Maybe (NormalizedConstraints vt v loc)) m (Maybe (NormalizedConstraints vt v loc))
union v0 v1 nc@NormalizedConstraints {constraintMap} = union v0 v1 nc@NormalizedConstraints {constraintMap} =
UFMap.union v0 v1 constraintMap \chosenCanon nonCanonValue m -> UFMap.union v0 v1 constraintMap noMerge \chosenCanon nonCanonValue m ->
-- In this block we want to collect the constraints from the -- In this block we want to collect the constraints from the
-- non-canonical value and add them to the canonical value. -- non-canonical value and add them to the canonical value.
@ -717,6 +717,8 @@ union v0 v1 nc@NormalizedConstraints {constraintMap} =
IsNotEffectful -> [] IsNotEffectful -> []
IsEffectful -> [C.Effectful chosenCanon] IsEffectful -> [C.Effectful chosenCanon]
in addConstraints constraints nc {constraintMap = m} in addConstraints constraints nc {constraintMap = m}
where
noMerge m = pure nc { constraintMap = m }
modifyListC :: modifyListC ::
forall vt v loc m. forall vt v loc m.

View File

@ -14,6 +14,7 @@ module Unison.PatternMatchCoverage.UFMap
) )
where where
import Control.Monad.Trans.Class
import Control.Monad.Fix (MonadFix) import Control.Monad.Fix (MonadFix)
import Control.Monad.Trans.Except (ExceptT (..)) import Control.Monad.Trans.Except (ExceptT (..))
import Data.Foldable (foldl') import Data.Foldable (foldl')
@ -160,9 +161,10 @@ union ::
k -> k ->
k -> k ->
UFMap k v -> UFMap k v ->
(UFMap k v -> m r) ->
(k -> v -> UFMap k v -> m (Maybe r)) -> (k -> v -> UFMap k v -> m (Maybe r)) ->
m (Maybe r) m (Maybe r)
union k0 k1 mapinit mergeValues = toMaybe do union k0 k1 mapinit alreadyMerged mergeValues = toMaybe do
rec let lu :: rec let lu ::
k -> k ->
UFMap k v -> UFMap k v ->
@ -194,16 +196,21 @@ union k0 k1 mapinit mergeValues = toMaybe do
let (chosenCanon, canonValue, nonCanonValue) = case size0 > size1 of let (chosenCanon, canonValue, nonCanonValue) = case size0 > size1 of
True -> (kcanon0, v0, v1) True -> (kcanon0, v0, v1)
False -> (kcanon1, v1, v0) False -> (kcanon1, v1, v0)
map2 <- case kcanon0 == kcanon1 of
let res = True -> do
ExceptT $ res <- lift (alreadyMerged map1)
mergeValues chosenCanon nonCanonValue map1 <&> \case pure (vfinal0 `seq` res)
Nothing -> Left (MergeFailed v0 v1) False -> do
Just x -> Right x map2 <-
in -- Now that both lookups have completed we can safely force the let res =
-- final values ExceptT $
vfinal0 `seq` vfinal1 `seq` res mergeValues chosenCanon nonCanonValue map1 <&> \case
pure map2 Nothing -> Left (MergeFailed v0 v1)
Just x -> Right x
in -- Now that both lookups have completed we can safely force the
-- final values
vfinal0 `seq` vfinal1 `seq` res
pure map2
where where
toMaybe :: ExceptT (UnionHaltReason k v) m r -> m (Maybe r) toMaybe :: ExceptT (UnionHaltReason k v) m r -> m (Maybe r)
toMaybe (ExceptT action) = toMaybe (ExceptT action) =

View File

@ -1,5 +1,3 @@
{-# LANGUAGE OverloadedStrings #-}
module Unison.PrettyPrintEnv module Unison.PrettyPrintEnv
( PrettyPrintEnv (..), ( PrettyPrintEnv (..),
patterns, patterns,

View File

@ -1,31 +1,98 @@
{-# LANGUAGE OverloadedStrings #-} module Unison.PrettyPrintEnv.Names
( -- * Namer
Namer (..),
hqNamer,
namer,
module Unison.PrettyPrintEnv.Names (fromNames, fromSuffixNames) where -- * Suffixifier
Suffixifier,
dontSuffixify,
suffixifyByHash,
suffixifyByName,
-- * Pretty-print env
makePPE,
makeTermNames,
makeTypeNames,
)
where
import Data.Set qualified as Set import Data.Set qualified as Set
import Unison.HashQualified' qualified as HQ' import Unison.HashQualified' qualified as HQ'
import Unison.Name (Name) import Unison.Name (Name)
import Unison.Name qualified as Name import Unison.Name qualified as Name
import Unison.Names (Names)
import Unison.Names qualified as Names import Unison.Names qualified as Names
import Unison.NamesWithHistory (NamesWithHistory) import Unison.NamesWithHistory qualified as Names
import Unison.NamesWithHistory qualified as NamesWithHistory
import Unison.Prelude import Unison.Prelude
import Unison.PrettyPrintEnv (PrettyPrintEnv (PrettyPrintEnv)) import Unison.PrettyPrintEnv (PrettyPrintEnv (PrettyPrintEnv))
import Unison.Util.Relation qualified as Rel import Unison.Reference (TypeReference)
import Unison.Referent (Referent)
fromNames :: Int -> NamesWithHistory -> PrettyPrintEnv ------------------------------------------------------------------------------------------------------------------------
fromNames len names = PrettyPrintEnv terms' types' -- Namer
where
terms' r = data Namer = Namer
NamesWithHistory.termName len r names { nameTerm :: Referent -> Set (HQ'.HashQualified Name),
& Set.toList nameType :: TypeReference -> Set (HQ'.HashQualified Name)
& fmap (\n -> (n, n)) }
& prioritize
types' r = namer :: Names -> Namer
NamesWithHistory.typeName len r names namer names =
& Set.toList Namer
& fmap (\n -> (n, n)) { nameTerm = Set.map HQ'.fromName . Names.namesForReferent names,
& prioritize nameType = Set.map HQ'.fromName . Names.namesForReference names
}
hqNamer :: Int -> Names -> Namer
hqNamer hashLen names =
Namer
{ nameTerm = \ref -> Names.termName hashLen ref names,
nameType = \ref -> Names.typeName hashLen ref names
}
------------------------------------------------------------------------------------------------------------------------
-- Suffixifier
data Suffixifier = Suffixifier
{ suffixifyTerm :: Name -> Name,
suffixifyType :: Name -> Name
}
dontSuffixify :: Suffixifier
dontSuffixify =
Suffixifier id id
suffixifyByName :: Names -> Suffixifier
suffixifyByName names =
Suffixifier
{ suffixifyTerm = \name -> Name.suffixifyByName name (Names.terms names),
suffixifyType = \name -> Name.suffixifyByName name (Names.types names)
}
suffixifyByHash :: Names -> Suffixifier
suffixifyByHash names =
Suffixifier
{ suffixifyTerm = \name -> Name.suffixifyByHash name (Names.terms names),
suffixifyType = \name -> Name.suffixifyByHash name (Names.types names)
}
------------------------------------------------------------------------------------------------------------------------
-- Pretty-print env
makePPE :: Namer -> Suffixifier -> PrettyPrintEnv
makePPE namer suffixifier =
PrettyPrintEnv
(makeTermNames namer suffixifier)
(makeTypeNames namer suffixifier)
makeTermNames :: Namer -> Suffixifier -> Referent -> [(HQ'.HashQualified Name, HQ'.HashQualified Name)]
makeTermNames Namer {nameTerm} Suffixifier {suffixifyTerm} =
prioritize . map (\name -> (name, suffixifyTerm <$> name)) . Set.toList . nameTerm
makeTypeNames :: Namer -> Suffixifier -> TypeReference -> [(HQ'.HashQualified Name, HQ'.HashQualified Name)]
makeTypeNames Namer {nameType} Suffixifier {suffixifyType} =
prioritize . map (\name -> (name, suffixifyType <$> name)) . Set.toList . nameType
-- | Sort the names for a given ref by the following factors (in priority order): -- | Sort the names for a given ref by the following factors (in priority order):
-- --
@ -38,24 +105,3 @@ prioritize =
sortOn \case sortOn \case
(fqn, HQ'.NameOnly name) -> (Name.isAbsolute name, Nothing, Name.countSegments (HQ'.toName fqn), Name.countSegments name) (fqn, HQ'.NameOnly name) -> (Name.isAbsolute name, Nothing, Name.countSegments (HQ'.toName fqn), Name.countSegments name)
(fqn, HQ'.HashQualified name hash) -> (Name.isAbsolute name, Just hash, Name.countSegments (HQ'.toName fqn), Name.countSegments name) (fqn, HQ'.HashQualified name hash) -> (Name.isAbsolute name, Just hash, Name.countSegments (HQ'.toName fqn), Name.countSegments name)
fromSuffixNames :: Int -> NamesWithHistory -> PrettyPrintEnv
fromSuffixNames len names = PrettyPrintEnv terms' types'
where
terms' r =
NamesWithHistory.termName len r names
& Set.toList
& fmap (\n -> (n, n))
& shortestUniqueSuffixes r (Names.terms $ NamesWithHistory.currentNames names)
& prioritize
types' r =
NamesWithHistory.typeName len r names
& Set.toList
& fmap (\n -> (n, n))
& shortestUniqueSuffixes r (Names.types $ NamesWithHistory.currentNames names)
& prioritize
-- | Reduce the provided names to their minimal unique suffix within the scope of the given
-- relation.
shortestUniqueSuffixes :: (Ord ref) => ref -> Rel.Relation Name ref -> [(a, HQ'.HashQualified Name)] -> [(a, HQ'.HashQualified Name)]
shortestUniqueSuffixes ref rel names = names <&> second (fmap (\name -> Name.shortestUniqueSuffix name ref rel))

View File

@ -1,11 +1,25 @@
{-# LANGUAGE OverloadedStrings #-} module Unison.PrettyPrintEnvDecl.Names
( fromNamesSuffixifiedByHash,
fromNamesSuffixifiedByName,
)
where
module Unison.PrettyPrintEnvDecl.Names where import Unison.Names (Names)
import Unison.PrettyPrintEnv.Names qualified as PPE
import Unison.NamesWithHistory (NamesWithHistory)
import Unison.PrettyPrintEnv.Names (fromNames, fromSuffixNames)
import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl (PrettyPrintEnvDecl)) import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl (PrettyPrintEnvDecl))
fromNamesDecl :: Int -> NamesWithHistory -> PrettyPrintEnvDecl fromNamesSuffixifiedByHash :: Int -> Names -> PrettyPrintEnvDecl
fromNamesDecl hashLength names = fromNamesSuffixifiedByHash hashLength names =
PrettyPrintEnvDecl (fromNames hashLength names) (fromSuffixNames hashLength names) PrettyPrintEnvDecl
(PPE.makePPE namer PPE.dontSuffixify)
(PPE.makePPE namer (PPE.suffixifyByHash names))
where
namer = PPE.hqNamer hashLength names
fromNamesSuffixifiedByName :: Int -> Names -> PrettyPrintEnvDecl
fromNamesSuffixifiedByName hashLength names =
PrettyPrintEnvDecl
(PPE.makePPE namer PPE.dontSuffixify)
(PPE.makePPE namer (PPE.suffixifyByName names))
where
namer = PPE.hqNamer hashLength names

View File

@ -1,4 +1,7 @@
module Unison.PrettyPrintEnvDecl.Sqlite where module Unison.PrettyPrintEnvDecl.Sqlite
( ppedForReferences,
)
where
import U.Codebase.Sqlite.NameLookups (ReversedName (..)) import U.Codebase.Sqlite.NameLookups (ReversedName (..))
import U.Codebase.Sqlite.NamedRef (NamedRef (..)) import U.Codebase.Sqlite.NamedRef (NamedRef (..))
@ -12,7 +15,6 @@ import Unison.Name (Name)
import Unison.Name qualified as Name import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment (..)) import Unison.NameSegment (NameSegment (..))
import Unison.Names qualified as Names import Unison.Names qualified as Names
import Unison.NamesWithHistory qualified as NamesWithHistory
import Unison.Prelude import Unison.Prelude
import Unison.PrettyPrintEnvDecl qualified as PPED import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.PrettyPrintEnvDecl.Names qualified as PPED import Unison.PrettyPrintEnvDecl.Names qualified as PPED
@ -48,7 +50,7 @@ ppedForReferences namesPerspective refs = do
pure result pure result
let allTermNamesToConsider = termNames <> longestTermSuffixMatches let allTermNamesToConsider = termNames <> longestTermSuffixMatches
let allTypeNamesToConsider = typeNames <> longestTypeSuffixMatches let allTypeNamesToConsider = typeNames <> longestTypeSuffixMatches
pure . PPED.fromNamesDecl hashLen . NamesWithHistory.fromCurrentNames $ Names.fromTermsAndTypes allTermNamesToConsider allTypeNamesToConsider pure . PPED.fromNamesSuffixifiedByHash hashLen $ Names.fromTermsAndTypes allTermNamesToConsider allTypeNamesToConsider
where where
namesForReference :: Ops.NamesPerspective -> LabeledDependency -> Sqlite.Transaction ([(Name, Referent)], [(Name, Reference)]) namesForReference :: Ops.NamesPerspective -> LabeledDependency -> Sqlite.Transaction ([(Name, Referent)], [(Name, Reference)])
namesForReference namesPerspective = \case namesForReference namesPerspective = \case

View File

@ -22,11 +22,11 @@ import Unison.ConstructorReference (ConstructorReference, GConstructorReference
import Unison.HashQualified (HashQualified) import Unison.HashQualified (HashQualified)
import Unison.Kind (Kind) import Unison.Kind (Kind)
import Unison.Kind qualified as Kind import Unison.Kind qualified as Kind
import Unison.KindInference.Error.Pretty (prettyKindError)
import Unison.Name (Name) import Unison.Name (Name)
import Unison.Name qualified as Name import Unison.Name qualified as Name
import Unison.Names qualified as Names import Unison.Names qualified as Names
import Unison.Names.ResolutionResult qualified as Names import Unison.Names.ResolutionResult qualified as Names
import Unison.NamesWithHistory qualified as NamesWithHistory
import Unison.Parser.Ann (Ann (..)) import Unison.Parser.Ann (Ann (..))
import Unison.Pattern (Pattern) import Unison.Pattern (Pattern)
import Unison.Prelude import Unison.Prelude
@ -609,6 +609,10 @@ renderTypeError e env src curPath = case e of
Pr.hang Pr.hang
"This case would be ignored because it's already covered by the preceding case(s):" "This case would be ignored because it's already covered by the preceding case(s):"
(annotatedAsErrorSite src loc) (annotatedAsErrorSite src loc)
KindInferenceFailure ke ->
let prettyTyp t = Pr.bold (renderType' env t)
showSource = showSourceMaybes src . map (\(loc, color) -> (,color) <$> rangeForAnnotated loc)
in prettyKindError prettyTyp showSource Type1 Type2 env ke
UnknownTerm {..} UnknownTerm {..}
| Var.typeOf unknownTermV == Var.MissingResult -> | Var.typeOf unknownTermV == Var.MissingResult ->
Pr.lines Pr.lines
@ -943,6 +947,7 @@ renderTypeError e env src curPath = case e of
fromString (show args), fromString (show args),
"\n" "\n"
] ]
C.KindInferenceFailure _ -> "kind inference failure"
C.DuplicateDefinitions vs -> C.DuplicateDefinitions vs ->
let go :: (v, [loc]) -> Pretty (AnnotatedText a) let go :: (v, [loc]) -> Pretty (AnnotatedText a)
go (v, locs) = go (v, locs) =
@ -1933,8 +1938,8 @@ prettyResolutionFailures s allFailures =
(Names.TypeResolutionFailure v _ Names.NotFound) -> (v, Nothing) (Names.TypeResolutionFailure v _ Names.NotFound) -> (v, Nothing)
ppeFromNames :: Names.Names -> PPE.PrettyPrintEnv ppeFromNames :: Names.Names -> PPE.PrettyPrintEnv
ppeFromNames names0 = ppeFromNames names =
PPE.fromNames PPE.todoHashLength (NamesWithHistory.NamesWithHistory {currentNames = names0, oldNames = mempty}) PPE.makePPE (PPE.hqNamer PPE.todoHashLength names) PPE.dontSuffixify
prettyRow :: (v, Maybe (NESet String)) -> [(Pretty ColorText, Pretty ColorText)] prettyRow :: (v, Maybe (NESet String)) -> [(Pretty ColorText, Pretty ColorText)]
prettyRow (v, mSet) = case mSet of prettyRow (v, mSet) = case mSet of

View File

@ -5,7 +5,9 @@ module Unison.Project.Util
projectBranchSegment, projectBranchSegment,
projectPathPrism, projectPathPrism,
projectBranchPathPrism, projectBranchPathPrism,
pattern UUIDNameSegment projectContextFromPath,
pattern UUIDNameSegment,
ProjectContext (..),
) )
where where
@ -118,3 +120,19 @@ projectBranchPathPrism =
"__projects" : UUIDNameSegment projectId : "branches" : UUIDNameSegment branchId : restPath -> "__projects" : UUIDNameSegment projectId : "branches" : UUIDNameSegment branchId : restPath ->
Just (ProjectAndBranch {project = ProjectId projectId, branch = ProjectBranchId branchId}, Path.fromList restPath) Just (ProjectAndBranch {project = ProjectId projectId, branch = ProjectBranchId branchId}, Path.fromList restPath)
_ -> Nothing _ -> Nothing
-- | The project information about the current path.
-- NOTE: if the user has cd'd into the project storage area but NOT into a branch, (where they shouldn't ever
-- be), this will result in a LooseCodePath.
data ProjectContext
= LooseCodePath Path.Absolute
| ProjectBranchPath ProjectId ProjectBranchId Path.Path {- path from branch root -}
deriving stock (Eq, Show)
projectContextFromPath :: Path.Absolute -> ProjectContext
projectContextFromPath path =
case path ^? projectBranchPathPrism of
Just (ProjectAndBranch {project = projectId, branch = branchId}, restPath) ->
ProjectBranchPath projectId branchId restPath
Nothing ->
LooseCodePath path

View File

@ -1416,6 +1416,8 @@ data POp
| -- STM | -- STM
ATOM ATOM
| TFRC -- try force | TFRC -- try force
| SDBL -- sandbox link list
| SDBV -- sandbox check for Values
deriving (Show, Eq, Ord, Enum, Bounded) deriving (Show, Eq, Ord, Enum, Bounded)
type ANormal = ABTN.Term ANormalF type ANormal = ABTN.Term ANormalF

View File

@ -22,8 +22,8 @@ import Data.Word (Word16, Word32, Word64)
import GHC.Stack import GHC.Stack
import Unison.ABT.Normalized (Term (..)) import Unison.ABT.Normalized (Term (..))
import Unison.Reference (Reference, Reference' (Builtin), pattern Derived) import Unison.Reference (Reference, Reference' (Builtin), pattern Derived)
import Unison.Runtime.Array qualified as PA
import Unison.Runtime.ANF as ANF hiding (Tag) import Unison.Runtime.ANF as ANF hiding (Tag)
import Unison.Runtime.Array qualified as PA
import Unison.Runtime.Exception import Unison.Runtime.Exception
import Unison.Runtime.Serialize import Unison.Runtime.Serialize
import Unison.Util.EnumContainers qualified as EC import Unison.Util.EnumContainers qualified as EC
@ -618,6 +618,8 @@ pOpCode op = case op of
DBTX -> 119 DBTX -> 119
IXOT -> 120 IXOT -> 120
IXOB -> 121 IXOB -> 121
SDBL -> 122
SDBV -> 123
pOpAssoc :: [(POp, Word16)] pOpAssoc :: [(POp, Word16)]
pOpAssoc = map (\op -> (op, pOpCode op)) [minBound .. maxBound] pOpAssoc = map (\op -> (op, pOpCode op)) [minBound .. maxBound]

View File

@ -690,7 +690,6 @@ splitls = binop0 4 $ \[n0, s, n, t, l, r] ->
[ (0, ([], seqViewEmpty)), [ (0, ([], seqViewEmpty)),
(1, ([BX, BX], TAbss [l, r] $ seqViewElem l r)) (1, ([BX, BX], TAbss [l, r] $ seqViewElem l r))
] ]
splitrs = binop0 4 $ \[n0, s, n, t, l, r] -> splitrs = binop0 4 $ \[n0, s, n, t, l, r] ->
unbox n0 Ty.natRef n unbox n0 Ty.natRef n
. TLetD t UN (TPrm SPLR [n, s]) . TLetD t UN (TPrm SPLR [n, s])
@ -928,15 +927,17 @@ watch =
raise :: SuperNormal Symbol raise :: SuperNormal Symbol
raise = raise =
unop0 3 $ \[r, f, n, k] -> unop0 3 $ \[r, f, n, k] ->
TMatch r . flip MatchRequest (TAbs f $ TVar f) TMatch r
. flip MatchRequest (TAbs f $ TVar f)
. Map.singleton Ty.exceptionRef . Map.singleton Ty.exceptionRef
$ mapSingleton 0 $ mapSingleton
( [BX], 0
TAbs f ( [BX],
. TShift Ty.exceptionRef k TAbs f
. TLetD n BX (TLit $ T "builtin.raise") . TShift Ty.exceptionRef k
$ TPrm EROR [n, f] . TLetD n BX (TLit $ T "builtin.raise")
) $ TPrm EROR [n, f]
)
gen'trace :: SuperNormal Symbol gen'trace :: SuperNormal Symbol
gen'trace = gen'trace =
@ -1023,6 +1024,19 @@ check'sandbox =
where where
(refs, val, b) = fresh (refs, val, b) = fresh
sandbox'links :: SuperNormal Symbol
sandbox'links = Lambda [BX] . TAbs ln $ TPrm SDBL [ln]
where
ln = fresh1
value'sandbox :: SuperNormal Symbol
value'sandbox =
Lambda [BX, BX]
. TAbss [refs, val]
$ TPrm SDBV [refs, val]
where
(refs, val) = fresh
stm'atomic :: SuperNormal Symbol stm'atomic :: SuperNormal Symbol
stm'atomic = stm'atomic =
Lambda [BX] Lambda [BX]
@ -2168,6 +2182,8 @@ builtinLookup =
("Link.Term.toText", (Untracked, term'link'to'text)), ("Link.Term.toText", (Untracked, term'link'to'text)),
("STM.atomically", (Tracked, stm'atomic)), ("STM.atomically", (Tracked, stm'atomic)),
("validateSandboxed", (Untracked, check'sandbox)), ("validateSandboxed", (Untracked, check'sandbox)),
("Value.validateSandboxed", (Tracked, value'sandbox)),
("sandboxLinks", (Tracked, sandbox'links)),
("IO.tryEval", (Tracked, try'eval)) ("IO.tryEval", (Tracked, try'eval))
] ]
++ foreignWrappers ++ foreignWrappers
@ -3062,6 +3078,8 @@ declareForeigns = do
\(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Many p \(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Many p
declareForeign Untracked "Pattern.capture" boxDirect . mkForeign $ declareForeign Untracked "Pattern.capture" boxDirect . mkForeign $
\(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Capture p \(TPat.CP p _) -> evaluate . TPat.cpattern $ TPat.Capture p
declareForeign Untracked "Pattern.captureAs" boxBoxDirect . mkForeign $
\(t, (TPat.CP p _)) -> evaluate . TPat.cpattern $ TPat.CaptureAs t p
declareForeign Untracked "Pattern.join" boxDirect . mkForeign $ \ps -> declareForeign Untracked "Pattern.join" boxDirect . mkForeign $ \ps ->
evaluate . TPat.cpattern . TPat.Join $ map (\(TPat.CP p _) -> p) ps evaluate . TPat.cpattern . TPat.Join $ map (\(TPat.CP p _) -> p) ps
declareForeign Untracked "Pattern.or" boxBoxDirect . mkForeign $ declareForeign Untracked "Pattern.or" boxBoxDirect . mkForeign $

View File

@ -5,13 +5,14 @@
{-# LANGUAGE ViewPatterns #-} {-# LANGUAGE ViewPatterns #-}
module Unison.Runtime.Decompile module Unison.Runtime.Decompile
( decompile ( decompile,
, DecompError (..) DecompResult,
, renderDecompError DecompError (..),
) where renderDecompError,
)
where
import Data.Set (singleton) import Data.Set (singleton)
import Prelude hiding (lines)
import Unison.ABT (substs) import Unison.ABT (substs)
import Unison.Codebase.Runtime (Error) import Unison.Codebase.Runtime (Error)
import Unison.ConstructorReference (GConstructorReference (..)) import Unison.ConstructorReference (GConstructorReference (..))
@ -64,10 +65,11 @@ import Unison.Type
typeLinkRef, typeLinkRef,
) )
import Unison.Util.Bytes qualified as By import Unison.Util.Bytes qualified as By
import Unison.Util.Pretty (lit, indentN, lines, wrap, syntaxToColor) import Unison.Util.Pretty (indentN, lines, lit, syntaxToColor, wrap)
import Unison.Util.Text qualified as Text import Unison.Util.Text qualified as Text
import Unison.Var (Var) import Unison.Var (Var)
import Unsafe.Coerce -- for Int -> Double import Unsafe.Coerce -- for Int -> Double
import Prelude hiding (lines)
con :: (Var v) => Reference -> Word64 -> Term v () con :: (Var v) => Reference -> Word64 -> Term v ()
con rf ct = constructor () (ConstructorReference rf $ fromIntegral ct) con rf ct = constructor () (ConstructorReference rf $ fromIntegral ct)

View File

@ -17,7 +17,6 @@ import Unison.ConstructorReference (GConstructorReference (..))
import Unison.DataDeclaration qualified as DD import Unison.DataDeclaration qualified as DD
import Unison.DataDeclaration.ConstructorId qualified as DD import Unison.DataDeclaration.ConstructorId qualified as DD
import Unison.FileParsers (ShouldUseTndr (..), computeTypecheckingEnvironment, synthesizeFile) import Unison.FileParsers (ShouldUseTndr (..), computeTypecheckingEnvironment, synthesizeFile)
import Unison.NamesWithHistory qualified as Names
import Unison.Parser.Ann (Ann (..)) import Unison.Parser.Ann (Ann (..))
import Unison.Parsers qualified as Parsers import Unison.Parsers qualified as Parsers
import Unison.Prelude import Unison.Prelude
@ -43,7 +42,7 @@ parsingEnv =
Parser.ParsingEnv Parser.ParsingEnv
{ uniqueNames = mempty, { uniqueNames = mempty,
uniqueTypeGuid = \_ -> pure Nothing, uniqueTypeGuid = \_ -> pure Nothing,
names = Names.NamesWithHistory Builtin.names0 mempty names = Builtin.names
} }
typecheckingEnv :: Typechecker.Env Symbol Ann typecheckingEnv :: Typechecker.Env Symbol Ann
@ -1006,4 +1005,4 @@ showNotes source env =
intercalateMap "\n\n" $ PrintError.renderNoteAsANSI 60 env source Path.absoluteEmpty intercalateMap "\n\n" $ PrintError.renderNoteAsANSI 60 env source Path.absoluteEmpty
ppEnv :: PPE.PrettyPrintEnv ppEnv :: PPE.PrettyPrintEnv
ppEnv = PPE.fromNames 10 Builtin.names ppEnv = PPE.makePPE (PPE.hqNamer 10 Builtin.names) PPE.dontSuffixify

View File

@ -10,6 +10,7 @@
module Unison.Runtime.Interface module Unison.Runtime.Interface
( startRuntime, ( startRuntime,
withRuntime, withRuntime,
startNativeRuntime,
standalone, standalone,
runStandalone, runStandalone,
StoredCache, StoredCache,
@ -23,13 +24,16 @@ import Control.Concurrent.STM as STM
import Control.Monad import Control.Monad
import Data.Binary.Get (runGetOrFail) import Data.Binary.Get (runGetOrFail)
-- import Data.Bits (shiftL) -- import Data.Bits (shiftL)
import Data.ByteString qualified as BS
import Data.ByteString.Lazy qualified as BL import Data.ByteString.Lazy qualified as BL
import Data.Bytes.Get (MonadGet) import Data.Bytes.Get (MonadGet)
import Data.Bytes.Put (MonadPut, runPutL) import Data.Bytes.Put (MonadPut, putWord32be, runPutL, runPutS)
import Data.Bytes.Serial import Data.Bytes.Serial
import Data.Foldable import Data.Foldable
import Data.IORef import Data.IORef
import Data.List qualified as L
import Data.Map.Strict qualified as Map import Data.Map.Strict qualified as Map
import Data.Sequence qualified as Seq (fromList)
import Data.Set as Set import Data.Set as Set
( filter, ( filter,
fromList, fromList,
@ -40,9 +44,16 @@ import Data.Set as Set
) )
import Data.Set qualified as Set import Data.Set qualified as Set
import Data.Text (isPrefixOf, unpack) import Data.Text (isPrefixOf, unpack)
import System.Process
( CreateProcess (..),
StdStream (..),
proc,
waitForProcess,
withCreateProcess,
)
import Unison.Builtin.Decls qualified as RF import Unison.Builtin.Decls qualified as RF
import Unison.Codebase.CodeLookup (CodeLookup (..)) import Unison.Codebase.CodeLookup (CodeLookup (..))
import Unison.Codebase.MainTerm (builtinMain, builtinTest) import Unison.Codebase.MainTerm (builtinIOTestTypes, builtinMain)
import Unison.Codebase.Runtime (Error, Runtime (..)) import Unison.Codebase.Runtime (Error, Runtime (..))
import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..)) import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..))
import Unison.ConstructorReference qualified as RF import Unison.ConstructorReference qualified as RF
@ -56,9 +67,13 @@ import Unison.PrettyPrintEnv qualified as PPE
import Unison.Reference (Reference) import Unison.Reference (Reference)
import Unison.Reference qualified as RF import Unison.Reference qualified as RF
import Unison.Referent qualified as RF (pattern Ref) import Unison.Referent qualified as RF (pattern Ref)
import Unison.Runtime.ANF import Unison.Runtime.ANF as ANF
import Unison.Runtime.ANF.Rehash (rehashGroups) import Unison.Runtime.ANF.Rehash as ANF (rehashGroups)
import Unison.Runtime.ANF.Serialize (getGroup, putGroup) import Unison.Runtime.ANF.Serialize as ANF
( getGroup,
putGroup,
serializeValue,
)
import Unison.Runtime.Builtin import Unison.Runtime.Builtin
import Unison.Runtime.Decompile import Unison.Runtime.Decompile
import Unison.Runtime.Exception import Unison.Runtime.Exception
@ -88,6 +103,7 @@ import Unison.Runtime.Machine
refNumTm, refNumTm,
refNumsTm, refNumsTm,
refNumsTy, refNumsTy,
reifyValue,
) )
import Unison.Runtime.Pattern import Unison.Runtime.Pattern
import Unison.Runtime.Serialize as SER import Unison.Runtime.Serialize as SER
@ -218,6 +234,37 @@ recursiveRefDeps seen cl (RF.DerivedId i) =
Nothing -> pure mempty Nothing -> pure mempty
recursiveRefDeps _ _ _ = pure mempty recursiveRefDeps _ _ _ = pure mempty
recursiveIRefDeps ::
Map.Map Reference (SuperGroup Symbol) ->
Set Reference ->
[Reference] ->
Set Reference
recursiveIRefDeps cl seen0 rfs = srfs <> foldMap f rfs
where
seen = seen0 <> srfs
srfs = Set.fromList rfs
f = foldMap (recursiveGroupDeps cl seen) . flip Map.lookup cl
recursiveGroupDeps ::
Map.Map Reference (SuperGroup Symbol) ->
Set Reference ->
SuperGroup Symbol ->
Set Reference
recursiveGroupDeps cl seen0 grp = deps <> recursiveIRefDeps cl seen depl
where
depl = Prelude.filter (`Set.notMember` seen0) $ groupTermLinks grp
deps = Set.fromList depl
seen = seen0 <> deps
recursiveIntermedDeps ::
Map.Map Reference (SuperGroup Symbol) ->
[Reference] ->
[(Reference, SuperGroup Symbol)]
recursiveIntermedDeps cl rfs = mapMaybe f $ Set.toList ds
where
ds = recursiveIRefDeps cl mempty rfs
f rf = fmap (rf,) (Map.lookup rf cl)
collectDeps :: collectDeps ::
CodeLookup Symbol IO () -> CodeLookup Symbol IO () ->
Term Symbol -> Term Symbol ->
@ -312,13 +359,45 @@ performRehash rgrp0 ctx =
Left (msg, refs) -> error $ unpack msg ++ ": " ++ show refs Left (msg, refs) -> error $ unpack msg ++ ": " ++ show refs
Right p -> p Right p -> p
loadCode ::
CodeLookup Symbol IO () ->
PrettyPrintEnv ->
EvalCtx ->
[Reference] ->
IO (EvalCtx, [(Reference, SuperGroup Symbol)])
loadCode cl ppe ctx tmrs = do
igs <- readTVarIO (intermed $ ccache ctx)
q <-
refNumsTm (ccache ctx) <&> \m r -> case r of
RF.DerivedId {}
| Just r <- baseToIntermed ctx r -> r `Map.notMember` m
| Just r <- floatToIntermed ctx r -> r `Map.notMember` m
| otherwise -> True
_ -> False
let (new, old) = L.partition q tmrs
odeps = recursiveIntermedDeps igs $ toIntermed ctx <$> old
itms <-
traverse (\r -> (RF.unsafeId r,) <$> resolveTermRef cl r) new
let im = Tm.unhashComponent (Map.fromList itms)
(subvs, rgrp0, rbkr) = intermediateTerms ppe ctx im
lubvs r = case Map.lookup r subvs of
Just r -> r
Nothing -> error "loadCode: variable missing for float refs"
vm = Map.mapKeys RF.DerivedId . Map.map (lubvs . fst) $ im
int b r = if b then r else toIntermed ctx r
(ctx', _, rgrp) =
performRehash
(fmap (overGroupLinks int) rgrp0)
(floatRemapAdd vm ctx)
return (backrefAdd rbkr ctx', rgrp ++ odeps)
loadDeps :: loadDeps ::
CodeLookup Symbol IO () -> CodeLookup Symbol IO () ->
PrettyPrintEnv -> PrettyPrintEnv ->
EvalCtx -> EvalCtx ->
[(Reference, Either [Int] [Int])] -> [(Reference, Either [Int] [Int])] ->
[Reference] -> [Reference] ->
IO EvalCtx IO (EvalCtx, [(Reference, SuperGroup Symbol)])
loadDeps cl ppe ctx tyrs tmrs = do loadDeps cl ppe ctx tyrs tmrs = do
let cc = ccache ctx let cc = ccache ctx
sand <- readTVarIO (sandbox cc) sand <- readTVarIO (sandbox cc)
@ -328,31 +407,99 @@ loadDeps cl ppe ctx tyrs tmrs = do
r `Map.notMember` dspec ctx r `Map.notMember` dspec ctx
|| r `Map.notMember` m || r `Map.notMember` m
_ -> False _ -> False
q <-
refNumsTm (ccache ctx) <&> \m r -> case r of
RF.DerivedId {}
| Just r <- baseToIntermed ctx r -> r `Map.notMember` m
| Just r <- floatToIntermed ctx r -> r `Map.notMember` m
| otherwise -> True
_ -> False
ctx <- foldM (uncurry . allocType) ctx $ Prelude.filter p tyrs ctx <- foldM (uncurry . allocType) ctx $ Prelude.filter p tyrs
itms <- let tyAdd = Set.fromList $ fst <$> tyrs
traverse (\r -> (RF.unsafeId r,) <$> resolveTermRef cl r) $ out@(_, rgrp) <- loadCode cl ppe ctx tmrs
Prelude.filter q tmrs out <$ cacheAdd0 tyAdd rgrp (expandSandbox sand rgrp) cc
let im = Tm.unhashComponent (Map.fromList itms)
(subvs, rgrp0, rbkr) = intermediateTerms ppe ctx im compileValue :: Reference -> [(Reference, SuperGroup Symbol)] -> Value
lubvs r = case Map.lookup r subvs of compileValue base =
Just r -> r flip pair (rf base) . ANF.BLit . List . Seq.fromList . fmap cpair
Nothing -> error "loadDeps: variable missing for float refs" where
vm = Map.mapKeys RF.DerivedId . Map.map (lubvs . fst) $ im rf = ANF.BLit . TmLink . RF.Ref
int b r = if b then r else toIntermed ctx r cons x y = Data RF.pairRef 0 [] [x, y]
(ctx', _, rgrp) = tt = Data RF.unitRef 0 [] []
performRehash code sg = ANF.BLit (Code sg)
(fmap (overGroupLinks int) rgrp0) pair x y = cons x (cons y tt)
(floatRemapAdd vm ctx) cpair (r, sg) = pair (rf r) (code sg)
tyAdd = Set.fromList $ fst <$> tyrs
backrefAdd rbkr ctx' decompileCtx ::
<$ cacheAdd0 tyAdd rgrp (expandSandbox sand rgrp) cc EnumMap Word64 Reference -> EvalCtx -> Closure -> DecompResult Symbol
decompileCtx crs ctx = decompile ib $ backReferenceTm crs fr ir dt
where
ib = intermedToBase ctx
fr = floatRemap ctx
ir = intermedRemap ctx
dt = decompTm ctx
nativeEval ::
IORef EvalCtx ->
CodeLookup Symbol IO () ->
PrettyPrintEnv ->
Term Symbol ->
IO (Either Error ([Error], Term Symbol))
nativeEval ctxVar cl ppe tm = catchInternalErrors $ do
ctx <- readIORef ctxVar
(tyrs, tmrs) <- collectDeps cl tm
(ctx, codes) <- loadDeps cl ppe ctx tyrs tmrs
(ctx, tcodes, base) <- prepareEvaluation ppe tm ctx
writeIORef ctxVar ctx
nativeEvalInContext ppe ctx (codes ++ tcodes) base
interpEval ::
ActiveThreads ->
IO () ->
IORef EvalCtx ->
CodeLookup Symbol IO () ->
PrettyPrintEnv ->
Term Symbol ->
IO (Either Error ([Error], Term Symbol))
interpEval activeThreads cleanupThreads ctxVar cl ppe tm =
catchInternalErrors $ do
ctx <- readIORef ctxVar
(tyrs, tmrs) <- collectDeps cl tm
(ctx, _) <- loadDeps cl ppe ctx tyrs tmrs
(ctx, _, init) <- prepareEvaluation ppe tm ctx
initw <- refNumTm (ccache ctx) init
writeIORef ctxVar ctx
evalInContext ppe ctx activeThreads initw
`UnliftIO.finally` cleanupThreads
nativeCompile ::
Text ->
IORef EvalCtx ->
CodeLookup Symbol IO () ->
PrettyPrintEnv ->
Reference ->
FilePath ->
IO (Maybe Error)
nativeCompile _version ctxVar cl ppe base path = tryM $ do
ctx <- readIORef ctxVar
(tyrs, tmrs) <- collectRefDeps cl base
(_, codes) <- loadDeps cl ppe ctx tyrs tmrs
nativeCompileCodes codes base path
interpCompile ::
Text ->
IORef EvalCtx ->
CodeLookup Symbol IO () ->
PrettyPrintEnv ->
Reference ->
FilePath ->
IO (Maybe Error)
interpCompile version ctxVar cl ppe rf path = tryM $ do
ctx <- readIORef ctxVar
(tyrs, tmrs) <- collectRefDeps cl rf
(ctx, _) <- loadDeps cl ppe ctx tyrs tmrs
let cc = ccache ctx
lk m = flip Map.lookup m =<< baseToIntermed ctx rf
Just w <- lk <$> readTVarIO (refTm cc)
sto <- standalone cc w
BL.writeFile path . runPutL $ do
serialize $ version
serialize $ RF.showShort 8 rf
putNat w
putStoredCache sto
backrefLifted :: backrefLifted ::
Reference -> Reference ->
@ -461,13 +608,13 @@ prepareEvaluation ::
PrettyPrintEnv -> PrettyPrintEnv ->
Term Symbol -> Term Symbol ->
EvalCtx -> EvalCtx ->
IO (EvalCtx, Word64) IO (EvalCtx, [(Reference, SuperGroup Symbol)], Reference)
prepareEvaluation ppe tm ctx = do prepareEvaluation ppe tm ctx = do
missing <- cacheAdd rgrp (ccache ctx') missing <- cacheAdd rgrp (ccache ctx')
when (not . null $ missing) . fail $ when (not . null $ missing) . fail $
reportBug "E029347" $ reportBug "E029347" $
"Error in prepareEvaluation, cache is missing: " <> show missing "Error in prepareEvaluation, cache is missing: " <> show missing
(,) (backrefAdd rbkr ctx') <$> refNumTm (ccache ctx') rmn pure (backrefAdd rbkr ctx', rgrp, rmn)
where where
(rmn0, frem, rgrp0, rbkr) = intermediateTerm ppe ctx tm (rmn0, frem, rgrp0, rbkr) = intermediateTerm ppe ctx tm
int b r = if b then r else toIntermed ctx r int b r = if b then r else toIntermed ctx r
@ -500,6 +647,73 @@ backReferenceTm ws frs irs dcm c i = do
bs <- Map.lookup r dcm bs <- Map.lookup r dcm
Map.lookup i bs Map.lookup i bs
schemeProc :: [String] -> CreateProcess
schemeProc args =
(proc "native-compiler/bin/runner" args)
{ std_in = CreatePipe,
std_out = Inherit,
std_err = Inherit
}
-- Note: this currently does not support yielding values; instead it
-- just produces a result appropriate for unitary `run` commands. The
-- reason is that the executed code can cause output to occur, which
-- would interfere with using stdout to communicate the final value
-- back from the subprocess. We need a side channel to support both
-- output effects and result communication.
--
-- Strictly speaking, this also holds for input. Input effects will
-- just get EOF in this scheme, because the code communication has
-- taken over the input. This could probably be without a side
-- channel, but a side channel is probably better.
nativeEvalInContext ::
PrettyPrintEnv ->
EvalCtx ->
[(Reference, SuperGroup Symbol)] ->
Reference ->
IO (Either Error ([Error], Term Symbol))
nativeEvalInContext _ ctx codes base = do
let cc = ccache ctx
crs <- readTVarIO $ combRefs cc
let bytes = serializeValue . compileValue base $ codes
decodeResult (Left msg) = pure . Left $ fromString msg
decodeResult (Right val) =
reifyValue cc val >>= \case
Left _ -> pure . Left $ "missing references from result"
Right cl -> case decompileCtx crs ctx cl of
(errs, dv) -> pure $ Right (listErrors errs, dv)
callout (Just pin) _ _ ph = do
BS.hPut pin . runPutS . putWord32be . fromIntegral $ BS.length bytes
BS.hPut pin bytes
UnliftIO.hClose pin
let unit = Data RF.unitRef 0 [] []
sunit = Data RF.pairRef 0 [] [unit, unit]
waitForProcess ph
decodeResult $ Right sunit
-- TODO: actualy receive output from subprocess
-- decodeResult . deserializeValue =<< BS.hGetContents pout
callout _ _ _ _ =
pure . Left $ "withCreateProcess didn't provide handles"
withCreateProcess (schemeProc []) callout
nativeCompileCodes ::
[(Reference, SuperGroup Symbol)] ->
Reference ->
FilePath ->
IO ()
nativeCompileCodes codes base path = do
let bytes = serializeValue . compileValue base $ codes
callout (Just pin) _ _ ph = do
BS.hPut pin . runPutS . putWord32be . fromIntegral $ BS.length bytes
BS.hPut pin bytes
UnliftIO.hClose pin
waitForProcess ph
pure ()
callout _ _ _ _ = fail "withCreateProcess didn't provide handles"
withCreateProcess (schemeProc ["-o", path]) callout
evalInContext :: evalInContext ::
PrettyPrintEnv -> PrettyPrintEnv ->
EvalCtx -> EvalCtx ->
@ -510,16 +724,7 @@ evalInContext ppe ctx activeThreads w = do
r <- newIORef BlackHole r <- newIORef BlackHole
crs <- readTVarIO (combRefs $ ccache ctx) crs <- readTVarIO (combRefs $ ccache ctx)
let hook = watchHook r let hook = watchHook r
decom = decom = decompileCtx crs ctx
decompile
(intermedToBase ctx)
( backReferenceTm
crs
(floatRemap ctx)
(intermedRemap ctx)
(decompTm ctx)
)
finish = fmap (first listErrors . decom) finish = fmap (first listErrors . decom)
prettyError (PE _ p) = p prettyError (PE _ p) = p
@ -706,28 +911,22 @@ startRuntime sandboxed runtimeHost version = do
pure $ pure $
Runtime Runtime
{ terminate = pure (), { terminate = pure (),
evaluate = \cl ppe tm -> catchInternalErrors $ do evaluate = interpEval activeThreads cleanupThreads ctxVar,
ctx <- readIORef ctxVar compileTo = interpCompile version ctxVar,
(tyrs, tmrs) <- collectDeps cl tm
ctx <- loadDeps cl ppe ctx tyrs tmrs
(ctx, init) <- prepareEvaluation ppe tm ctx
writeIORef ctxVar ctx
evalInContext ppe ctx activeThreads init `UnliftIO.finally` cleanupThreads,
compileTo = \cl ppe rf path -> tryM $ do
ctx <- readIORef ctxVar
(tyrs, tmrs) <- collectRefDeps cl rf
ctx <- loadDeps cl ppe ctx tyrs tmrs
let cc = ccache ctx
lk m = flip Map.lookup m =<< baseToIntermed ctx rf
Just w <- lk <$> readTVarIO (refTm cc)
sto <- standalone cc w
BL.writeFile path . runPutL $ do
serialize $ version
serialize $ RF.showShort 8 rf
putNat w
putStoredCache sto,
mainType = builtinMain External, mainType = builtinMain External,
ioTestType = builtinTest External ioTestTypes = builtinIOTestTypes External
}
startNativeRuntime :: Text -> IO (Runtime Symbol)
startNativeRuntime version = do
ctxVar <- newIORef =<< baseContext False
pure $
Runtime
{ terminate = pure (),
evaluate = nativeEval ctxVar,
compileTo = nativeCompile version ctxVar,
mainType = builtinMain External,
ioTestTypes = builtinIOTestTypes External
} }
withRuntime :: (MonadUnliftIO m) => Bool -> RuntimeHost -> Text -> (Runtime Symbol -> m a) -> m a withRuntime :: (MonadUnliftIO m) => Bool -> RuntimeHost -> Text -> (Runtime Symbol -> m a) -> m a

View File

@ -61,12 +61,12 @@ import Unison.Runtime.ANF
internalBug, internalBug,
packTags, packTags,
pattern TApp, pattern TApp,
pattern TBLit,
pattern TFOp, pattern TFOp,
pattern TFrc, pattern TFrc,
pattern THnd, pattern THnd,
pattern TLets, pattern TLets,
pattern TLit, pattern TLit,
pattern TBLit,
pattern TMatch, pattern TMatch,
pattern TName, pattern TName,
pattern TPrm, pattern TPrm,
@ -390,6 +390,7 @@ data BPrim1
| TLTT -- value, Term.Link.toText | TLTT -- value, Term.Link.toText
-- debug -- debug
| DBTX -- debug text | DBTX -- debug text
| SDBL -- sandbox link list
deriving (Show, Eq, Ord) deriving (Show, Eq, Ord)
data BPrim2 data BPrim2
@ -424,6 +425,7 @@ data BPrim2
| TRCE -- trace | TRCE -- trace
-- code -- code
| SDBX -- sandbox | SDBX -- sandbox
| SDBV -- sandbox Value
deriving (Show, Eq, Ord) deriving (Show, Eq, Ord)
data MLit data MLit
@ -859,7 +861,7 @@ emitSection _ _ _ _ ctx (TLit l) =
| ANF.LY {} <- l = addCount 0 1 | ANF.LY {} <- l = addCount 0 1
| otherwise = addCount 1 0 | otherwise = addCount 1 0
emitSection _ _ _ _ ctx (TBLit l) = emitSection _ _ _ _ ctx (TBLit l) =
addCount 0 1 . countCtx ctx . Ins (emitBLit l) . Yield $ BArg1 0 addCount 0 1 . countCtx ctx . Ins (emitBLit l) . Yield $ BArg1 0
emitSection rns grpr grpn rec ctx (TMatch v bs) emitSection rns grpr grpn rec ctx (TMatch v bs)
| Just (i, BX) <- ctxResolve ctx v, | Just (i, BX) <- ctxResolve ctx v,
MatchData r cs df <- bs = MatchData r cs df <- bs =
@ -1040,7 +1042,6 @@ emitLet _ _ _ _ _ _ _ (TLit l) =
fmap (Ins $ emitLit l) fmap (Ins $ emitLit l)
emitLet _ _ _ _ _ _ _ (TBLit l) = emitLet _ _ _ _ _ _ _ (TBLit l) =
fmap (Ins $ emitBLit l) fmap (Ins $ emitBLit l)
-- emitLet rns grp _ _ _ ctx (TApp (FComb r) args) -- emitLet rns grp _ _ _ ctx (TApp (FComb r) args)
-- -- We should be able to tell if we are making a saturated call -- -- We should be able to tell if we are making a saturated call
-- -- or not here. We aren't carrying the information here yet, though. -- -- or not here. We aren't carrying the information here yet, though.
@ -1190,6 +1191,8 @@ emitPOp ANF.CVLD = emitBP1 CVLD
emitPOp ANF.LOAD = emitBP1 LOAD emitPOp ANF.LOAD = emitBP1 LOAD
emitPOp ANF.VALU = emitBP1 VALU emitPOp ANF.VALU = emitBP1 VALU
emitPOp ANF.SDBX = emitBP2 SDBX emitPOp ANF.SDBX = emitBP2 SDBX
emitPOp ANF.SDBL = emitBP1 SDBL
emitPOp ANF.SDBV = emitBP2 SDBV
-- error call -- error call
emitPOp ANF.EROR = emitBP2 THRO emitPOp ANF.EROR = emitBP2 THRO
emitPOp ANF.TRCE = emitBP2 TRCE emitPOp ANF.TRCE = emitBP2 TRCE
@ -1553,7 +1556,7 @@ prettySection ind sec =
. prettySection (ind + 1) pu . prettySection (ind + 1) pu
. foldr (\p r -> rqc p . r) id (mapToList bs) . foldr (\p r -> rqc p . r) id (mapToList bs)
where where
rqc (i , e) = rqc (i, e) =
showString "\n" showString "\n"
. shows i . shows i
. showString " ->\n" . showString " ->\n"

View File

@ -28,8 +28,13 @@ import Unison.Builtin.Decls (exceptionRef, ioFailureRef)
import Unison.Builtin.Decls qualified as Rf import Unison.Builtin.Decls qualified as Rf
import Unison.ConstructorReference qualified as CR import Unison.ConstructorReference qualified as CR
import Unison.Prelude hiding (Text) import Unison.Prelude hiding (Text)
import Unison.Reference (Reference, Reference' (Builtin), toShortHash) import Unison.Reference
import Unison.Referent (pattern Con, pattern Ref) ( Reference,
Reference' (Builtin),
isBuiltin,
toShortHash,
)
import Unison.Referent (Referent, pattern Con, pattern Ref)
import Unison.Runtime.ANF as ANF import Unison.Runtime.ANF as ANF
( CompileExn (..), ( CompileExn (..),
Mem (..), Mem (..),
@ -388,6 +393,14 @@ exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim1 DBTX i)
bstk <- bump bstk bstk <- bump bstk
bstk <$ pokeBi bstk (Util.Text.pack tx) bstk <$ pokeBi bstk (Util.Text.pack tx)
pure (denv, ustk, bstk, k) pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim1 SDBL i)
| sandboxed env =
die "attempted to use sandboxed operation: sandboxLinks"
| otherwise = do
tl <- peekOffBi bstk i
bstk <- bump bstk
pokeS bstk . encodeSandboxListResult =<< sandboxList env tl
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k _ (BPrim1 op i) = do exec !_ !denv !_activeThreads !ustk !bstk !k _ (BPrim1 op i) = do
(ustk, bstk) <- bprim1 ustk bstk op i (ustk, bstk) <- bprim1 ustk bstk op i
pure (denv, ustk, bstk, k) pure (denv, ustk, bstk, k)
@ -399,6 +412,17 @@ exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim2 SDBX i j) = do
ustk <- bump ustk ustk <- bump ustk
poke ustk $ if b then 1 else 0 poke ustk $ if b then 1 else 0
pure (denv, ustk, bstk, k) pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim2 SDBV i j)
| sandboxed env =
die "attempted to use sandboxed operation: Value.validateSandboxed"
| otherwise = do
s <- peekOffS bstk i
v <- peekOffBi bstk j
l <- decodeSandboxArgument s
res <- checkValueSandboxing env l v
bstk <- bump bstk
poke bstk $ encodeSandboxResult res
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k _ (BPrim2 EQLU i j) = do exec !_ !denv !_activeThreads !ustk !bstk !k _ (BPrim2 EQLU i j) = do
x <- peekOff bstk i x <- peekOff bstk i
y <- peekOff bstk j y <- peekOff bstk j
@ -1576,6 +1600,7 @@ bprim1 !ustk !bstk TLTT _ = pure (ustk, bstk)
bprim1 !ustk !bstk LOAD _ = pure (ustk, bstk) bprim1 !ustk !bstk LOAD _ = pure (ustk, bstk)
bprim1 !ustk !bstk VALU _ = pure (ustk, bstk) bprim1 !ustk !bstk VALU _ = pure (ustk, bstk)
bprim1 !ustk !bstk DBTX _ = pure (ustk, bstk) bprim1 !ustk !bstk DBTX _ = pure (ustk, bstk)
bprim1 !ustk !bstk SDBL _ = pure (ustk, bstk)
{-# INLINE bprim1 #-} {-# INLINE bprim1 #-}
bprim2 :: bprim2 ::
@ -1781,6 +1806,7 @@ bprim2 !ustk !bstk THRO _ _ = pure (ustk, bstk) -- impossible
bprim2 !ustk !bstk TRCE _ _ = pure (ustk, bstk) -- impossible bprim2 !ustk !bstk TRCE _ _ = pure (ustk, bstk) -- impossible
bprim2 !ustk !bstk CMPU _ _ = pure (ustk, bstk) -- impossible bprim2 !ustk !bstk CMPU _ _ = pure (ustk, bstk) -- impossible
bprim2 !ustk !bstk SDBX _ _ = pure (ustk, bstk) -- impossible bprim2 !ustk !bstk SDBX _ _ = pure (ustk, bstk) -- impossible
bprim2 !ustk !bstk SDBV _ _ = pure (ustk, bstk) -- impossible
{-# INLINE bprim2 #-} {-# INLINE bprim2 #-}
yield :: yield ::
@ -1949,6 +1975,22 @@ decodeSandboxArgument s = fmap join . for (toList s) $ \case
_ -> pure [] -- constructor _ -> pure [] -- constructor
_ -> die "decodeSandboxArgument: unrecognized value" _ -> die "decodeSandboxArgument: unrecognized value"
encodeSandboxListResult :: [Reference] -> Sq.Seq Closure
encodeSandboxListResult =
Sq.fromList . fmap (Foreign . Wrap Rf.termLinkRef . Ref)
encodeSandboxResult :: Either [Reference] [Reference] -> Closure
encodeSandboxResult (Left rfs) =
encodeLeft . Foreign . Wrap Rf.listRef $ encodeSandboxListResult rfs
encodeSandboxResult (Right rfs) =
encodeRight . Foreign . Wrap Rf.listRef $ encodeSandboxListResult rfs
encodeLeft :: Closure -> Closure
encodeLeft = DataB1 Rf.eitherRef leftTag
encodeRight :: Closure -> Closure
encodeRight = DataB1 Rf.eitherRef rightTag
addRefs :: addRefs ::
TVar Word64 -> TVar Word64 ->
TVar (M.Map Reference Word64) -> TVar (M.Map Reference Word64) ->
@ -1992,6 +2034,12 @@ codeValidate tml cc = do
extra = Foreign . Wrap Rf.textRef . Util.Text.pack $ show cs extra = Foreign . Wrap Rf.textRef . Util.Text.pack $ show cs
in pure . Just $ Failure ioFailureRef msg extra in pure . Just $ Failure ioFailureRef msg extra
sandboxList :: CCache -> Referent -> IO [Reference]
sandboxList cc (Ref r) = do
sands <- readTVarIO $ sandbox cc
pure . maybe [] S.toList $ M.lookup r sands
sandboxList _ _ = pure []
checkSandboxing :: checkSandboxing ::
CCache -> CCache ->
[Reference] -> [Reference] ->
@ -2007,6 +2055,31 @@ checkSandboxing cc allowed0 c = do
where where
allowed = S.fromList allowed0 allowed = S.fromList allowed0
-- Checks a Value for sandboxing. A Left result indicates that some
-- dependencies of the Value are unknown. A Right result indicates
-- builtins transitively referenced by the Value that are disallowed.
checkValueSandboxing ::
CCache ->
[Reference] ->
ANF.Value ->
IO (Either [Reference] [Reference])
checkValueSandboxing cc allowed0 v = do
sands <- readTVarIO $ sandbox cc
have <- readTVarIO $ intermed cc
let f False r
| Nothing <- M.lookup r have,
not (isBuiltin r) =
(S.singleton r, mempty)
| Just rs <- M.lookup r sands =
(mempty, rs `S.difference` allowed)
f _ _ = (mempty, mempty)
case valueLinks f v of
(miss, sbx)
| S.null miss -> pure . Right $ S.toList sbx
| otherwise -> pure . Left $ S.toList miss
where
allowed = S.fromList allowed0
cacheAdd0 :: cacheAdd0 ::
S.Set Reference -> S.Set Reference ->
[(Reference, SuperGroup Symbol)] -> [(Reference, SuperGroup Symbol)] ->
@ -2358,6 +2431,15 @@ unitTag
packTags rt 0 packTags rt 0
| otherwise = error "internal error: unitTag" | otherwise = error "internal error: unitTag"
leftTag, rightTag :: Word64
(leftTag, rightTag)
| Just n <- M.lookup Rf.eitherRef builtinTypeNumbering,
et <- toEnum (fromIntegral n),
lt <- toEnum (fromIntegral Rf.eitherLeftId),
rt <- toEnum (fromIntegral Rf.eitherRightId) =
(packTags et lt, packTags et rt)
| otherwise = error "internal error: either tags"
universalCompare :: universalCompare ::
(Foreign -> Foreign -> Ordering) -> (Foreign -> Foreign -> Ordering) ->
Closure -> Closure ->

View File

@ -449,6 +449,7 @@ instance Tag BPrim1 where
tag2word VALU = 23 tag2word VALU = 23
tag2word TLTT = 24 tag2word TLTT = 24
tag2word DBTX = 25 tag2word DBTX = 25
tag2word SDBL = 26
word2tag 0 = pure SIZT word2tag 0 = pure SIZT
word2tag 1 = pure USNC word2tag 1 = pure USNC
@ -476,6 +477,7 @@ instance Tag BPrim1 where
word2tag 23 = pure VALU word2tag 23 = pure VALU
word2tag 24 = pure TLTT word2tag 24 = pure TLTT
word2tag 25 = pure DBTX word2tag 25 = pure DBTX
word2tag 26 = pure SDBL
word2tag n = unknownTag "BPrim1" n word2tag n = unknownTag "BPrim1" n
instance Tag BPrim2 where instance Tag BPrim2 where
@ -504,6 +506,7 @@ instance Tag BPrim2 where
tag2word SDBX = 22 tag2word SDBX = 22
tag2word IXOT = 23 tag2word IXOT = 23
tag2word IXOB = 24 tag2word IXOB = 24
tag2word SDBV = 25
word2tag 0 = pure EQLU word2tag 0 = pure EQLU
word2tag 1 = pure CMPU word2tag 1 = pure CMPU
@ -530,4 +533,5 @@ instance Tag BPrim2 where
word2tag 22 = pure SDBX word2tag 22 = pure SDBX
word2tag 23 = pure IXOT word2tag 23 = pure IXOT
word2tag 24 = pure IXOB word2tag 24 = pure IXOB
word2tag 25 = pure SDBV
word2tag n = unknownTag "BPrim2" n word2tag n = unknownTag "BPrim2" n

View File

@ -93,9 +93,18 @@ resolveUnresolvedModifier unresolvedModifier var =
UnresolvedModifier'Structural -> pure (DD.Structural <$ unresolvedModifier) UnresolvedModifier'Structural -> pure (DD.Structural <$ unresolvedModifier)
UnresolvedModifier'UniqueWithGuid guid -> pure (DD.Unique guid <$ unresolvedModifier) UnresolvedModifier'UniqueWithGuid guid -> pure (DD.Unique guid <$ unresolvedModifier)
UnresolvedModifier'UniqueWithoutGuid guid0 -> do UnresolvedModifier'UniqueWithoutGuid guid0 -> do
ParsingEnv {uniqueTypeGuid} <- ask unique <- resolveUniqueModifier var guid0
guid <- fromMaybe guid0 <$> lift (lift (uniqueTypeGuid (Name.unsafeFromVar var))) pure $ unique <$ unresolvedModifier
pure (DD.Unique guid <$ unresolvedModifier)
resolveUniqueModifier :: (Monad m, Var v) => v -> Text -> P v m DD.Modifier
resolveUniqueModifier var guid0 = do
ParsingEnv {uniqueTypeGuid} <- ask
guid <- fromMaybe guid0 <$> lift (lift (uniqueTypeGuid (Name.unsafeFromVar var)))
pure $ DD.Unique guid
defaultUniqueModifier :: (Monad m, Var v) => v -> P v m DD.Modifier
defaultUniqueModifier var =
uniqueName 32 >>= resolveUniqueModifier var
-- unique[someguid] type Blah = ... -- unique[someguid] type Blah = ...
modifier :: (Monad m, Var v) => P v m (Maybe (L.Token UnresolvedModifier)) modifier :: (Monad m, Var v) => P v m (Maybe (L.Token UnresolvedModifier))
@ -132,7 +141,7 @@ dataDeclaration ::
Maybe (L.Token UnresolvedModifier) -> Maybe (L.Token UnresolvedModifier) ->
P v m (v, DataDeclaration v Ann, Accessors v) P v m (v, DataDeclaration v Ann, Accessors v)
dataDeclaration maybeUnresolvedModifier = do dataDeclaration maybeUnresolvedModifier = do
keywordTok <- fmap void (reserved "type") <|> openBlockWith "type" _ <- fmap void (reserved "type") <|> openBlockWith "type"
(name, typeArgs) <- (name, typeArgs) <-
(,) (,)
<$> TermParser.verifyRelativeVarName prefixDefinitionName <$> TermParser.verifyRelativeVarName prefixDefinitionName
@ -181,7 +190,13 @@ dataDeclaration maybeUnresolvedModifier = do
closingAnn :: Ann closingAnn :: Ann
closingAnn = last (ann eq : ((\(_, _, t) -> ann t) <$> constructors)) closingAnn = last (ann eq : ((\(_, _, t) -> ann t) <$> constructors))
case maybeUnresolvedModifier of case maybeUnresolvedModifier of
Nothing -> P.customFailure $ MissingTypeModifier ("type" <$ keywordTok) name Nothing -> do
modifier <- defaultUniqueModifier (L.payload name)
pure
( L.payload name,
DD.mkDataDecl' modifier closingAnn typeArgVs constructors,
accessors
)
Just unresolvedModifier -> do Just unresolvedModifier -> do
modifier <- resolveUnresolvedModifier unresolvedModifier (L.payload name) modifier <- resolveUnresolvedModifier unresolvedModifier (L.payload name)
pure pure
@ -196,7 +211,7 @@ effectDeclaration ::
Maybe (L.Token UnresolvedModifier) -> Maybe (L.Token UnresolvedModifier) ->
P v m (v, EffectDeclaration v Ann) P v m (v, EffectDeclaration v Ann)
effectDeclaration maybeUnresolvedModifier = do effectDeclaration maybeUnresolvedModifier = do
keywordTok <- fmap void (reserved "ability") <|> openBlockWith "ability" _ <- fmap void (reserved "ability") <|> openBlockWith "ability"
name <- TermParser.verifyRelativeVarName prefixDefinitionName name <- TermParser.verifyRelativeVarName prefixDefinitionName
typeArgs <- many (TermParser.verifyRelativeVarName prefixDefinitionName) typeArgs <- many (TermParser.verifyRelativeVarName prefixDefinitionName)
let typeArgVs = L.payload <$> typeArgs let typeArgVs = L.payload <$> typeArgs
@ -208,7 +223,12 @@ effectDeclaration maybeUnresolvedModifier = do
last $ ann blockStart : ((\(_, _, t) -> ann t) <$> constructors) last $ ann blockStart : ((\(_, _, t) -> ann t) <$> constructors)
case maybeUnresolvedModifier of case maybeUnresolvedModifier of
Nothing -> P.customFailure $ MissingTypeModifier ("ability" <$ keywordTok) name Nothing -> do
modifier <- defaultUniqueModifier (L.payload name)
pure
( L.payload name,
DD.mkEffectDecl' modifier closingAnn typeArgVs constructors
)
Just unresolvedModifier -> do Just unresolvedModifier -> do
modifier <- resolveUnresolvedModifier unresolvedModifier (L.payload name) modifier <- resolveUnresolvedModifier unresolvedModifier (L.payload name)
pure pure

View File

@ -1,5 +1,6 @@
module Unison.Syntax.DeclPrinter (prettyDecl, prettyDeclHeader, prettyDeclOrBuiltinHeader) where module Unison.Syntax.DeclPrinter (prettyDecl, prettyDeclW, prettyDeclHeader, prettyDeclOrBuiltinHeader, AccessorName) where
import Control.Monad.Writer (Writer, runWriter, tell)
import Data.List (isPrefixOf) import Data.List (isPrefixOf)
import Data.Map qualified as Map import Data.Map qualified as Map
import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..)) import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..))
@ -13,6 +14,7 @@ import Unison.DataDeclaration qualified as DD
import Unison.DataDeclaration.Dependencies qualified as DD import Unison.DataDeclaration.Dependencies qualified as DD
import Unison.HashQualified qualified as HQ import Unison.HashQualified qualified as HQ
import Unison.Name (Name) import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.PrettyPrintEnv (PrettyPrintEnv) import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.PrettyPrintEnv qualified as PPE import Unison.PrettyPrintEnv qualified as PPE
import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl (..)) import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl (..))
@ -32,6 +34,19 @@ import Unison.Var qualified as Var
type SyntaxText = S.SyntaxText' Reference type SyntaxText = S.SyntaxText' Reference
type AccessorName = HQ.HashQualified Name
prettyDeclW ::
(Var v) =>
PrettyPrintEnvDecl ->
Reference ->
HQ.HashQualified Name ->
DD.Decl v a ->
Writer [AccessorName] (Pretty SyntaxText)
prettyDeclW ppe r hq d = case d of
Left e -> pure $ prettyEffectDecl ppe r hq e
Right dd -> prettyDataDecl ppe r hq dd
prettyDecl :: prettyDecl ::
(Var v) => (Var v) =>
PrettyPrintEnvDecl -> PrettyPrintEnvDecl ->
@ -39,9 +54,7 @@ prettyDecl ::
HQ.HashQualified Name -> HQ.HashQualified Name ->
DD.Decl v a -> DD.Decl v a ->
Pretty SyntaxText Pretty SyntaxText
prettyDecl ppe r hq d = case d of prettyDecl ppe r hq d = fst . runWriter $ prettyDeclW ppe r hq d
Left e -> prettyEffectDecl ppe r hq e
Right dd -> prettyDataDecl ppe r hq dd
prettyEffectDecl :: prettyEffectDecl ::
(Var v) => (Var v) =>
@ -70,7 +83,7 @@ prettyGADT env ctorType r name dd =
constructor (n, (_, _, t)) = constructor (n, (_, _, t)) =
prettyPattern (PPED.unsuffixifiedPPE env) ctorType name (ConstructorReference r n) prettyPattern (PPED.unsuffixifiedPPE env) ctorType name (ConstructorReference r n)
<> fmt S.TypeAscriptionColon " :" <> fmt S.TypeAscriptionColon " :"
`P.hang` TypePrinter.prettySyntax (PPED.suffixifiedPPE env) t `P.hang` TypePrinter.prettySyntax (PPED.suffixifiedPPE env) t
header = prettyEffectHeader name (DD.EffectDeclaration dd) <> fmt S.ControlKeyword " where" header = prettyEffectHeader name (DD.EffectDeclaration dd) <> fmt S.ControlKeyword " where"
prettyPattern :: prettyPattern ::
@ -97,24 +110,35 @@ prettyDataDecl ::
Reference -> Reference ->
HQ.HashQualified Name -> HQ.HashQualified Name ->
DataDeclaration v a -> DataDeclaration v a ->
Pretty SyntaxText Writer [AccessorName] (Pretty SyntaxText)
prettyDataDecl (PrettyPrintEnvDecl unsuffixifiedPPE suffixifiedPPE) r name dd = prettyDataDecl (PrettyPrintEnvDecl unsuffixifiedPPE suffixifiedPPE) r name dd =
(header <>) . P.sep (fmt S.DelimiterChar (" | " `P.orElse` "\n | ")) $ (header <>)
constructor . P.sep (fmt S.DelimiterChar (" | " `P.orElse` "\n | "))
<$> zip <$> constructor
[0 ..] `traverse` zip
(DD.constructors' dd) [0 ..]
(DD.constructors' dd)
where where
constructor (n, (_, _, Type.ForallsNamed' _ t)) = constructor' n t constructor (n, (_, _, Type.ForallsNamed' _ t)) = constructor' n t
constructor (n, (_, _, t)) = constructor' n t constructor (n, (_, _, t)) = constructor' n t
constructor' n t = case Type.unArrows t of constructor' n t = case Type.unArrows t of
Nothing -> prettyPattern unsuffixifiedPPE CT.Data name (ConstructorReference r n) Nothing -> pure $ prettyPattern unsuffixifiedPPE CT.Data name (ConstructorReference r n)
Just ts -> case fieldNames unsuffixifiedPPE r name dd of Just ts -> case fieldNames unsuffixifiedPPE r name dd of
Nothing -> Nothing ->
P.group . P.hang' (prettyPattern unsuffixifiedPPE CT.Data name (ConstructorReference r n)) " " $ pure
P.spaced (runPretty suffixifiedPPE (traverse (TypePrinter.prettyRaw Map.empty 10) (init ts))) . P.group
Just fs -> . P.hang' (prettyPattern unsuffixifiedPPE CT.Data name (ConstructorReference r n)) " "
P.group $ $ P.spaced (runPretty suffixifiedPPE (traverse (TypePrinter.prettyRaw Map.empty 10) (init ts)))
Just fs -> do
tell
[ case accessor of
Nothing -> HQ.NameOnly $ declName `Name.joinDot` fieldName
Just accessor -> HQ.NameOnly $ declName `Name.joinDot` fieldName `Name.joinDot` accessor
| HQ.NameOnly declName <- [name],
HQ.NameOnly fieldName <- fs,
accessor <- [Nothing, Just "set", Just "modify"]
]
pure . P.group $
fmt S.DelimiterChar "{ " fmt S.DelimiterChar "{ "
<> P.sep <> P.sep
(fmt S.DelimiterChar "," <> " " `P.orElse` "\n ") (fmt S.DelimiterChar "," <> " " `P.orElse` "\n ")
@ -124,7 +148,7 @@ prettyDataDecl (PrettyPrintEnvDecl unsuffixifiedPPE suffixifiedPPE) r name dd =
P.group $ P.group $
styleHashQualified'' (fmt (S.TypeReference r)) fname styleHashQualified'' (fmt (S.TypeReference r)) fname
<> fmt S.TypeAscriptionColon " :" <> fmt S.TypeAscriptionColon " :"
`P.hang` runPretty suffixifiedPPE (TypePrinter.prettyRaw Map.empty (-1) typ) `P.hang` runPretty suffixifiedPPE (TypePrinter.prettyRaw Map.empty (-1) typ)
header = prettyDataHeader name dd <> fmt S.DelimiterChar (" = " `P.orElse` "\n = ") header = prettyDataHeader name dd <> fmt S.DelimiterChar (" = " `P.orElse` "\n = ")
-- Comes up with field names for a data declaration which has the form of a -- Comes up with field names for a data declaration which has the form of a
@ -176,8 +200,9 @@ fieldNames env r name dd = do
prettyModifier :: DD.Modifier -> Pretty SyntaxText prettyModifier :: DD.Modifier -> Pretty SyntaxText
prettyModifier DD.Structural = fmt S.DataTypeModifier "structural" prettyModifier DD.Structural = fmt S.DataTypeModifier "structural"
prettyModifier (DD.Unique _uid) = prettyModifier (DD.Unique _uid) = mempty -- don't print anything since 'unique' is the default
fmt S.DataTypeModifier "unique" -- <> ("[" <> P.text uid <> "] ") -- leaving this comment for the historical record so the syntax for uid is not forgotten
-- fmt S.DataTypeModifier "unique" -- <> ("[" <> P.text uid <> "] ")
prettyDataHeader :: prettyDataHeader ::
(Var v) => HQ.HashQualified Name -> DD.DataDeclaration v a -> Pretty SyntaxText (Var v) => HQ.HashQualified Name -> DD.DataDeclaration v a -> Pretty SyntaxText

View File

@ -11,7 +11,7 @@ import Unison.DataDeclaration qualified as DD
import Unison.Name qualified as Name import Unison.Name qualified as Name
import Unison.Names qualified as Names import Unison.Names qualified as Names
import Unison.Names.ResolutionResult qualified as Names import Unison.Names.ResolutionResult qualified as Names
import Unison.NamesWithHistory qualified as NamesWithHistory import Unison.NamesWithHistory qualified as Names
import Unison.Parser.Ann (Ann) import Unison.Parser.Ann (Ann)
import Unison.Prelude import Unison.Prelude
import Unison.Syntax.DeclParser (declarations) import Unison.Syntax.DeclParser (declarations)
@ -41,16 +41,16 @@ file = do
-- which are parsed and applied to the type decls and term stanzas -- which are parsed and applied to the type decls and term stanzas
(namesStart, imports) <- TermParser.imports <* optional semi (namesStart, imports) <- TermParser.imports <* optional semi
(dataDecls, effectDecls, parsedAccessors) <- declarations (dataDecls, effectDecls, parsedAccessors) <- declarations
env <- case UFN.environmentFor (NamesWithHistory.currentNames namesStart) dataDecls effectDecls of env <- case UFN.environmentFor namesStart dataDecls effectDecls of
Right (Right env) -> pure env Right (Right env) -> pure env
Right (Left es) -> P.customFailure $ TypeDeclarationErrors es Right (Left es) -> P.customFailure $ TypeDeclarationErrors es
Left es -> resolutionFailures (toList es) Left es -> resolutionFailures (toList es)
let accessors :: [[(v, Ann, Term v Ann)]] let accessors :: [[(v, Ann, Term v Ann)]]
accessors = accessors =
[ DD.generateRecordAccessors (toPair <$> fields) (L.payload typ) r [ DD.generateRecordAccessors (toPair <$> fields) (L.payload typ) r
| (typ, fields) <- parsedAccessors, | (typ, fields) <- parsedAccessors,
Just (r, _) <- [Map.lookup (L.payload typ) (UF.datas env)] Just (r, _) <- [Map.lookup (L.payload typ) (UF.datas env)]
] ]
toPair (tok, typ) = (L.payload tok, ann tok <> ann typ) toPair (tok, typ) = (L.payload tok, ann tok <> ann typ)
let importNames = [(Name.unsafeFromVar v, Name.unsafeFromVar v2) | (v, v2) <- imports] let importNames = [(Name.unsafeFromVar v, Name.unsafeFromVar v2) | (v, v2) <- imports]
let locals = Names.importing importNames (UF.names env) let locals = Names.importing importNames (UF.names env)
@ -61,7 +61,7 @@ file = do
-- --
-- There's some more complicated logic below to have suffix-based name resolution -- There's some more complicated logic below to have suffix-based name resolution
-- make use of _terms_ from the local file. -- make use of _terms_ from the local file.
local (\e -> e {names = NamesWithHistory.push locals namesStart}) $ do local (\e -> e {names = Names.push locals namesStart}) $ do
names <- asks names names <- asks names
stanzas0 <- sepBy semi stanza stanzas0 <- sepBy semi stanza
let stanzas = fmap (TermParser.substImports names imports) <$> stanzas0 let stanzas = fmap (TermParser.substImports names imports) <$> stanzas0
@ -78,26 +78,26 @@ file = do
-- All locally declared term variables, running example: -- All locally declared term variables, running example:
-- [foo.alice, bar.alice, zonk.bob] -- [foo.alice, bar.alice, zonk.bob]
fqLocalTerms :: [v] fqLocalTerms :: [v]
fqLocalTerms = (stanzas0 >>= getVars) <> (view _1 <$> join accessors) fqLocalTerms = (stanzas0 >>= getVars) <> (view _1 <$> join accessors)
-- suffixified local term bindings shadow any same-named thing from the outer codebase scope -- suffixified local term bindings shadow any same-named thing from the outer codebase scope
-- example: `foo.bar` in local file scope will shadow `foo.bar` and `bar` in codebase scope -- example: `foo.bar` in local file scope will shadow `foo.bar` and `bar` in codebase scope
let (curNames, resolveLocals) = let (curNames, resolveLocals) =
( Names.shadowTerms locals (NamesWithHistory.currentNames names), ( Names.shadowTerms locals names,
resolveLocals resolveLocals
) )
where where
-- Each unique suffix mapped to its fully qualified name -- Each unique suffix mapped to its fully qualified name
canonicalVars :: Map v v canonicalVars :: Map v v
canonicalVars = UFN.variableCanonicalizer fqLocalTerms canonicalVars = UFN.variableCanonicalizer fqLocalTerms
-- All unique local term name suffixes - these we want to -- All unique local term name suffixes - these we want to
-- avoid resolving to a term that's in the codebase -- avoid resolving to a term that's in the codebase
locals :: [Name.Name] locals :: [Name.Name]
locals = (Name.unsafeFromVar <$> Map.keys canonicalVars) locals = (Name.unsafeFromVar <$> Map.keys canonicalVars)
-- A function to replace unique local term suffixes with their -- A function to replace unique local term suffixes with their
-- fully qualified name -- fully qualified name
replacements = [ (v, Term.var () v2) | (v,v2) <- Map.toList canonicalVars, v /= v2 ] replacements = [(v, Term.var () v2) | (v, v2) <- Map.toList canonicalVars, v /= v2]
resolveLocals = ABT.substsInheritAnnotation replacements resolveLocals = ABT.substsInheritAnnotation replacements
let bindNames = Term.bindSomeNames Name.unsafeFromVar (Set.fromList fqLocalTerms) curNames . resolveLocals let bindNames = Term.bindSomeNames Name.unsafeFromVar (Set.fromList fqLocalTerms) curNames . resolveLocals
terms <- case List.validate (traverseOf _3 bindNames) terms of terms <- case List.validate (traverseOf _3 bindNames) terms of

View File

@ -32,9 +32,9 @@ import Unison.ConstructorType qualified as CT
import Unison.HashQualified qualified as HQ import Unison.HashQualified qualified as HQ
import Unison.Name (Name) import Unison.Name (Name)
import Unison.Name qualified as Name import Unison.Name qualified as Name
import Unison.Names (Names)
import Unison.Names qualified as Names import Unison.Names qualified as Names
import Unison.NamesWithHistory (NamesWithHistory) import Unison.NamesWithHistory qualified as Names
import Unison.NamesWithHistory qualified as NamesWithHistory
import Unison.Parser.Ann (Ann) import Unison.Parser.Ann (Ann)
import Unison.Pattern (Pattern) import Unison.Pattern (Pattern)
import Unison.Pattern qualified as Pattern import Unison.Pattern qualified as Pattern
@ -111,7 +111,7 @@ typeLink' :: (Monad m, Var v) => P v m (L.Token Reference)
typeLink' = do typeLink' = do
id <- hqPrefixId id <- hqPrefixId
ns <- asks names ns <- asks names
case NamesWithHistory.lookupHQType (L.payload id) ns of case Names.lookupHQType Names.IncludeSuffixes (L.payload id) ns of
s s
| Set.size s == 1 -> pure $ const (Set.findMin s) <$> id | Set.size s == 1 -> pure $ const (Set.findMin s) <$> id
| otherwise -> customFailure $ UnknownType id s | otherwise -> customFailure $ UnknownType id s
@ -120,7 +120,7 @@ termLink' :: (Monad m, Var v) => P v m (L.Token Referent)
termLink' = do termLink' = do
id <- hqPrefixId id <- hqPrefixId
ns <- asks names ns <- asks names
case NamesWithHistory.lookupHQTerm (L.payload id) ns of case Names.lookupHQTerm Names.IncludeSuffixes (L.payload id) ns of
s s
| Set.size s == 1 -> pure $ const (Set.findMin s) <$> id | Set.size s == 1 -> pure $ const (Set.findMin s) <$> id
| otherwise -> customFailure $ UnknownTerm id s | otherwise -> customFailure $ UnknownTerm id s
@ -129,7 +129,7 @@ link' :: (Monad m, Var v) => P v m (Either (L.Token Reference) (L.Token Referent
link' = do link' = do
id <- hqPrefixId id <- hqPrefixId
ns <- asks names ns <- asks names
case (NamesWithHistory.lookupHQTerm (L.payload id) ns, NamesWithHistory.lookupHQType (L.payload id) ns) of case (Names.lookupHQTerm Names.IncludeSuffixes (L.payload id) ns, Names.lookupHQType Names.IncludeSuffixes (L.payload id) ns) of
(s, s2) | Set.size s == 1 && Set.null s2 -> pure . Right $ const (Set.findMin s) <$> id (s, s2) | Set.size s == 1 && Set.null s2 -> pure . Right $ const (Set.findMin s) <$> id
(s, s2) | Set.size s2 == 1 && Set.null s -> pure . Left $ const (Set.findMin s2) <$> id (s, s2) | Set.size s2 == 1 && Set.null s -> pure . Left $ const (Set.findMin s2) <$> id
(s, s2) -> customFailure $ UnknownId id s s2 (s, s2) -> customFailure $ UnknownId id s s2
@ -279,7 +279,7 @@ parsePattern = label "pattern" root
names <- asks names names <- asks names
-- probably should avoid looking up in `names` if `L.payload tok` -- probably should avoid looking up in `names` if `L.payload tok`
-- starts with a lowercase -- starts with a lowercase
case NamesWithHistory.lookupHQPattern (L.payload tok) ct names of case Names.lookupHQPattern Names.IncludeSuffixes (L.payload tok) ct names of
s s
| Set.null s -> die tok s | Set.null s -> die tok s
| Set.size s > 1 -> die tok s | Set.size s > 1 -> die tok s
@ -420,7 +420,7 @@ resolveHashQualified tok = do
names <- asks names names <- asks names
case L.payload tok of case L.payload tok of
HQ.NameOnly n -> pure $ Term.var (ann tok) (Name.toVar n) HQ.NameOnly n -> pure $ Term.var (ann tok) (Name.toVar n)
_ -> case NamesWithHistory.lookupHQTerm (L.payload tok) names of _ -> case Names.lookupHQTerm Names.IncludeSuffixes (L.payload tok) names of
s s
| Set.null s -> failCommitted $ UnknownTerm tok s | Set.null s -> failCommitted $ UnknownTerm tok s
| Set.size s > 1 -> failCommitted $ UnknownTerm tok s | Set.size s > 1 -> failCommitted $ UnknownTerm tok s
@ -1113,7 +1113,7 @@ importp = do
(Just (Right prefix), Nothing) -> do (Just (Right prefix), Nothing) -> do
-- `wildcard import` -- `wildcard import`
names <- asks names names <- asks names
pure $ Names.expandWildcardImport (L.payload prefix) (NamesWithHistory.currentNames names) pure $ Names.expandWildcardImport (L.payload prefix) names
(Just (Right prefix), Just suffixes) -> pure do (Just (Right prefix), Just suffixes) -> pure do
suffix <- L.payload <$> suffixes suffix <- L.payload <$> suffixes
pure (suffix, Name.joinDot (L.payload prefix) suffix) pure (suffix, Name.joinDot (L.payload prefix) suffix)
@ -1131,17 +1131,17 @@ instance (Show v) => Show (BlockElement v) where
-- subst -- subst
-- use Foo.Bar + blah -- use Foo.Bar + blah
-- use Bar.Baz zonk zazzle -- use Bar.Baz zonk zazzle
imports :: (Monad m, Var v) => P v m (NamesWithHistory, [(v, v)]) imports :: (Monad m, Var v) => P v m (Names, [(v, v)])
imports = do imports = do
let sem = P.try (semi <* P.lookAhead (reserved "use")) let sem = P.try (semi <* P.lookAhead (reserved "use"))
imported <- mconcat . reverse <$> sepBy sem importp imported <- mconcat . reverse <$> sepBy sem importp
ns' <- NamesWithHistory.importing imported <$> asks names ns' <- Names.importing imported <$> asks names
pure (ns', [(Name.toVar suffix, Name.toVar full) | (suffix, full) <- imported]) pure (ns', [(Name.toVar suffix, Name.toVar full) | (suffix, full) <- imported])
-- A key feature of imports is we want to be able to say: -- A key feature of imports is we want to be able to say:
-- `use foo.bar Baz qux` without having to specify whether `Baz` or `qux` are -- `use foo.bar Baz qux` without having to specify whether `Baz` or `qux` are
-- terms or types. -- terms or types.
substImports :: (Var v) => NamesWithHistory -> [(v, v)] -> Term v Ann -> Term v Ann substImports :: (Var v) => Names -> [(v, v)] -> Term v Ann -> Term v Ann
substImports ns imports = substImports ns imports =
ABT.substsInheritAnnotation ABT.substsInheritAnnotation
[ (suffix, Term.var () full) [ (suffix, Term.var () full)
@ -1151,7 +1151,7 @@ substImports ns imports =
-- not in Names, but in a later term binding -- not in Names, but in a later term binding
[ (suffix, Type.var () full) [ (suffix, Type.var () full)
| (suffix, full) <- imports, | (suffix, full) <- imports,
NamesWithHistory.hasTypeNamed (Name.unsafeFromVar full) ns Names.hasTypeNamed Names.IncludeSuffixes (Name.unsafeFromVar full) ns
] ]
block' :: (Monad m, Var v) => IsTop -> String -> P v m (L.Token ()) -> P v m (L.Token ()) -> TermP v m block' :: (Monad m, Var v) => IsTop -> String -> P v m (L.Token ()) -> P v m (L.Token ()) -> TermP v m

View File

@ -18,6 +18,8 @@ import Control.Monad.State (evalState)
import Control.Monad.State qualified as State import Control.Monad.State qualified as State
import Data.Char (isPrint) import Data.Char (isPrint)
import Data.List import Data.List
import Data.List qualified as List
import Data.List.NonEmpty qualified as NEL
import Data.Map qualified as Map import Data.Map qualified as Map
import Data.Set qualified as Set import Data.Set qualified as Set
import Data.Text (unpack) import Data.Text (unpack)
@ -36,6 +38,7 @@ import Unison.HashQualified qualified as HQ
import Unison.HashQualified' qualified as HQ' import Unison.HashQualified' qualified as HQ'
import Unison.Name (Name) import Unison.Name (Name)
import Unison.Name qualified as Name import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment)
import Unison.NameSegment qualified as NameSegment import Unison.NameSegment qualified as NameSegment
import Unison.Pattern (Pattern) import Unison.Pattern (Pattern)
import Unison.Pattern qualified as Pattern import Unison.Pattern qualified as Pattern
@ -228,7 +231,7 @@ pretty0
tm' <- pretty0 (ac 10 Normal im doc) tm tm' <- pretty0 (ac 10 Normal im doc) tm
tp' <- TypePrinter.pretty0 im 0 t tp' <- TypePrinter.pretty0 im 0 t
pure . paren (p >= 0) $ tm' <> PP.hang (fmt S.TypeAscriptionColon " :") tp' pure . paren (p >= 0) $ tm' <> PP.hang (fmt S.TypeAscriptionColon " :") tp'
Int' i -> pure . fmt S.NumericLiteral $ (if i >= 0 then l "+" else mempty) <> l (show i) Int' i -> pure . fmt S.NumericLiteral . l $ (if i >= 0 then ("+" ++ show i) else (show i))
Nat' u -> pure . fmt S.NumericLiteral . l $ show u Nat' u -> pure . fmt S.NumericLiteral . l $ show u
Float' f -> pure . fmt S.NumericLiteral . l $ show f Float' f -> pure . fmt S.NumericLiteral . l $ show f
-- TODO How to handle Infinity, -Infinity and NaN? Parser cannot parse -- TODO How to handle Infinity, -Infinity and NaN? Parser cannot parse
@ -297,14 +300,10 @@ pretty0
<> fmt S.ControlKeyword "with" <> fmt S.ControlKeyword "with"
`hangHandler` ph `hangHandler` ph
] ]
App' x (Constructor' (ConstructorReference DD.UnitRef 0)) -> do
px <- pretty0 (ac (if isBlock x then 0 else 10) Normal im doc) x
pure . paren (p >= 11 || isBlock x && p >= 3) $
fmt S.DelayForceChar (l "!") <> PP.indentNAfterNewline 1 px
Delay' x Delay' x
| isLet x || p < 0 -> do | isLet x || p < 0 -> do
let (im', uses) = calcImports im x let (im', uses) = calcImports im x
let hang = if isSoftHangable x then PP.softHang else PP.hang let hang = if isSoftHangable x && null uses then PP.softHang else PP.hang
px <- pretty0 (ac 0 Block im' doc) x px <- pretty0 (ac 0 Block im' doc) x
pure . paren (p >= 3) $ pure . paren (p >= 3) $
fmt S.ControlKeyword "do" `hang` PP.lines (uses <> [px]) fmt S.ControlKeyword "do" `hang` PP.lines (uses <> [px])
@ -399,6 +398,7 @@ pretty0
fmt S.ControlKeyword " with" `PP.hang` pbs fmt S.ControlKeyword " with" `PP.hang` pbs
] ]
else (fmt S.ControlKeyword "match " <> ps <> fmt S.ControlKeyword " with") `PP.hang` pbs else (fmt S.ControlKeyword "match " <> ps <> fmt S.ControlKeyword " with") `PP.hang` pbs
Apps' f args -> paren (p >= 10) <$> (PP.hang <$> goNormal 9 f <*> PP.spacedTraverse (goNormal 10) args)
t -> pure $ l "error: " <> l (show t) t -> pure $ l "error: " <> l (show t)
where where
goNormal prec tm = pretty0 (ac prec Normal im doc) tm goNormal prec tm = pretty0 (ac prec Normal im doc) tm
@ -460,8 +460,6 @@ pretty0
<> [lhs, arr] <> [lhs, arr]
go tm = goNormal 10 tm go tm = goNormal 10 tm
PP.hang kw <$> fmap PP.lines (traverse go rs) PP.hang kw <$> fmap PP.lines (traverse go rs)
(Apps' f@(Constructor' _) args, _) ->
paren (p >= 10) <$> (PP.hang <$> goNormal 9 f <*> PP.spacedTraverse (goNormal 10) args)
(Bytes' bs, _) -> (Bytes' bs, _) ->
pure $ fmt S.BytesLiteral "0xs" <> PP.shown (Bytes.fromWord8s (map fromIntegral bs)) pure $ fmt S.BytesLiteral "0xs" <> PP.shown (Bytes.fromWord8s (map fromIntegral bs))
BinaryAppsPred' apps lastArg -> do BinaryAppsPred' apps lastArg -> do
@ -491,28 +489,23 @@ pretty0
y = thing2 y = thing2
...) ...)
-} -}
(Apps' f (unsnoc -> Just (args, lastArg)), _) | isSoftHangable lastArg -> do (App' x (Constructor' (ConstructorReference DD.UnitRef 0)), _) | isLeaf x -> do
fun <- goNormal 9 f px <- pretty0 (ac (if isBlock x then 0 else 9) Normal im doc) x
args' <- traverse (goNormal 10) args pure . paren (p >= 11 || isBlock x && p >= 3) $
lastArg' <- goNormal 0 lastArg fmt S.DelayForceChar (l "!") <> PP.indentNAfterNewline 1 px
let softTab = PP.softbreak <> ("" `PP.orElse` " ") (Apps' f (unsnoc -> Just (args, lastArg)), _)
pure . paren (p >= 3) $ | isSoftHangable lastArg -> do
PP.group (PP.group (PP.group (PP.sep softTab (fun : args') <> softTab)) <> lastArg') fun <- goNormal 9 f
args' <- traverse (goNormal 10) args
lastArg' <- goNormal 0 lastArg
let softTab = PP.softbreak <> ("" `PP.orElse` " ")
pure . paren (p >= 3) $
PP.group (PP.group (PP.group (PP.sep softTab (fun : args') <> softTab)) <> lastArg')
(Ands' xs lastArg, _) -> (Ands' xs lastArg, _) ->
-- Old code, without monadic booleanOps:
-- paren (p >= 10)
-- . booleanOps (fmt S.ControlKeyword "&&") xs
-- <$> pretty0 (ac 10 Normal im doc) lastArg
-- New code, where booleanOps is monadic like pretty0:
paren (p >= 10) <$> do paren (p >= 10) <$> do
lastArg' <- pretty0 (ac 10 Normal im doc) lastArg lastArg' <- pretty0 (ac 10 Normal im doc) lastArg
booleanOps (fmt S.ControlKeyword "&&") xs lastArg' booleanOps (fmt S.ControlKeyword "&&") xs lastArg'
(Ors' xs lastArg, _) -> (Ors' xs lastArg, _) ->
-- Old code:
-- paren (p >= 10)
-- . booleanOps (fmt S.ControlKeyword "||") xs
-- <$> pretty0 (ac 10 Normal im doc) lastArg
-- New code:
paren (p >= 10) <$> do paren (p >= 10) <$> do
lastArg' <- pretty0 (ac 10 Normal im doc) lastArg lastArg' <- pretty0 (ac 10 Normal im doc) lastArg
booleanOps (fmt S.ControlKeyword "||") xs lastArg' booleanOps (fmt S.ControlKeyword "||") xs lastArg'
@ -576,7 +569,6 @@ pretty0
nonForcePred :: Term3 v PrintAnnotation -> Bool nonForcePred :: Term3 v PrintAnnotation -> Bool
nonForcePred = \case nonForcePred = \case
Constructor' (ConstructorReference DD.UnitRef 0) -> False
Constructor' (ConstructorReference DD.DocRef _) -> False Constructor' (ConstructorReference DD.DocRef _) -> False
_ -> True _ -> True
@ -2127,8 +2119,9 @@ nameEndsWith ppe suffix r = case PrettyPrintEnv.termName ppe (Referent.Ref r) of
-- Algorithm is the following: -- Algorithm is the following:
-- 1. Form the set of all local variables used anywhere in the term -- 1. Form the set of all local variables used anywhere in the term
-- 2. When picking a name for a term, see if it is contained in this set. -- 2. When picking a name for a term, see if it is contained in this set.
-- If yes, use the qualified name for the term (which PPE conveniently provides) -- If yes: use a minimally qualified name which is longer than the suffixed name,
-- If no, use the suffixed name for the term -- but doesn't conflict with any local vars.
-- If no: use the suffixed name for the term
-- --
-- The algorithm does the same for type references in signatures. -- The algorithm does the same for type references in signatures.
-- --
@ -2150,7 +2143,32 @@ avoidShadowing tm (PrettyPrintEnv terms types) =
Set.fromList [n | v <- ABT.allVars tm, n <- varToName v] Set.fromList [n | v <- ABT.allVars tm, n <- varToName v]
usedTypeNames = usedTypeNames =
Set.fromList [n | Ann' _ ty <- ABT.subterms tm, v <- ABT.allVars ty, n <- varToName v] Set.fromList [n | Ann' _ ty <- ABT.subterms tm, v <- ABT.allVars ty, n <- varToName v]
tweak :: Set Name -> (HQ'.HashQualified Name, HQ'.HashQualified Name) -> (HQ'.HashQualified Name, HQ'.HashQualified Name)
tweak used (fullName, HQ'.NameOnly suffixedName) tweak used (fullName, HQ'.NameOnly suffixedName)
| Set.member suffixedName used = (fullName, fullName) | Set.member suffixedName used =
let revFQNSegments :: NEL.NonEmpty NameSegment
revFQNSegments = Name.reverseSegments (HQ'.toName fullName)
minimallySuffixed :: HQ'.HashQualified Name
minimallySuffixed =
revFQNSegments
-- Get all suffixes (it's inits instead of tails because name segments are in reverse order)
& NEL.inits
-- Drop the empty 'init'
& NEL.tail
& mapMaybe (fmap Name.fromReverseSegments . NEL.nonEmpty) -- Convert back into names
-- Drop the suffixes that we know are shorter than the suffixified name
& List.drop (Name.countSegments suffixedName)
-- Drop the suffixes that are equal to local variables
& filter ((\n -> n `Set.notMember` used))
& listToMaybe
& maybe fullName HQ'.NameOnly
in (fullName, minimallySuffixed)
tweak _ p = p tweak _ p = p
varToName v = toList (Name.fromText (Var.name v)) varToName v = toList (Name.fromText (Var.name v))
isLeaf :: Term2 vt at ap v a -> Bool
isLeaf (Var' {}) = True
isLeaf (Constructor' {}) = True
isLeaf (Request' {}) = True
isLeaf (Ref' {}) = True
isLeaf _ = False

View File

@ -41,7 +41,7 @@ typeAtom =
HQ.NameOnly n -> pure $ Type.var (ann tok) (Name.toVar n) HQ.NameOnly n -> pure $ Type.var (ann tok) (Name.toVar n)
hq -> do hq -> do
names <- asks names names <- asks names
let matches = Names.lookupHQType hq names let matches = Names.lookupHQType Names.IncludeSuffixes hq names
if Set.size matches /= 1 if Set.size matches /= 1
then P.customFailure (UnknownType tok matches) then P.customFailure (UnknownType tok matches)
else pure $ Type.ref (ann tok) (Set.findMin matches) else pure $ Type.ref (ann tok) (Set.findMin matches)

View File

@ -18,7 +18,7 @@ module Unison.Typechecker
Resolution (..), Resolution (..),
Name, Name,
NamedReference (..), NamedReference (..),
Context.PatternMatchCoverageCheckSwitch (..), Context.PatternMatchCoverageCheckAndKindInferenceSwitch (..),
) )
where where
@ -32,11 +32,14 @@ import Control.Monad.State
modify, modify,
) )
import Control.Monad.Writer import Control.Monad.Writer
import Data.Foldable
import Data.Map qualified as Map import Data.Map qualified as Map
import Data.Sequence.NonEmpty qualified as NESeq (toSeq) import Data.Sequence.NonEmpty qualified as NESeq (toSeq)
import Data.Set qualified as Set
import Data.Text qualified as Text import Data.Text qualified as Text
import Unison.ABT qualified as ABT import Unison.ABT qualified as ABT
import Unison.Blank qualified as B import Unison.Blank qualified as B
import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation)
import Unison.Name qualified as Name import Unison.Name qualified as Name
import Unison.Prelude import Unison.Prelude
import Unison.PrettyPrintEnv (PrettyPrintEnv) import Unison.PrettyPrintEnv (PrettyPrintEnv)
@ -105,9 +108,9 @@ makeLenses ''Env
-- a function to resolve the type of @Ref@ constructors -- a function to resolve the type of @Ref@ constructors
-- contained in that term. -- contained in that term.
synthesize :: synthesize ::
(Monad f, Var v, Ord loc) => (Monad f, Var v, BuiltinAnnotation loc, Ord loc, Show loc) =>
PrettyPrintEnv -> PrettyPrintEnv ->
Context.PatternMatchCoverageCheckSwitch -> Context.PatternMatchCoverageCheckAndKindInferenceSwitch ->
Env v loc -> Env v loc ->
Term v loc -> Term v loc ->
ResultT (Notes v loc) f (Type v loc) ResultT (Notes v loc) f (Type v loc)
@ -171,14 +174,14 @@ data Resolution v loc = Resolution
-- | Infer the type of a 'Unison.Term', using type-directed name resolution -- | Infer the type of a 'Unison.Term', using type-directed name resolution
-- to attempt to resolve unknown symbols. -- to attempt to resolve unknown symbols.
synthesizeAndResolve :: synthesizeAndResolve ::
(Monad f, Var v, Monoid loc, Ord loc) => PrettyPrintEnv -> Env v loc -> TDNR f v loc (Type v loc) (Monad f, Var v, Monoid loc, BuiltinAnnotation loc, Ord loc, Show loc) => PrettyPrintEnv -> Env v loc -> TDNR f v loc (Type v loc)
synthesizeAndResolve ppe env = do synthesizeAndResolve ppe env = do
tm <- get tm <- get
(tp, notes) <- (tp, notes) <-
listen . lift $ listen . lift $
synthesize synthesize
ppe ppe
Context.PatternMatchCoverageCheckSwitch'Enabled Context.PatternMatchCoverageCheckAndKindInferenceSwitch'Enabled
env env
tm tm
typeDirectedNameResolution ppe notes tp env typeDirectedNameResolution ppe notes tp env
@ -211,7 +214,7 @@ liftResult = lift . MaybeT . WriterT . pure . runIdentity . runResultT
-- 3. No match at all. Throw an unresolved symbol at the user. -- 3. No match at all. Throw an unresolved symbol at the user.
typeDirectedNameResolution :: typeDirectedNameResolution ::
forall v loc f. forall v loc f.
(Monad f, Var v, Ord loc, Monoid loc) => (Monad f, Var v, BuiltinAnnotation loc, Ord loc, Monoid loc, Show loc) =>
PrettyPrintEnv -> PrettyPrintEnv ->
Notes v loc -> Notes v loc ->
Type v loc -> Type v loc ->
@ -228,16 +231,13 @@ typeDirectedNameResolution ppe oldNotes oldType env = do
case catMaybes resolutions of case catMaybes resolutions of
[] -> pure oldType [] -> pure oldType
rs -> rs ->
let goAgain = applySuggestions rs >>= \case
any ((== 1) . length . dedupe . filter Context.isExact . suggestions) rs True -> do
in if goAgain synthesizeAndResolve ppe tdnrEnv
then do False -> do
traverse_ substSuggestion rs -- The type hasn't changed
synthesizeAndResolve ppe tdnrEnv liftResult $ suggest rs
else do pure oldType
-- The type hasn't changed
liftResult $ suggest rs
pure oldType
where where
addTypedComponent :: Context.InfoNote v loc -> State (Env v loc) () addTypedComponent :: Context.InfoNote v loc -> State (Env v loc) ()
addTypedComponent (Context.TopLevelComponent vtts) = addTypedComponent (Context.TopLevelComponent vtts) =
@ -267,23 +267,50 @@ typeDirectedNameResolution ppe oldNotes oldType env = do
Var.MissingResult -> v Var.MissingResult -> v
_ -> Var.named name _ -> Var.named name
substSuggestion :: Resolution v loc -> TDNR f v loc () extractSubstitution :: [Context.Suggestion v loc] -> Maybe (Either v Referent)
extractSubstitution suggestions =
let groupedByName :: [([Name.Name], Either v Referent)] =
map (\(a, b) -> (b, a))
. Map.toList
. fmap Set.toList
. foldl'
( \b Context.Suggestion {suggestionName, suggestionReplacement} ->
Map.insertWith
Set.union
suggestionReplacement
(Set.singleton (Name.unsafeFromText suggestionName))
b
)
Map.empty
$ filter Context.isExact suggestions
matches :: Set (Either v Referent) = Name.preferShallowLibDepth groupedByName
in case toList matches of
[x] -> Just x
_ -> Nothing
applySuggestions :: [Resolution v loc] -> TDNR f v loc Bool
applySuggestions = foldlM phi False
where
phi b a = do
didSub <- substSuggestion a
pure $! b || didSub
substSuggestion :: Resolution v loc -> TDNR f v loc Bool
substSuggestion substSuggestion
( Resolution ( Resolution
name name
_ _
loc loc
v v
( filter Context.isExact -> (extractSubstitution -> Just replacement)
[Context.Suggestion _ _ replacement Context.Exact]
)
) = ) =
do do
modify (substBlank (Text.unpack name) loc solved) modify (substBlank (Text.unpack name) loc solved)
lift . btw $ Context.Decision (suggestedVar v name) loc solved lift . btw $ Context.Decision (suggestedVar v name) loc solved
pure True
where where
solved = either (Term.var loc) (Term.fromReferent loc) replacement solved = either (Term.var loc) (Term.fromReferent loc) replacement
substSuggestion _ = pure () substSuggestion _ = pure False
-- Resolve a `Blank` to a term -- Resolve a `Blank` to a term
substBlank :: String -> loc -> Term v loc -> Term v loc -> Term v loc substBlank :: String -> loc -> Term v loc -> Term v loc -> Term v loc
@ -301,7 +328,7 @@ typeDirectedNameResolution ppe oldNotes oldType env = do
Context.InfoNote v loc -> Context.InfoNote v loc ->
Result (Notes v loc) (Maybe (Resolution v loc)) Result (Notes v loc) (Maybe (Resolution v loc))
resolveNote env (Context.SolvedBlank (B.Resolve loc n) v it) = resolveNote env (Context.SolvedBlank (B.Resolve loc n) v it) =
fmap (Just . Resolution (Text.pack n) it loc v . dedupe . join) fmap (Just . Resolution (Text.pack n) it loc v . join)
. traverse (resolve it) . traverse (resolve it)
. join . join
. maybeToList . maybeToList
@ -337,7 +364,7 @@ typeDirectedNameResolution ppe oldNotes oldType env = do
-- contained in the term. Returns @typ@ if successful, -- contained in the term. Returns @typ@ if successful,
-- and a note about typechecking failure otherwise. -- and a note about typechecking failure otherwise.
check :: check ::
(Monad f, Var v, Ord loc) => (Monad f, Var v, BuiltinAnnotation loc, Ord loc, Show loc) =>
PrettyPrintEnv -> PrettyPrintEnv ->
Env v loc -> Env v loc ->
Term v loc -> Term v loc ->
@ -346,7 +373,7 @@ check ::
check ppe env term typ = check ppe env term typ =
synthesize synthesize
ppe ppe
Context.PatternMatchCoverageCheckSwitch'Enabled Context.PatternMatchCoverageCheckAndKindInferenceSwitch'Enabled
env env
(Term.ann (ABT.annotation term) term typ) (Term.ann (ABT.annotation term) term typ)
@ -360,8 +387,8 @@ check ppe env term typ =
-- tweak (Type.ForallNamed' v body) = Type.forall() v (tweak body) -- tweak (Type.ForallNamed' v body) = Type.forall() v (tweak body)
-- tweak t = Type.arrow() t t -- tweak t = Type.arrow() t t
-- | Returns `True` if the expression is well-typed, `False` otherwise -- | Returns `True` if the expression is well-typed, `False` otherwise
wellTyped :: (Monad f, Var v, Ord loc) => PrettyPrintEnv -> Env v loc -> Term v loc -> f Bool wellTyped :: (Monad f, Var v, BuiltinAnnotation loc, Ord loc, Show loc) => PrettyPrintEnv -> Env v loc -> Term v loc -> f Bool
wellTyped ppe env term = go <$> runResultT (synthesize ppe Context.PatternMatchCoverageCheckSwitch'Enabled env term) wellTyped ppe env term = go <$> runResultT (synthesize ppe Context.PatternMatchCoverageCheckAndKindInferenceSwitch'Enabled env term)
where where
go (may, _) = isJust may go (may, _) = isJust may

View File

@ -20,7 +20,7 @@ module Unison.Typechecker.Context
Type, Type,
TypeVar, TypeVar,
Result (..), Result (..),
PatternMatchCoverageCheckSwitch (..), PatternMatchCoverageCheckAndKindInferenceSwitch (..),
errorTerms, errorTerms,
innermostErrorTerm, innermostErrorTerm,
lookupAnn, lookupAnn,
@ -70,6 +70,7 @@ import Data.Text qualified as Text
import Unison.ABT qualified as ABT import Unison.ABT qualified as ABT
import Unison.Blank qualified as B import Unison.Blank qualified as B
import Unison.Builtin.Decls qualified as DDB import Unison.Builtin.Decls qualified as DDB
import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation)
import Unison.ConstructorReference import Unison.ConstructorReference
( ConstructorReference, ( ConstructorReference,
GConstructorReference (..), GConstructorReference (..),
@ -81,6 +82,7 @@ import Unison.DataDeclaration
) )
import Unison.DataDeclaration qualified as DD import Unison.DataDeclaration qualified as DD
import Unison.DataDeclaration.ConstructorId (ConstructorId) import Unison.DataDeclaration.ConstructorId (ConstructorId)
import Unison.KindInference qualified as KindInference
import Unison.Pattern (Pattern) import Unison.Pattern (Pattern)
import Unison.Pattern qualified as Pattern import Unison.Pattern qualified as Pattern
import Unison.PatternMatchCoverage (checkMatch) import Unison.PatternMatchCoverage (checkMatch)
@ -216,15 +218,15 @@ mapErrors f r = case r of
CompilerBug bug es is -> CompilerBug bug (f <$> es) is CompilerBug bug es is -> CompilerBug bug (f <$> es) is
s@(Success _ _) -> s s@(Success _ _) -> s
data PatternMatchCoverageCheckSwitch data PatternMatchCoverageCheckAndKindInferenceSwitch
= PatternMatchCoverageCheckSwitch'Enabled = PatternMatchCoverageCheckAndKindInferenceSwitch'Enabled
| PatternMatchCoverageCheckSwitch'Disabled | PatternMatchCoverageCheckAndKindInferenceSwitch'Disabled
newtype MT v loc f a = MT newtype MT v loc f a = MT
{ runM :: { runM ::
-- for debug output -- for debug output
PrettyPrintEnv -> PrettyPrintEnv ->
PatternMatchCoverageCheckSwitch -> PatternMatchCoverageCheckAndKindInferenceSwitch ->
-- Data declarations in scope -- Data declarations in scope
DataDeclarations v loc -> DataDeclarations v loc ->
-- Effect declarations in scope -- Effect declarations in scope
@ -399,6 +401,7 @@ data Cause v loc
| DataEffectMismatch Unknown Reference (DataDeclaration v loc) | DataEffectMismatch Unknown Reference (DataDeclaration v loc)
| UncoveredPatterns loc (NonEmpty (Pattern ())) | UncoveredPatterns loc (NonEmpty (Pattern ()))
| RedundantPattern loc | RedundantPattern loc
| KindInferenceFailure (KindInference.KindError v loc)
| InaccessiblePattern loc | InaccessiblePattern loc
deriving (Show) deriving (Show)
@ -772,8 +775,8 @@ getDataDeclarations = MT \_ _ datas _ env -> pure (datas, env)
getEffectDeclarations :: M v loc (EffectDeclarations v loc) getEffectDeclarations :: M v loc (EffectDeclarations v loc)
getEffectDeclarations = MT \_ _ _ effects env -> pure (effects, env) getEffectDeclarations = MT \_ _ _ effects env -> pure (effects, env)
getPatternMatchCoverageCheckSwitch :: M v loc PatternMatchCoverageCheckSwitch getPatternMatchCoverageCheckAndKindInferenceSwitch :: M v loc PatternMatchCoverageCheckAndKindInferenceSwitch
getPatternMatchCoverageCheckSwitch = MT \_ pmcSwitch _ _ env -> pure (pmcSwitch, env) getPatternMatchCoverageCheckAndKindInferenceSwitch = MT \_ pmcSwitch _ _ env -> pure (pmcSwitch, env)
compilerCrash :: CompilerBug v loc -> M v loc a compilerCrash :: CompilerBug v loc -> M v loc a
compilerCrash bug = liftResult $ compilerBug bug compilerCrash bug = liftResult $ compilerBug bug
@ -1300,9 +1303,9 @@ synthesizeWanted e
want <- coalesceWanted cwant swant want <- coalesceWanted cwant swant
ctx <- getContext ctx <- getContext
let matchType = apply ctx outputType let matchType = apply ctx outputType
getPatternMatchCoverageCheckSwitch >>= \case getPatternMatchCoverageCheckAndKindInferenceSwitch >>= \case
PatternMatchCoverageCheckSwitch'Enabled -> ensurePatternCoverage e matchType scrutinee scrutineeType cases PatternMatchCoverageCheckAndKindInferenceSwitch'Enabled -> ensurePatternCoverage e matchType scrutinee scrutineeType cases
PatternMatchCoverageCheckSwitch'Disabled -> pure () PatternMatchCoverageCheckAndKindInferenceSwitch'Disabled -> pure ()
pure $ (matchType, want) pure $ (matchType, want)
where where
l = loc e l = loc e
@ -3054,9 +3057,9 @@ verifyDataDeclarations decls = forM_ (Map.toList decls) $ \(_ref, decl) -> do
-- | public interface to the typechecker -- | public interface to the typechecker
synthesizeClosed :: synthesizeClosed ::
(Var v, Ord loc) => (BuiltinAnnotation loc, Var v, Ord loc, Show loc) =>
PrettyPrintEnv -> PrettyPrintEnv ->
PatternMatchCoverageCheckSwitch -> PatternMatchCoverageCheckAndKindInferenceSwitch ->
[Type v loc] -> [Type v loc] ->
TL.TypeLookup v loc -> TL.TypeLookup v loc ->
Term v loc -> Term v loc ->
@ -3073,8 +3076,32 @@ synthesizeClosed ppe pmcSwitch abilities lookupType term0 =
verifyDataDeclarations datas verifyDataDeclarations datas
*> verifyDataDeclarations (DD.toDataDecl <$> effects) *> verifyDataDeclarations (DD.toDataDecl <$> effects)
*> verifyClosedTerm term *> verifyClosedTerm term
doKindInference ppe datas effects term
synthesizeClosed' abilities term synthesizeClosed' abilities term
doKindInference ::
( Var v,
Ord loc,
BuiltinAnnotation loc,
Show loc
) =>
PrettyPrintEnv ->
DataDeclarations v loc ->
Map Reference (EffectDeclaration v loc) ->
Term v loc ->
MT v loc (Result v loc) ()
doKindInference ppe datas effects term = do
getPatternMatchCoverageCheckAndKindInferenceSwitch >>= \case
PatternMatchCoverageCheckAndKindInferenceSwitch'Disabled -> pure ()
PatternMatchCoverageCheckAndKindInferenceSwitch'Enabled -> do
let kindInferRes = do
let decls = (Left <$> effects) <> (Right <$> datas)
st <- KindInference.inferDecls ppe decls
KindInference.kindCheckAnnotations ppe st (TypeVar.lowerTerm term)
case kindInferRes of
Left (ke Nel.:| _kes) -> failWith (KindInferenceFailure ke)
Right () -> pure ()
verifyClosedTerm :: forall v loc. (Ord v) => Term v loc -> Result v loc () verifyClosedTerm :: forall v loc. (Ord v) => Term v loc -> Result v loc ()
verifyClosedTerm t = do verifyClosedTerm t = do
ok1 <- verifyClosed t id ok1 <- verifyClosed t id
@ -3108,7 +3135,7 @@ annotateRefs synth = ABT.visit f
run :: run ::
(Var v, Ord loc, Functor f) => (Var v, Ord loc, Functor f) =>
PrettyPrintEnv -> PrettyPrintEnv ->
PatternMatchCoverageCheckSwitch -> PatternMatchCoverageCheckAndKindInferenceSwitch ->
DataDeclarations v loc -> DataDeclarations v loc ->
EffectDeclarations v loc -> EffectDeclarations v loc ->
MT v loc f a -> MT v loc f a ->
@ -3155,7 +3182,7 @@ isSubtype' type1 type2 = succeeds $ do
-- See documentation at 'Unison.Typechecker.fitsScheme' -- See documentation at 'Unison.Typechecker.fitsScheme'
fitsScheme :: (Var v, Ord loc) => Type v loc -> Type v loc -> Either (CompilerBug v loc) Bool fitsScheme :: (Var v, Ord loc) => Type v loc -> Type v loc -> Either (CompilerBug v loc) Bool
fitsScheme type1 type2 = run PPE.empty PatternMatchCoverageCheckSwitch'Enabled Map.empty Map.empty $ fitsScheme type1 type2 = run PPE.empty PatternMatchCoverageCheckAndKindInferenceSwitch'Enabled Map.empty Map.empty $
succeeds $ do succeeds $ do
let vars = Set.toList $ Set.union (ABT.freeVars type1) (ABT.freeVars type2) let vars = Set.toList $ Set.union (ABT.freeVars type1) (ABT.freeVars type2)
reserveAll (TypeVar.underlying <$> vars) reserveAll (TypeVar.underlying <$> vars)
@ -3196,7 +3223,7 @@ isRedundant userType0 inferredType0 = do
isSubtype :: isSubtype ::
(Var v, Ord loc) => Type v loc -> Type v loc -> Either (CompilerBug v loc) Bool (Var v, Ord loc) => Type v loc -> Type v loc -> Either (CompilerBug v loc) Bool
isSubtype t1 t2 = isSubtype t1 t2 =
run PPE.empty PatternMatchCoverageCheckSwitch'Enabled Map.empty Map.empty (isSubtype' t1 t2) run PPE.empty PatternMatchCoverageCheckAndKindInferenceSwitch'Enabled Map.empty Map.empty (isSubtype' t1 t2)
isEqual :: isEqual ::
(Var v, Ord loc) => Type v loc -> Type v loc -> Either (CompilerBug v loc) Bool (Var v, Ord loc) => Type v loc -> Type v loc -> Either (CompilerBug v loc) Bool

Some files were not shown because too many files have changed in this diff Show More