Merge remote-tracking branch 'origin/trunk' into topic/native-compiler

This commit is contained in:
Dan Doel 2024-02-07 16:43:47 -05:00
commit 5f946e202e
216 changed files with 5363 additions and 4721 deletions

View File

@ -119,21 +119,6 @@ jobs:
unzip -d /tmp/ucm/ui /tmp/unisonLocal.zip
tar -c -z -f ucm-linux.tar.gz -C /tmp/ucm .
- name: build and push docker container
run: docker build . --file Dockerfile --tag $IMAGE_NAME --label "runnumber=${GITHUB_RUN_ID}"
- name: Log in to docker registry
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin
- name: Push docker image
run: |
IMAGE_NAME=${{github.repository}}
IMAGE_ID=ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
VERSION=${{inputs.version}}
docker tag $IMAGE_NAME $IMAGE_ID:$VERSION
docker push $IMAGE_ID:$VERSION
- name: Upload linux artifact
uses: actions/upload-artifact@v2
with:

View File

@ -67,6 +67,7 @@ The format for this list: name, GitHub handle
* Nicole Prindle (@nprindle)
* Harald Gliebe (@hagl)
* Phil de Joux (@philderbeast)
* Daroc Alden (@setupminimal)
* Travis Staton (@tstat)
* Dan Freeman (@dfreeman)
* Emil Hotkowski (@emilhotkowski)

View File

@ -9,8 +9,8 @@ RUN apt-get update && \
update-locale LANG=en_US.UTF-8
COPY /tmp/ucm/ucm /usr/local/bin/ucm
COPY /tmp/ucm/ui /usr/local/share/ucm
COPY tmp/ucm/ucm /usr/local/bin/ucm
COPY tmp/ucm/ui /usr/local/share/ucm
ENV UCM_WEB_UI=/usr/local/share/ucm
ENV UCM_PORT=8080
@ -20,4 +20,4 @@ RUN chmod 555 /usr/local/bin/ucm
EXPOSE 8080
ENTRYPOINT ["/usr/local/bin/ucm"]
CMD ["--codebase","/unison"]
CMD ["--codebase","/unison"]

View File

@ -4,6 +4,7 @@ module Unison.Hashing.V2.Convert2
v2ToH2Type,
v2ToH2TypeD,
h2ToV2Reference,
v2ToH2Referent,
v2ToH2Branch,
v2ToH2Term,
v2ToH2Decl,

View File

@ -11,6 +11,7 @@ module U.Codebase.Sqlite.Branch.Format
localToDbBranch,
localToDbDiff,
localToHashBranch,
localToBranch,
-- dbToLocalDiff,
)
where

View File

@ -3,8 +3,8 @@
module U.Codebase.Sqlite.Branch.Full where
import Control.Lens
import Data.Bitraversable
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import U.Codebase.HashTags
import U.Codebase.Reference (Reference', TermReference', TypeReference')
import U.Codebase.Reference qualified as Reference
@ -91,13 +91,33 @@ metadataSetFormatReferences_ ::
metadataSetFormatReferences_ f (Inline refs) = Inline <$> Set.traverse f refs
quadmap :: forall t h p c t' h' p' c'. (Ord t', Ord h') => (t -> t') -> (h -> h') -> (p -> p') -> (c -> c') -> Branch' t h p c -> Branch' t' h' p' c'
quadmap ft fh fp fc (Branch terms types patches children) =
quadmap ft fh fp fc branch =
runIdentity $ quadmapM (Identity . ft) (Identity . fh) (Identity . fp) (Identity . fc) branch
quadmapM :: forall t h p c t' h' p' c' m. (Ord t', Ord h', Applicative m) => (t -> m t') -> (h -> m h') -> (p -> m p') -> (c -> m c') -> Branch' t h p c -> m (Branch' t' h' p' c')
quadmapM ft fh fp fc (Branch terms types patches children) =
Branch
(Map.bimap ft doTerms terms)
(Map.bimap ft doTypes types)
(Map.bimap ft fp patches)
(Map.bimap ft fc children)
<$> (Map.bitraverse ft doTerms terms)
<*> (Map.bitraverse ft doTypes types)
<*> (Map.bitraverse ft fp patches)
<*> (Map.bitraverse ft fc children)
where
doTerms = Map.bimap (bimap (bimap ft fh) (bimap ft fh)) doMetadata
doTypes = Map.bimap (bimap ft fh) doMetadata
doMetadata (Inline s) = Inline . Set.map (bimap ft fh) $ s
doTerms = Map.bitraverse (bitraverse (bitraverse ft fh) (bitraverse ft fh)) doMetadata
doTypes = Map.bitraverse (bitraverse ft fh) doMetadata
doMetadata (Inline s) = Inline <$> Set.traverse (bitraverse ft fh) s
-- | Traversal over text references in a branch
t_ :: (Ord t', Ord h) => Traversal (Branch' t h p c) (Branch' t' h p c) t t'
t_ f = quadmapM f pure pure pure
-- | Traversal over hash references in a branch
h_ :: (Ord t, Ord h') => Traversal (Branch' t h p c) (Branch' t h' p c) h h'
h_ f = quadmapM pure f pure pure
-- | Traversal over patch references in a branch
p_ :: (Ord t, Ord h) => Traversal (Branch' t h p c) (Branch' t h p' c) p p'
p_ f = quadmapM pure pure f pure
-- | Traversal over child references in a branch
c_ :: (Ord t, Ord h) => Traversal (Branch' t h p c) (Branch' t h p c') c c'
c_ f = quadmapM pure pure pure f

View File

@ -1,6 +1,6 @@
-- | This module contains decoders for blobs stored in SQLite.
module U.Codebase.Sqlite.Decode
( DecodeError,
( DecodeError (..),
-- * @object.bytes@
decodeBranchFormat,

View File

@ -2,8 +2,8 @@
module U.Codebase.Sqlite.LocalIds where
import Control.Lens
import Data.Bifoldable (Bifoldable (bifoldMap))
import Data.Bifunctor (Bifunctor (bimap))
import Data.Bitraversable (Bitraversable (bitraverse))
import Data.Bits (Bits)
import Data.Vector (Vector)
@ -48,3 +48,9 @@ instance Bifoldable LocalIds' where
instance Bifunctor LocalIds' where
bimap f g (LocalIds t d) = LocalIds (f <$> t) (g <$> d)
t_ :: Traversal (LocalIds' t h) (LocalIds' t' h) t t'
t_ f (LocalIds t d) = LocalIds <$> traverse f t <*> pure d
h_ :: Traversal (LocalIds' t h) (LocalIds' t h') h h'
h_ f (LocalIds t d) = LocalIds <$> pure t <*> traverse f d

View File

@ -1,3 +1,6 @@
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE DataKinds #-}
-- | This module facilitates the creation of "localized" versions of objects, suitable for storage.
--
-- Localization is a stateful process in which the real database identifiers contained within an object, e.g. 'DbBranch', are canonicalized
@ -24,21 +27,24 @@
-- where all terms, types, etc. within the @branch@ structure refer to offsets in the associated vectors.
module U.Codebase.Sqlite.LocalizeObject
( localizeBranch,
localizeBranchG,
localizePatch,
localizePatchG,
)
where
import Control.Lens
import Control.Monad.Trans.State.Strict (StateT)
import Control.Monad.State.Strict
import Control.Monad.Trans.State.Strict qualified as State
import Data.Bitraversable (bitraverse)
import Data.Generics.Product.Typed (HasType (typed))
import Data.Generics.Product (HasField (..))
import Data.Map.Strict qualified as Map
import U.Codebase.Reference (Reference')
import U.Codebase.Referent (Referent')
import U.Codebase.Sqlite.Branch.Format (BranchLocalIds)
import U.Codebase.Sqlite.Branch.Format qualified as Branch
import U.Codebase.Sqlite.Branch.Full (Branch' (..), DbBranch, LocalBranch)
import U.Codebase.Sqlite.Branch.Full qualified as Branch
import U.Codebase.Sqlite.DbId (BranchObjectId, CausalHashId, HashId, ObjectId, PatchObjectId, TextId)
import U.Codebase.Sqlite.LocalIds
( LocalBranchChildId (..),
LocalDefnId (..),
@ -46,13 +52,13 @@ import U.Codebase.Sqlite.LocalIds
LocalPatchObjectId (..),
LocalTextId (..),
)
import U.Codebase.Sqlite.Patch.Format (PatchLocalIds)
import U.Codebase.Sqlite.Patch.Format (PatchLocalIds, PatchLocalIds')
import U.Codebase.Sqlite.Patch.Format qualified as Patch
import U.Codebase.Sqlite.Patch.Full (LocalPatch, Patch, Patch' (..))
import U.Codebase.Sqlite.Patch.TermEdit (LocalTermEdit, TermEdit)
import U.Codebase.Sqlite.Patch.TypeEdit (LocalTypeEdit, TypeEdit)
import U.Codebase.Sqlite.Reference (LocalReference, LocalReferenceH, Reference, ReferenceH)
import U.Codebase.Sqlite.Referent (LocalReferent, LocalReferentH, Referent, ReferentH)
import U.Codebase.Sqlite.Patch.TermEdit (LocalTermEdit, TermEdit')
import U.Codebase.Sqlite.Patch.TypeEdit (LocalTypeEdit, TypeEdit')
import U.Codebase.Sqlite.Reference (LocalReference, LocalReferenceH)
import U.Codebase.Sqlite.Referent (LocalReferent, LocalReferentH)
import Unison.Prelude
import Unison.Util.Map qualified as Map
import Unison.Util.Set qualified as Set
@ -62,7 +68,11 @@ import Unison.Util.Set qualified as Set
-- | Localize a branch object.
localizeBranch :: DbBranch -> (BranchLocalIds, LocalBranch)
localizeBranch (Branch terms types patches children) =
localizeBranch = localizeBranchG
-- | Generalized form of 'localizeBranch'.
localizeBranchG :: forall t d p c. (Ord t, Ord d, Ord p, Ord c) => Branch' t d p c -> (Branch.BranchLocalIds' t d p c, LocalBranch)
localizeBranchG (Branch terms types patches children) =
(runIdentity . runLocalizeBranch) do
Branch
<$> Map.bitraverse localizeText (Map.bitraverse localizeReferent localizeBranchMetadata) terms
@ -70,23 +80,28 @@ localizeBranch (Branch terms types patches children) =
<*> Map.bitraverse localizeText localizePatchReference patches
<*> Map.bitraverse localizeText localizeBranchReference children
where
localizeBranchMetadata :: (ContainsDefns s, ContainsText s, Monad m) => Branch.DbMetadataSet -> StateT s m Branch.LocalMetadataSet
localizeBranchMetadata ::
Branch.MetadataSetFormat' t d ->
State (LocalizeBranchState t d p c) (Branch.MetadataSetFormat' LocalTextId LocalDefnId)
localizeBranchMetadata (Branch.Inline refs) =
Branch.Inline <$> Set.traverse localizeReference refs
-- | Localize a patch object.
localizePatch :: Patch -> (PatchLocalIds, LocalPatch)
localizePatch (Patch termEdits typeEdits) =
localizePatch = localizePatchG
localizePatchG :: forall t h d. (Ord t, Ord h, Ord d) => Patch' t h d -> (PatchLocalIds' t h d, LocalPatch)
localizePatchG (Patch termEdits typeEdits) =
(runIdentity . runLocalizePatch) do
Patch
<$> Map.bitraverse localizeReferentH (Set.traverse localizeTermEdit) termEdits
<*> Map.bitraverse localizeReferenceH (Set.traverse localizeTypeEdit) typeEdits
where
localizeTermEdit :: (ContainsText s, ContainsDefns s, Monad m) => TermEdit -> StateT s m LocalTermEdit
localizeTermEdit :: (TermEdit' t d) -> State (LocalizePatchState t h d) LocalTermEdit
localizeTermEdit =
bitraverse localizeText localizeDefn
localizeTypeEdit :: (ContainsText s, ContainsDefns s, Monad m) => TypeEdit -> StateT s m LocalTypeEdit
localizeTypeEdit :: TypeEdit' t d -> State (LocalizePatchState t h d) LocalTypeEdit
localizeTypeEdit =
bitraverse localizeText localizeDefn
@ -94,38 +109,51 @@ localizePatch (Patch termEdits typeEdits) =
-- General-purpose localization
-- Contains references to branch objects.
type ContainsBranches s =
HasType (Map (BranchObjectId, CausalHashId) LocalBranchChildId) s
class Ord c => ContainsBranches c s where
branches_ :: Lens' s (Map c LocalBranchChildId)
-- Contains references to definition objects i.e. term/decl component objects.
type ContainsDefns s =
HasType (Map ObjectId LocalDefnId) s
class Ord d => ContainsDefns d s where
defns_ :: Lens' s (Map d LocalDefnId)
-- Contains references to objects by their hash.
type ContainsHashes =
HasType (Map HashId LocalHashId)
class Ord h => ContainsHashes h s where
hashes_ :: Lens' s (Map h LocalHashId)
-- Contains references to patch objects.
type ContainsPatches =
HasType (Map PatchObjectId LocalPatchObjectId)
class Ord p => ContainsPatches p s where
patches_ :: Lens' s (Map p LocalPatchObjectId)
-- Contains text.
type ContainsText =
HasType (Map TextId LocalTextId)
class Ord t => ContainsText t s where
texts_ :: Lens' s (Map t LocalTextId)
-- The inner state of the localization of a branch object.
type LocalizeBranchState =
( Map TextId LocalTextId,
Map ObjectId LocalDefnId,
Map PatchObjectId LocalPatchObjectId,
Map (BranchObjectId, CausalHashId) LocalBranchChildId
)
data LocalizeBranchState t d p c = LocalizeBranchState
{ texts :: Map t LocalTextId,
defns :: Map d LocalDefnId,
patches :: Map p LocalPatchObjectId,
branches :: Map c LocalBranchChildId
}
deriving (Show, Generic)
-- Run a computation that localizes a branch object, returning the local ids recorded within.
runLocalizeBranch :: (Monad m) => StateT LocalizeBranchState m a -> m (BranchLocalIds, a)
instance Ord t => ContainsText t (LocalizeBranchState t d p c) where
texts_ = field @"texts"
instance Ord d => ContainsDefns d (LocalizeBranchState t d p c) where
defns_ = field @"defns"
instance Ord p => ContainsPatches p (LocalizeBranchState t d p c) where
patches_ = field @"patches"
instance Ord c => ContainsBranches c (LocalizeBranchState t d p c) where
branches_ = field @"branches"
-- | Run a computation that localizes a branch object, returning the local ids recorded within.
runLocalizeBranch :: forall m t d p c a. (Monad m, Ord t, Ord d, Ord p, Ord c) => StateT (LocalizeBranchState t d p c) m a -> m (Branch.BranchLocalIds' t d p c, a)
runLocalizeBranch action = do
(result, (localTexts, localDefns, localPatches, localChildren)) <- State.runStateT action (mempty @LocalizeBranchState)
let branchLocalIds :: BranchLocalIds
(result, (LocalizeBranchState localTexts localDefns localPatches localChildren)) <- State.runStateT action (LocalizeBranchState mempty mempty mempty mempty)
let branchLocalIds :: Branch.BranchLocalIds' t d p c
branchLocalIds =
Branch.LocalIds
{ Branch.branchTextLookup = Map.valuesVector (Map.swap localTexts),
@ -136,17 +164,27 @@ runLocalizeBranch action = do
pure (branchLocalIds, result)
-- The inner state of the localization of a patch object.
type LocalizePatchState =
( Map TextId LocalTextId,
Map HashId LocalHashId,
Map ObjectId LocalDefnId
)
data LocalizePatchState t h d = LocalizePatchState
{ texts :: Map t LocalTextId,
hashes :: Map h LocalHashId,
defns :: Map d LocalDefnId
}
deriving (Show, Generic)
instance Ord t => ContainsText t (LocalizePatchState t h d) where
texts_ = field @"texts"
instance Ord h => ContainsHashes h (LocalizePatchState t h d) where
hashes_ = field @"hashes"
instance Ord d => ContainsDefns d (LocalizePatchState t h d) where
defns_ = field @"defns"
-- Run a computation that localizes a patch object, returning the local ids recorded within.
runLocalizePatch :: (Monad m) => StateT LocalizePatchState m a -> m (PatchLocalIds, a)
runLocalizePatch :: forall t h d a m. (Monad m, Ord t, Ord h, Ord d) => StateT (LocalizePatchState t h d) m a -> m (PatchLocalIds' t h d, a)
runLocalizePatch action = do
(result, (localTexts, localHashes, localDefns)) <- State.runStateT action (mempty @LocalizePatchState)
let patchLocalIds :: PatchLocalIds
(result, (LocalizePatchState localTexts localHashes localDefns)) <- State.runStateT action (LocalizePatchState mempty mempty mempty)
let patchLocalIds :: PatchLocalIds' t h d
patchLocalIds =
Patch.LocalIds
{ Patch.patchTextLookup = Map.valuesVector (Map.swap localTexts),
@ -156,52 +194,52 @@ runLocalizePatch action = do
pure (patchLocalIds, result)
-- Localize a branch object reference in any monad that encapsulates the stateful localization of an object that contains branch references.
localizeBranchReference :: (ContainsBranches s, Monad m) => (BranchObjectId, CausalHashId) -> StateT s m LocalBranchChildId
localizeBranchReference :: (ContainsBranches c s, Monad m) => c -> StateT s m LocalBranchChildId
localizeBranchReference =
zoom typed . localize
zoom branches_ . localize
-- Localize a definition object reference in any monad that encapsulates the stateful localization of an object that contains definition
-- references.
localizeDefn :: (ContainsDefns s, Monad m) => ObjectId -> StateT s m LocalDefnId
localizeDefn :: (ContainsDefns d s, Monad m) => d -> StateT s m LocalDefnId
localizeDefn =
zoom typed . localize
zoom defns_ . localize
-- Localize a hash reference in any monad that encapsulates the stateful localization of an object that contains hash references.
localizeHash :: (ContainsHashes s, Monad m) => HashId -> StateT s m LocalHashId
localizeHash :: (ContainsHashes h s, Monad m) => h -> StateT s m LocalHashId
localizeHash =
zoom typed . localize
zoom hashes_ . localize
-- Localize a patch object reference in any monad that encapsulates the stateful localization of an object that contains patch references.
localizePatchReference :: (ContainsPatches s, Monad m) => PatchObjectId -> StateT s m LocalPatchObjectId
localizePatchReference :: (ContainsPatches p s, Monad m) => p -> StateT s m LocalPatchObjectId
localizePatchReference =
zoom typed . localize
zoom patches_ . localize
-- Localize a reference in any monad that encapsulates the stateful localization of an object that contains references.
localizeReference :: (ContainsDefns s, ContainsText s, Monad m) => Reference -> StateT s m LocalReference
localizeReference :: (ContainsDefns d s, ContainsText t s, Monad m) => Reference' t d -> StateT s m LocalReference
localizeReference =
bitraverse localizeText localizeDefn
-- Localize a possibly-missing reference in any monad that encapsulates the stateful localization of an object that contains
-- possibly-missing references.
localizeReferenceH :: (ContainsHashes s, ContainsText s, Monad m) => ReferenceH -> StateT s m LocalReferenceH
localizeReferenceH :: (ContainsHashes h s, ContainsText t s, Monad m) => Reference' t h -> StateT s m LocalReferenceH
localizeReferenceH =
bitraverse localizeText localizeHash
-- Localize a referent in any monad that encapsulates the stateful localization of an object that contains referents.
localizeReferent :: (ContainsDefns s, ContainsText s, Monad m) => Referent -> StateT s m LocalReferent
localizeReferent :: forall d t s m. (ContainsDefns d s, ContainsText t s, Monad m) => (Referent' (Reference' t d) (Reference' t d)) -> StateT s m LocalReferent
localizeReferent =
bitraverse localizeReference localizeReference
-- Localize a possibly-missing referent in any monad that encapsulates the stateful localization of an object that contains possibly-missing
-- referents.
localizeReferentH :: (ContainsHashes s, ContainsText s, Monad m) => ReferentH -> StateT s m LocalReferentH
localizeReferentH :: (ContainsHashes h s, ContainsText t s, Monad m, r ~ Reference' t h) => Referent' r r -> StateT s m LocalReferentH
localizeReferentH =
bitraverse localizeReferenceH localizeReferenceH
-- Localize a text reference in any monad that encapsulates the stateful localization of an object that contains text.
localizeText :: (ContainsText s, Monad m) => TextId -> StateT s m LocalTextId
localizeText :: (ContainsText t s, Monad m) => t -> StateT s m LocalTextId
localizeText =
zoom typed . localize
zoom texts_ . localize
-- Resolve a real id to its corresponding local id, either by looking it up in a map, or else using the next available local id, which is
-- recorded for next time.

View File

@ -7,6 +7,7 @@ module U.Codebase.Sqlite.Patch.Format
SyncPatchFormat' (..),
applyPatchDiffs,
localPatchToPatch,
localPatchToPatch',
localPatchDiffToPatchDiff,
localPatchToHashPatch,
)
@ -74,6 +75,15 @@ localToPatch' :: (Ord t, Ord h, Ord d) => PatchLocalIds' t h d -> (Patch' LocalT
localToPatch' li =
Patch.Full.trimap (lookupPatchLocalText li) (lookupPatchLocalHash li) (lookupPatchLocalDefn li)
-- | Generic version of `localPatchToPatch` that works with any `PatchLocalIds'`.
localPatchToPatch' ::
(Ord t, Ord h, Ord d) =>
PatchLocalIds' t h d ->
Patch' LocalTextId LocalHashId LocalDefnId ->
Patch' t h d
localPatchToPatch' li =
Patch.Full.trimap (lookupPatchLocalText li) (lookupPatchLocalHash li) (lookupPatchLocalDefn li)
-- | Type specialized version of `localToPatch'`.
localPatchToPatch :: PatchLocalIds -> LocalPatch -> Patch
localPatchToPatch = localToPatch'

View File

@ -1,6 +1,7 @@
module U.Codebase.Sqlite.Patch.Full where
import Control.Lens
import Data.Bitraversable (Bitraversable, bitraverse)
import Data.Map (Map)
import Data.Set (Set)
import Data.Set qualified as Set
@ -47,6 +48,16 @@ data Patch' t h o = Patch
typeEdits :: Map (Reference' t h) (Set (TypeEdit' t o))
}
patchT_ :: (Ord t', Ord h, Ord o) => Traversal (Patch' t h o) (Patch' t' h o) t t'
patchT_ f Patch {termEdits, typeEdits} = do
newTermEdits <-
traverseOf (Map.bitraversed (Referent.refs_ . Reference.t_) (Set.traverse . traverseFirst)) f termEdits
newTypeEdits <- traverseOf (Map.bitraversed (Reference.t_) (Set.traverse . traverseFirst)) f typeEdits
pure Patch {termEdits = newTermEdits, typeEdits = newTypeEdits}
where
traverseFirst :: Bitraversable b => Traversal (b a c) (b a' c) a a'
traverseFirst f = bitraverse f pure
patchH_ :: (Ord t, Ord h') => Traversal (Patch' t h o) (Patch' t h' o) h h'
patchH_ f Patch {termEdits, typeEdits} = do
newTermEdits <- termEdits & Map.traverseKeys . Referent.refs_ . Reference.h_ %%~ f

View File

@ -149,6 +149,7 @@ module U.Codebase.Sqlite.Queries
setRemoteProjectBranchName,
insertBranchRemoteMapping,
ensureBranchRemoteMapping,
deleteBranchRemoteMapping,
-- * indexes
@ -2792,6 +2793,7 @@ data EntityLocation
EntityInMainStorage
| -- | `temp_entity`
EntityInTempStorage
deriving (Eq, Show, Ord)
-- | Where is an entity stored?
entityLocation :: Hash32 -> Transaction (Maybe EntityLocation)
@ -4033,6 +4035,20 @@ ensureBranchRemoteMapping pid bid rpid host rbid =
DO NOTHING
|]
deleteBranchRemoteMapping ::
ProjectId ->
ProjectBranchId ->
URI ->
Transaction ()
deleteBranchRemoteMapping pid bid host =
execute
[sql|
DELETE FROM project_branch_remote_mapping
WHERE local_project_id = :pid
AND local_branch_id = :bid
AND remote_host = :host
|]
-- | Convert reversed name segments into glob for searching based on suffix
--
-- >>> toSuffixGlob ("foo" NonEmpty.:| ["bar"])

View File

@ -19,6 +19,7 @@ module U.Codebase.Sqlite.Serialization
getTempPatchFormat,
getTempTermFormat,
getTermAndType,
getTypeFromTermAndType,
getTermFormat,
getWatchResultFormat,
lookupDeclElement,
@ -43,6 +44,10 @@ module U.Codebase.Sqlite.Serialization
putSingleTerm,
putDeclElement,
getSingleTerm,
putLocalIdsWith,
getLocalIdsWith,
putLocalBranch,
putLocalPatch,
)
where
@ -160,7 +165,7 @@ putLocalIdsWith putText putDefn LocalIds {textLookup, defnLookup} = do
putFoldable putText textLookup
putFoldable putDefn defnLookup
getLocalIds :: (MonadGet m) => m LocalIds
getLocalIds :: (MonadGet m, Num t, Bits t, Num h, Bits h) => m (LocalIds' t h)
getLocalIds = getLocalIdsWith getVarInt getVarInt
skipLocalIds :: (MonadGet m) => m ()
@ -315,10 +320,16 @@ putSingleTerm t = putABT putSymbol putUnit putF t
getTermComponent :: (MonadGet m) => m TermFormat.LocallyIndexedComponent
getTermComponent =
TermFormat.LocallyIndexedComponent
<$> getFramedArray (getTuple3 getLocalIds (getFramed getSingleTerm) getTType)
<$> getFramedArray (getTuple3 getLocalIds (getFramed getSingleTerm) getTermElementType)
getTermAndType :: (MonadGet m) => m (TermFormat.Term, TermFormat.Type)
getTermAndType = (,) <$> getFramed getSingleTerm <*> getTType
getTermAndType = (,) <$> getFramed getSingleTerm <*> getTermElementType
-- | Decode ONLY the type of a term-component element.
-- This is useful during sync and when we need the type of a term component element but don't
-- want to decode the whole term (which can be expensive).
getTypeFromTermAndType :: (MonadGet m) => m (TermFormat.Type)
getTypeFromTermAndType = skipFramed *> getTermElementType
getSingleTerm :: (MonadGet m) => m TermFormat.Term
getSingleTerm = getABT getSymbol getUnit getF
@ -399,7 +410,7 @@ getSingleTerm = getABT getSymbol getUnit getF
lookupTermElement :: (MonadGet m) => Reference.Pos -> m (LocalIds, TermFormat.Term, TermFormat.Type)
lookupTermElement i =
getWord8 >>= \case
0 -> unsafeFramedArrayLookup (getTuple3 getLocalIds (getFramed getSingleTerm) getTType) $ fromIntegral i
0 -> unsafeFramedArrayLookup (getTuple3 getLocalIds (getFramed getSingleTerm) getTermElementType) $ fromIntegral i
tag -> unknownTag "lookupTermElement" tag
lookupTermElementDiscardingType :: (MonadGet m) => Reference.Pos -> m (LocalIds, TermFormat.Term)
@ -411,11 +422,12 @@ lookupTermElementDiscardingType i =
lookupTermElementDiscardingTerm :: (MonadGet m) => Reference.Pos -> m (LocalIds, TermFormat.Type)
lookupTermElementDiscardingTerm i =
getWord8 >>= \case
0 -> unsafeFramedArrayLookup ((,) <$> getLocalIds <* skipFramed <*> getTType) $ fromIntegral i
0 -> unsafeFramedArrayLookup ((,) <$> getLocalIds <* skipFramed <*> getTermElementType) $ fromIntegral i
tag -> unknownTag "lookupTermElementDiscardingTerm" tag
getTType :: (MonadGet m) => m TermFormat.Type
getTType = getType getReference
-- | Decode a type which is stored alongisde a term-component element.
getTermElementType :: (MonadGet m) => m TermFormat.Type
getTermElementType = getType getReference
getType :: forall m r. (MonadGet m) => m r -> m (Type.TypeR r Symbol)
getType getReference = getABT getSymbol getUnit go
@ -526,21 +538,13 @@ putBranchFormat b = case b of
BranchFormat.Full li b -> do
putWord8 0
putBranchLocalIds li
putBranchFull b
putLocalBranch b
BranchFormat.Diff r li d -> do
putWord8 1
putVarInt r
putBranchLocalIds li
putBranchDiff d
where
putBranchFull (BranchFull.Branch terms types patches children) = do
putMap putVarInt (putMap putReferent putMetadataSetFormat) terms
putMap putVarInt (putMap putReference putMetadataSetFormat) types
putMap putVarInt putVarInt patches
putMap putVarInt putVarInt children
where
putMetadataSetFormat (BranchFull.Inline s) =
putWord8 0 *> putFoldable putReference s
putBranchDiff (BranchDiff.Diff terms types patches children) = do
putMap putVarInt (putMap putReferent putDiffOp) terms
putMap putVarInt (putMap putReference putDiffOp) types
@ -562,6 +566,16 @@ putBranchFormat b = case b of
BranchDiff.ChildRemove -> putWord8 0
BranchDiff.ChildAddReplace b -> putWord8 1 *> putVarInt b
putLocalBranch :: (MonadPut m) => BranchFull.LocalBranch -> m ()
putLocalBranch (BranchFull.Branch terms types patches children) = do
putMap putVarInt (putMap putReferent putMetadataSetFormat) terms
putMap putVarInt (putMap putReference putMetadataSetFormat) types
putMap putVarInt putVarInt patches
putMap putVarInt putVarInt children
where
putMetadataSetFormat (BranchFull.Inline s) =
putWord8 0 *> putFoldable putReference s
putBranchLocalIds :: (MonadPut m) => BranchFormat.BranchLocalIds -> m ()
putBranchLocalIds (BranchFormat.LocalIds ts os ps cs) = do
putFoldable putVarInt ts
@ -574,7 +588,7 @@ putPatchFormat = \case
PatchFormat.Full ids p -> do
putWord8 0
putPatchLocalIds ids
putPatchFull p
putLocalPatch p
PatchFormat.Diff r ids p -> do
putWord8 1
putVarInt r
@ -608,14 +622,14 @@ getTermEdit =
0 -> pure TermEdit.Deprecate
1 -> TermEdit.Replace <$> getReferent <*> getTyping
x -> unknownTag "getTermEdit" x
where
getTyping :: (MonadGet m) => m TermEdit.Typing
getTyping =
getWord8 >>= \case
0 -> pure TermEdit.Same
1 -> pure TermEdit.Subtype
2 -> pure TermEdit.Different
x -> unknownTag "getTyping" x
getTyping :: (MonadGet m) => m TermEdit.Typing
getTyping =
getWord8 >>= \case
0 -> pure TermEdit.Same
1 -> pure TermEdit.Subtype
2 -> pure TermEdit.Different
x -> unknownTag "getTyping" x
getTypeEdit :: (MonadGet m) => m TypeEdit.LocalTypeEdit
getTypeEdit =
@ -631,8 +645,8 @@ getPatchLocalIds =
<*> getVector getVarInt
<*> getVector getVarInt
putPatchFull :: (MonadPut m) => PatchFull.LocalPatch -> m ()
putPatchFull (PatchFull.Patch termEdits typeEdits) = do
putLocalPatch :: (MonadPut m) => PatchFull.LocalPatch -> m ()
putLocalPatch (PatchFull.Patch termEdits typeEdits) = do
putMap putReferent (putFoldable putTermEdit) termEdits
putMap putReference (putFoldable putTypeEdit) typeEdits

View File

@ -1,17 +1,25 @@
module U.Codebase.HashTags where
import Unison.Hash (Hash)
import Unison.Hash32 (Hash32)
import Unison.Prelude
-- | Represents a hash of a type or term component
newtype ComponentHash = ComponentHash {unComponentHash :: Hash}
deriving stock (Eq, Ord, Show)
deriving stock (Eq, Ord)
newtype BranchHash = BranchHash {unBranchHash :: Hash} deriving (Eq, Ord)
newtype BranchHash = BranchHash {unBranchHash :: Hash}
deriving stock (Eq, Ord)
-- | Represents a hash of a causal containing values of the provided type.
newtype CausalHash = CausalHash {unCausalHash :: Hash} deriving (Eq, Ord)
newtype CausalHash = CausalHash {unCausalHash :: Hash}
deriving stock (Eq, Ord)
newtype PatchHash = PatchHash {unPatchHash :: Hash} deriving (Eq, Ord)
newtype PatchHash = PatchHash {unPatchHash :: Hash}
deriving stock (Eq, Ord)
instance Show ComponentHash where
show h = "ComponentHash (" ++ show (unComponentHash h) ++ ")"
instance Show BranchHash where
show h = "BranchHash (" ++ show (unBranchHash h) ++ ")"
@ -21,3 +29,55 @@ instance Show CausalHash where
instance Show PatchHash where
show h = "PatchHash (" ++ show (unPatchHash h) ++ ")"
instance From ComponentHash Text where
from = from @Hash @Text . unComponentHash
instance From BranchHash Text where
from = from @Hash @Text . unBranchHash
instance From CausalHash Text where
from = from @Hash @Text . unCausalHash
instance From PatchHash Text where
from = from @Hash @Text . unPatchHash
instance From ComponentHash Hash
instance From BranchHash Hash
instance From CausalHash Hash
instance From PatchHash Hash
instance From Hash ComponentHash
instance From Hash BranchHash
instance From Hash CausalHash
instance From Hash PatchHash
instance From ComponentHash Hash32 where
from = from @Hash @Hash32 . unComponentHash
instance From BranchHash Hash32 where
from = from @Hash @Hash32 . unBranchHash
instance From CausalHash Hash32 where
from = from @Hash @Hash32 . unCausalHash
instance From PatchHash Hash32 where
from = from @Hash @Hash32 . unPatchHash
instance From Hash32 ComponentHash where
from = ComponentHash . from @Hash32 @Hash
instance From Hash32 BranchHash where
from = BranchHash . from @Hash32 @Hash
instance From Hash32 CausalHash where
from = CausalHash . from @Hash32 @Hash
instance From Hash32 PatchHash where
from = PatchHash . from @Hash32 @Hash

View File

@ -9,7 +9,7 @@
module U.Util.Serialization where
import Control.Applicative (Applicative (liftA2), liftA3)
import Control.Monad (foldM, replicateM, when, replicateM_)
import Control.Monad (foldM, replicateM, replicateM_, when)
import Data.Bits (Bits, clearBit, setBit, shiftL, shiftR, testBit, (.|.))
import Data.ByteString (ByteString, readFile, writeFile)
import qualified Data.ByteString as BS

View File

@ -2,12 +2,19 @@
[![asciicast](https://asciinema.org/a/Kwa7NscffA3R8KCHxq1OavRm0.svg)](https://asciinema.org/a/Kwa7NscffA3R8KCHxq1OavRm0)
* [Overview](#overview)
* [Installation and setup](#installation-and-setup)
* [Settings](#settings)
* [NeoVim](#neovim)
* [VSCode](#vscode)
## Overview
Supported features:
* Autocompletion
* Inline type and parser error messages
* Format on save (you can disable this in your editor if you like)
* Show type on hover
Notes:
@ -34,6 +41,17 @@ You can set this persistently in powershell using:
See [this issue](https://github.com/unisonweb/unison/issues/3487) for more details.
### Settings
Supported settings and their defaults. See information for your language server client about where to provide these.
```json
{
// A suggestion for the formatter about how wide (in columns) to print definitions.
"formattingWidth": 80
}
```
### NeoVim
Before configuring the LSP, install the Vim plugin for filetype detection and syntax highlighting.

View File

@ -46,44 +46,36 @@ Smoke test of the new release. Try `brew upgrade unison-language`, launch it, la
## 4
Announce on #general Slack channel. Template below.
Announce on #general Discord channel. Template below.
---
Release announcement template (be sure to update the release urls) -
We've just released a new version of Unison, $RELEASE_NAME.
We've just released a new version of Unison, $RELEASE_NAME, release notes here (link to the issue). Install/upgrade instructions in the thread.
---
Mac upgrade is just `brew upgrade unison-language`.
**macOS or Linux w/ Homebrew:**
Install or upgrade is just `brew install unisonweb/unison/unison-language`.
A fresh install via:
If you have previously done `brew install unison-language --head` to install a dev build, uninstall that first via `brew uninstall unison-language`.
**macOS or Linux manual install:**
macOS
```
brew tap unisonweb/unison
brew install unison-language
mkdir -p unisonlanguage && cd unisonlanguage
curl -L https://github.com/unisonweb/unison/releases/latest/download/ucm-macos.tar.gz \
| tar -xz
./ucm
```
Linux
```
mkdir -p unisonlanguage && cd unisonlanguage
curl -L https://github.com/unisonweb/unison/releases/latest/download/ucm-linux.tar.gz \
| tar -xz
./ucm
```
If you have previously done brew install unison-language --head to install a dev build, uninstall that first via brew uninstall unison-language.
_Linux manual install:_
```
mkdir unisonlanguage
curl -L https://github.com/unisonweb/unison/releases/download/release%2FM2h/ucm-linux.tar.gz --output unisonlanguage/ucm.tar.gz
tar -xzf unisonlanguage/ucm.tar.gz -C unisonlanguage
./unisonlanguage/ucm
```
_Mac manual install:_
```
mkdir unisonlanguage
curl -L https://github.com/unisonweb/unison/releases/download/release%2FM2h/ucm-macos.tar.gz --output unisonlanguage/ucm.tar.gz
tar -xzf unisonlanguage/ucm.tar.gz -C unisonlanguage
./unisonlanguage/ucm
```
_Windows manual install:_
**Windows manual install:**
* Recommended: [Set your default Terminal application](https://devblogs.microsoft.com/commandline/windows-terminal-as-your-default-command-line-experience/) to “Windows Terminal”.
* Download [the release](https://github.com/unisonweb/unison/releases/download/release%2FM5h/ucm-windows.zip) and extract it to a location of your choosing.
* Run `ucm.exe`

View File

@ -10,6 +10,7 @@ dependencies:
- text
- unison-prelude
- unison-util-base32hex
- witch
library:
source-dirs: src

View File

@ -37,6 +37,9 @@ instance Show Hash where
newtype HashFor t = HashFor {genericHash :: Hash}
deriving newtype (Show, Eq, Ord, Generic)
instance From Hash Text where
from = toBase32HexText
-- | Convert a hash to a byte string.
toByteString :: Hash -> ByteString
toByteString = B.Short.fromShort . toShort

View File

@ -32,6 +32,15 @@ import Unison.Prelude
newtype Hash32 = UnsafeFromBase32Hex Base32Hex
deriving (Eq, Ord, Show) via (Text)
instance From Hash32 Text where
from = toText
instance From Hash32 Hash where
from = toHash
instance From Hash Hash32 where
from = fromHash
fromHash :: Hash -> Hash32
fromHash =
unsafeFromBase32Hex . Hash.toBase32Hex

View File

@ -56,4 +56,5 @@ library
, text
, unison-prelude
, unison-util-base32hex
, witch
default-language: Haskell2010

View File

@ -229,7 +229,7 @@ wrapImplPreserveSpaces = \case
(Lit s) -> fromMaybe False (fmap (isSpaceNotNewline . fst) $ LL.uncons s)
_ -> False
f p | startsWithSpace p = p `orElse` newline
f p = p
f p = p `orElse` (newline <> p)
isSpaceNotNewline :: Char -> Bool
isSpaceNotNewline c = isSpace c && not (c == '\n')

View File

@ -243,4 +243,3 @@ deriveRangeFromDomain :: Ord b => a -> NESet b -> Map b a -> Map b a
deriveRangeFromDomain x ys acc =
foldr (flip Map.insert x) acc ys
{-# INLINE deriveRangeFromDomain #-}

View File

@ -59,6 +59,7 @@ dependencies:
- http-client
- http-media
- http-types
- IntervalMap
- lens
- lucid
- megaparsec

View File

@ -44,6 +44,7 @@ module Unison.Codebase
SqliteCodebase.Operations.before,
getShallowBranchAtPath,
getShallowCausalAtPath,
getBranchAtPath,
Operations.expectCausalBranchByCausalHash,
getShallowCausalFromRoot,
getShallowRootBranch,
@ -115,7 +116,6 @@ where
import Control.Monad.Except (ExceptT (ExceptT), runExceptT)
import Control.Monad.Trans.Except (throwE)
import Data.List as List
import Data.Map qualified as Map
import Data.Set qualified as Set
import U.Codebase.Branch qualified as V2
@ -242,23 +242,15 @@ getShallowBranchAtPath path mayBranch = do
childBranch <- V2Causal.value childCausal
getShallowBranchAtPath p (Just childBranch)
-- | Get a branch from the codebase.
getBranchForHash :: (Monad m) => Codebase m v a -> CausalHash -> m (Maybe (Branch m))
getBranchForHash codebase h =
-- Attempt to find the Branch in the current codebase cache and root up to 3 levels deep
-- If not found, attempt to find it in the Codebase (sqlite)
let nestedChildrenForDepth :: Int -> Branch m -> [Branch m]
nestedChildrenForDepth depth b =
if depth == 0
then []
else b : (Map.elems (Branch._children (Branch.head b)) >>= nestedChildrenForDepth (depth - 1))
headHashEq = (h ==) . Branch.headHash
find rb = List.find headHashEq (nestedChildrenForDepth 3 rb)
in do
rootBranch <- getRootBranch codebase
maybe (getBranchForHashImpl codebase h) (pure . Just) (find rootBranch)
-- | Get a v1 branch from the root following the given path.
getBranchAtPath ::
(MonadIO m) =>
Codebase m v a ->
Path.Absolute ->
m (Branch m)
getBranchAtPath codebase path = do
V2Causal.Causal {causalHash} <- runTransaction codebase $ getShallowCausalAtPath (Path.unabsolute path) Nothing
expectBranchForHash codebase causalHash
-- | Like 'getBranchForHash', but for when the hash is known to be in the codebase.
expectBranchForHash :: (Monad m) => Codebase m v a -> CausalHash -> m (Branch m)

View File

@ -1,15 +1,26 @@
module Unison.Codebase.Branch.Names
( namesDiff,
toNames,
toPrettyPrintEnvDecl,
)
where
import Unison.Codebase.Branch
import Unison.Names (Names (..))
import Unison.NamesWithHistory qualified as Names
import Unison.PrettyPrintEnv.Names qualified as PPE
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.PrettyPrintEnvDecl.Names qualified as PPED
import Unison.Util.Relation qualified as R
import Prelude hiding (head, read, subtract)
-- | Get the pretty-printing environment for names in the provided branch.
toPrettyPrintEnvDecl :: Int -> Branch0 m -> PPED.PrettyPrintEnvDecl
toPrettyPrintEnvDecl hashLength b =
let names = toNames b
in PPED.makePPED (PPE.hqNamer hashLength names) (PPE.suffixifyByHash names)
-- | Get the names in the provided branch.
toNames :: Branch0 m -> Names
toNames b =
Names

View File

@ -381,7 +381,7 @@ sqliteCodebase debugName root localOrRemote lockOption migrationStrategy action
getTermComponentWithTypes,
getRootBranch,
putRootBranch,
getBranchForHashImpl = getBranchForHash,
getBranchForHash,
putBranch,
syncFromDirectory,
syncToDirectory,

View File

@ -50,7 +50,6 @@ import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment (..))
import Unison.Names (Names (Names))
import Unison.Names qualified as Names
import Unison.Names.Scoped (ScopedNames (..))
import Unison.Parser.Ann (Ann)
import Unison.Prelude
import Unison.Reference (Reference)
@ -518,7 +517,7 @@ filterReferentsHavingTypeImpl ::
filterReferentsHavingTypeImpl doGetDeclType typRef termRefs =
Ops.filterTermsByReferentHavingType (Cv.reference1to2 typRef) (Cv.referentid1to2 <$> toList termRefs)
>>= traverse (Cv.referentid2to1 doGetDeclType)
<&> Set.fromList
<&> Set.fromList
-- | The number of base32 characters needed to distinguish any two references in the codebase.
hashLength :: Transaction Int
@ -588,7 +587,7 @@ namesAtPath ::
BranchHash ->
-- Include names from the project which contains this path.
Path ->
Transaction ScopedNames
Transaction Names
namesAtPath bh path = do
let namesRoot = PathSegments . coerce . Path.toList $ path
namesPerspective@Ops.NamesPerspective {relativePerspective} <- Ops.namesPerspectiveForRootAndPath bh namesRoot
@ -596,22 +595,15 @@ namesAtPath bh path = do
NamesInPerspective {termNamesInPerspective, typeNamesInPerspective} <- Ops.allNamesInPerspective namesPerspective
let termsInPath = convertTerms termNamesInPerspective
let typesInPath = convertTypes typeNamesInPerspective
let rootTerms = Rel.fromList termsInPath
let rootTypes = Rel.fromList typesInPath
let absoluteRootNames = Names.makeAbsolute $ Names {terms = rootTerms, types = rootTypes}
let relativeScopedNames =
case relativePath of
Path.Empty -> (Names.makeRelative $ absoluteRootNames)
Path.Empty -> (Names {terms = Rel.fromList termsInPath, types = Rel.fromList typesInPath})
p ->
let reversedPathSegments = reverse . Path.toList $ p
relativeTerms = mapMaybe (stripPathPrefix reversedPathSegments) termsInPath
relativeTypes = mapMaybe (stripPathPrefix reversedPathSegments) typesInPath
in (Names {terms = Rel.fromList relativeTerms, types = Rel.fromList relativeTypes})
pure $
ScopedNames
{ relativeScopedNames,
absoluteRootNames
}
pure $ relativeScopedNames
where
convertTypes names =
names <&> \(S.NamedRef {reversedSegments, ref}) ->

View File

@ -79,7 +79,7 @@ data Codebase m v a = Codebase
Text -> -- Reason for the change, will be recorded in the reflog
Branch m ->
m (),
getBranchForHashImpl :: CausalHash -> m (Maybe (Branch m)),
getBranchForHash :: CausalHash -> m (Maybe (Branch m)),
-- | Put a branch into the codebase, which includes its children, its patches, and the branch itself, if they don't
-- already exist.
--

View File

@ -83,7 +83,7 @@ hashFieldAccessors ::
)
hashFieldAccessors ppe declName vars declRef dd = do
let accessors :: [(v, (), Term.Term v ())]
accessors = DD.generateRecordAccessors (map (,()) vars) declName declRef
accessors = DD.generateRecordAccessors mempty (map (,()) vars) declName declRef
let typeLookup :: TypeLookup v ()
typeLookup =
TypeLookup

View File

@ -13,7 +13,6 @@
-- Afterwards, the 'SolveState' holds the kinds of all decls and we
-- can check that type annotations in terms that may mention the
-- decls are well-kinded with 'kindCheckAnnotations'.
module Unison.KindInference
( inferDecls,
kindCheckAnnotations,
@ -28,7 +27,7 @@ import Data.Map.Strict qualified as Map
import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation)
import Unison.DataDeclaration
import Unison.KindInference.Generate (declComponentConstraints, termConstraints)
import Unison.KindInference.Solve (KindError, verify, initialState, step, defaultUnconstrainedVars)
import Unison.KindInference.Solve (KindError, defaultUnconstrainedVars, initialState, step, verify)
import Unison.KindInference.Solve.Monad (Env (..), SolveState, run, runGen)
import Unison.Prelude
import Unison.PrettyPrintEnv qualified as PrettyPrintEnv

View File

@ -1,6 +1,7 @@
module Unison.KindInference.Constraint.Context
( ConstraintContext(..)
) where
( ConstraintContext (..),
)
where
import Unison.KindInference.UVar (UVar)
import Unison.Type (Type)

View File

@ -6,7 +6,7 @@ module Unison.KindInference.Constraint.Unsolved
)
where
import Control.Lens (Traversal, Lens, Lens')
import Control.Lens (Lens, Lens', Traversal)
import Unison.KindInference.Constraint.Provenance (Provenance)
import Unison.KindInference.Constraint.Provenance qualified as Provenance

View File

@ -20,7 +20,7 @@ import Unison.KindInference.Constraint.Context (ConstraintContext (..))
import Unison.KindInference.Constraint.Provenance (Provenance (..))
import Unison.KindInference.Constraint.Provenance qualified as Provenance
import Unison.KindInference.Constraint.Unsolved (Constraint (..))
import Unison.KindInference.Generate.Monad (Gen, GeneratedConstraint, freshVar, pushType, lookupType, scopedType)
import Unison.KindInference.Generate.Monad (Gen, GeneratedConstraint, freshVar, lookupType, pushType, scopedType)
import Unison.KindInference.UVar (UVar)
import Unison.Prelude
import Unison.Reference (Reference)
@ -241,41 +241,42 @@ declComponentConstraintTree decls = do
-- Add a kind variable for every datatype
declKind <- pushType (Type.ref (DD.annotation $ asDataDecl decl) ref)
pure (ref, decl, declKind)
(declConstraints, constructorConstraints) <- unzip <$> for decls \(ref, decl, declKind) -> do
let declAnn = DD.annotation $ asDataDecl decl
let declType = Type.ref declAnn ref
-- Unify the datatype with @k_1 -> ... -> k_n -> *@ where @n@ is
-- the number of type parameters
let tyVars = map (\tyVar -> Type.var declAnn tyVar) (DD.bound $ asDataDecl decl)
tyvarKinds <- for tyVars \tyVar -> do
-- it would be nice to annotate these type vars with their
-- precise location, but that information doesn't seem to be
-- available via "DataDeclaration", so we currently settle for
-- the whole decl annotation.
k <- freshVar tyVar
pure (k, tyVar)
(declConstraints, constructorConstraints) <-
unzip <$> for decls \(ref, decl, declKind) -> do
let declAnn = DD.annotation $ asDataDecl decl
let declType = Type.ref declAnn ref
-- Unify the datatype with @k_1 -> ... -> k_n -> *@ where @n@ is
-- the number of type parameters
let tyVars = map (\tyVar -> Type.var declAnn tyVar) (DD.bound $ asDataDecl decl)
tyvarKinds <- for tyVars \tyVar -> do
-- it would be nice to annotate these type vars with their
-- precise location, but that information doesn't seem to be
-- available via "DataDeclaration", so we currently settle for
-- the whole decl annotation.
k <- freshVar tyVar
pure (k, tyVar)
let tyvarKindsOnly = map fst tyvarKinds
constructorConstraints <-
Node <$> for (DD.constructors' $ asDataDecl decl) \(constructorAnn, _, constructorType) -> do
withInstantiatedConstructorType declType tyvarKindsOnly constructorType \constructorType -> do
constructorKind <- freshVar constructorType
ct <- typeConstraintTree constructorKind constructorType
pure $ ParentConstraint (IsType constructorKind (Provenance DeclDefinition constructorAnn)) ct
let tyvarKindsOnly = map fst tyvarKinds
constructorConstraints <-
Node <$> for (DD.constructors' $ asDataDecl decl) \(constructorAnn, _, constructorType) -> do
withInstantiatedConstructorType declType tyvarKindsOnly constructorType \constructorType -> do
constructorKind <- freshVar constructorType
ct <- typeConstraintTree constructorKind constructorType
pure $ ParentConstraint (IsType constructorKind (Provenance DeclDefinition constructorAnn)) ct
(fullyAppliedKind, _fullyAppliedType, declConstraints) <-
let phi (dk, dt, cts) (ak, at) = do
-- introduce a kind uvar for each app node
let t' = Type.app declAnn dt at
v <- freshVar t'
let cts' = Constraint (IsArr dk (Provenance DeclDefinition declAnn) ak v) cts
pure (v, t', cts')
in foldlM phi (declKind, declType, Node []) tyvarKinds
(fullyAppliedKind, _fullyAppliedType, declConstraints) <-
let phi (dk, dt, cts) (ak, at) = do
-- introduce a kind uvar for each app node
let t' = Type.app declAnn dt at
v <- freshVar t'
let cts' = Constraint (IsArr dk (Provenance DeclDefinition declAnn) ak v) cts
pure (v, t', cts')
in foldlM phi (declKind, declType, Node []) tyvarKinds
let finalDeclConstraints = case decl of
Left _effectDecl -> Constraint (IsAbility fullyAppliedKind (Provenance DeclDefinition declAnn)) declConstraints
Right _dataDecl -> Constraint (IsType fullyAppliedKind (Provenance DeclDefinition declAnn)) declConstraints
pure (finalDeclConstraints, constructorConstraints)
let finalDeclConstraints = case decl of
Left _effectDecl -> Constraint (IsAbility fullyAppliedKind (Provenance DeclDefinition declAnn)) declConstraints
Right _dataDecl -> Constraint (IsType fullyAppliedKind (Provenance DeclDefinition declAnn)) declConstraints
pure (finalDeclConstraints, constructorConstraints)
pure (Node declConstraints `StrictOrder` Node constructorConstraints)
-- | This is a helper to unify the kind constraints on type variables

View File

@ -8,7 +8,6 @@ module Unison.KindInference.Solve
)
where
import Unison.KindInference.Error (KindError(..), ConstraintConflict(..), improveError)
import Control.Lens (Prism', prism', review, (%~))
import Control.Monad.Reader (asks)
import Control.Monad.Reader qualified as M
@ -22,6 +21,7 @@ import Unison.KindInference.Constraint.Provenance (Provenance (..))
import Unison.KindInference.Constraint.Solved qualified as Solved
import Unison.KindInference.Constraint.StarProvenance (StarProvenance (..))
import Unison.KindInference.Constraint.Unsolved qualified as Unsolved
import Unison.KindInference.Error (ConstraintConflict (..), KindError (..), improveError)
import Unison.KindInference.Generate (builtinConstraints)
import Unison.KindInference.Generate.Monad (Gen (..), GeneratedConstraint)
import Unison.KindInference.Solve.Monad
@ -123,9 +123,9 @@ markVisiting x = do
OccCheckState {visitingSet, visitingStack} <- M.get
case Set.member x visitingSet of
True -> do
OccCheckState{solvedConstraints} <- M.get
OccCheckState {solvedConstraints} <- M.get
let loc = case U.lookupCanon x solvedConstraints of
Just (_, _, Descriptor { descriptorConstraint = Just (Solved.IsArr (Provenance _ loc) _ _ )}, _) -> loc
Just (_, _, Descriptor {descriptorConstraint = Just (Solved.IsArr (Provenance _ loc) _ _)}, _) -> loc
_ -> error "cycle without IsArr constraint"
addError (CycleDetected loc x solvedConstraints)
pure Cycle

View File

@ -718,7 +718,7 @@ union v0 v1 nc@NormalizedConstraints {constraintMap} =
IsEffectful -> [C.Effectful chosenCanon]
in addConstraints constraints nc {constraintMap = m}
where
noMerge m = pure nc { constraintMap = m }
noMerge m = pure nc {constraintMap = m}
modifyListC ::
forall vt v loc m.

View File

@ -14,8 +14,8 @@ module Unison.PatternMatchCoverage.UFMap
)
where
import Control.Monad.Trans.Class
import Control.Monad.Fix (MonadFix)
import Control.Monad.Trans.Class
import Control.Monad.Trans.Except (ExceptT (..))
import Data.Foldable (foldl')
import Data.Functor ((<&>))

View File

@ -37,6 +37,8 @@ biasTo targets PrettyPrintEnvDecl {unsuffixifiedPPE, suffixifiedPPE} =
empty :: PrettyPrintEnvDecl
empty = PrettyPrintEnvDecl PPE.empty PPE.empty
-- | Will use names from the fallback pped if no names were found in the primary.
-- @addFallback primary fallback@
addFallback :: PrettyPrintEnvDecl -> PrettyPrintEnvDecl -> PrettyPrintEnvDecl
addFallback (PrettyPrintEnvDecl unsuff1 suff1) (PrettyPrintEnvDecl unsuff2 suff2) =
PrettyPrintEnvDecl (unsuff1 `PPE.addFallback` unsuff2) (suff1 `PPE.addFallback` suff2)

View File

@ -1,13 +1,27 @@
{-# LANGUAGE RecordWildCards #-}
module Unison.PrintError where
module Unison.PrintError
( Env,
defaultWidth,
prettyParseError,
prettyResolutionFailures,
prettyVar,
printNoteWithSource,
renderCompilerBug,
renderNoteAsANSI,
renderParseErrorAsANSI,
renderParseErrors,
)
where
import Control.Lens ((%~))
import Control.Lens.Tuple (_1, _2, _3)
import Data.List (find, intersperse)
import Data.Function (on)
import Data.List (find, intersperse, sortBy)
import Data.List.Extra (nubOrd)
import Data.List.NonEmpty qualified as Nel
import Data.Map qualified as Map
import Data.Ord (comparing)
import Data.Proxy
import Data.Sequence (Seq (..))
import Data.Set qualified as Set
@ -17,14 +31,15 @@ import Data.Text qualified as Text
import Text.Megaparsec qualified as P
import Unison.ABT qualified as ABT
import Unison.Builtin.Decls (unitRef, pattern TupleType')
import Unison.Codebase.Path qualified as Path
import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..))
import Unison.HashQualified (HashQualified)
import Unison.HashQualified' qualified as HQ'
import Unison.Kind (Kind)
import Unison.Kind qualified as Kind
import Unison.KindInference.Error.Pretty (prettyKindError)
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment (..))
import Unison.Names qualified as Names
import Unison.Names.ResolutionResult qualified as Names
import Unison.Parser.Ann (Ann (..))
@ -33,7 +48,7 @@ import Unison.Prelude
import Unison.PrettyPrintEnv qualified as PPE
import Unison.PrettyPrintEnv.Names qualified as PPE
import Unison.Reference qualified as R
import Unison.Referent (Referent, pattern Ref)
import Unison.Referent (Referent, toReference, pattern Ref)
import Unison.Result (Note (..))
import Unison.Result qualified as Result
import Unison.Settings qualified as Settings
@ -77,12 +92,6 @@ pattern Type2 = Color.Green
pattern ErrorSite :: Color
pattern ErrorSite = Color.HiRed
pattern TypeKeyword :: Color
pattern TypeKeyword = Color.Yellow
pattern AbilityKeyword :: Color
pattern AbilityKeyword = Color.Green
pattern Identifier :: Color
pattern Identifier = Color.Bold
@ -111,18 +120,6 @@ fromOverHere src spots0 removing =
1 -> "\n from right here:\n\n" <> showSource src spots
_ -> "\n from these spots, respectively:\n\n" <> showSource src spots
showTypeWithProvenance ::
(Var v, Annotated a, Ord style) =>
Env ->
String ->
style ->
Type v a ->
Pretty (AnnotatedText style)
showTypeWithProvenance env src color typ =
style color (renderType' env typ)
<> ".\n"
<> fromOverHere' src [styleAnnotated color typ] []
styleAnnotated :: (Annotated a) => sty -> a -> Maybe (Range, sty)
styleAnnotated sty a = (,sty) <$> rangeForAnnotated a
@ -169,9 +166,8 @@ renderTypeError ::
TypeError v loc ->
Env ->
String ->
Path.Absolute ->
Pretty ColorText
renderTypeError e env src curPath = case e of
renderTypeError e env src = case e of
BooleanMismatch {..} ->
mconcat
[ Pr.wrap $
@ -627,51 +623,112 @@ renderTypeError e env src curPath = case e of
_ -> Pr.wrap $ "It should be of type " <> Pr.group (style Type1 (renderType' env expectedType) <> ".")
UnknownTerm {..} ->
let (correct, wrongTypes, wrongNames) =
foldr sep id suggestions ([], [], [])
sep (C.Suggestion name typ _ match) r =
foldr
sep
id
( sortBy
( comparing length <> compare
`on` (Text.splitOn "." . C.suggestionName)
)
suggestions
)
([], [], [])
sep s@(C.Suggestion _ _ _ match) r =
case match of
C.Exact -> (_1 %~ ((name, typ) :)) . r
C.WrongType -> (_2 %~ ((name, typ) :)) . r
C.WrongName -> (_3 %~ ((name, typ) :)) . r
libPath = Path.absoluteToPath' curPath Path.:> "lib"
C.Exact -> (_1 %~ (s :)) . r
C.WrongType -> (_2 %~ (s :)) . r
C.WrongName -> (_3 %~ (s :)) . r
undefinedSymbolHelp =
mconcat
[ ( case expectedType of
Type.Var' (TypeVar.Existential {}) ->
Pr.wrap "I also don't know what type it should be."
_ ->
mconcat
[ Pr.wrap "I think its type should be:",
"\n\n",
Pr.indentN 4 (style Type1 (renderType' env expectedType))
]
),
"\n\n",
Pr.hang
"Some common causes of this error include:"
( Pr.bulleted
[ Pr.wrap "Your current namespace is too deep to contain the definition in its subtree",
Pr.wrap "The definition is part of a library which hasn't been added to this project",
Pr.wrap "You have a typo in the name"
]
)
]
in mconcat
[ "I couldn't find any definitions matching the name ",
[ "I couldn't figure out what ",
style ErrorSite (Var.nameStr unknownTermV),
" inside the namespace ",
prettyPath' (Path.absoluteToPath' curPath),
"\n\n",
" refers to here:\n\n",
annotatedAsErrorSite src termSite,
"\n",
Pr.hang
"Some common causes of this error include:"
( Pr.bulleted
[ Pr.wrap "Your current namespace is too deep to contain the definition in its subtree",
Pr.wrap "The definition is part of a library which hasn't been added to this project"
]
)
<> "\n\n"
<> "To add a library to this project use the command: "
<> Pr.backticked ("fork <.path.to.lib> " <> Pr.shown (libPath Path.:> "<libname>")),
"\n\n",
case expectedType of
Type.Var' (TypeVar.Existential {}) -> "There are no constraints on its type."
_ ->
"Whatever it is, its type should conform to "
<> style Type1 (renderType' env expectedType)
<> ".",
"\n\n",
-- ++ showTypeWithProvenance env src Type1 expectedType
case correct of
[] -> case wrongTypes of
[] -> case wrongNames of
[] -> mempty
[] -> undefinedSymbolHelp
wrongs -> formatWrongs wrongNameText wrongs
wrongs -> formatWrongs wrongTypeText wrongs
wrongs ->
let helpMeOut =
Pr.wrap
( mconcat
[ "Help me out by",
Pr.bold "using a more specific name here",
"or",
Pr.bold "adding a type annotation."
]
)
in Pr.wrap
( "The name "
<> style Identifier (Var.nameStr unknownTermV)
<> " is ambiguous. I tried to resolve it by type but"
)
<> " "
<> case expectedType of
Type.Var' (TypeVar.Existential {}) -> Pr.wrap ("its type could be anything." <> helpMeOut) <> "\n"
_ ->
mconcat
[ ( Pr.wrap $
mconcat
[ "no term with that name would pass typechecking.",
"I think its type should be:"
]
),
"\n\n",
Pr.indentN 4 (style Type1 (renderType' env expectedType)),
"\n\n",
Pr.wrap
( mconcat
[ "If that's not what you expected, you may have a type error somewhere else in your code.",
helpMeOut
]
)
]
<> "\n\n"
<> formatWrongs wrongTypeText wrongs
suggs ->
mconcat
[ "I found some terms in scope that have matching names and types. ",
"Maybe you meant one of these:\n\n",
intercalateMap "\n" formatSuggestion suggs
[ Pr.wrap
( mconcat
[ mconcat
[ "The name ",
style Identifier (Var.nameStr unknownTermV),
" is ambiguous. "
],
case expectedType of
Type.Var' (TypeVar.Existential {}) -> "I couldn't narrow it down by type, as any type would work here."
_ ->
"Its type should be:\n\n"
<> Pr.indentN 4 (style Type1 (renderType' env expectedType))
]
),
"\n\n",
Pr.wrap "I found some terms in scope that have matching names and types. Maybe you meant one of these:",
"\n\n",
intercalateMap "\n" (renderSuggestion env) suggs
]
]
DuplicateDefinitions {..} ->
@ -733,47 +790,48 @@ renderTypeError e env src curPath = case e of
]
where
wrongTypeText pl =
mconcat
[ "I found ",
pl "a term" "some terms",
" in scope with ",
pl "a " "",
"matching name",
pl "" "s",
" but ",
pl "a " "",
"different type",
pl "" "s",
". ",
"If ",
pl "this" "one of these",
" is what you meant, try using the fully qualified name and I might ",
"be able to give you a more illuminating error message: \n\n"
]
Pr.paragraphyText
( mconcat
[ "I found ",
pl "a term" "some terms",
" in scope with ",
pl "a " "",
"matching name",
pl "" "s",
" but ",
pl "a " "",
"different type",
pl "" "s",
". ",
"If ",
pl "this" "one of these",
" is what you meant, try using its full name:"
]
)
<> "\n\n"
wrongNameText pl =
mconcat
[ "I found ",
pl "a term" "some terms",
" in scope with ",
pl "a " "",
"matching type",
pl "" "s",
" but ",
pl "a " "",
"different name",
pl "" "s",
". ",
"Maybe you meant ",
pl "this" "one of these",
":\n\n"
]
formatSuggestion :: (Text, C.Type v loc) -> Pretty ColorText
formatSuggestion (name, typ) =
" - " <> fromString (Text.unpack name) <> " : " <> renderType' env typ
Pr.paragraphyText
( mconcat
[ "I found ",
pl "a term" "some terms",
" in scope with ",
pl "a " "",
"matching type",
pl "" "s",
" but ",
pl "a " "",
"different name",
pl "" "s",
". ",
"Maybe you meant ",
pl "this" "one of these",
":\n\n"
]
)
formatWrongs txt wrongs =
let sz = length wrongs
pl a b = if sz == 1 then a else b
in mconcat [txt pl, intercalateMap "\n" formatSuggestion wrongs]
in mconcat [txt pl, intercalateMap "\n" (renderSuggestion env) wrongs]
ordinal :: (IsString s) => Int -> s
ordinal n =
fromString $
@ -1132,7 +1190,12 @@ renderType env f t = renderType0 env f (0 :: Int) (cleanup t)
renderSuggestion ::
(IsString s, Semigroup s, Var v) => Env -> C.Suggestion v loc -> s
renderSuggestion env sug =
fromString (Text.unpack $ C.suggestionName sug)
renderTerm
env
( case C.suggestionReplacement sug of
Right ref -> Term.ref () (toReference ref)
Left v -> Term.var () v
)
<> " : "
<> renderType'
env
@ -1141,9 +1204,6 @@ renderSuggestion env sug =
spaces :: (IsString a, Monoid a) => (b -> a) -> [b] -> a
spaces = intercalateMap " "
arrows :: (IsString a, Monoid a) => (b -> a) -> [b] -> a
arrows = intercalateMap " ->"
commas :: (IsString a, Monoid a) => (b -> a) -> [b] -> a
commas = intercalateMap ", "
@ -1174,17 +1234,6 @@ showConstructor env r =
fromString . HQ.toString $
PPE.patternName env r
styleInOverallType ::
(Var v, Annotated a, Eq a) =>
Env ->
C.Type v a ->
C.Type v a ->
Color ->
Pretty ColorText
styleInOverallType e overallType leafType c = renderType e f overallType
where
f loc s = if loc == ABT.annotation leafType then Color.style c <$> s else s
_posToEnglish :: (IsString s) => L.Pos -> s
_posToEnglish (L.Pos l c) =
fromString $ "Line " ++ show l ++ ", Column " ++ show c
@ -1218,16 +1267,18 @@ rangeToEnglish (Range (L.Pos l c) (L.Pos l' c')) =
then "line " ++ show l
else "lines " ++ show l ++ "" ++ show l'
annotatedToEnglish :: (Annotated a, IsString s) => a -> s
annotatedToEnglish :: (Annotated a, IsString s, Semigroup s) => a -> s
annotatedToEnglish a = case ann a of
Intrinsic -> "an intrinsic"
External -> "an external"
Intrinsic -> "<intrinsic>"
External -> "<external>"
GeneratedFrom a -> "generated from: " <> annotatedToEnglish a
Ann start end -> rangeToEnglish $ Range start end
rangeForAnnotated :: (Annotated a) => a -> Maybe Range
rangeForAnnotated a = case ann a of
Intrinsic -> Nothing
External -> Nothing
GeneratedFrom a -> rangeForAnnotated a
Ann start end -> Just $ Range start end
showLexerOutput :: Bool
@ -1238,10 +1289,9 @@ renderNoteAsANSI ::
Pr.Width ->
Env ->
String ->
Path.Absolute ->
Note v a ->
String
renderNoteAsANSI w e s curPath n = Pr.toANSI w $ printNoteWithSource e s curPath n
renderNoteAsANSI w e s n = Pr.toANSI w $ printNoteWithSource e s n
renderParseErrorAsANSI :: (Var v) => Pr.Width -> String -> Parser.Err v -> String
renderParseErrorAsANSI w src = Pr.toANSI w . prettyParseError src
@ -1250,19 +1300,18 @@ printNoteWithSource ::
(Var v, Annotated a, Show a, Ord a) =>
Env ->
String ->
Path.Absolute ->
Note v a ->
Pretty ColorText
printNoteWithSource env _s _curPath (TypeInfo n) = prettyTypeInfo n env
printNoteWithSource _env s _curPath (Parsing e) = prettyParseError s e
printNoteWithSource env s curPath (TypeError e) = prettyTypecheckError e env s curPath
printNoteWithSource _env _s _curPath (NameResolutionFailures _es) = undefined
printNoteWithSource _env s _curPath (UnknownSymbol v a) =
printNoteWithSource env _s (TypeInfo n) = prettyTypeInfo n env
printNoteWithSource _env s (Parsing e) = prettyParseError s e
printNoteWithSource env s (TypeError e) = prettyTypecheckError e env s
printNoteWithSource _env _s (NameResolutionFailures _es) = undefined
printNoteWithSource _env s (UnknownSymbol v a) =
fromString ("Unknown symbol `" ++ Text.unpack (Var.name v) ++ "`\n\n")
<> annotatedAsErrorSite s a
printNoteWithSource env s _curPath (CompilerBug (Result.TypecheckerBug c)) =
printNoteWithSource env s (CompilerBug (Result.TypecheckerBug c)) =
renderCompilerBug env s c
printNoteWithSource _env _s _curPath (CompilerBug c) =
printNoteWithSource _env _s (CompilerBug c) =
fromString $ "Compiler bug: " <> show c
_printPosRange :: String -> L.Pos -> L.Pos -> String
@ -1629,16 +1678,17 @@ renderParseErrors s = \case
then unknownTypesMsg
else unknownTypesMsg <> "\n\n" <> dupDataAndAbilitiesMsg
in (msgs, allRanges)
go (Parser.DidntExpectExpression _tok (Just t@(L.payload -> L.SymbolyId "::" Nothing))) =
let msg =
mconcat
[ "This looks like the start of an expression here but I was expecting a binding.",
"\nDid you mean to use a single " <> style Code ":",
" here for a type signature?",
"\n\n",
tokenAsErrorSite s t
]
in (msg, [rangeForToken t])
go (Parser.DidntExpectExpression _tok (Just t@(L.payload -> L.SymbolyId (HQ'.NameOnly name))))
| name == Name.fromSegment (NameSegment "::") =
let msg =
mconcat
[ "This looks like the start of an expression here but I was expecting a binding.",
"\nDid you mean to use a single " <> style Code ":",
" here for a type signature?",
"\n\n",
tokenAsErrorSite s t
]
in (msg, [rangeForToken t])
go (Parser.DidntExpectExpression tok _nextTok) =
let msg =
mconcat
@ -1858,24 +1908,14 @@ showSource src annotations =
showSource1 :: (Ord a) => String -> (Range, a) -> Pretty (AnnotatedText a)
showSource1 src annotation = showSource src [annotation]
findTerm :: Seq (C.PathElement v loc) -> Maybe loc
findTerm = go
where
go (C.InSynthesize t :<| _) = Just $ ABT.annotation t
go (C.InCheck t _ :<| _) = Just $ ABT.annotation t
go (C.InSynthesizeApp _ t _ :<| _) = Just $ ABT.annotation t
go (_ :<| t) = go t
go Empty = Nothing
prettyTypecheckError ::
(Var v, Ord loc, Show loc, Parser.Annotated loc) =>
C.ErrorNote v loc ->
Env ->
String ->
Path.Absolute ->
Pretty ColorText
prettyTypecheckError note env src curPath =
renderTypeError (typeErrorFromNote note) env src curPath
prettyTypecheckError note env src =
renderTypeError (typeErrorFromNote note) env src
prettyTypeInfo ::
(Var v, Ord loc, Show loc, Parser.Annotated loc) =>
@ -1957,6 +1997,3 @@ useExamples =
(Pr.blue "use .foo bar.baz", Pr.wrap "Introduces `bar.baz` as a local alias for the absolute name `.foo.bar.baz`")
]
]
prettyPath' :: Path.Path' -> Pretty ColorText
prettyPath' p' = Pr.blue (Pr.shown p')

View File

@ -12,7 +12,6 @@ import Text.RawString.QQ (r)
import Unison.Builtin qualified as Builtin
import Unison.Codebase.CodeLookup (CodeLookup (..))
import Unison.Codebase.CodeLookup.Util qualified as CL
import Unison.Codebase.Path qualified as Path
import Unison.ConstructorReference (GConstructorReference (..))
import Unison.DataDeclaration qualified as DD
import Unison.DataDeclaration.ConstructorId qualified as DD
@ -1002,7 +1001,7 @@ type EitherResult = Either String TFile
showNotes :: (Foldable f) => String -> PrintError.Env -> f Note -> String
showNotes source env =
intercalateMap "\n\n" $ PrintError.renderNoteAsANSI 60 env source Path.absoluteEmpty
intercalateMap "\n\n" $ PrintError.renderNoteAsANSI 60 env source
ppEnv :: PPE.PrettyPrintEnv
ppEnv = PPE.makePPE (PPE.hqNamer 10 Builtin.names) PPE.dontSuffixify

View File

@ -5,8 +5,8 @@ where
import Control.Lens
import Control.Monad.Reader (MonadReader (..))
import Data.List.NonEmpty qualified as NonEmpty
import Data.Map qualified as Map
import Data.Text qualified as Text
import Text.Megaparsec qualified as P
import Unison.ABT qualified as ABT
import Unison.DataDeclaration (DataDeclaration, EffectDeclaration)
@ -113,11 +113,11 @@ modifier = do
where
unique = do
tok <- openBlockWith "unique"
optional (openBlockWith "[" *> wordyIdString <* closeBlock) >>= \case
optional (openBlockWith "[" *> importWordyId <* closeBlock) >>= \case
Nothing -> do
guid <- uniqueName 32
pure (UnresolvedModifier'UniqueWithoutGuid guid <$ tok)
Just guid -> pure (UnresolvedModifier'UniqueWithGuid (Text.pack (L.payload guid)) <$ tok)
Just guid -> pure (UnresolvedModifier'UniqueWithGuid (Name.toText (L.payload guid)) <$ tok)
structural = do
tok <- openBlockWith "structural"
pure (UnresolvedModifier'Structural <$ tok)
@ -141,7 +141,7 @@ dataDeclaration ::
Maybe (L.Token UnresolvedModifier) ->
P v m (v, DataDeclaration v Ann, Accessors v)
dataDeclaration maybeUnresolvedModifier = do
_ <- fmap void (reserved "type") <|> openBlockWith "type"
typeToken <- fmap void (reserved "type") <|> openBlockWith "type"
(name, typeArgs) <-
(,)
<$> TermParser.verifyRelativeVarName prefixDefinitionName
@ -150,7 +150,7 @@ dataDeclaration maybeUnresolvedModifier = do
eq <- reserved "="
let -- go gives the type of the constructor, given the types of
-- the constructor arguments, e.g. Cons becomes forall a . a -> List a -> List a
go :: L.Token v -> [Type v Ann] -> (Ann, v, Type v Ann)
go :: L.Token v -> [Type v Ann] -> (Ann {- Ann spanning the constructor and its args -}, (Ann, v, Type v Ann))
go ctorName ctorArgs =
let arrow i o = Type.arrow (ann i <> ann o) i o
app f arg = Type.app (ann f <> ann arg) f arg
@ -160,14 +160,16 @@ dataDeclaration maybeUnresolvedModifier = do
-- or just `Optional a` in the case of `None`
ctorType = foldr arrow ctorReturnType ctorArgs
ctorAnn = ann ctorName <> maybe (ann ctorName) ann (lastMay ctorArgs)
in ( ann ctorName,
Var.namespaced [L.payload name, L.payload ctorName],
Type.foralls ctorAnn typeArgVs ctorType
in ( ctorAnn,
( ann ctorName,
Var.namespaced [L.payload name, L.payload ctorName],
Type.foralls ctorAnn typeArgVs ctorType
)
)
prefixVar = TermParser.verifyRelativeVarName prefixDefinitionName
dataConstructor :: P v m (Ann, v, Type v Ann)
dataConstructor :: P v m (Ann, (Ann, v, Type v Ann))
dataConstructor = go <$> prefixVar <*> many TypeParser.valueTypeLeaf
record :: P v m ([(Ann, v, Type v Ann)], [(L.Token v, [(L.Token v, Type v Ann)])])
record :: P v m ([(Ann, (Ann, v, Type v Ann))], [(L.Token v, [(L.Token v, Type v Ann)])], Ann)
record = do
_ <- openBlockWith "{"
let field :: P v m [(L.Token v, Type v Ann)]
@ -179,29 +181,35 @@ dataDeclaration maybeUnresolvedModifier = do
Just _ -> maybe [f] (f :) <$> (optional semi *> optional field)
)
fields <- field
_ <- closeBlock
closingToken <- closeBlock
let lastSegment = name <&> (\v -> Var.named (Name.toText $ Name.unqualified (Name.unsafeFromVar v)))
pure ([go lastSegment (snd <$> fields)], [(name, fields)])
(constructors, accessors) <-
msum [record, (,[]) <$> sepBy (reserved "|") dataConstructor]
pure ([go lastSegment (snd <$> fields)], [(name, fields)], ann closingToken)
(constructors, accessors, closingAnn) <-
msum [Left <$> record, Right <$> sepBy (reserved "|") dataConstructor] <&> \case
Left (constructors, accessors, closingAnn) -> (constructors, accessors, closingAnn)
Right constructors -> do
let closingAnn :: Ann
closingAnn = NonEmpty.last (ann eq NonEmpty.:| ((\(constrSpanAnn, _) -> constrSpanAnn) <$> constructors))
in (constructors, [], closingAnn)
_ <- closeBlock
let -- the annotation of the last constructor if present,
-- otherwise ann of name
closingAnn :: Ann
closingAnn = last (ann eq : ((\(_, _, t) -> ann t) <$> constructors))
case maybeUnresolvedModifier of
Nothing -> do
modifier <- defaultUniqueModifier (L.payload name)
-- ann spanning the whole Decl.
let declSpanAnn = ann typeToken <> closingAnn
pure
( L.payload name,
DD.mkDataDecl' modifier closingAnn typeArgVs constructors,
DD.mkDataDecl' modifier declSpanAnn typeArgVs (snd <$> constructors),
accessors
)
Just unresolvedModifier -> do
modifier <- resolveUnresolvedModifier unresolvedModifier (L.payload name)
-- ann spanning the whole Decl.
-- Technically the typeToken is redundant here, but this is more future proof.
let declSpanAnn = ann typeToken <> ann modifier <> closingAnn
pure
( L.payload name,
DD.mkDataDecl' (L.payload modifier) (ann modifier <> closingAnn) typeArgVs constructors,
DD.mkDataDecl' (L.payload modifier) declSpanAnn typeArgVs (snd <$> constructors),
accessors
)
@ -211,7 +219,7 @@ effectDeclaration ::
Maybe (L.Token UnresolvedModifier) ->
P v m (v, EffectDeclaration v Ann)
effectDeclaration maybeUnresolvedModifier = do
_ <- fmap void (reserved "ability") <|> openBlockWith "ability"
abilityToken <- fmap void (reserved "ability") <|> openBlockWith "ability"
name <- TermParser.verifyRelativeVarName prefixDefinitionName
typeArgs <- many (TermParser.verifyRelativeVarName prefixDefinitionName)
let typeArgVs = L.payload <$> typeArgs
@ -225,17 +233,22 @@ effectDeclaration maybeUnresolvedModifier = do
case maybeUnresolvedModifier of
Nothing -> do
modifier <- defaultUniqueModifier (L.payload name)
-- ann spanning the whole ability declaration.
let abilitySpanAnn = ann abilityToken <> closingAnn
pure
( L.payload name,
DD.mkEffectDecl' modifier closingAnn typeArgVs constructors
DD.mkEffectDecl' modifier abilitySpanAnn typeArgVs constructors
)
Just unresolvedModifier -> do
modifier <- resolveUnresolvedModifier unresolvedModifier (L.payload name)
-- ann spanning the whole ability declaration.
-- Technically the abilityToken is redundant here, but this is more future proof.
let abilitySpanAnn = ann abilityToken <> ann modifier <> closingAnn
pure
( L.payload name,
DD.mkEffectDecl'
(L.payload modifier)
(ann modifier <> closingAnn)
abilitySpanAnn
typeArgVs
constructors
)

View File

@ -1,4 +1,6 @@
module Unison.Syntax.FileParser where
module Unison.Syntax.FileParser
( file
) where
import Control.Lens
import Control.Monad.Reader (asks, local)
@ -9,14 +11,16 @@ import Unison.ABT qualified as ABT
import Unison.DataDeclaration (DataDeclaration)
import Unison.DataDeclaration qualified as DD
import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment (..))
import Unison.Names qualified as Names
import Unison.Names.ResolutionResult qualified as Names
import Unison.NamesWithHistory qualified as Names
import Unison.Parser.Ann (Ann)
import Unison.Parser.Ann qualified as Ann
import Unison.Prelude
import Unison.Syntax.DeclParser (declarations)
import Unison.Syntax.Lexer qualified as L
import Unison.Syntax.Name qualified as Name (unsafeFromVar)
import Unison.Syntax.Name qualified as Name (toString, unsafeFromVar)
import Unison.Syntax.Parser
import Unison.Syntax.TermParser qualified as TermParser
import Unison.Term (Term)
@ -47,7 +51,7 @@ file = do
Left es -> resolutionFailures (toList es)
let accessors :: [[(v, Ann, Term v Ann)]]
accessors =
[ DD.generateRecordAccessors (toPair <$> fields) (L.payload typ) r
[ DD.generateRecordAccessors Ann.GeneratedFrom (toPair <$> fields) (L.payload typ) r
| (typ, fields) <- parsedAccessors,
Just (r, _) <- [Map.lookup (L.payload typ) (UF.datas env)]
]
@ -193,9 +197,10 @@ stanza = watchExpression <|> unexpectedAction <|> binding
(kind, guid, ann) <- watched
_ <- guardEmptyWatch ann
msum
[ WatchBinding kind ann <$> TermParser.binding,
WatchExpression kind guid ann <$> TermParser.blockTerm
[ TermParser.binding <&> (\trm@(((trmSpanAnn, _), _)) -> WatchBinding kind (ann <> trmSpanAnn) trm),
TermParser.blockTerm <&> (\trm -> WatchExpression kind guid (ann <> ABT.annotation trm) trm)
]
guardEmptyWatch ann =
P.try $ do
op <- optional (L.payload <$> P.lookAhead closeBlock)
@ -212,14 +217,14 @@ stanza = watchExpression <|> unexpectedAction <|> binding
binding@((_, v), _) <- TermParser.binding
pure $ case doc of
Nothing -> Binding binding
Just doc -> Bindings [((ann doc, Var.joinDot v (Var.named "doc")), doc), binding]
Just (spanAnn, doc) -> Bindings [((spanAnn, Var.joinDot v (Var.named "doc")), doc), binding]
watched :: (Monad m, Var v) => P v m (UF.WatchKind, Text, Ann)
watched = P.try do
kind <- optional wordyIdString
kind <- (fmap . fmap . fmap) Name.toString (optional importWordyId)
guid <- uniqueName 10
op <- optional (L.payload <$> P.lookAhead symbolyIdString)
guard (op == Just ">")
op <- optional (L.payload <$> P.lookAhead importSymbolyId)
guard (op == Just (Name.fromSegment (NameSegment ">")))
tok <- anyToken
guard $ maybe True (`L.touches` tok) kind
pure (maybe UF.RegularWatch L.payload kind, guid, maybe mempty ann kind <> ann tok)

View File

@ -25,13 +25,17 @@ import Data.Set qualified as Set
import Data.Text qualified as Text
import Data.Tuple.Extra qualified as TupleE
import Text.Megaparsec qualified as P
import U.Core.ABT qualified as ABT
import Unison.ABT qualified as ABT
import Unison.Builtin.Decls qualified as DD
import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..))
import Unison.ConstructorType qualified as CT
import Unison.HashQualified qualified as HQ
import Unison.HashQualified' qualified as HQ'
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment (..))
import Unison.NameSegment qualified as NameSegment
import Unison.Names (Names)
import Unison.Names qualified as Names
import Unison.NamesWithHistory qualified as Names
@ -96,7 +100,7 @@ rewriteBlock = do
rewriteTermlike kw mk = do
kw <- quasikeyword kw
lhs <- term
rhs <- block "==>"
(_spanAnn, rhs) <- block "==>"
pure (mk (ann kw <> ann rhs) lhs rhs)
rewriteTerm = rewriteTermlike "term" DD.rewriteTerm
rewriteCase = rewriteTermlike "case" DD.rewriteCase
@ -203,10 +207,10 @@ matchCase = do
[ Nothing <$ P.try (quasikeyword "otherwise"),
Just <$> infixAppOrBooleanOp
]
t <- block "->"
(_spanAnn, t) <- block "->"
pure (guard, t)
let unguardedBlock = label "case match" do
t <- block "->"
(_spanAnn, t) <- block "->"
pure (Nothing, t)
-- a pattern's RHS is either one or more guards, or a single unguarded block.
guardsAndBlocks <- guardedBlocks <|> (pure @[] <$> unguardedBlock)
@ -256,7 +260,9 @@ parsePattern = label "pattern" root
text = (\t -> Pattern.Text (ann t) (L.payload t)) <$> string
char = (\c -> Pattern.Char (ann c) (L.payload c)) <$> character
parenthesizedOrTuplePattern :: P v m (Pattern Ann, [(Ann, v)])
parenthesizedOrTuplePattern = tupleOrParenthesized parsePattern unit pair
parenthesizedOrTuplePattern = do
(_spanAnn, (pat, pats)) <- tupleOrParenthesized parsePattern unit pair
pure (pat, pats)
unit ann = (Pattern.Constructor ann (ConstructorReference DD.unitRef 0) [], [])
pair (p1, v1) (p2, v2) =
( Pattern.Constructor (ann p1 <> ann p2) (ConstructorReference DD.pairRef 0) [p1, p2],
@ -344,11 +350,14 @@ lam p = label "lambda" $ mkLam <$> P.try (some prefixDefinitionName <* reserved
mkLam vs b = Term.lam' (ann (head vs) <> ann b) (map L.payload vs) b
letBlock, handle, ifthen :: (Monad m, Var v) => TermP v m
letBlock = label "let" $ block "let"
letBlock = label "let" $ (snd <$> block "let")
handle = label "handle" do
b <- block "handle"
handler <- block "with"
pure $ Term.handle (ann b) handler b
(handleSpan, b) <- block "handle"
(_withSpan, handler) <- block "with"
-- We don't use the annotation span from 'with' here because it will
-- include a dedent if it's at the end of block.
-- Meaning the newline gets overwritten when pretty-printing and it messes things up.
pure $ Term.handle (handleSpan <> ann handler) handler b
checkCasesArities :: (Ord v, Annotated a) => NonEmpty (Int, a) -> P v m (Int, NonEmpty a)
checkCasesArities cases@((i, _) NonEmpty.:| rest) =
@ -377,9 +386,9 @@ lamCase = do
ifthen = label "if" do
start <- peekAny
c <- block "if"
t <- block "then"
f <- block "else"
(_spanAnn, c) <- block "if"
(_spanAnn, t) <- block "then"
(_spanAnn, f) <- block "else"
pure $ Term.iff (ann start <> ann f) c t f
text :: (Var v) => TermP v m
@ -402,16 +411,22 @@ hashQualifiedPrefixTerm = resolveHashQualified =<< hqPrefixId
hashQualifiedInfixTerm :: (Monad m, Var v) => TermP v m
hashQualifiedInfixTerm = resolveHashQualified =<< hqInfixId
quasikeyword :: (Ord v) => String -> P v m (L.Token ())
quasikeyword :: Ord v => Text -> P v m (L.Token ())
quasikeyword kw = queryToken \case
L.WordyId s Nothing | s == kw -> Just ()
L.WordyId (HQ'.NameOnly n) | nameIsKeyword n kw -> Just ()
_ -> Nothing
symbolyQuasikeyword :: (Ord v) => String -> P v m (L.Token ())
symbolyQuasikeyword :: (Ord v) => Text -> P v m (L.Token ())
symbolyQuasikeyword kw = queryToken \case
L.SymbolyId s Nothing | s == kw -> Just ()
L.SymbolyId (HQ'.NameOnly n) | nameIsKeyword n kw -> Just ()
_ -> Nothing
nameIsKeyword :: Name -> Text -> Bool
nameIsKeyword name keyword =
case (Name.isRelative name, Name.reverseSegments name) of
(True, segment NonEmpty.:| []) -> NameSegment.toText segment == keyword
_ -> False
-- If the hash qualified is name only, it is treated as a var, if it
-- has a short hash, we resolve that short hash immediately and fail
-- committed if that short hash can't be found in the current environment
@ -440,10 +455,10 @@ termLeaf =
keywordBlock,
list term,
delayQuote,
delayBlock,
(snd <$> delayBlock),
bang,
docBlock,
doc2Block
doc2Block <&> \(spanAnn, trm) -> trm {ABT.annotation = ABT.annotation trm <> spanAnn}
]
-- Syntax for documentation v2 blocks, which are surrounded by {{ }}.
@ -479,41 +494,53 @@ termLeaf =
-- variables that will be looked up in the environment like anything else. This
-- means that the documentation syntax can have its meaning changed by
-- overriding what functions the names `syntax.doc*` correspond to.
doc2Block :: forall m v. (Monad m, Var v) => TermP v m
doc2Block =
doc2Block :: forall m v. (Monad m, Var v) => P v m (Ann {- Annotation for the whole spanning block -}, Term v Ann)
doc2Block = do
P.lookAhead (openBlockWith "syntax.docUntitledSection") *> elem
where
elem :: TermP v m
-- For terms which aren't blocks the spanning annotation is the same as the
-- term annotation.
selfAnnotated :: Term v Ann -> (Ann, Term v Ann)
selfAnnotated t = (ann t, t)
elem :: P v m (Ann {- Annotation for the whole spanning block -}, Term v Ann)
elem =
text <|> do
t <- openBlock
(selfAnnotated <$> text) <|> do
startTok <- openBlock
let -- here, `t` will be something like `Open "syntax.docWord"`
-- so `f` will be a term var with the name "syntax.docWord".
f = f' t
f = f' startTok
f' t = Term.var (ann t) (Var.nameds (L.payload t))
-- follows are some common syntactic forms used for parsing child elements
-- regular is parsed into `f child1 child2 child3` for however many children
regular = do
cs <- P.many elem <* closeBlock
pure $ Term.apps' f cs
cs <- P.many (snd <$> elem)
endTok <- closeBlock
let trm = Term.apps' f cs
pure (ann startTok <> ann endTok, trm)
-- variadic is parsed into: `f [child1, child2, ...]`
variadic = variadic' f
variadic' f = do
cs <- P.many elem <* closeBlock
pure $ Term.apps' f [Term.list (ann cs) cs]
cs <- P.many (snd <$> elem)
endTok <- closeBlock
let trm = Term.apps' f [Term.list (ann cs) cs]
pure (ann startTok <> ann endTok, trm)
-- sectionLike is parsed into: `f tm [child1, child2, ...]`
sectionLike = do
arg1 <- elem
cs <- P.many elem <* closeBlock
pure $ Term.apps' f [arg1, Term.list (ann cs) cs]
arg1 <- (snd <$> elem)
cs <- P.many (snd <$> elem)
endTok <- closeBlock
let trm = Term.apps' f [arg1, Term.list (ann cs) cs]
pure (ann startTok <> ann endTok, trm)
evalLike wrap = do
tm <- term <* closeBlock
pure $ Term.apps' f [wrap tm]
tm <- term
endTok <- closeBlock
let trm = Term.apps' f [wrap tm]
pure (ann startTok <> ann endTok, trm)
-- converts `tm` to `'tm`
--
@ -522,8 +549,7 @@ doc2Block =
-- code which renders documents. (We want the doc display to get
-- the unevaluated expression `1 + 1` and not `2`)
addDelay tm = Term.delay (ann tm) tm
case L.payload t of
case L.payload startTok of
"syntax.docJoin" -> variadic
"syntax.docUntitledSection" -> variadic
"syntax.docColumn" -> variadic
@ -534,33 +560,45 @@ doc2Block =
"syntax.docBulletedList" -> variadic
"syntax.docSourceAnnotations" -> variadic
"syntax.docSourceElement" -> do
link <- elem
anns <- P.optional $ reserved "@" *> elem
closeBlock $> Term.apps' f [link, fromMaybe (Term.list (ann link) mempty) anns]
link <- (snd <$> elem)
anns <- P.optional $ reserved "@" *> (snd <$> elem)
endTok <- closeBlock
let trm = Term.apps' f [link, fromMaybe (Term.list (ann link) mempty) anns]
pure (ann startTok <> ann endTok, trm)
"syntax.docNumberedList" -> do
nitems@((n, _) : _) <- P.some nitem <* closeBlock
nitems@((n, _) : _) <- P.some nitem
endTok <- closeBlock
let items = snd <$> nitems
pure $ Term.apps' f [n, Term.list (ann items) items]
let trm = Term.apps' f [n, Term.list (ann items) items]
pure (ann startTok <> ann endTok, trm)
where
nitem = do
n <- number
t <- openBlockWith "syntax.docColumn"
let f = f' ("syntax.docColumn" <$ t)
child <- variadic' f
(_spanAnn, child) <- variadic' f
pure (n, child)
"syntax.docSection" -> sectionLike
-- @source{ type Blah, foo, type Bar }
"syntax.docEmbedTermLink" -> do
tm <- addDelay <$> (hashQualifiedPrefixTerm <|> hashQualifiedInfixTerm)
closeBlock $> Term.apps' f [tm]
endTok <- closeBlock
let trm = Term.apps' f [tm]
pure (ann startTok <> ann endTok, trm)
"syntax.docEmbedSignatureLink" -> do
tm <- addDelay <$> (hashQualifiedPrefixTerm <|> hashQualifiedInfixTerm)
closeBlock $> Term.apps' f [tm]
endTok <- closeBlock
let trm = Term.apps' f [tm]
pure (ann startTok <> ann endTok, trm)
"syntax.docEmbedTypeLink" -> do
r <- typeLink'
closeBlock $> Term.apps' f [Term.typeLink (ann r) (L.payload r)]
"syntax.docExample" ->
(term <* closeBlock) <&> \case
endTok <- closeBlock
let trm = Term.apps' f [Term.typeLink (ann r) (L.payload r)]
pure (ann startTok <> ann endTok, trm)
"syntax.docExample" -> do
trm <- term
endTok <- closeBlock
pure . (ann startTok <> ann endTok,) $ case trm of
tm@(Term.Apps' _ xs) ->
let fvs = List.Extra.nubOrd $ concatMap (toList . Term.freeVars) xs
n = Term.nat (ann tm) (fromIntegral (length fvs))
@ -570,11 +608,11 @@ doc2Block =
"syntax.docTransclude" -> evalLike id
"syntax.docEvalInline" -> evalLike addDelay
"syntax.docExampleBlock" -> do
tm <- block'' False True "syntax.docExampleBlock" (pure (void t)) closeBlock
pure $ Term.apps' f [Term.nat (ann tm) 0, addDelay tm]
(spanAnn, tm) <- block'' False True "syntax.docExampleBlock" (pure (void startTok)) closeBlock
pure $ (spanAnn, Term.apps' f [Term.nat (ann tm) 0, addDelay tm])
"syntax.docEval" -> do
tm <- block' False "syntax.docEval" (pure (void t)) closeBlock
pure $ Term.apps' f [addDelay tm]
(spanAnn, tm) <- block' False "syntax.docEval" (pure (void startTok)) closeBlock
pure $ (spanAnn, Term.apps' f [addDelay tm])
_ -> regular
docBlock :: (Monad m, Var v) => TermP v m
@ -947,10 +985,10 @@ delayQuote = P.label "quote" do
e <- termLeaf
pure $ DD.delayTerm (ann start <> ann e) e
delayBlock :: (Monad m, Var v) => TermP v m
delayBlock :: (Monad m, Var v) => P v m (Ann {- Ann spanning the whole block -}, Term v Ann)
delayBlock = P.label "do" do
b <- block "do"
pure $ DD.delayTerm (ann b) b
(spanAnn, b) <- block "do"
pure $ (spanAnn, DD.delayTerm (ann b) b)
bang :: (Monad m, Var v) => TermP v m
bang = P.label "bang" do
@ -960,9 +998,9 @@ bang = P.label "bang" do
seqOp :: (Ord v) => P v m Pattern.SeqOp
seqOp =
(Pattern.Snoc <$ matchToken (L.SymbolyId ":+" Nothing))
<|> (Pattern.Cons <$ matchToken (L.SymbolyId "+:" Nothing))
<|> (Pattern.Concat <$ matchToken (L.SymbolyId "++" Nothing))
Pattern.Snoc <$ matchToken (L.SymbolyId (HQ'.fromName (Name.fromSegment (NameSegment ":+"))))
<|> Pattern.Cons <$ matchToken (L.SymbolyId (HQ'.fromName (Name.fromSegment (NameSegment "+:"))))
<|> Pattern.Concat <$ matchToken (L.SymbolyId (HQ'.fromName (Name.fromSegment (NameSegment "++"))))
term4 :: (Monad m, Var v) => TermP v m
term4 = f <$> some termLeaf
@ -1024,7 +1062,7 @@ destructuringBind = do
let boundVars' = snd <$> boundVars
P.lookAhead (openBlockWith "=")
pure (p, boundVars')
scrute <- block "=" -- Dwight K. Scrute ("The People's Scrutinee")
(_spanAnn, scrute) <- block "=" -- Dwight K. Scrute ("The People's Scrutinee")
let guard = Nothing
let absChain vs t = foldr (\v t -> ABT.abs' (ann t) v t) t vs
thecase t = Term.MatchCase p (fmap (absChain boundVars) guard) $ absChain boundVars t
@ -1062,29 +1100,35 @@ binding = label "binding" do
Nothing -> do
-- we haven't seen a type annotation, so lookahead to '=' before commit
(lhsLoc, name, args) <- P.try (lhs <* P.lookAhead (openBlockWith "="))
body <- block "="
(_bodySpanAnn, body) <- block "="
verifyRelativeName' (fmap Name.unsafeFromVar name)
pure $ mkBinding (lhsLoc <> ann body) (L.payload name) args body
let binding = mkBinding lhsLoc args body
-- We don't actually use the span annotation from the block (yet) because it
-- may contain a bunch of white-space and comments following a top-level-definition.
let spanAnn = ann lhsLoc <> ann binding
pure $ ((spanAnn, (L.payload name)), binding)
Just (nameT, typ) -> do
(lhsLoc, name, args) <- lhs
verifyRelativeName' (fmap Name.unsafeFromVar name)
when (L.payload name /= L.payload nameT) $
customFailure $
SignatureNeedsAccompanyingBody nameT
body <- block "="
pure $
fmap
(\e -> Term.ann (ann nameT <> ann e) e typ)
(mkBinding (ann lhsLoc <> ann body) (L.payload name) args body)
(_bodySpanAnn, body) <- block "="
let binding = mkBinding lhsLoc args body
-- We don't actually use the span annotation from the block (yet) because it
-- may contain a bunch of white-space and comments following a top-level-definition.
let spanAnn = ann nameT <> ann binding
pure $ ((spanAnn, L.payload name), Term.ann (ann nameT <> ann binding) binding typ)
where
mkBinding loc f [] body = ((loc, f), body)
mkBinding loc f args body =
((loc, f), Term.lam' (loc <> ann body) (L.payload <$> args) body)
mkBinding :: Ann -> [L.Token v] -> Term.Term v Ann -> Term.Term v Ann
mkBinding _lhsLoc [] body = body
mkBinding lhsLoc args body =
(Term.lam' (lhsLoc <> ann body) (L.payload <$> args) body)
customFailure :: (P.MonadParsec e s m) => e -> m a
customFailure = P.customFailure
block :: forall m v. (Monad m, Var v) => String -> TermP v m
block :: forall m v. (Monad m, Var v) => String -> P v m (Ann, Term v Ann)
block s = block' False s (openBlockWith s) closeBlock
-- example: use Foo.bar.Baz + ++ x
@ -1154,25 +1198,32 @@ substImports ns imports =
Names.hasTypeNamed Names.IncludeSuffixes (Name.unsafeFromVar full) ns
]
block' :: (Monad m, Var v) => IsTop -> String -> P v m (L.Token ()) -> P v m (L.Token ()) -> TermP v m
block' ::
(Monad m, Var v) =>
IsTop ->
String ->
P v m (L.Token ()) ->
P v m (L.Token ()) ->
P v m (Ann {- ann which spans the whole block -}, Term v Ann)
block' isTop = block'' isTop False
block'' ::
forall m v b.
(Monad m, Var v) =>
forall m v end.
(Monad m, Var v, Annotated end) =>
IsTop ->
Bool -> -- `True` means insert `()` at end of block if it ends with a statement
String ->
P v m (L.Token ()) ->
P v m b ->
TermP v m
P v m end ->
P v m (Ann {- ann which spans the whole block -}, Term v Ann)
block'' isTop implicitUnitAtEnd s openBlock closeBlock = do
open <- openBlock
(names, imports) <- imports
_ <- optional semi
statements <- local (\e -> e {names = names}) $ sepBy semi statement
_ <- closeBlock
substImports names imports <$> go open statements
end <- closeBlock
body <- substImports names imports <$> go open statements
pure (ann open <> ann end, body)
where
statement = asum [Binding <$> binding, DestructuringBind <$> destructuringBind, Action <$> blockTerm]
go :: L.Token () -> [BlockElement v] -> P v m (Term v Ann)
@ -1247,7 +1298,9 @@ number' i u f = fmap go numeric
| otherwise = u (read <$> num)
tupleOrParenthesizedTerm :: (Monad m, Var v) => TermP v m
tupleOrParenthesizedTerm = label "tuple" $ tupleOrParenthesized term DD.unitTerm pair
tupleOrParenthesizedTerm = label "tuple" $ do
(spanAnn, tm) <- tupleOrParenthesized term DD.unitTerm pair
pure $ tm {ABT.annotation = spanAnn}
where
pair t1 t2 =
Term.app

View File

@ -1737,18 +1737,18 @@ prettyDoc2 ::
prettyDoc2 ac tm = do
ppe <- getPPE
let brace p =
fmt S.DocDelimiter "{{"
<> PP.softbreak
<> p
<> PP.softbreak
<> fmt
S.DocDelimiter
"}}"
if PP.isMultiLine p
then fmt S.DocDelimiter "{{" <> PP.newline <> p <> PP.newline <> fmt S.DocDelimiter "}}"
else fmt S.DocDelimiter "{{" <> PP.softbreak <> p <> PP.softbreak <> fmt S.DocDelimiter "}}"
bail tm = brace <$> pretty0 ac tm
contains :: Char -> Pretty SyntaxText -> Bool
contains c p =
PP.toPlainUnbroken (PP.syntaxToColor p)
& elem c
-- Finds the longest run of a character and return one bigger than that
longestRun c s =
case filter (\s -> take 2 s == [c, c]) $
group (PP.toPlainUnbroken $ PP.syntaxToColor s) of
List.group (PP.toPlainUnbroken $ PP.syntaxToColor s) of
[] -> 2
x -> 1 + maximum (map length x)
oneMore c inner = replicate (longestRun c inner) c
@ -1782,7 +1782,12 @@ prettyDoc2 ac tm = do
pure $ PP.text t
(toDocCode ppe -> Just d) -> do
inner <- rec d
let quotes = PP.string $ oneMore '\'' inner
let quotes =
-- Prefer ` if there aren't any in the inner text,
-- otherwise use one more than the longest run of ' in the inner text
if contains '`' inner
then PP.string $ oneMore '\'' inner
else PP.string "`"
pure $ PP.group $ quotes <> inner <> quotes
(toDocJoin ppe -> Just ds) -> foldMapM rec ds
(toDocItalic ppe -> Just d) -> do

View File

@ -1,10 +1,20 @@
module Unison.Syntax.TypeParser where
{-# LANGUAGE OverloadedStrings #-}
module Unison.Syntax.TypeParser
( computationType
, valueType
, valueTypeLeaf
) where
import Control.Monad.Reader (asks)
import Data.Set qualified as Set
import Text.Megaparsec qualified as P
import Unison.ABT qualified as ABT
import Unison.Builtin.Decls qualified as DD
import Unison.HashQualified qualified as HQ
import Unison.HashQualified' qualified as HQ'
import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment (NameSegment))
import Unison.NamesWithHistory qualified as Names
import Unison.Parser.Ann (Ann (..))
import Unison.Prelude
@ -93,8 +103,10 @@ sequenceTyp = do
let a = ann open <> ann close
pure $ Type.app a (Type.list a) t
tupleOrParenthesizedType :: (Var v) => TypeP v m -> TypeP v m
tupleOrParenthesizedType rec = tupleOrParenthesized rec DD.unitType pair
tupleOrParenthesizedType :: Var v => TypeP v m -> TypeP v m
tupleOrParenthesizedType rec = do
(spanAnn, ty) <- tupleOrParenthesized rec DD.unitType pair
pure (ty {ABT.annotation = ABT.annotation ty <> spanAnn})
where
pair t1 t2 =
let a = ann t1 <> ann t2
@ -113,6 +125,6 @@ forall :: (Var v) => TypeP v m -> TypeP v m
forall rec = do
kw <- reserved "forall" <|> reserved ""
vars <- fmap (fmap L.payload) . some $ prefixDefinitionName
_ <- matchToken $ L.SymbolyId "." Nothing
_ <- matchToken $ L.SymbolyId (HQ'.fromName (Name.fromSegment (NameSegment ".")))
t <- rec
pure $ Type.foralls (ann kw <> ann t) vars t

View File

@ -47,9 +47,10 @@ minimize (Term.LetRecNamedAnnotatedTop' isTop blockAnn bs e) =
(compare `on` fst)
grouped = group bindings
dupes = filter ok grouped
where
ok (v, as) | Var.name v == "_" = False
| otherwise = length as > 1
where
ok (v, as)
| Var.name v == "_" = False
| otherwise = length as > 1
in if not $ null dupes
then Left $ Nel.fromList dupes
else

View File

@ -218,6 +218,15 @@ mapErrors f r = case r of
CompilerBug bug es is -> CompilerBug bug (f <$> es) is
s@(Success _ _) -> s
-- Allows modifying the stored notes in a scoped way.
-- This is based on the `pass` function in e.g. Control.Monad.Writer
adjustResultNotes ::
Result v loc (a, InfoNote v loc -> InfoNote v loc) ->
Result v loc a
adjustResultNotes (Success notes (r, f)) = Success (fmap f notes) r
adjustResultNotes (TypeError e i) = TypeError e i
adjustResultNotes (CompilerBug c e i) = CompilerBug c e i
data PatternMatchCoverageCheckAndKindInferenceSwitch
= PatternMatchCoverageCheckAndKindInferenceSwitch'Enabled
| PatternMatchCoverageCheckAndKindInferenceSwitch'Disabled
@ -251,6 +260,15 @@ liftTotalM (MT m) = MT $ \ppe pmcSwitch datas effects env -> case m ppe pmcSwitc
Left bug -> CompilerBug bug mempty mempty
Right a -> Success mempty a
-- Allows modifying the stored notes in a scoped way.
-- This is based on the `pass` function in e.g. Control.Monad.Writer
adjustNotes ::
M v loc (a, InfoNote v loc -> InfoNote v loc) -> M v loc a
adjustNotes (MT m) = MT $ \ppe pmcSwitch datas effects env ->
adjustResultNotes (twiddle <$> m ppe pmcSwitch datas effects env)
where
twiddle ((a, c), b) = ((a, b), c)
-- errorNote :: Cause v loc -> M v loc ()
-- errorNote = liftResult . errorNote
@ -338,6 +356,27 @@ data InfoNote v loc
topLevelComponent :: (Var v) => [(v, Type.Type v loc, RedundantTypeAnnotation)] -> InfoNote v loc
topLevelComponent = TopLevelComponent . fmap (over _2 removeSyntheticTypeVars)
-- Given a list of Elements that are going to be discarded from a
-- context, substitutes the informataion into the solved blank types
-- of an InfoNote. This should give better TDNR results, because it
-- allows the stored solutions to incorporate information from later
-- in the type checking process, instead of it being entirely reliant
-- on information in the local scope of the reference to be resolved.
--
-- Note: this does not take any care to abstract over the variables
-- stored in the notes, so it is _heavily_ reliant on the fact that we
-- never reuse variable names/numberings in the typechecker. If this
-- becomes untrue, then we need to revisit this and instead properly
-- generalize types stored in the notes.
substituteSolved ::
(Var v, Ord loc) =>
[Element v loc] ->
InfoNote v loc ->
InfoNote v loc
substituteSolved ctx (SolvedBlank b v t) =
SolvedBlank b v (applyCtx ctx t)
substituteSolved _ i = i
-- The typechecker generates synthetic type variables as part of type inference.
-- This function converts these synthetic type variables to regular named type
-- variables guaranteed to not collide with any other type variables.
@ -434,8 +473,7 @@ data Info v loc = Info
solvedExistentials :: Map v (Monotype v loc), -- `v` is solved to some monotype
universalVars :: Set v, -- set of universals seen so far
termVarAnnotations :: Map v (Type v loc),
allVars :: Set v, -- all variables seen so far
previouslyTypecheckedVars :: Set v -- term vars already typechecked
allVars :: Set v -- all variables seen so far
}
-- | The empty context
@ -472,11 +510,24 @@ retract0 e (Context ctx) = case focusAt (\(e', _) -> e' == e) ctx of
-- of `body` and the discarded context (not including the marker), respectively.
-- Freshened `markerHint` is used to create the marker.
markThenRetract :: (Var v, Ord loc) => v -> M v loc a -> M v loc (a, [Element v loc])
markThenRetract markerHint body = do
v <- freshenVar markerHint
markThenRetract hint body =
markThenCallWithRetract hint \retract -> adjustNotes do
r <- body
ctx <- retract
pure ((r, ctx), substituteSolved ctx)
markThenRetract0 :: (Var v, Ord loc) => v -> M v loc a -> M v loc ()
markThenRetract0 markerHint body = () <$ markThenRetract markerHint body
markThenCallWithRetract ::
(Var v, Ord loc) =>
v ->
(M v loc [Element v loc] -> M v loc a) ->
M v loc a
markThenCallWithRetract hint k = do
v <- freshenVar hint
extendContext (Marker v)
a <- body
(a,) <$> doRetract (Marker v)
k (doRetract (Marker v))
where
doRetract :: (Var v, Ord loc) => Element v loc -> M v loc [Element v loc]
doRetract e = do
@ -498,9 +549,6 @@ markThenRetract markerHint body = do
setContext t
pure discarded
markThenRetract0 :: (Var v, Ord loc) => v -> M v loc a -> M v loc ()
markThenRetract0 markerHint body = () <$ markThenRetract markerHint body
-- unsolved' :: Context v loc -> [(B.Blank loc, v)]
-- unsolved' (Context ctx) = [(b,v) | (Existential b v, _) <- ctx]
@ -591,6 +639,16 @@ modifyContext f = do
appendContext :: (Var v, Ord loc) => [Element v loc] -> M v loc ()
appendContext = traverse_ extendContext
markRetained :: (Var v, Ord loc) => Set v -> M v loc ()
markRetained keep = setContext . marks =<< getContext
where
marks (Context eis) = Context (fmap mark eis)
mark (Existential B.Blank v, i)
| v `Set.member` keep = (Var (TypeVar.Existential B.Retain v), i)
mark (Solved B.Blank v t, i)
| v `Set.member` keep = (Solved B.Retain v t, i)
mark p = p
extendContext :: (Var v) => Element v loc -> M v loc ()
extendContext e =
isReserved (varOf e) >>= \case
@ -654,14 +712,6 @@ freshenTypeVar v =
in (Var.freshenId id (TypeVar.underlying v), e {freshId = id + 1})
)
isClosed :: (Var v) => Term v loc -> M v loc Bool
isClosed e = Set.null <$> freeVars e
freeVars :: (Var v) => Term v loc -> M v loc (Set v)
freeVars e = do
ctx <- getContext
pure $ ABT.freeVars e `Set.difference` previouslyTypecheckedVars (info ctx)
-- todo: do we want this to return a location for the aspect of the type that was not well formed
-- todo: or maybe a note / list of notes, or an M
@ -690,7 +740,7 @@ wellformedType c t = case t of
-- | Return the `Info` associated with the last element of the context, or the zero `Info`.
info :: (Ord v) => Context v loc -> Info v loc
info (Context []) = Info mempty mempty mempty mempty mempty mempty
info (Context []) = Info mempty mempty mempty mempty mempty
info (Context ((_, i) : _)) = i
-- | Add an element onto the end of this `Context`. Takes `O(log N)` time,
@ -699,19 +749,19 @@ info (Context ((_, i) : _)) = i
extend' :: (Var v) => Element v loc -> Context v loc -> Either (CompilerBug v loc) (Context v loc)
extend' e c@(Context ctx) = Context . (: ctx) . (e,) <$> i'
where
Info es ses us uas vs pvs = info c
Info es ses us uas vs = info c
-- see figure 7
i' = case e of
Var v -> case v of
-- UvarCtx - ensure no duplicates
TypeVar.Universal v ->
if Set.notMember v vs
then pure $ Info es ses (Set.insert v us) uas (Set.insert v vs) pvs
then pure $ Info es ses (Set.insert v us) uas (Set.insert v vs)
else crash $ "variable " <> show v <> " already defined in the context"
-- EvarCtx - ensure no duplicates, and that this existential is not solved earlier in context
TypeVar.Existential _ v ->
if Set.notMember v vs
then pure $ Info (Set.insert v es) ses us uas (Set.insert v vs) pvs
then pure $ Info (Set.insert v es) ses us uas (Set.insert v vs)
else crash $ "variable " <> show v <> " already defined in the context"
-- SolvedEvarCtx - ensure `v` is fresh, and the solution is well-formed wrt the context
Solved _ v sa@(Type.getPolytype -> t)
@ -719,7 +769,7 @@ extend' e c@(Context ctx) = Context . (: ctx) . (e,) <$> i'
| not (wellformedType c t) -> crash $ "type " <> show t <> " is not well-formed wrt the context"
| otherwise ->
pure $
Info (Set.insert v es) (Map.insert v sa ses) us uas (Set.insert v vs) pvs
Info (Set.insert v es) (Map.insert v sa ses) us uas (Set.insert v vs)
-- VarCtx - ensure `v` is fresh, and annotation is well-formed wrt the context
Ann v t
| Set.member v vs -> crash $ "variable " <> show v <> " already defined in the context"
@ -732,12 +782,11 @@ extend' e c@(Context ctx) = Context . (: ctx) . (e,) <$> i'
us
(Map.insert v t uas)
(Set.insert v vs)
((if Set.null (Type.freeVars t) then Set.insert v else id) pvs)
-- MarkerCtx - note that since a Marker is always the first mention of a variable, suffices to
-- just check that `v` is not previously mentioned
Marker v ->
if Set.notMember v vs
then pure $ Info es ses us uas (Set.insert v vs) pvs
then pure $ Info es ses us uas (Set.insert v vs)
else crash $ "marker variable " <> show v <> " already defined in the context"
crash reason = Left $ IllegalContextExtension c e reason
@ -1152,21 +1201,7 @@ synthesizeWanted tm@(Term.Request' r) =
fmap (wantRequest tm) . ungeneralize . Type.purifyArrows
=<< getEffectConstructorType r
synthesizeWanted (Term.Let1Top' top binding e) = do
isClosed <- isClosed binding
-- note: no need to freshen binding, it can't refer to v
((tb, wb), ctx2) <- markThenRetract Var.inferOther $ do
_ <- extendExistential Var.inferOther
synthesize binding
-- regardless of whether we generalize existentials, we'll need to
-- process the wanted abilities with respect to things falling out
-- of scope.
wb <- substAndDefaultWanted wb ctx2
-- If the binding has no free variables, we generalize over its
-- existentials
tbinding <-
if isClosed
then pure $ generalizeExistentials ctx2 tb
else applyM . applyCtx ctx2 $ tb
(tbinding, wb) <- synthesizeBinding top binding
v' <- ABT.freshen e freshenVar
when (Var.isAction (ABT.variable e)) $
-- enforce that actions in a block have type ()
@ -1311,6 +1346,89 @@ synthesizeWanted e
l = loc e
synthesizeWanted _e = compilerCrash PatternMatchFailure
-- | Synthesizes a type for a local binding, for use in synthesizing
-- or checking `Let1` expressions. There is a bit of wrapping around
-- the call to `synthesize` to attempt to generalize certain
-- definitions.
--
-- We want to generalize self-contained definitions when possible, so
-- that things like:
--
-- id x = x
-- id ()
-- id "hello"
--
-- will work. However, note that just checking that the definition is
-- self contained is insufficient, because:
--
-- r = IO.ref '(bug "whatever")
--
-- is self-contained (in the free variable sense), but would yield a
-- polymorphic reference. So, I think it is also necessary to check
-- that the binding has no wanted abilities. This automatically covers
-- the local function definitions we want.
--
-- ---
--
-- The current strategy for generalization is a bit sophisticated as
-- well. We want to generalize local definitions when possible.
-- However, when doing type directed name resolution, we _don't_ want
-- to generalize over variables that will help us figure out which
-- selection to make.
--
-- So, when we _do_ generalize, we first partition the discarded
-- context into the portion that is involved in TDNR solutions, and
-- the portion that isn't. We generalize the variables that aren't
-- involved in TDNR, and re-push the variables that are, so that they
-- can be refined later. This is a bit unusual for the algorithm we
-- use, but it seems like it should be safe.
synthesizeBinding ::
(Var v) =>
(Ord loc) =>
Bool ->
Term v loc ->
M v loc (Type v loc, Wanted v loc)
synthesizeBinding top binding = do
markThenCallWithRetract Var.inferOther \retract -> adjustNotes do
(tb, wb) <- synthesize binding
if not (null wb)
then fmap (\t -> ((t, wb), id)) (applyM tb)
else
if top
then do
ctx <- retract
pure ((generalizeExistentials ctx tb, []), substituteSolved ctx)
else do
ctx <- retract
-- Note: this is conservative about what we avoid
-- generalizing. Right now only TDNR causes variables to be
-- retained. It might be possible to make this happen for any
-- `Recorded` to do more inference for unknown variable errors
-- (or whatever the other cases are for), at the expense of
-- less generalization in the process of reporting those.
let retain (B.Recorded B.Resolve {}) = True
retain B.Retain = True
retain _ = False
erecs = [v | Existential b v <- ctx, retain b]
srecs =
[ v
| Solved b _ sa <- ctx,
retain b,
TypeVar.Existential _ v <-
Set.toList . ABT.freeVars . applyCtx ctx $ Type.getPolytype sa
]
keep = Set.fromList (erecs ++ srecs)
p (Existential _ v)
| v `Set.member` keep =
Left . Var $ TypeVar.Existential B.Retain v
p e = Right e
(repush, discard) = partitionEithers $ fmap p ctx
appendContext repush
markRetained keep
let tf = generalizeExistentials discard (applyCtx ctx tb)
pure ((tf, []), substituteSolved ctx)
getDataConstructorsAtType :: forall v loc. (Ord loc, Var v) => Type v loc -> M v loc (EnumeratedConstructors (TypeVar v loc) v loc)
getDataConstructorsAtType t0 = do
dataConstructors <- getDataConstructors t0
@ -2307,10 +2425,10 @@ checkWanted want (Term.Lam' body) (Type.Arrow'' i es o) = do
body <- pure $ ABT.bindInheritAnnotation body (Term.var () x)
checkWithAbilities es body o
pure want
checkWanted want (Term.Let1' binding m) t = do
v <- ABT.freshen m freshenVar
(tbinding, wbinding) <- synthesize binding
checkWanted want (Term.Let1Top' top binding m) t = do
(tbinding, wbinding) <- synthesizeBinding top binding
want <- coalesceWanted wbinding want
v <- ABT.freshen m freshenVar
markThenRetractWanted v $ do
when (Var.isAction (ABT.variable m)) $
-- enforce that actions in a block have type ()

View File

@ -1,12 +1,12 @@
module Unison.Typechecker.Extractor where
import Unison.KindInference (KindError)
import Control.Monad.Reader
import Data.List qualified as List
import Data.List.NonEmpty (NonEmpty)
import Data.Set qualified as Set
import Unison.Blank qualified as B
import Unison.ConstructorReference (ConstructorReference)
import Unison.KindInference (KindError)
import Unison.Pattern (Pattern)
import Unison.Prelude hiding (whenM)
import Unison.Term qualified as Term

View File

@ -9,20 +9,21 @@ import Unison.DataDeclaration (DataDeclaration, EffectDeclaration (..))
import Unison.DataDeclaration qualified as DD
import Unison.DataDeclaration.Names qualified as DD.Names
import Unison.Hashing.V2.Convert qualified as Hashing
import Unison.Name qualified as Name
import Unison.Names (Names (..))
import Unison.Names.ResolutionResult qualified as Names
import Unison.NamesWithHistory qualified as Names
import Unison.Prelude
import Unison.Reference qualified as Reference
import Unison.Referent qualified as Referent
import Unison.Name qualified as Name
import Unison.Syntax.Name qualified as Name
import Unison.Term qualified as Term
import Unison.UnisonFile qualified as UF
import Unison.UnisonFile.Env (Env (..))
import Unison.UnisonFile.Error (Error (DupDataAndAbility, UnknownType))
import Unison.UnisonFile.Type (TypecheckedUnisonFile (TypecheckedUnisonFileId), UnisonFile (UnisonFileId))
import Unison.Util.Relation qualified as Relation
import Unison.Util.List qualified as List
import Unison.Util.Relation qualified as Relation
import Unison.Var (Var)
import Unison.Var qualified as Var
import Unison.WatchKind qualified as WK
@ -33,6 +34,9 @@ toNames uf = datas <> effects
datas = foldMap (DD.Names.dataDeclToNames' Name.unsafeFromVar) (Map.toList (UF.dataDeclarationsId uf))
effects = foldMap (DD.Names.effectDeclToNames' Name.unsafeFromVar) (Map.toList (UF.effectDeclarationsId uf))
addNamesFromUnisonFile :: (Var v) => UnisonFile v a -> Names -> Names
addNamesFromUnisonFile unisonFile names = Names.shadowing (toNames unisonFile) names
typecheckedToNames :: (Var v) => TypecheckedUnisonFile v a -> Names
typecheckedToNames uf = Names (terms <> ctors) types
where
@ -57,6 +61,9 @@ typecheckedToNames uf = Names (terms <> ctors) types
. UF.hashConstructors
$ uf
addNamesFromTypeCheckedUnisonFile :: (Var v) => TypecheckedUnisonFile v a -> Names -> Names
addNamesFromTypeCheckedUnisonFile unisonFile names = Names.shadowing (typecheckedToNames unisonFile) names
typecheckedUnisonFile0 :: (Ord v) => TypecheckedUnisonFile v a
typecheckedUnisonFile0 = TypecheckedUnisonFileId Map.empty Map.empty mempty mempty mempty
@ -100,7 +107,7 @@ bindNames names (UnisonFileId d e ts ws) = do
--
-- It's used below in `environmentFor` and also during the term resolution
-- process.
variableCanonicalizer :: forall v . Var v => [v] -> Map v v
variableCanonicalizer :: forall v. Var v => [v] -> Map v v
variableCanonicalizer vs =
done $ List.multimap do
v <- vs
@ -108,7 +115,7 @@ variableCanonicalizer vs =
suffix <- Name.suffixes n
pure (Var.named (Name.toText suffix), v)
where
done xs = Map.fromList [ (k, v) | (k, nubOrd -> [v]) <- Map.toList xs ] <> Map.fromList [(v,v) | v <- vs]
done xs = Map.fromList [(k, v) | (k, nubOrd -> [v]) <- Map.toList xs] <> Map.fromList [(v, v) | v <- vs]
-- This function computes hashes for data and effect declarations, and
-- also returns a function for resolving strings to (Reference, ConstructorId)

View File

@ -0,0 +1,153 @@
module Unison.UnisonFile.Summary
( FileSummary (..),
allWatches,
allTypeDecls,
mkFileSummary,
fileDefLocations,
)
where
import Control.Lens
import Data.Foldable
import Data.Map qualified as Map
import Data.Set qualified as Set
import Unison.DataDeclaration qualified as DD
import Unison.Names (Names)
import Unison.Parser.Ann
import Unison.Prelude
import Unison.Reference qualified as Reference
import Unison.Symbol
import Unison.Symbol qualified as Symbol
import Unison.Term (Term)
import Unison.Term qualified as Term
import Unison.Type (Type)
import Unison.UnisonFile qualified as UF
import Unison.UnisonFile.Names qualified as UF
import Unison.Var qualified as Var
import Unison.WatchKind (pattern TestWatch)
import Unison.WatchKind qualified as WK
-- | A file that parses might not always type-check, but often we just want to get as much
-- information as we have available. This provides a type where we can summarize the
-- information available in a Unison file.
--
-- If the file typechecked then all the Ref Ids and types will be filled in, otherwise
-- they will be Nothing.
data FileSummary = FileSummary
{ dataDeclsBySymbol :: Map Symbol (Reference.Id, DD.DataDeclaration Symbol Ann),
dataDeclsByReference :: Map Reference.Id (Map Symbol (DD.DataDeclaration Symbol Ann)),
effectDeclsBySymbol :: Map Symbol (Reference.Id, DD.EffectDeclaration Symbol Ann),
effectDeclsByReference :: Map Reference.Id (Map Symbol (DD.EffectDeclaration Symbol Ann)),
termsBySymbol :: Map Symbol (Ann, Maybe Reference.Id, Term Symbol Ann, Maybe (Type Symbol Ann)),
termsByReference :: Map (Maybe Reference.Id) (Map Symbol (Ann, Term Symbol Ann, Maybe (Type Symbol Ann))),
testWatchSummary :: [(Ann, Maybe Symbol, Maybe Reference.Id, Term Symbol Ann, Maybe (Type Symbol Ann))],
exprWatchSummary :: [(Ann, Maybe Symbol, Maybe Reference.Id, Term Symbol Ann, Maybe (Type Symbol Ann), Maybe WK.WatchKind)],
fileNames :: Names
}
deriving stock (Show)
allWatches :: FileSummary -> [(Ann, Maybe Symbol, Maybe Reference.Id, Term Symbol Ann, Maybe (Type Symbol Ann), Maybe WK.WatchKind)]
allWatches FileSummary {testWatchSummary, exprWatchSummary} =
exprWatchSummary
<> (testWatchSummary <&> \(ann, sym, refId, tm, typ) -> (ann, sym, refId, tm, typ, Just WK.TestWatch))
allTypeDecls :: FileSummary -> Map Symbol (Reference.Id, Either (DD.EffectDeclaration Symbol Ann) (DD.DataDeclaration Symbol Ann))
allTypeDecls FileSummary {dataDeclsBySymbol, effectDeclsBySymbol} =
let dataDecls = dataDeclsBySymbol <&> \(refId, dd) -> (refId, Right dd)
effectDecls = effectDeclsBySymbol <&> \(refId, ed) -> (refId, Left ed)
in dataDecls <> effectDecls
-- | Summarize the information available to us from the current state of the file.
-- See 'FileSummary' for more information.
mkFileSummary :: Maybe (UF.UnisonFile Symbol Ann) -> Maybe (UF.TypecheckedUnisonFile Symbol Ann) -> Maybe FileSummary
mkFileSummary parsed typechecked = case (parsed, typechecked) of
(Nothing, Nothing) -> Nothing
(_, Just tf@(UF.TypecheckedUnisonFileId {dataDeclarationsId', effectDeclarationsId', hashTermsId})) ->
let (trms, testWatches, exprWatches) =
hashTermsId & ifoldMap \sym (ann, ref, wk, trm, typ) ->
case wk of
Nothing -> (Map.singleton sym (ann, Just ref, trm, getUserTypeAnnotation sym <|> Just typ), mempty, mempty)
Just TestWatch -> (mempty, [(ann, assertUserSym sym, Just ref, trm, getUserTypeAnnotation sym <|> Just typ)], mempty)
Just wk -> (mempty, mempty, [(ann, assertUserSym sym, Just ref, trm, getUserTypeAnnotation sym <|> Just typ, Just wk)])
in Just $
FileSummary
{ dataDeclsBySymbol = dataDeclarationsId',
dataDeclsByReference = declsRefMap dataDeclarationsId',
effectDeclsBySymbol = effectDeclarationsId',
effectDeclsByReference = declsRefMap effectDeclarationsId',
termsBySymbol = trms,
termsByReference = termsRefMap trms,
testWatchSummary = testWatches,
exprWatchSummary = exprWatches,
fileNames = UF.typecheckedToNames tf
}
(Just uf@(UF.UnisonFileId {dataDeclarationsId, effectDeclarationsId, terms, watches}), _) ->
let trms =
terms & foldMap \(sym, ann, trm) ->
(Map.singleton sym (ann, Nothing, trm, Nothing))
(testWatches, exprWatches) =
watches & ifoldMap \wk tms ->
tms & foldMap \(v, ann, trm) ->
case wk of
TestWatch -> ([(ann, assertUserSym v, Nothing, trm, Nothing)], mempty)
_ -> (mempty, [(ann, assertUserSym v, Nothing, trm, Nothing, Just wk)])
in Just $
FileSummary
{ dataDeclsBySymbol = dataDeclarationsId,
dataDeclsByReference = declsRefMap dataDeclarationsId,
effectDeclsBySymbol = effectDeclarationsId,
effectDeclsByReference = declsRefMap effectDeclarationsId,
termsBySymbol = trms,
termsByReference = termsRefMap trms,
testWatchSummary = testWatches,
exprWatchSummary = exprWatches,
fileNames = UF.toNames uf
}
where
declsRefMap :: (Ord v, Ord r) => Map v (r, a) -> Map r (Map v a)
declsRefMap m =
m
& Map.toList
& fmap (\(v, (r, a)) -> (r, Map.singleton v a))
& Map.fromListWith (<>)
termsRefMap :: (Ord v, Ord r) => Map v (ann, r, a, b) -> Map r (Map v (ann, a, b))
termsRefMap m =
m
& Map.toList
& fmap (\(v, (ann, r, a, b)) -> (r, Map.singleton v (ann, a, b)))
& Map.fromListWith (<>)
-- Gets the user provided type annotation for a term if there is one.
-- This type sig will have Ann's within the file if it exists.
getUserTypeAnnotation :: Symbol -> Maybe (Type Symbol Ann)
getUserTypeAnnotation v = do
UF.UnisonFileId {terms, watches} <- parsed
trm <- (terms <> fold watches) ^? folded . filteredBy (_1 . only v) . _3
typ <- Term.getTypeAnnotation trm
pure typ
-- \| If a symbol is a 'User' symbol, return (Just sym), otherwise return Nothing.
assertUserSym :: Symbol -> Maybe Symbol
assertUserSym sym = case sym of
Symbol.Symbol _ (Var.User {}) -> Just sym
_ -> Nothing
-- | Compute the location of user defined definitions within the file
fileDefLocations :: FileSummary -> Map Symbol (Set Ann)
fileDefLocations fs@FileSummary {dataDeclsBySymbol, effectDeclsBySymbol, termsBySymbol} =
fold
[ dataDeclsBySymbol <&> \(_, decl) ->
decl
& DD.annotation
& Set.singleton,
effectDeclsBySymbol <&> \(_, decl) ->
decl
& DD.toDataDecl
& DD.annotation
& Set.singleton,
(allWatches fs)
& foldMap \(ann, maySym, _id, _trm, _typ, _wk) ->
case maySym of
Nothing -> mempty
Just sym -> Map.singleton sym (Set.singleton ann),
termsBySymbol <&> \(ann, _id, _trm, _typ) -> Set.singleton ann
]

View File

@ -10,7 +10,6 @@ import System.Directory (doesFileExist)
import System.FilePath (joinPath, replaceExtension, splitPath)
import System.FilePath.Find (always, extension, find, (==?))
import Unison.Builtin qualified as Builtin
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.Runtime (Runtime, evaluateWatches)
import Unison.NamesWithHistory qualified as Names
import Unison.Parser.Ann (Ann)
@ -89,7 +88,7 @@ go rt files how = do
showNotes :: (Foldable f) => String -> PrintError.Env -> f Note -> String
showNotes source env =
intercalateMap "\n\n" $ PrintError.renderNoteAsANSI 60 env source Path.absoluteEmpty
intercalateMap "\n\n" $ PrintError.renderNoteAsANSI 60 env source
decodeResult ::
String -> SynthResult -> EitherResult -- String (UF.TypecheckedUnisonFile Symbol Ann)

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.36.0.
-- This file has been generated from package.yaml by hpack version 0.35.2.
--
-- see: https://github.com/sol/hpack
@ -177,6 +177,7 @@ library
Unison.UnisonFile
Unison.UnisonFile.Env
Unison.UnisonFile.Names
Unison.UnisonFile.Summary
Unison.UnisonFile.Type
Unison.Util.Convert
Unison.Util.CycleTable
@ -226,7 +227,8 @@ library
ViewPatterns
ghc-options: -Wall -O0 -fno-warn-name-shadowing -fno-warn-missing-pattern-synonym-signatures
build-depends:
ListLike
IntervalMap
, ListLike
, NanoID
, aeson
, ansi-terminal
@ -416,7 +418,8 @@ test-suite parser-typechecker-tests
ViewPatterns
ghc-options: -Wall -O0 -fno-warn-name-shadowing -fno-warn-missing-pattern-synonym-signatures -W -threaded -rtsopts "-with-rtsopts=-N -T" -v0
build-depends:
ListLike
IntervalMap
, ListLike
, NanoID
, aeson
, ansi-terminal

View File

@ -75,7 +75,7 @@ module Unison.Cli.MonadUtils
-- * Latest touched Unison file
getLatestFile,
getLatestParsedFile,
getNamesFromLatestParsedFile,
getNamesFromLatestFile,
getTermFromLatestParsedFile,
expectLatestFile,
expectLatestParsedFile,
@ -258,7 +258,8 @@ modifyRootBranch f = do
getCurrentBranch :: Cli (Branch IO)
getCurrentBranch = do
path <- getCurrentPath
getBranchAt path
Cli.Env {codebase} <- ask
liftIO $ Codebase.getBranchAtPath codebase path
-- | Get the current branch0.
getCurrentBranch0 :: Cli (Branch0 IO)
@ -573,12 +574,14 @@ getTermFromLatestParsedFile (HQ.NameOnly n) = do
_ -> Nothing
getTermFromLatestParsedFile _ = pure Nothing
getNamesFromLatestParsedFile :: Cli Names
getNamesFromLatestParsedFile = do
uf <- getLatestParsedFile
pure $ case uf of
-- | Gets the names from the latest typechecked unison file, or latest parsed file if it
-- didn't typecheck.
getNamesFromLatestFile :: Cli Names
getNamesFromLatestFile = do
use #latestTypecheckedFile <&> \case
Just (Right tf) -> UFN.typecheckedToNames tf
Just (Left uf) -> UFN.toNames uf
Nothing -> mempty
Just uf -> UFN.toNames uf
-- | Get the latest typechecked unison file, or return early if there isn't one.
expectLatestTypecheckedFile :: Cli (TypecheckedUnisonFile Symbol Ann)

View File

@ -1,65 +1,15 @@
-- | Utilities that have to do with constructing names objects.
module Unison.Cli.NamesUtils
( basicParseNames,
basicPrettyPrintNamesA,
displayNames,
getBasicPrettyPrintNames,
makePrintNamesFromLabeled',
makeShadowedPrintNamesFromHQ,
( currentNames,
)
where
import Unison.Cli.Monad (Cli)
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Codebase.Path (Path)
import Unison.Codebase.Path qualified as Path
import Unison.Cli.MonadUtils (getCurrentBranch0)
import Unison.Codebase.Branch.Names qualified as Branch
import Unison.Names (Names)
import Unison.NamesWithHistory qualified as Names
import Unison.Server.Backend qualified as Backend
import Unison.UnisonFile (TypecheckedUnisonFile)
import Unison.UnisonFile.Names qualified as UF
import Unison.Var (Var)
basicParseNames :: Cli Names
basicParseNames =
fst <$> basicNames' Backend.Within
basicPrettyPrintNamesA :: Cli Names
basicPrettyPrintNamesA = snd <$> basicNames' Backend.AllNames
-- implementation detail of basicParseNames and basicPrettyPrintNames
basicNames' :: (Path -> Backend.NameScoping) -> Cli (Names, Names)
basicNames' nameScoping = do
root' <- Cli.getRootBranch
currentPath' <- Cli.getCurrentPath
let (parse, pretty, _local) = Backend.namesForBranch root' (nameScoping $ Path.unabsolute currentPath')
pure (parse, pretty)
-- | Produce a `Names` needed to display all the hashes used in the given file.
displayNames ::
(Var v) =>
TypecheckedUnisonFile v a ->
Cli Names
displayNames unisonFile =
-- voodoo
makeShadowedPrintNamesFromLabeled
(UF.typecheckedToNames unisonFile)
getBasicPrettyPrintNames :: Cli Names
getBasicPrettyPrintNames = do
rootBranch <- Cli.getRootBranch
currentPath <- Cli.getCurrentPath
pure (Backend.prettyNamesForBranch rootBranch (Backend.AllNames (Path.unabsolute currentPath)))
makePrintNamesFromLabeled' :: Cli Names
makePrintNamesFromLabeled' =
basicPrettyPrintNamesA
makeShadowedPrintNamesFromHQ :: Names -> Cli Names
makeShadowedPrintNamesFromHQ shadowing = do
basicNames <- basicPrettyPrintNamesA
pure $ Names.shadowing shadowing basicNames
makeShadowedPrintNamesFromLabeled :: Names -> Cli Names
makeShadowedPrintNamesFromLabeled shadowing =
Names.shadowing shadowing <$> makePrintNamesFromLabeled'
-- | Produce a 'Names' object which contains names for the current branch.
currentNames :: Cli Names
currentNames = do
Branch.toNames <$> getCurrentBranch0

View File

@ -1,33 +1,32 @@
-- | Utilities that have to do with constructing pretty-print environments, given stateful information in the Cli monad
-- state/environment, such as the current path.
module Unison.Cli.PrettyPrintUtils
( prettyPrintEnvDecl,
( prettyPrintEnvDeclFromNames,
currentPrettyPrintEnvDecl,
)
where
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.NamesUtils qualified as Cli
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Path (Path)
import Unison.Codebase.Path qualified as Path
import Unison.Names (Names)
import Unison.Prelude
import Unison.PrettyPrintEnv.Names qualified as PPE
import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl)
import Unison.PrettyPrintEnvDecl qualified as PPE hiding (biasTo)
import Unison.PrettyPrintEnvDecl.Names qualified as PPED
import Unison.Server.Backend qualified as Backend
prettyPrintEnvDecl :: Names -> Cli PrettyPrintEnvDecl
prettyPrintEnvDecl ns =
-- | Builds a pretty print env decl from a names object.
prettyPrintEnvDeclFromNames :: Names -> Cli PPE.PrettyPrintEnvDecl
prettyPrintEnvDeclFromNames ns =
Cli.runTransaction Codebase.hashLength <&> \hashLen ->
PPED.makePPED (PPE.hqNamer hashLen ns) (PPE.suffixifyByHash ns)
-- | Get a pretty print env decl for the current names at the current path.
currentPrettyPrintEnvDecl :: (Path -> Backend.NameScoping) -> Cli PrettyPrintEnvDecl
currentPrettyPrintEnvDecl scoping = do
root' <- Cli.getRootBranch
currentPath <- Cli.getCurrentPath
hqLen <- Cli.runTransaction Codebase.hashLength
pure $ Backend.getCurrentPrettyNames hqLen (scoping (Path.unabsolute currentPath)) root'
--
-- Prefer using 'prettyPrintEnvDeclFromNames' when you've already got
-- a 'Names' value around, since using 'currentPrettyPrintEnvDecl' rebuilds the underlying
-- names object.
currentPrettyPrintEnvDecl :: Cli PPE.PrettyPrintEnvDecl
currentPrettyPrintEnvDecl = do
Cli.currentNames >>= prettyPrintEnvDeclFromNames

View File

@ -3,6 +3,7 @@ module Unison.Cli.ProjectUtils
( -- * Project/path helpers
getCurrentProject,
expectCurrentProject,
expectCurrentProjectIds,
getCurrentProjectIds,
getCurrentProjectBranch,
getProjectBranchForPath,
@ -12,6 +13,7 @@ module Unison.Cli.ProjectUtils
projectBranchPath,
projectBranchSegment,
projectBranchPathPrism,
resolveBranchRelativePath,
branchRelativePathToAbsolute,
-- * Name hydration
@ -50,7 +52,7 @@ import Unison.Codebase.Editor.Output (Output (LocalProjectBranchDoesntExist))
import Unison.Codebase.Editor.Output qualified as Output
import Unison.Codebase.Path (Path')
import Unison.Codebase.Path qualified as Path
import Unison.CommandLine.BranchRelativePath (BranchRelativePath)
import Unison.CommandLine.BranchRelativePath (BranchRelativePath, ResolvedBranchRelativePath)
import Unison.CommandLine.BranchRelativePath qualified as BranchRelativePath
import Unison.Prelude
import Unison.Project (ProjectAndBranch (..), ProjectBranchName, ProjectName)
@ -59,24 +61,37 @@ import Unison.Sqlite qualified as Sqlite
import Witch (unsafeFrom)
branchRelativePathToAbsolute :: BranchRelativePath -> Cli Path.Absolute
branchRelativePathToAbsolute = \case
branchRelativePathToAbsolute brp = resolveBranchRelativePath brp <&> \case
BranchRelativePath.ResolvedLoosePath p -> p
BranchRelativePath.ResolvedBranchRelative projectBranch mRel ->
let projectBranchIds = getIds projectBranch
handleRel = case mRel of
Nothing -> id
Just rel -> flip Path.resolve rel
in handleRel (projectBranchPath projectBranchIds)
where
getIds = \case
ProjectAndBranch project branch -> ProjectAndBranch (view #projectId project) (view #branchId branch)
resolveBranchRelativePath :: BranchRelativePath -> Cli ResolvedBranchRelativePath
resolveBranchRelativePath = \case
BranchRelativePath.BranchRelative brp -> case brp of
These projectBranch path -> do
projectBranch <- getIds <$> expectProjectAndBranchByTheseNames (toThese projectBranch)
pure (Path.resolve (projectBranchPath projectBranch) path)
This projectBranch -> do
projectBranch <- getIds <$> expectProjectAndBranchByTheseNames (toThese projectBranch)
pure (projectBranchPath projectBranch)
projectBranch <- expectProjectAndBranchByTheseNames (toThese projectBranch)
pure (BranchRelativePath.ResolvedBranchRelative projectBranch Nothing)
That path -> do
projectBranch <- expectCurrentProjectIds
pure (Path.resolve (projectBranchPath projectBranch) path)
BranchRelativePath.LoosePath path -> Cli.resolvePath' path
(projectBranch, _) <- expectCurrentProjectBranch
pure (BranchRelativePath.ResolvedBranchRelative projectBranch (Just path))
These projectBranch path -> do
projectBranch <- expectProjectAndBranchByTheseNames (toThese projectBranch)
pure (BranchRelativePath.ResolvedBranchRelative projectBranch (Just path))
BranchRelativePath.LoosePath path ->
BranchRelativePath.ResolvedLoosePath <$> Cli.resolvePath' path
where
toThese = \case
Left branchName -> That branchName
Right (projectName, branchName) -> These projectName branchName
getIds = \case
ProjectAndBranch project branch -> ProjectAndBranch (view #projectId project) (view #branchId branch)
-- | Get the current project that a user is on.
getCurrentProject :: Cli (Maybe Sqlite.Project)

View File

@ -1,6 +1,6 @@
{-# HLINT ignore "Use tuple-section" #-}
{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-}
{-# HLINT ignore "Use tuple-section" #-}
module Unison.Codebase.Editor.HandleInput
( loop,
)
@ -9,7 +9,7 @@ where
-- TODO: Don't import backend
import Control.Error.Util qualified as ErrorUtil
import Control.Lens
import Control.Lens hiding (from)
import Control.Monad.Reader (ask)
import Control.Monad.State (StateT)
import Control.Monad.State qualified as State
@ -39,14 +39,12 @@ import U.Codebase.Sqlite.ProjectBranch qualified as Sqlite
import U.Codebase.Sqlite.Queries qualified as Queries
import Unison.ABT qualified as ABT
import Unison.Builtin qualified as Builtin
import Unison.Builtin.Decls qualified as DD
import Unison.Builtin.Terms qualified as Builtin
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.NamesUtils (basicParseNames, displayNames, getBasicPrettyPrintNames, makePrintNamesFromLabeled')
import Unison.Cli.Pretty qualified as Pretty
import Unison.Cli.PrettyPrintUtils (currentPrettyPrintEnvDecl, prettyPrintEnvDecl)
import Unison.Cli.NamesUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils qualified as Cli
import Unison.Cli.ProjectUtils qualified as ProjectUtils
import Unison.Cli.TypeCheck (typecheckTerm)
import Unison.Codebase (Codebase)
@ -65,9 +63,13 @@ import Unison.Codebase.Editor.HandleInput.AuthLogin (authLogin)
import Unison.Codebase.Editor.HandleInput.Branch (handleBranch)
import Unison.Codebase.Editor.HandleInput.BranchRename (handleBranchRename)
import Unison.Codebase.Editor.HandleInput.Branches (handleBranches)
import Unison.Codebase.Editor.HandleInput.DebugDefinition qualified as DebugDefinition
import Unison.Codebase.Editor.HandleInput.DebugFoldRanges qualified as DebugFoldRanges
import Unison.Codebase.Editor.HandleInput.DeleteBranch (handleDeleteBranch)
import Unison.Codebase.Editor.HandleInput.DeleteProject (handleDeleteProject)
import Unison.Codebase.Editor.HandleInput.EditNamespace (handleEditNamespace)
import Unison.Codebase.Editor.HandleInput.FindAndReplace (handleStructuredFindI, handleStructuredFindReplaceI)
import Unison.Codebase.Editor.HandleInput.FormatFile qualified as Format
import Unison.Codebase.Editor.HandleInput.Load (EvalMode (Sandboxed), evalUnisonFile, handleLoad, loadUnisonFile)
import Unison.Codebase.Editor.HandleInput.MoveAll (handleMoveAll)
import Unison.Codebase.Editor.HandleInput.MoveBranch (doMoveBranch)
@ -85,6 +87,7 @@ import Unison.Codebase.Editor.HandleInput.Push (handleGist, handlePushRemoteBran
import Unison.Codebase.Editor.HandleInput.ReleaseDraft (handleReleaseDraft)
import Unison.Codebase.Editor.HandleInput.Run (handleRun)
import Unison.Codebase.Editor.HandleInput.RuntimeUtils qualified as RuntimeUtils
import Unison.Codebase.Editor.HandleInput.ShowDefinition (showDefinitions)
import Unison.Codebase.Editor.HandleInput.TermResolution (resolveMainRef, resolveTermRef)
import Unison.Codebase.Editor.HandleInput.Tests qualified as Tests
import Unison.Codebase.Editor.HandleInput.UI (openUI)
@ -119,6 +122,7 @@ import Unison.Codebase.TermEdit.Typing qualified as TermEdit
import Unison.Codebase.TypeEdit (TypeEdit)
import Unison.Codebase.TypeEdit qualified as TypeEdit
import Unison.Codebase.Verbosity qualified as Verbosity
import Unison.CommandLine.BranchRelativePath (BranchRelativePath)
import Unison.CommandLine.Completion qualified as Completion
import Unison.CommandLine.DisplayValues qualified as DisplayValues
import Unison.CommandLine.InputPattern qualified as IP
@ -273,9 +277,11 @@ loop e = do
void $ propagatePatch description patch' currentPath
Cli.respond Success
previewResponse sourceName sr uf = do
names <- displayNames uf
ppe <- PPE.suffixifiedPPE <$> prettyPrintEnvDecl names
Cli.respond $ Typechecked (Text.pack sourceName) ppe sr uf
names <- Cli.currentNames
let namesWithDefinitionsFromFile = UF.addNamesFromTypeCheckedUnisonFile uf names
filePPED <- Cli.prettyPrintEnvDeclFromNames namesWithDefinitionsFromFile
let suffixifiedPPE = PPE.suffixifiedPPE filePPED
Cli.respond $ Typechecked (Text.pack sourceName) suffixifiedPPE sr uf
in Cli.time "InputPattern" case input of
ApiI -> do
Cli.Env {serverBaseUrl} <- ask
@ -416,7 +422,7 @@ loop e = do
ForkLocalBranchI src0 dest0 -> do
(srcb, branchEmpty) <-
case src0 of
Left hash -> (, WhichBranchEmptyHash hash) <$> Cli.resolveShortCausalHash hash
Left hash -> (,WhichBranchEmptyHash hash) <$> Cli.resolveShortCausalHash hash
Right path' -> do
absPath <- ProjectUtils.branchRelativePathToAbsolute path'
let srcp = Path.convert absPath
@ -539,11 +545,10 @@ loop e = do
Cli.respondNumbered (Output.ShowDiffAfterUndo ppe diff)
UiI path' -> openUI path'
DocToMarkdownI docName -> do
basicPrettyPrintNames <- getBasicPrettyPrintNames
names <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
hqLength <- Cli.runTransaction Codebase.hashLength
let pped = PPED.makePPED (PPE.hqNamer hqLength basicPrettyPrintNames) (PPE.suffixifyByHash basicPrettyPrintNames)
basicPrettyPrintNames <- basicParseNames
let nameSearch = NameSearch.makeNameSearch hqLength basicPrettyPrintNames
let nameSearch = NameSearch.makeNameSearch hqLength names
Cli.Env {codebase, runtime} <- ask
mdText <- liftIO $ do
docRefs <- Backend.docsForDefinitionName codebase nameSearch Names.IncludeSuffixes docName
@ -553,9 +558,9 @@ loop e = do
Cli.respond $ Output.MarkdownOut (Text.intercalate "\n---\n" mdText)
DocsToHtmlI namespacePath' sourceDirectory -> do
Cli.Env {codebase, sandboxedRuntime} <- ask
rootBranch <- Cli.getRootBranch
absPath <- Path.unabsolute <$> Cli.resolvePath' namespacePath'
_evalErrs <- liftIO $ (Backend.docsInBranchToHtmlFiles sandboxedRuntime codebase rootBranch absPath sourceDirectory)
absPath <- Cli.resolvePath' namespacePath'
branch <- liftIO $ Codebase.getBranchAtPath codebase absPath
_evalErrs <- liftIO $ (Backend.docsInBranchToHtmlFiles sandboxedRuntime codebase branch sourceDirectory)
pure ()
AliasTermI src' dest' -> do
Cli.Env {codebase} <- ask
@ -663,22 +668,20 @@ loop e = do
fixupOutput :: Path.HQSplit -> HQ.HashQualified Name
fixupOutput = fmap Path.unsafeToName . HQ'.toHQ . Path.unsplitHQ
NamesI global query -> do
currentPath' <- Path.unabsolute <$> Cli.getCurrentPath
hqLength <- Cli.runTransaction Codebase.hashLength
root <- Cli.getRootBranch
(names, pped) <-
if global || any Name.isAbsolute query
then do
let root0 = Branch.head root
let names = Names.makeAbsolute $ Branch.toNames root0
-- Use an absolutely qualified ppe for view.global
let names = Names.makeAbsolute $ Branch.toNames root0
let pped = PPED.makePPED (PPE.hqNamer hqLength names) (PPE.suffixifyByHash names)
pure (names, pped)
else do
currentBranch <- Cli.getCurrentBranch0
let currentNames = Branch.toNames currentBranch
let pped = Backend.getCurrentPrettyNames hqLength (Backend.Within currentPath') root
pure (currentNames, pped)
names <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
pure (names, pped)
let unsuffixifiedPPE = PPED.unsuffixifiedPPE pped
terms = Names.lookupHQTerm Names.IncludeSuffixes query names
@ -689,8 +692,7 @@ loop e = do
types' = map (\r -> (r, PPE.allTypeNames unsuffixifiedPPE r)) (Set.toList types)
Cli.respond $ ListNames global hqLength types' terms'
DocsI srcs -> do
basicPrettyPrintNames <- getBasicPrettyPrintNames
for_ srcs (docsI basicPrettyPrintNames)
for_ srcs docsI
CreateAuthorI authorNameSegment authorFullName -> do
Cli.Env {codebase} <- ask
initialBranch <- Cli.getCurrentBranch
@ -766,12 +768,12 @@ loop e = do
case (null endangerments, insistence) of
(True, _) -> pure (Cli.respond Success)
(False, Force) -> do
ppeDecl <- currentPrettyPrintEnvDecl Backend.Within
ppeDecl <- Cli.currentPrettyPrintEnvDecl
pure do
Cli.respond Success
Cli.respondNumbered $ DeletedDespiteDependents ppeDecl endangerments
(False, Try) -> do
ppeDecl <- currentPrettyPrintEnvDecl Backend.Within
ppeDecl <- Cli.currentPrettyPrintEnvDecl
Cli.respondNumbered $ CantDeleteNamespace ppeDecl endangerments
Cli.returnEarlyWithoutOutput
parentPathAbs <- Cli.resolvePath' parentPath
@ -783,10 +785,10 @@ loop e = do
afterDelete
DeleteTarget'ProjectBranch name -> handleDeleteBranch name
DeleteTarget'Project name -> handleDeleteProject name
DisplayI outputLoc names -> do
basicPrettyPrintNames <- getBasicPrettyPrintNames
traverse_ (displayI basicPrettyPrintNames outputLoc) names
DisplayI outputLoc namesToDisplay -> do
traverse_ (displayI outputLoc) namesToDisplay
ShowDefinitionI outputLoc showDefinitionScope query -> handleShowDefinition outputLoc showDefinitionScope query
EditNamespaceI paths -> handleEditNamespace LatestFileLocation paths
FindPatchI -> do
branch <- Cli.getCurrentBranch0
let patches =
@ -801,16 +803,15 @@ loop e = do
pathArgAbs <- Cli.resolvePath' pathArg
entries <- liftIO (Backend.lsAtPath codebase Nothing pathArgAbs)
-- caching the result as an absolute path, for easier jumping around
Cli.setNumberedArgs $ fmap entryToHQString entries
currentBranch <- Cli.getCurrentBranch
let buildPPE = do
schLength <- Codebase.runTransaction codebase Codebase.branchHashLength
pure $
Backend.basicSuffixifiedNames
schLength
currentBranch
(Backend.AllNames (Path.unabsolute pathArgAbs))
pped <- Cli.currentPrettyPrintEnvDecl
let suffixifiedPPE = PPED.suffixifiedPPE pped
-- This used to be a delayed action which only forced the loading of the root
-- branch when it was necessary for printing the results, but that got wiped out
-- when we ported to the new Cli monad.
-- It would be nice to restore it, but it's pretty rare that it actually results
-- in an improvement, so perhaps it's not worth the effort.
let buildPPE = pure suffixifiedPPE
Cli.respond $ ListShallow buildPPE entries
where
entryToHQString :: ShallowListEntry v Ann -> String
@ -934,8 +935,9 @@ loop e = do
let adds = SlurpResult.adds sr
Cli.stepAtNoSync (Path.unabsolute currentPath, doSlurpAdds adds uf)
Cli.runTransaction . Codebase.addDefsToCodebase codebase . SlurpResult.filterUnisonFile sr $ uf
ppe <- prettyPrintEnvDecl =<< displayNames uf
Cli.respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr
pped <- Cli.prettyPrintEnvDeclFromNames $ UF.addNamesFromTypeCheckedUnisonFile uf currentNames
let suffixifiedPPE = PPED.suffixifiedPPE pped
Cli.respond $ SlurpOutput input suffixifiedPPE sr
Cli.syncRoot description
SaveExecuteResultI resultName -> handleAddRun input resultName
PreviewAddI requestedNames -> do
@ -1024,8 +1026,9 @@ loop e = do
Cli.respond Success
ListEditsI maybePath -> do
patch <- Cli.getPatchAt (fromMaybe Cli.defaultPatchPath maybePath)
ppe <- suffixifiedPPE =<< makePrintNamesFromLabeled'
Cli.respondNumbered $ ListEdits patch ppe
pped <- Cli.currentPrettyPrintEnvDecl
let suffixifiedPPE = PPED.suffixifiedPPE pped
Cli.respondNumbered $ ListEdits patch suffixifiedPPE
PullRemoteBranchI sourceTarget sMode pMode verbosity -> doPullRemoteBranch sourceTarget sMode pMode verbosity
PushRemoteBranchI pushRemoteBranchInput -> handlePushRemoteBranch pushRemoteBranchInput
ListDependentsI hq -> handleDependents hq
@ -1064,6 +1067,25 @@ loop e = do
_ -> pure ()
Nothing -> do
Cli.respond DebugFuzzyOptionsNoResolver
DebugFormatI -> do
Cli.Env {writeSource, loadSource} <- ask
void $ runMaybeT do
(filePath, _) <- MaybeT Cli.getLatestFile
pf <- lift Cli.getLatestParsedFile
tf <- lift Cli.getLatestTypecheckedFile
names <- lift Cli.currentNames
let buildPPED uf tf =
Cli.prettyPrintEnvDeclFromNames $ (fromMaybe mempty $ (UF.typecheckedToNames <$> tf) <|> (UF.toNames <$> uf)) `Names.shadowing` names
let formatWidth = 80
currentPath <- lift $ Cli.getCurrentPath
updates <- MaybeT $ Format.formatFile buildPPED formatWidth currentPath pf tf Nothing
source <-
liftIO (loadSource (Text.pack filePath)) >>= \case
Cli.InvalidSourceNameError -> lift $ Cli.returnEarly $ Output.InvalidSourceName filePath
Cli.LoadError -> lift $ Cli.returnEarly $ Output.SourceLoadFailed filePath
Cli.LoadSuccess contents -> pure contents
let updatedSource = Format.applyTextReplacements updates source
liftIO $ writeSource (Text.pack filePath) updatedSource
DebugDumpNamespacesI -> do
let seen h = State.gets (Set.member h)
set h = State.modify (Set.insert h)
@ -1117,6 +1139,10 @@ loop e = do
traceM $ show name ++ ",Type," ++ Text.unpack (Reference.toText r)
for_ (Relation.toList . Branch.deepTerms $ rootBranch0) \(r, name) ->
traceM $ show name ++ ",Term," ++ Text.unpack (Referent.toText r)
DebugTermI isVerbose hqName -> DebugDefinition.debugTerm isVerbose hqName
DebugLSPFoldRangesI -> do
DebugFoldRanges.debugFoldRanges
DebugTypeI hqName -> DebugDefinition.debugDecl hqName
DebugClearWatchI {} ->
Cli.runTransaction Codebase.clearWatches
DebugDoctorI {} -> do
@ -1173,10 +1199,10 @@ inputDescription :: Input -> Cli Text
inputDescription input =
case input of
SaveExecuteResultI _str -> pure "save-execute-result"
ForkLocalBranchI _src0 _dest0 -> do
-- src <- hp' src0
-- dest <- p' dest0
pure ("fork ") -- todo
ForkLocalBranchI src0 dest0 -> do
src <- either (pure . Text.pack . show) brp src0
dest <- brp dest0
pure ("fork " <> src <> " " <> dest)
MergeLocalBranchI src0 dest0 mode -> do
src <- looseCodeOrProjectToText src0
dest <- looseCodeOrProjectToText dest0
@ -1331,10 +1357,17 @@ inputDescription input =
DebugDoctorI {} -> wat
DebugDumpNamespaceSimpleI {} -> wat
DebugDumpNamespacesI {} -> wat
DebugTermI verbose hqName ->
if verbose
then pure ("debug.term.verbose " <> HQ.toText hqName)
else pure ("debug.term " <> HQ.toText hqName)
DebugTypeI hqName -> pure ("debug.type " <> HQ.toText hqName)
DebugLSPFoldRangesI -> pure "debug.lsp.fold-ranges"
DebugNameDiffI {} -> wat
DebugNumberedArgsI {} -> wat
DebugTabCompletionI _input -> wat
DebugFuzzyOptionsI cmd input -> pure . Text.pack $ "debug.fuzzy-completions " <> unwords (cmd : toList input)
DebugFormatI -> pure "debug.format"
DebugTypecheckedUnisonFileI {} -> wat
DeprecateTermI {} -> wat
DeprecateTypeI {} -> wat
@ -1369,6 +1402,8 @@ inputDescription input =
ReleaseDraftI {} -> wat
ShowDefinitionByPrefixI {} -> wat
ShowDefinitionI {} -> wat
EditNamespaceI paths ->
pure $ Text.unwords ("edit.namespace" : (Path.toText <$> paths))
ShowReflogI {} -> wat
SwitchBranchI {} -> wat
TestI {} -> wat
@ -1381,6 +1416,8 @@ inputDescription input =
hp' = either (pure . Text.pack . show) p'
p' :: Path' -> Cli Text
p' = fmap tShow . Cli.resolvePath'
brp :: BranchRelativePath -> Cli Text
brp = fmap from . ProjectUtils.resolveBranchRelativePath
ops' :: Maybe Path.Split' -> Cli Text
ops' = maybe (pure ".") ps'
opatch :: Maybe Path.Split' -> Cli Text
@ -1411,31 +1448,25 @@ handleFindI ::
Cli ()
handleFindI isVerbose fscope ws input = do
Cli.Env {codebase} <- ask
root' <- Cli.getRootBranch
currentPath' <- Cli.getCurrentPath
currentBranch0 <- Cli.getCurrentBranch0
let getNames :: FindScope -> Names
getNames findScope =
let cp = Path.unabsolute currentPath'
nameScope = case findScope of
FindLocal -> Backend.Within cp
FindLocalAndDeps -> Backend.Within cp
FindGlobal -> Backend.AllNames cp
scopeFilter = case findScope of
FindLocal ->
let f n =
case Name.segments n of
"lib" Nel.:| _ : _ -> False
_ -> True
in Names.filter f
FindGlobal -> id
FindLocalAndDeps ->
let f n =
case Name.segments n of
"lib" Nel.:| (_ : "lib" : _) -> False
_ -> True
in Names.filter f
in scopeFilter (Backend.prettyNamesForBranch root' nameScope)
(pped, names) <- case fscope of
FindLocal -> do
let names = Branch.toNames (Branch.withoutLib currentBranch0)
-- Don't exclude anything from the pretty printer, since the type signatures we print for
-- results may contain things in lib.
pped <- Cli.currentPrettyPrintEnvDecl
pure (pped, names)
FindLocalAndDeps -> do
let names = Branch.toNames (Branch.withoutTransitiveLibs currentBranch0)
-- Don't exclude anything from the pretty printer, since the type signatures we print for
-- results may contain things in lib.
pped <- Cli.currentPrettyPrintEnvDecl
pure (pped, names)
FindGlobal -> do
globalNames <- Names.makeAbsolute . Branch.toNames <$> Cli.getRootBranch0
pped <- Cli.prettyPrintEnvDeclFromNames globalNames
pure (pped, globalNames)
let suffixifiedPPE = PPED.suffixifiedPPE pped
let getResults :: Names -> Cli [SearchResult]
getResults names =
case ws of
@ -1465,13 +1496,20 @@ handleFindI isVerbose fscope ws input = do
let respondResults results = do
Cli.setNumberedArgs $ fmap searchResultToHQString results
results' <- Cli.runTransaction (Backend.loadSearchResults codebase results)
ppe <- suffixifiedPPE =<< makePrintNamesFromLabeled'
Cli.respond $ ListOfDefinitions fscope ppe isVerbose results'
results <- getResults (getNames fscope)
Cli.respond $ ListOfDefinitions fscope suffixifiedPPE isVerbose results'
results <- getResults names
case (results, fscope) of
([], FindLocal) -> do
Cli.respond FindNoLocalMatches
respondResults =<< getResults (getNames FindLocalAndDeps)
-- We've already searched everything else, so now we search JUST the
-- names in lib.
let mayOnlyLibBranch = currentBranch0 & Branch.children %%~ (\cs -> Map.singleton "lib" <$> Map.lookup "lib" cs)
case mayOnlyLibBranch of
Nothing -> respondResults []
Just onlyLibBranch -> do
let onlyLibNames = Branch.toNames onlyLibBranch
results <- getResults onlyLibNames
respondResults results
_ -> respondResults results
handleDependencies :: HQ.HashQualified Name -> Cli ()
@ -1479,7 +1517,8 @@ handleDependencies hq = do
Cli.Env {codebase} <- ask
-- todo: add flag to handle transitive efficiently
lds <- resolveHQToLabeledDependencies hq
ppe <- PPE.suffixifiedPPE <$> currentPrettyPrintEnvDecl Backend.WithinStrict
pped <- Cli.currentPrettyPrintEnvDecl
let suffixifiedPPE = PPED.suffixifiedPPE pped
when (null lds) do
Cli.returnEarly (LabeledReferenceNotFound hq)
results <- for (toList lds) \ld -> do
@ -1504,15 +1543,15 @@ handleDependencies hq = do
Just tp -> Type.labeledDependencies tp
tm _ = pure mempty
in LD.fold tp tm ld
let types = [(PPE.typeName ppe r, r) | LabeledDependency.TypeReference r <- toList dependencies]
let terms = [(PPE.termName ppe r, r) | LabeledDependency.TermReferent r <- toList dependencies]
let types = [(PPE.typeName suffixifiedPPE r, r) | LabeledDependency.TypeReference r <- toList dependencies]
let terms = [(PPE.termName suffixifiedPPE r, r) | LabeledDependency.TermReferent r <- toList dependencies]
pure (types, terms)
let types = nubOrdOn snd . Name.sortByText (HQ.toText . fst) $ (join $ fst <$> results)
let terms = nubOrdOn snd . Name.sortByText (HQ.toText . fst) $ (join $ snd <$> results)
Cli.setNumberedArgs $
map (Text.unpack . Reference.toText . snd) types
<> map (Text.unpack . Reference.toText . Referent.toReference . snd) terms
Cli.respond $ ListDependencies ppe lds (fst <$> types) (fst <$> terms)
Cli.respond $ ListDependencies suffixifiedPPE lds (fst <$> types) (fst <$> terms)
handleDependents :: HQ.HashQualified Name -> Cli ()
handleDependents hq = do
@ -1520,7 +1559,7 @@ handleDependents hq = do
lds <- resolveHQToLabeledDependencies hq
-- Use an unsuffixified PPE here, so we display full names (relative to the current path),
-- rather than the shortest possible unambiguous name.
pped <- currentPrettyPrintEnvDecl Backend.WithinStrict
pped <- Cli.currentPrettyPrintEnvDecl
let fqppe = PPE.unsuffixifiedPPE pped
let ppe = PPE.suffixifiedPPE pped
when (null lds) do
@ -1583,8 +1622,8 @@ handleDiffNamespaceToPatch description input = do
}
-- Display the patch that we are about to create.
ppe <- suffixifiedPPE =<< makePrintNamesFromLabeled'
Cli.respondNumbered (ListEdits patch ppe)
suffixifiedPPE <- PPED.suffixifiedPPE <$> Cli.currentPrettyPrintEnvDecl
Cli.respondNumbered (ListEdits patch suffixifiedPPE)
(patchPath, patchName) <- Cli.resolveSplit' (input ^. #patch)
@ -1621,63 +1660,34 @@ handleDiffNamespaceToPatch description input = do
-- | Handle a @ShowDefinitionI@ input command, i.e. `view` or `edit`.
handleShowDefinition :: OutputLocation -> ShowDefinitionScope -> NonEmpty (HQ.HashQualified Name) -> Cli ()
handleShowDefinition outputLoc showDefinitionScope query = do
Cli.Env {codebase, writeSource} <- ask
Cli.Env {codebase} <- ask
hqLength <- Cli.runTransaction Codebase.hashLength
-- If the query is empty, run a fuzzy search.
root <- Cli.getRootBranch
let root0 = Branch.head root
currentPath' <- Path.unabsolute <$> Cli.getCurrentPath
let hasAbsoluteQuery = any (any Name.isAbsolute) query
(names, unbiasedPPED) <- case (hasAbsoluteQuery, showDefinitionScope) of
-- If any of the queries are absolute, use global names.
-- TODO: We should instead print each definition using the names from its project-branch root.
(True, _) -> do
let namingScope = Backend.AllNames currentPath'
let parseNames = Backend.parseNamesForBranch root namingScope
let ppe = Backend.getCurrentPrettyNames hqLength (Backend.Within currentPath') root
pure (parseNames, ppe)
(_, ShowDefinitionGlobal) -> do
root <- Cli.getRootBranch
let root0 = Branch.head root
let names = Names.makeAbsolute $ Branch.toNames root0
-- Use an absolutely qualified ppe for view.global
let ppe = PPED.makePPED (PPE.hqNamer hqLength names) (PPE.suffixifyByHash names)
pure (names, ppe)
pped <- Cli.prettyPrintEnvDeclFromNames names
pure (names, pped)
(_, ShowDefinitionGlobal) -> do
root <- Cli.getRootBranch
let root0 = Branch.head root
let names = Names.makeAbsolute $ Branch.toNames root0
pped <- Cli.prettyPrintEnvDeclFromNames names
pure (names, pped)
(_, ShowDefinitionLocal) -> do
currentBranch <- Cli.getCurrentBranch0
let currentNames = Branch.toNames currentBranch
let ppe = Backend.getCurrentPrettyNames hqLength (Backend.Within currentPath') root
pure (currentNames, ppe)
currentNames <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames currentNames
pure (currentNames, pped)
let pped = PPED.biasTo (mapMaybe HQ.toName (toList query)) unbiasedPPED
Backend.DefinitionResults terms types misses <- do
let nameSearch = NameSearch.makeNameSearch hqLength names
Cli.runTransaction (Backend.definitionsByName codebase nameSearch includeCycles Names.IncludeSuffixes (toList query))
outputPath <- getOutputPath
case outputPath of
_ | null terms && null types -> pure ()
Nothing -> do
-- If we're writing to console we don't add test-watch syntax
let isTest _ = False
let isSourceFile = False
-- No filepath, render code to console.
let renderedCodePretty = renderCodePretty pped isSourceFile isTest terms types
Cli.respond $ DisplayDefinitions renderedCodePretty
Just fp -> do
-- We build an 'isTest' check to prepend "test>" to tests in a scratch file.
testRefs <- Cli.runTransaction (Codebase.filterTermsByReferenceIdHavingType codebase (DD.testResultType mempty) (Map.keysSet terms & Set.mapMaybe Reference.toId))
let isTest r = Set.member r testRefs
let isSourceFile = True
let renderedCodePretty = renderCodePretty pped isSourceFile isTest terms types
let renderedCodeText = Text.pack $ P.toPlain 80 renderedCodePretty
-- We set latestFile to be programmatically generated, if we
-- are viewing these definitions to a file - this will skip the
-- next update for that file (which will happen immediately)
#latestFile ?= (fp, True)
liftIO $ writeSource (Text.pack fp) renderedCodeText
let numDefinitions = Map.size terms + Map.size types
Cli.respond $ LoadedDefinitionsToSourceFile fp numDefinitions
when (not (null misses)) (Cli.respond (SearchTermsNotFound misses))
showDefinitions outputLoc pped terms types misses
where
renderCodePretty pped isSourceFile isTest terms types =
P.syntaxToColor . P.sep "\n\n" $
Pretty.prettyTypeDisplayObjects pped types <> Pretty.prettyTermDisplayObjects pped isSourceFile isTest terms
-- `view`: don't include cycles; `edit`: include cycles
includeCycles =
case outputLoc of
@ -1685,26 +1695,14 @@ handleShowDefinition outputLoc showDefinitionScope query = do
FileLocation _ -> Backend.IncludeCycles
LatestFileLocation -> Backend.IncludeCycles
-- Get the file path to send the definition(s) to. `Nothing` means the terminal.
getOutputPath :: Cli (Maybe FilePath)
getOutputPath =
case outputLoc of
ConsoleLocation -> pure Nothing
FileLocation path -> pure (Just path)
LatestFileLocation -> do
loopState <- State.get
pure case loopState ^. #latestFile of
Nothing -> Just "scratch.u"
Just (path, _) -> Just path
-- todo: compare to `getHQTerms` / `getHQTypes`. Is one universally better?
resolveHQToLabeledDependencies :: HQ.HashQualified Name -> Cli (Set LabeledDependency)
resolveHQToLabeledDependencies = \case
HQ.NameOnly n -> do
parseNames <- basicParseNames
names <- Cli.currentNames
let terms, types :: Set LabeledDependency
terms = Set.map LD.referent . Name.searchBySuffix n $ Names.terms parseNames
types = Set.map LD.typeRef . Name.searchBySuffix n $ Names.types parseNames
terms = Set.map LD.referent . Name.searchBySuffix n $ Names.terms names
types = Set.map LD.typeRef . Name.searchBySuffix n $ Names.types names
pure $ terms <> types
-- rationale: the hash should be unique enough that the name never helps
HQ.HashQualified _n sh -> resolveHashOnly sh
@ -1723,13 +1721,13 @@ doDisplay :: OutputLocation -> Names -> Term Symbol () -> Cli ()
doDisplay outputLoc names tm = do
Cli.Env {codebase} <- ask
loopState <- State.get
ppe <- prettyPrintEnvDecl names
pped <- Cli.prettyPrintEnvDeclFromNames names
let suffixifiedPPE = PPED.suffixifiedPPE pped
(tms, typs) <- maybe mempty UF.indexByReference <$> Cli.getLatestTypecheckedFile
let useCache = True
evalTerm tm =
fmap ErrorUtil.hush . fmap (fmap Term.unannotate) $
RuntimeUtils.evalUnisonTermE True (PPE.suffixifiedPPE ppe) useCache (Term.amap (const External) tm)
RuntimeUtils.evalUnisonTermE True suffixifiedPPE useCache (Term.amap (const External) tm)
loadTerm (Reference.DerivedId r) = case Map.lookup r tms of
Nothing -> fmap (fmap Term.unannotate) $ Cli.runTransaction (Codebase.getTerm codebase r)
Just (_, tm, _) -> pure (Just $ Term.unannotate tm)
@ -1741,7 +1739,7 @@ doDisplay outputLoc names tm = do
loadTypeOfTerm' (Referent.Ref (Reference.DerivedId r))
| Just (_, _, ty) <- Map.lookup r tms = pure $ Just (void ty)
loadTypeOfTerm' r = fmap (fmap void) . Cli.runTransaction . Codebase.getTypeOfReferent codebase $ r
rendered <- DisplayValues.displayTerm ppe loadTerm loadTypeOfTerm' evalTerm loadDecl tm
rendered <- DisplayValues.displayTerm pped loadTerm loadTypeOfTerm' evalTerm loadDecl tm
mayFP <- case outputLoc of
ConsoleLocation -> pure Nothing
FileLocation path -> Just <$> Directory.canonicalizePath path
@ -1773,11 +1771,8 @@ doShowTodoOutput patch scopePath = do
( Text.unpack . Reference.toText . view _2
<$> fst (TO.todoFrontierDependents todo)
)
-- only needs the local references to check for obsolete defs
ppe <- do
names <- makePrintNamesFromLabeled'
prettyPrintEnvDecl names
Cli.respondNumbered $ TodoOutput ppe todo
pped <- Cli.currentPrettyPrintEnvDecl
Cli.respondNumbered $ TodoOutput pped todo
checkTodo :: Codebase m Symbol Ann -> Patch -> Names -> Sqlite.Transaction (TO.TodoOutput Symbol Ann)
checkTodo codebase patch names0 = do
@ -1915,10 +1910,6 @@ searchBranchScored names0 score queries =
pair qn =
(\score -> (Just score, result)) <$> score qn name
basicPPE :: Cli PPE.PrettyPrintEnv
basicPPE =
basicParseNames >>= suffixifiedPPE
compilerPath :: Path.Path'
compilerPath = Path.Path' {Path.unPath' = Left abs}
where
@ -1953,7 +1944,7 @@ getSchemeGenLibDir =
doGenerateSchemeBoot ::
Bool -> Maybe PPE.PrettyPrintEnv -> Maybe String -> Cli ()
doGenerateSchemeBoot force mppe mdir = do
ppe <- maybe basicPPE pure mppe
ppe <- maybe (PPED.suffixifiedPPE <$> Cli.currentPrettyPrintEnvDecl) pure mppe
dir <- maybe getSchemeGenLibDir pure mdir
let bootf = dir </> "unison" </> "boot-generated.ss"
swrapf = dir </> "unison" </> "simple-wrappers.ss"
@ -2072,6 +2063,8 @@ checkDeletes typesTermsTuples doutput inputs = do
toRel setRef name = R.fromList (fmap (name,) (toList setRef))
let toDelete = fmap (\(_, names, types, terms) -> Names (toRel terms names) (toRel types names)) splitsNames
-- make sure endangered is compeletely contained in paths
-- TODO: We should just check for endangerments from the project root, not the
-- global root!
rootNames <- Branch.toNames <$> Cli.getRootBranch0
-- get only once for the entire deletion set
let allTermsToDelete :: Set LabeledDependency
@ -2094,18 +2087,18 @@ checkDeletes typesTermsTuples doutput inputs = do
(map (BranchUtil.makeDeleteTypeName split) . Set.toList $ types)
++ (map (BranchUtil.makeDeleteTermName split) . Set.toList $ terms)
)
before <- Cli.getRootBranch0
before <- Cli.getCurrentBranch0
description <- inputDescription inputs
Cli.stepManyAt description deleteTypesTerms
case doutput of
DeleteOutput'Diff -> do
after <- Cli.getRootBranch0
after <- Cli.getCurrentBranch0
(ppe, diff) <- diffHelper before after
Cli.respondNumbered (ShowDiffAfterDeleteDefinitions ppe diff)
DeleteOutput'NoDiff -> do
Cli.respond Success
else do
ppeDecl <- currentPrettyPrintEnvDecl Backend.Within
ppeDecl <- Cli.prettyPrintEnvDeclFromNames rootNames
let combineRefs = List.foldl (Map.unionWith NESet.union) Map.empty endangeredDeletions
Cli.respondNumbered (CantDeleteDefinitions ppeDecl combineRefs)
@ -2120,7 +2113,7 @@ getEndangeredDependents ::
Names ->
-- | All entities we want to delete (including the target)
Set LabeledDependency ->
-- | All names from the root branch
-- | Names from the current branch
Names ->
-- | map from references going extinct to the set of endangered dependents
Sqlite.Transaction (Map LabeledDependency (NESet LabeledDependency))
@ -2158,37 +2151,51 @@ getEndangeredDependents targetToDelete otherDesiredDeletions rootNames = do
pure extinctToEndangered
displayI ::
Names ->
OutputLocation ->
HQ.HashQualified Name ->
Cli ()
displayI names outputLoc hq = do
displayI outputLoc hq = do
let useRoot = any Name.isAbsolute hq
(names, pped) <-
if useRoot
then do
root <- Cli.getRootBranch
let root0 = Branch.head root
let names = Names.makeAbsolute $ Branch.toNames root0
pped <- Cli.prettyPrintEnvDeclFromNames names
pure (names, pped)
else do
names <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
pure (names, pped)
let suffixifiedPPE = PPE.suffixifiedPPE pped
let bias = maybeToList $ HQ.toName hq
latestTypecheckedFile <- Cli.getLatestTypecheckedFile
case addWatch (HQ.toString hq) latestTypecheckedFile of
Nothing -> do
let results = Names.lookupHQTerm Names.IncludeSuffixes hq names
pped <- prettyPrintEnvDecl names
ref <-
Set.asSingleton results & onNothing do
Cli.returnEarly
if Set.null results
then SearchTermsNotFound [hq]
else TermAmbiguous (PPE.suffixifiedPPE pped) hq results
else TermAmbiguous suffixifiedPPE hq results
let tm = Term.fromReferent External ref
tm <- RuntimeUtils.evalUnisonTerm True (PPE.biasTo bias $ PPE.suffixifiedPPE pped) True tm
tm <- RuntimeUtils.evalUnisonTerm True (PPE.biasTo bias $ suffixifiedPPE) True tm
doDisplay outputLoc names (Term.unannotate tm)
Just (toDisplay, unisonFile) -> do
ppe <- PPE.biasTo bias <$> executePPE unisonFile
(_, watches) <- evalUnisonFile Sandboxed ppe unisonFile []
let namesWithDefinitionsFromFile = UF.addNamesFromTypeCheckedUnisonFile unisonFile names
filePPED <- Cli.prettyPrintEnvDeclFromNames namesWithDefinitionsFromFile
let suffixifiedFilePPE = PPE.biasTo bias $ PPE.suffixifiedPPE filePPED
(_, watches) <- evalUnisonFile Sandboxed suffixifiedFilePPE unisonFile []
(_, _, _, _, tm, _) <-
Map.lookup toDisplay watches & onNothing (error $ "Evaluation dropped a watch expression: " <> HQ.toString hq)
ns <- displayNames unisonFile
let ns = UF.addNamesFromTypeCheckedUnisonFile unisonFile names
doDisplay outputLoc ns tm
docsI :: Names -> Path.HQSplit' -> Cli ()
docsI prettyPrintNames src =
fileByName
docsI :: Path.HQSplit' -> Cli ()
docsI src = do
findInScratchfileByName
where
{- Given `docs foo`, we look for docs in 3 places, in this order:
(fileByName) First check the file for `foo.doc`, and if found do `display foo.doc`
@ -2203,16 +2210,15 @@ docsI prettyPrintNames src =
dotDoc :: HQ.HashQualified Name
dotDoc = hq <&> \n -> Name.joinDot n "doc"
fileByName :: Cli ()
fileByName = do
ns <- maybe mempty UF.typecheckedToNames <$> Cli.getLatestTypecheckedFile
case Names.lookupHQTerm Names.IncludeSuffixes dotDoc ns of
s
| Set.size s == 1 ->
-- the displayI command expects full term names, so we resolve
-- the hash back to its full name in the file
displayI prettyPrintNames ConsoleLocation (Names.longestTermName 10 (Set.findMin s) ns)
_ -> displayI prettyPrintNames ConsoleLocation dotDoc
findInScratchfileByName :: Cli ()
findInScratchfileByName = do
namesInFile <- Cli.getNamesFromLatestFile
case Names.lookupHQTerm Names.IncludeSuffixes dotDoc namesInFile of
s | Set.size s == 1 -> do
-- the displayI command expects full term names, so we resolve
-- the hash back to its full name in the file
displayI ConsoleLocation (Names.longestTermName 10 (Set.findMin s) namesInFile)
_ -> displayI ConsoleLocation dotDoc
loadDisplayInfo ::
Codebase m Symbol Ann ->
@ -2235,15 +2241,10 @@ loadTypeDisplayObject codebase = \case
maybe (MissingObject $ Reference.idToShortHash id) UserObject
<$> Codebase.getTypeDeclaration codebase id
lexedSource :: Text -> Text -> Cli (Names, (Text, [L.Token L.Lexeme]))
lexedSource :: Text -> Text -> Cli (Text, [L.Token L.Lexeme])
lexedSource name src = do
let tokens = L.lexer (Text.unpack name) (Text.unpack src)
parseNames <- basicParseNames
pure (parseNames, (src, tokens))
suffixifiedPPE :: Names -> Cli PPE.PrettyPrintEnv
suffixifiedPPE ns =
Cli.runTransaction Codebase.hashLength <&> \hashLen -> PPE.makePPE (PPE.hqNamer hashLen ns) (PPE.suffixifyByHash ns)
pure (src, tokens)
parseSearchType :: SrcLoc -> String -> Cli (Type Symbol Ann)
parseSearchType srcLoc typ = Type.removeAllEffectVars <$> parseType srcLoc typ
@ -2254,9 +2255,8 @@ type SrcLoc = String
parseType :: SrcLoc -> String -> Cli (Type Symbol Ann)
parseType input src = do
-- `show Input` is the name of the "file" being lexed
(names0, lexed) <- lexedSource (Text.pack input) (Text.pack src)
parseNames <- basicParseNames
let names = Names.push names0 parseNames
lexed <- lexedSource (Text.pack input) (Text.pack src)
names <- Cli.currentNames
let parsingEnv =
Parser.ParsingEnv
{ uniqueNames = mempty,
@ -2298,22 +2298,13 @@ addWatch watchName (Just uf) = do
)
_ -> addWatch watchName Nothing
executePPE ::
(Var v) =>
TypecheckedUnisonFile v a ->
Cli PPE.PrettyPrintEnv
executePPE unisonFile =
suffixifiedPPE =<< displayNames unisonFile
hqNameQuery :: Names.SearchType -> [HQ.HashQualified Name] -> Cli QueryResult
hqNameQuery searchType query = do
Cli.Env {codebase} <- ask
root' <- Cli.getRootBranch
currentPath <- Cli.getCurrentPath
names <- Cli.currentNames
Cli.runTransaction do
hqLength <- Codebase.hashLength
let parseNames = Backend.parseNamesForBranch root' (Backend.AllNames (Path.unabsolute currentPath))
let nameSearch = NameSearch.makeNameSearch hqLength parseNames
let nameSearch = NameSearch.makeNameSearch hqLength names
Backend.hqNameQuery codebase nameSearch searchType query
looseCodeOrProjectToPath :: Either Path' (ProjectAndBranch Sqlite.Project Sqlite.ProjectBranch) -> Path'

View File

@ -11,10 +11,9 @@ import Data.Text qualified as Text
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.NamesUtils (displayNames)
import Unison.Cli.PrettyPrintUtils (prettyPrintEnvDecl)
import Unison.Cli.NamesUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils qualified as Cli
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch.Names qualified as Branch
import Unison.Codebase.Editor.HandleInput.Update (doSlurpAdds)
import Unison.Codebase.Editor.Input (Input)
import Unison.Codebase.Editor.Output (Output (NoLastRunResult, SaveTermNameConflict, SlurpOutput))
@ -31,6 +30,7 @@ import Unison.Symbol (Symbol)
import Unison.Syntax.Name qualified as Name
import Unison.UnisonFile (TypecheckedUnisonFile)
import Unison.UnisonFile qualified as UF
import Unison.UnisonFile.Names qualified as UF
handleAddRun :: Input -> Name -> Cli ()
handleAddRun input resultName = do
@ -38,14 +38,16 @@ handleAddRun input resultName = do
uf <- addSavedTermToUnisonFile resultName
Cli.Env {codebase} <- ask
currentPath <- Cli.getCurrentPath
currentNames <- Branch.toNames <$> Cli.getCurrentBranch0
currentNames <- Cli.currentNames
let sr = Slurp.slurpFile uf (Set.singleton resultVar) Slurp.AddOp currentNames
let adds = SlurpResult.adds sr
Cli.stepAtNoSync (Path.unabsolute currentPath, doSlurpAdds adds uf)
Cli.runTransaction . Codebase.addDefsToCodebase codebase . SlurpResult.filterUnisonFile sr $ uf
ppe <- prettyPrintEnvDecl =<< displayNames uf
let namesWithDefinitionsFromFile = UF.addNamesFromTypeCheckedUnisonFile uf currentNames
pped <- Cli.prettyPrintEnvDeclFromNames namesWithDefinitionsFromFile
let suffixifiedPPE = PPE.suffixifiedPPE pped
Cli.syncRoot (Text.pack (InputPattern.patternName InputPatterns.saveExecuteResult) <> " " <> Name.toText resultName)
Cli.respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr
Cli.respond $ SlurpOutput input suffixifiedPPE sr
addSavedTermToUnisonFile :: Name -> Cli (TypecheckedUnisonFile Symbol Ann)
addSavedTermToUnisonFile resultName = do

View File

@ -0,0 +1,59 @@
module Unison.Codebase.Editor.HandleInput.DebugDefinition
( debugTerm,
debugDecl,
)
where
import Control.Monad.Reader
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.NamesUtils qualified as Cli
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Editor.Output (Output (..))
import Unison.ConstructorReference (GConstructorReference (ConstructorReference))
import Unison.DataDeclaration.ConstructorId (ConstructorId)
import Unison.HashQualified qualified as HQ
import Unison.Name (Name)
import Unison.NamesWithHistory qualified as Names
import Unison.Prelude
import Unison.Reference (TermReference, TypeReference)
import Unison.Reference qualified as Reference
import Unison.Referent qualified as Referent
debugTermReference :: Bool -> TermReference -> Cli ()
debugTermReference verbose ref = do
Cli.Env {codebase} <- ask
case ref of
Reference.DerivedId refId -> do
Cli.runTransaction (Codebase.getTerm codebase refId) >>= \case
Nothing -> Cli.respond $ TermNotFound' (Reference.toShortHash ref)
Just term -> do
Cli.respond $ DebugTerm verbose (Right term)
Reference.Builtin builtinTxt -> do
Cli.respond $ DebugTerm verbose (Left builtinTxt)
debugTypeReference :: TypeReference -> Maybe ConstructorId -> Cli ()
debugTypeReference ref mayConId = do
Cli.Env {codebase} <- ask
case ref of
Reference.DerivedId refId -> do
Cli.runTransaction (Codebase.getTypeDeclaration codebase refId) >>= \case
Nothing -> Cli.respond $ TypeNotFound' (Reference.toShortHash ref)
Just decl -> do
Cli.respond $ DebugDecl (Right decl) mayConId
Reference.Builtin builtinTxt -> do
Cli.respond $ DebugDecl (Left builtinTxt) mayConId
debugTerm :: Bool -> HQ.HashQualified Name -> Cli ()
debugTerm verbose hqName = do
names <- Cli.currentNames
let matches = Names.lookupHQTerm Names.IncludeSuffixes hqName names
for_ matches \case
Referent.Ref termReference -> debugTermReference verbose termReference
Referent.Con (ConstructorReference typeRef conId) _conTyp -> debugTypeReference typeRef (Just conId)
debugDecl :: HQ.HashQualified Name -> Cli ()
debugDecl hqName = do
names <- Cli.currentNames
let matches = Names.lookupHQType Names.IncludeSuffixes hqName names
for_ matches \typeRef -> debugTypeReference typeRef Nothing

View File

@ -0,0 +1,61 @@
module Unison.Codebase.Editor.HandleInput.DebugFoldRanges (debugFoldRanges) where
import Control.Lens
import Control.Monad.Reader
import Data.Text qualified as Text
import Language.LSP.Protocol.Lens
import Language.LSP.Protocol.Types qualified as LSP
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Codebase.Editor.HandleInput.FormatFile (TextReplacement (..))
import Unison.Codebase.Editor.HandleInput.FormatFile qualified as FormatFile
import Unison.Codebase.Editor.Output
import Unison.LSP.Conversions qualified as CV
import Unison.LSP.FoldingRange (foldingRangesForFile)
import Unison.Prelude
import Unison.Util.Range qualified as U
debugFoldRanges :: Cli ()
debugFoldRanges = do
Cli.Env {loadSource} <- ask
(filePath, _) <- Cli.expectLatestFile
parsedFile <- Cli.expectLatestParsedFile
let foldingRanges =
foldingRangesForFile parsedFile
& fmap
( \fr ->
LSP.Range
(LSP.Position (fr ^. startLine) (fromMaybe 0 $ fr ^. startCharacter))
( case (fr ^. endCharacter) of
Just c -> LSP.Position (fr ^. endLine) c
-- If there's no end char specified, go all the way to the beginning of the next line
Nothing -> LSP.Position ((fr ^. endLine) + 1) 0
)
)
sourceTxt <-
liftIO (loadSource (Text.pack filePath)) >>= \case
Cli.InvalidSourceNameError -> Cli.returnEarly $ InvalidSourceName filePath
Cli.LoadError -> Cli.returnEarly $ SourceLoadFailed filePath
Cli.LoadSuccess contents -> pure contents
Cli.respond $ AnnotatedFoldRanges $ annotateRanges sourceTxt foldingRanges
-- | Annotate the bounds of a range within text using 《 and 》.
--
-- Useful for checking that computed ranges make sense against the source text.
--
-- >>> annotateRanges "one\ntwo\nthree\nfour" [ LSP.Range (LSP.Position 1 0) (LSP.Position 2 3) ]
-- "one\n\12298two\nthr\12299ee\nfour"
annotateRanges :: Text -> [LSP.Range] -> Text
annotateRanges txt ranges =
let replacements =
ranges
& foldMap
( \(LSP.Range start end) ->
let startPos = CV.lspToUPos start
endPos = CV.lspToUPos end
in [ TextReplacement "" (U.Range startPos startPos),
TextReplacement "" (U.Range endPos endPos)
]
)
in FormatFile.applyTextReplacements replacements txt

View File

@ -0,0 +1,67 @@
module Unison.Codebase.Editor.HandleInput.EditNamespace (handleEditNamespace) where
import Control.Monad.Reader
import Data.List.Extra qualified as List
import Data.Map qualified as Map
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils qualified as NamesUtils
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Branch.Names qualified as Branch
import Unison.Codebase.Editor.HandleInput.ShowDefinition (showDefinitions)
import Unison.Codebase.Editor.Input (OutputLocation (..))
import Unison.Codebase.Path (Path)
import Unison.Codebase.Path qualified as Path
import Unison.Names qualified as Names
import Unison.Prelude
import Unison.Server.Backend qualified as Backend
import Unison.Util.Monoid (foldMapM)
handleEditNamespace :: OutputLocation -> [Path] -> Cli ()
handleEditNamespace outputLoc inputPaths = do
Cli.Env {codebase} <- ask
currentBranch <- Cli.getCurrentBranch0
ppe <- NamesUtils.currentPrettyPrintEnvDecl
let paths =
if null inputPaths
then [Path.empty]
else inputPaths
let allNamesToEdit =
(List.nubOrd paths) & foldMap \path ->
let b = Branch.withoutLib $ Branch.getAt0 path currentBranch
names = (Branch.toNames b)
prefixedNames = case Path.toName path of
Nothing -> names
Just pathPrefix -> Names.prefix0 pathPrefix names
in prefixedNames
let termRefs = Names.termReferences allNamesToEdit
-- We only need to (optionally) include cycles for type references, not term references,
-- because 'update' is smart enough to patch-up cycles as expected for terms.
let typeRefsWithoutCycles = Names.typeReferences allNamesToEdit
typeRefs <- Cli.runTransaction $
case includeCycles of
Backend.IncludeCycles -> foldMapM Codebase.componentReferencesForReference typeRefsWithoutCycles
Backend.DontIncludeCycles -> pure typeRefsWithoutCycles
terms <-
termRefs
& foldMapM \ref ->
Map.singleton ref <$> Backend.displayTerm codebase ref
& Cli.runTransaction
types <-
typeRefs
& foldMapM \ref ->
Map.singleton ref <$> Backend.displayType codebase ref
& Cli.runTransaction
let misses = []
showDefinitions outputLoc ppe terms types misses
where
-- `view`: don't include cycles; `edit`: include cycles
includeCycles =
case outputLoc of
ConsoleLocation -> Backend.DontIncludeCycles
FileLocation _ -> Backend.IncludeCycles
LatestFileLocation -> Backend.IncludeCycles

View File

@ -99,7 +99,7 @@ lookupRewrite onErr prepare rule = do
Cli.Env {codebase} <- ask
currentBranch <- Cli.getCurrentBranch0
hqLength <- Cli.runTransaction Codebase.hashLength
fileNames <- Cli.getNamesFromLatestParsedFile
fileNames <- Cli.getNamesFromLatestFile
let currentNames = fileNames <> Branch.toNames currentBranch
let ppe = PPED.makePPED (PPE.hqNamer hqLength currentNames) (PPE.suffixifyByHash currentNames)
ot <- Cli.getTermFromLatestParsedFile rule

View File

@ -0,0 +1,281 @@
module Unison.Codebase.Editor.HandleInput.FormatFile
( formatFile,
applyTextReplacements,
TextReplacement (..),
)
where
import Control.Lens hiding (List)
import Control.Monad.State
import Data.IntervalMap.Interval qualified as Interval
import Data.List qualified as List
import Data.List.NonEmpty.Extra qualified as NEL
import Data.Map qualified as Map
import Data.Set qualified as Set
import Data.Text qualified as Text
import Text.Builder qualified as TB
import U.Core.ABT qualified as ABT
import Unison.Codebase.Path qualified as Path
import Unison.DataDeclaration qualified as Decl
import Unison.HashQualified qualified as HQ
import Unison.Lexer.Pos qualified as Pos
import Unison.Name qualified as Name
import Unison.Parser.Ann qualified as Ann
import Unison.Prelude
import Unison.PrettyPrintEnv.Util qualified as PPE
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.Reference qualified as Reference
import Unison.Symbol (Symbol)
import Unison.Syntax.DeclPrinter qualified as DeclPrinter
import Unison.Syntax.Name qualified as Name
import Unison.Syntax.TermPrinter qualified as TermPrinter
import Unison.Term qualified as Term
import Unison.UnisonFile (TypecheckedUnisonFile, UnisonFile)
import Unison.UnisonFile qualified as UF
import Unison.UnisonFile.Summary qualified as FileSummary
import Unison.Util.Pretty qualified as Pretty
import Unison.Util.Range (Range (..))
import Unison.Var qualified as Var
-- | Format a file, returning a list of Text replacements to apply to the file.
formatFile ::
Monad m =>
(Maybe (UnisonFile Symbol Ann.Ann) -> Maybe (TypecheckedUnisonFile Symbol Ann.Ann) -> m PPED.PrettyPrintEnvDecl) ->
Int ->
Path.Absolute ->
Maybe (UnisonFile Symbol Ann.Ann) ->
Maybe (TypecheckedUnisonFile Symbol Ann.Ann) ->
Maybe (Set Range) ->
m (Maybe [TextReplacement])
formatFile makePPEDForFile formattingWidth currentPath inputParsedFile inputTypecheckedFile mayRangesToFormat = runMaybeT $ do
let (mayParsedFile, mayTypecheckedFile) = mkUnisonFilesDeterministic inputParsedFile inputTypecheckedFile
fileSummary <- hoistMaybe $ FileSummary.mkFileSummary mayParsedFile mayTypecheckedFile
filePPED <- lift $ makePPEDForFile mayParsedFile mayTypecheckedFile
parsedFile <- hoistMaybe mayParsedFile
-- Don't format anything unless the file typechecks.
-- The formatter mostly works on a parsed file, but we currently fail to print
-- '{{ .. }}'-style docs correctly if they don't typecheck.
_typecheckedFile <- hoistMaybe mayTypecheckedFile
formattedDecls <-
(FileSummary.allTypeDecls fileSummary)
& fmap
( \(ref, decl) ->
let tldAnn = either (Decl.annotation . Decl.toDataDecl) (Decl.annotation) decl
in (tldAnn, ref, decl)
)
& Map.filter (\(tldAnn, _, _) -> isInFormatRange tldAnn)
& itraverse \sym (tldAnn, ref, decl) -> do
symName <- hoistMaybe (Name.fromVar sym)
let declNameSegments = NEL.appendr (Path.toList (Path.unabsolute currentPath)) (Name.segments symName)
let declName = Name.fromSegments declNameSegments
let hqName = HQ.fromName symName
let biasedPPED = PPED.biasTo [declName] filePPED
-- If it's a unique type the parser will re-order constructors arbitrarily because
-- the random unique seed gets mixed in and then things are ordered by hash.
--
-- The constructor order will always be re-ordered on definition Add anyways, so we
-- just force alphabetical order for unique types for sanity reasons.
-- Doesn't work unless we alter it before building the pped
-- let deterministicDecl = decl & Decl.declAsDataDecl_ . Decl.constructors_ %~ sortOn (view _1)
pure $
(tldAnn, DeclPrinter.prettyDecl biasedPPED (Reference.DerivedId ref) hqName decl)
& over _2 Pretty.syntaxToColor
formattedTerms <-
(FileSummary.termsBySymbol fileSummary)
& Map.filter (\(tldAnn, _, trm, _) -> shouldFormatTerm tldAnn trm)
& itraverse \sym (tldAnn, mayRefId, trm, _typ) -> do
symName <- hoistMaybe (Name.fromVar sym)
let defNameSegments = NEL.appendr (Path.toList (Path.unabsolute currentPath)) (Name.segments symName)
let defName = Name.fromSegments defNameSegments
let hqName = HQ.NameOnly symName
let biasedPPED = PPED.biasTo [defName] filePPED
let definitionPPE = case mayRefId of
Just refId -> PPE.declarationPPE biasedPPED (Reference.DerivedId refId)
Nothing -> PPED.suffixifiedPPE biasedPPED
let formattedTerm = Pretty.syntaxToColor $ TermPrinter.prettyBinding definitionPPE hqName (removeGeneratedTypeAnnotations parsedFile sym trm)
-- TODO: format watch expressions and test watches
-- let formattedWatches =
-- allWatches fileSummary & map \(_tldAnn, maySym, _mayRef, trm, _mayType, mayWatchKind) -> do
-- case (mayWatchKind, maySym) of
-- (Just wk, Just (Symbol.Symbol _ (Var.User {}))) ->
-- -- Watch with binding
-- Pretty.syntaxToColor $ Pretty.string wk <> "> " <> TermPrinter.prettyBindingWithoutTypeSignature definitionPPE hqName (stripTypeAnnotation trm)
-- (Just wk, _) -> Pretty.string wk <> "> " <> TermPrinter.prettyBlock False definitionPPE (stripTypeAnnotation trm)
-- (Nothing, _) -> "> " <> TermPrinter.prettyBlock False definitionPPE (stripTypeAnnotation trm)
pure (tldAnn, formattedTerm)
-- Only keep definitions which are _actually_ in the file, skipping generated accessors
-- and such.
let nonGeneratedDefs =
(formattedTerms <> formattedDecls)
& mapMaybe
( \case
(Ann.Ann {start, end}, txt) -> Just ((start, end), txt)
_ -> Nothing
)
-- when (null filteredDefs) empty {- Don't format if we have no definitions or it wipes out the fold! -}
let textEdits =
nonGeneratedDefs & foldMap \((start, end), txt) -> do
range <- maybeToList $ annToRange (Ann.Ann start end)
pure $ (TextReplacement (Text.pack $ Pretty.toPlain (Pretty.Width formattingWidth) txt) range)
pure textEdits
where
isInFormatRange :: Ann.Ann -> Bool
isInFormatRange ann =
case mayRangesToFormat of
Nothing -> True
Just rangesToFormat -> any (annRangeOverlap ann) rangesToFormat
shouldFormatTerm :: Ann.Ann -> Term.Term Symbol Ann.Ann -> Bool
shouldFormatTerm ann trm =
isInFormatRange ann
&& not (isUntypecheckedDoc trm)
-- The lexer converts '{{ .. }}' into 'syntax.docUntitledSection (..)', but the pretty
-- printer doesn't print it back as '{{ .. }}' unless it typechecks, so
-- we just don't format docs that have un-resolved 'docUntitledSection' symbols.
isUntypecheckedDoc :: Term.Term Symbol Ann.Ann -> Bool
isUntypecheckedDoc trm =
ABT.freeVars trm
& Set.map Var.nameStr
& Set.member "syntax.docUntitledSection"
-- Does the given range overlap with the given annotation?
annRangeOverlap :: Ann.Ann -> Range -> Bool
annRangeOverlap a r =
annToInterval a & \case
Nothing -> False
Just annI -> rangeToInterval r `Interval.overlaps` annI
-- Typechecking ALWAYS adds a type-signature, but we don't want to add ones that didn't
-- already exist in the source file.
removeGeneratedTypeAnnotations ::
UnisonFile Symbol a -> Symbol -> (Term.Term Symbol a) -> (Term.Term Symbol a)
removeGeneratedTypeAnnotations uf v = \case
Term.Ann' tm _annotation | not (hasUserTypeSignature uf v) -> tm
x -> x
-- This is a bit of a hack.
-- The file parser uses a different unique ID for unique types on every parse,
-- that id changes hashes, and constructors are ordered by that hash.
-- This means that pretty-printing isn't deterministic and constructors will re-order
-- themselves on every save :|
--
-- It's difficult and a bad idea to change the parser to use a deterministic unique ID,
-- so instead we just re-sort the constructors by their source-file annotation AFTER
-- parsing. This is fine for pretty-printing, but don't use this for anything other than
-- formatting since the Decls it produces aren't technically valid.
mkUnisonFilesDeterministic :: Maybe (UnisonFile Symbol Ann.Ann) -> Maybe (TypecheckedUnisonFile Symbol Ann.Ann) -> (Maybe (UnisonFile Symbol Ann.Ann), Maybe (TypecheckedUnisonFile Symbol Ann.Ann))
mkUnisonFilesDeterministic mayUnisonFile mayTypecheckedFile =
let sortedUF =
mayUnisonFile
& _Just . #dataDeclarationsId . traversed . _2 %~ sortConstructors
& _Just . #effectDeclarationsId . traversed . _2 . Decl.asDataDecl_ %~ sortConstructors
sortedTF =
mayTypecheckedFile
& _Just . #dataDeclarationsId' . traversed . _2 %~ sortConstructors
& _Just . #effectDeclarationsId' . traversed . _2 . Decl.asDataDecl_ %~ sortConstructors
in (sortedUF, sortedTF)
-- ppedForFileHelper
sortConstructors :: Decl.DataDeclaration v Ann.Ann -> Decl.DataDeclaration v Ann.Ann
sortConstructors dd =
-- Sort by their Ann so we keep the order they were in the original file.
dd & Decl.constructors_ %~ sortOn @Ann.Ann (view _1)
annToRange :: Ann.Ann -> Maybe Range
annToRange = \case
Ann.Intrinsic -> Nothing
Ann.External -> Nothing
Ann.GeneratedFrom a -> annToRange a
Ann.Ann start end -> Just $ Range start end
rangeToInterval :: Range -> Interval.Interval Pos.Pos
rangeToInterval (Range start end) =
Interval.ClosedInterval start end
annToInterval :: Ann.Ann -> Maybe (Interval.Interval Pos.Pos)
annToInterval ann = annToRange ann <&> rangeToInterval
-- | Returns 'True' if the given symbol is a term with a user provided type signature in the
-- parsed file, false otherwise.
hasUserTypeSignature :: Eq v => UnisonFile v a -> v -> Bool
hasUserTypeSignature parsedFile sym =
UF.terms parsedFile
& any (\(v, _, trm) -> v == sym && isJust (Term.getTypeAnnotation trm))
-- | A text replacement to apply to a file.
data TextReplacement = TextReplacement
{ -- The new new text to replace the old text in the range with. w
replacementText :: Text,
-- The range to replace, [start, end)
replacementRange :: Range
}
deriving (Eq, Show)
-- | Apply a list of range replacements to a text, returning the updated text.
--
-- >>> applyFormatUpdates [TextReplacement "cakes" (Range (Pos.Pos 1 21) (Pos.Pos 1 28))] "my favorite food is oranges because\nthey are delicious and nutritious"
-- "my favorite food is cakes because\nthey are delicious and nutritious"
--
-- Multiple replacements.
-- >>> let txt = "my favorite food is oranges because\nthey are delicious and nutritious"
-- >>> let replacements = [TextReplacement "cakes" (Range (Pos.Pos 1 21) (Pos.Pos 1 28)), TextReplacement "decadent" (Range (Pos.Pos 2 10) (Pos.Pos 2 19)), TextReplacement "tasty" (Range (Pos.Pos 2 24) (Pos.Pos 2 34))]
-- >>> applyFormatUpdates replacements txt
-- "my favorite food is cakes because\nthey are decadent and tasty"
--
-- Multi-line replacements.
-- >>> let txt = "mary had a little lamb\nwhose fleece was white as snow\nand everywhere that mary went\nthe lamb was sure to go"
-- >>> let replacements = [TextReplacement "lambo, which" (Range (Pos.Pos 1 19) (Pos.Pos 2 13)), TextReplacement " the people stared" (Range (Pos.Pos 3 99) (Pos.Pos 4 99))]
-- >>> applyFormatUpdates replacements txt
-- "mary had a little lambo, which was white as snow\nand everywhere that mary went the people stared"
applyTextReplacements :: [TextReplacement] -> Text -> Text
applyTextReplacements replacements inputText = applyTextReplacementsHelper relativeOffsets (Text.lines inputText) & TB.run
where
tupleReplacements :: [(Int, Int, Maybe Text)]
tupleReplacements =
replacements
& foldMap
( \(TextReplacement txt (Range (Pos.Pos startLine startCol) (Pos.Pos endLine endCol))) ->
-- Convert from 1-based indexing to 0-based indexing
[(startLine - 1, startCol - 1, Nothing), (endLine - 1, endCol - 1, Just txt)]
)
relativeOffsets = relativizeOffsets (List.sortOn (\(line, col, _) -> (line, col)) tupleReplacements)
-- | Given a list of offsets, return a list of offsets where each offset is positioned relative to the previous offset.
-- I.e. if the first offset is at line 3 col 4, and the next is at line 5 col 6, we subtract 3
-- from the 5 but leave the column alone since it's on a different line, resulting in [(3,4), (2,6)]
--
-- If the first offset is at line 3 col 4, and the next is at line 3 col 6, we subtract 3 from
-- the line number AND subtract 4 from the column number since they're on the same line.
-- Resulting in [(3,4), (0,2)]
--
--
-- >>> relativizeOffsets [(0, 0, Nothing), (0, 4, Just "1"), (0, 10, Nothing), (1, 0, Just "2"), (1, 5, Nothing), (5, 10, Just "3")]
-- NOW [(0,0,Nothing),(0,4,Just "1"),(0,6,Nothing),(1,0,Just "2"),(0,5,Nothing),(4,10,Just "3")]
relativizeOffsets :: [(Int, Int, Maybe Text)] -> [(Int, Int, Maybe Text)]
relativizeOffsets xs =
let grouped = List.groupBy (\(a, _, _) (b, _, _) -> a == b) xs
in grouped
& fmap (snd . List.mapAccumL (\acc (line, col, r) -> (col, (line, col - acc, r))) 0)
& List.concat
& snd . List.mapAccumL (\acc (line, col, r) -> (line, (line - acc, col, r))) 0
-- | Apply a list of range replacements to a list of lines, returning the result.
--
-- >>> applyTextReplacementsHelper [(0, 1, Nothing), (0, 4, Just "1"), (0, 2, Nothing), (1, 3, Just "2"), (0, 5, Nothing), (1, 10, Just "3")] ["abcdefghijk", "lmnopqrstuv", "wxyz", "1234567890"] & TB.run
-- "a1fg2opqrs3\n1234567890"
applyTextReplacementsHelper :: [(Int, Int, Maybe Text)] -> [Text] -> TB.Builder
applyTextReplacementsHelper [] ls = TB.intercalate "\n" (TB.text <$> ls)
applyTextReplacementsHelper _ [] = mempty
applyTextReplacementsHelper ((0, col, r) : rest) (l : ls) =
let (prefix, suffix) = Text.splitAt col l
in TB.text (fromMaybe prefix r) <> applyTextReplacementsHelper rest (suffix : ls)
applyTextReplacementsHelper ((line, col, r) : rest) ls =
case List.splitAt line ls of
(prefixLines, []) -> TB.intercalate "\n" (TB.text <$> prefixLines)
(prefixLines, (lastLine : restLines)) ->
let (prefixChars, suffixChars) = Text.splitAt col lastLine
segment =
(TB.intercalate "\n" $ fmap TB.text prefixLines)
<> TB.char '\n'
<> TB.text prefixChars
in maybe segment TB.text r <> applyTextReplacementsHelper rest (suffixChars : restLines)

View File

@ -15,28 +15,26 @@ import System.Environment (withArgs)
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.NamesUtils (displayNames, makeShadowedPrintNamesFromHQ)
import Unison.Cli.PrettyPrintUtils (prettyPrintEnvDecl)
import Unison.Cli.NamesUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils qualified as Cli
import Unison.Cli.TypeCheck (computeTypecheckingEnvironment)
import Unison.Cli.UniqueTypeGuidLookup qualified as Cli
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch.Names qualified as Branch
import Unison.Codebase.Editor.HandleInput.RuntimeUtils qualified as RuntimeUtils
import Unison.Codebase.Editor.Output qualified as Output
import Unison.Codebase.Editor.Slurp qualified as Slurp
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.Runtime qualified as Runtime
import Unison.FileParsers qualified as FileParsers
import Unison.Names (Names)
import Unison.Parser.Ann (Ann)
import Unison.Parser.Ann qualified as Ann
import Unison.Parsers qualified as Parsers
import Unison.Prelude
import Unison.PrettyPrintEnv qualified as PPE
import Unison.PrettyPrintEnv.Names qualified as PPE
import Unison.PrettyPrintEnvDecl qualified as PPE
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.Reference qualified as Reference
import Unison.Result qualified as Result
import Unison.Server.Backend qualified as Backend
import Unison.Symbol (Symbol)
import Unison.Syntax.Parser qualified as Parser
import Unison.Term (Term)
@ -60,11 +58,11 @@ handleLoad maybePath = do
loadUnisonFile :: Text -> Text -> Cli ()
loadUnisonFile sourceName text = do
Cli.respond $ Output.LoadingFile sourceName
unisonFile <- withFile sourceName text
currentNames <- Branch.toNames <$> Cli.getCurrentBranch0
currentNames <- Cli.currentNames
unisonFile <- withFile currentNames sourceName text
let sr = Slurp.slurpFile unisonFile mempty Slurp.CheckOp currentNames
names <- displayNames unisonFile
pped <- prettyPrintEnvDecl names
let names = UF.addNamesFromTypeCheckedUnisonFile unisonFile currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
let ppe = PPE.suffixifiedPPE pped
Cli.respond $ Output.Typechecked sourceName ppe sr unisonFile
(bindings, e) <- evalUnisonFile Permissive ppe unisonFile []
@ -75,13 +73,12 @@ loadUnisonFile sourceName text = do
#latestTypecheckedFile .= Just (Right unisonFile)
where
withFile ::
Names ->
Text ->
Text ->
Cli (TypecheckedUnisonFile Symbol Ann)
withFile sourceName text = do
rootBranch <- Cli.getRootBranch
withFile names sourceName text = do
currentPath <- Cli.getCurrentPath
let parseNames = Backend.getCurrentParseNames (Backend.Within (Path.unabsolute currentPath)) rootBranch
State.modify' \loopState ->
loopState
& #latestFile .~ Just (Text.unpack sourceName, False)
@ -92,7 +89,7 @@ loadUnisonFile sourceName text = do
Parser.ParsingEnv
{ uniqueNames = uniqueName,
uniqueTypeGuid = Cli.loadUniqueTypeGuid currentPath,
names = parseNames
names
}
unisonFile <-
Cli.runTransaction (Parsers.parseFile (Text.unpack sourceName) (Text.unpack text) parsingEnv)
@ -104,8 +101,9 @@ loadUnisonFile sourceName text = do
computeTypecheckingEnvironment (FileParsers.ShouldUseTndr'Yes parsingEnv) codebase [] unisonFile
let Result.Result notes maybeTypecheckedUnisonFile = FileParsers.synthesizeFile typecheckingEnv unisonFile
maybeTypecheckedUnisonFile & onNothing do
ns <- makeShadowedPrintNamesFromHQ (UF.toNames unisonFile)
ppe <- Cli.runTransaction Codebase.hashLength <&> \hashLen -> PPE.makePPE (PPE.hqNamer hashLen ns) (PPE.suffixifyByHash ns)
let namesWithFileDefinitions = UF.addNamesFromUnisonFile unisonFile names
pped <- Cli.prettyPrintEnvDeclFromNames namesWithFileDefinitions
let suffixifiedPPE = PPED.suffixifiedPPE pped
let tes = [err | Result.TypeError err <- toList notes]
cbs =
[ bug
@ -114,9 +112,9 @@ loadUnisonFile sourceName text = do
]
when (not (null tes)) do
currentPath <- Cli.getCurrentPath
Cli.respond (Output.TypeErrors currentPath text ppe tes)
Cli.respond (Output.TypeErrors currentPath text suffixifiedPPE tes)
when (not (null cbs)) do
Cli.respond (Output.CompilerBugs text ppe cbs)
Cli.respond (Output.CompilerBugs text suffixifiedPPE cbs)
Cli.returnEarlyWithoutOutput
data EvalMode = Sandboxed | Permissive | Native

View File

@ -10,11 +10,12 @@ import Data.Map qualified as Map
import Data.Set qualified as Set
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils (currentPrettyPrintEnvDecl)
import Unison.Cli.PrettyPrintUtils qualified as Cli
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch (Branch0)
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Branch.Names qualified as Branch
import Unison.Codebase.Editor.Output qualified as Output
import Unison.Codebase.Path qualified as Path
import Unison.DataDeclaration qualified as DD
@ -22,11 +23,11 @@ import Unison.LabeledDependency (LabeledDependency)
import Unison.LabeledDependency qualified as LD
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.Names qualified as Names
import Unison.Prelude
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.Reference qualified as Reference
import Unison.Referent qualified as Referent
import Unison.Server.Backend qualified as Backend
import Unison.Sqlite qualified as Sqlite
import Unison.Symbol (Symbol)
import Unison.Term qualified as Term
@ -41,8 +42,13 @@ handleNamespaceDependencies namespacePath' = do
Cli.returnEarly (Output.BranchEmpty (Output.WhichBranchEmptyPath (Path.absoluteToPath' path)))
externalDependencies <-
Cli.runTransaction (namespaceDependencies codebase branch)
ppe <- PPED.unsuffixifiedPPE <$> currentPrettyPrintEnvDecl Backend.Within
Cli.respondNumbered $ Output.ListNamespaceDependencies ppe path externalDependencies
currentPPED <- Cli.currentPrettyPrintEnvDecl
globalNames <- Names.makeAbsolute . Branch.toNames <$> Cli.getRootBranch0
globalPPED <- Cli.prettyPrintEnvDeclFromNames globalNames
-- We explicitly include a global unsuffixified fallback on namespace dependencies since
-- the things we want names for are obviously outside of our scope.
let ppeWithFallback = PPED.unsuffixifiedPPE $ PPED.addFallback globalPPED currentPPED
Cli.respondNumbered $ Output.ListNamespaceDependencies ppeWithFallback path externalDependencies
-- | Check the dependencies of all types and terms in the current namespace,
-- returns a map of dependencies which do not have a name within the current namespace,

View File

@ -9,23 +9,21 @@ import Data.Map qualified as Map
import Unison.Builtin qualified as Builtin
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils (prettyPrintEnvDecl)
import Unison.Cli.NamesUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils qualified as Cli
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch (Branch0 (..))
import Unison.Codebase.Branch.Names qualified as Branch
import Unison.Codebase.BranchDiff qualified as BranchDiff
import Unison.Codebase.Editor.Output.BranchDiff qualified as OBranchDiff
import Unison.Codebase.Path qualified as Path
import Unison.DataDeclaration qualified as DD
import Unison.Parser.Ann (Ann (..))
import Unison.Prelude
import Unison.PrettyPrintEnv qualified as PPE
import Unison.PrettyPrintEnvDecl qualified as PPE hiding (biasTo)
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.Reference (Reference)
import Unison.Reference qualified as Reference
import Unison.Server.Backend qualified as Backend
import Unison.Sqlite qualified as Sqlite
import Unison.Symbol (Symbol)
@ -36,13 +34,12 @@ diffHelper ::
diffHelper before after =
Cli.time "diffHelper" do
Cli.Env {codebase} <- ask
rootBranch <- Cli.getRootBranch
currentPath <- Cli.getCurrentPath
hqLength <- Cli.runTransaction Codebase.hashLength
diff <- liftIO (BranchDiff.diff0 before after)
let (_parseNames, prettyNames0, _local) = Backend.namesForBranch rootBranch (Backend.AllNames $ Path.unabsolute currentPath)
ppe <- PPE.suffixifiedPPE <$> prettyPrintEnvDecl prettyNames0
fmap (ppe,) do
names <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
let suffixifiedPPE = PPED.suffixifiedPPE pped
fmap (suffixifiedPPE,) do
OBranchDiff.toOutput
(Cli.runTransaction . Codebase.getTypeOfReferent codebase)
(Cli.runTransaction . declOrBuiltin codebase)

View File

@ -417,9 +417,14 @@ pushProjectBranchToProjectBranch'InferredProject force localProjectAndBranch loc
(ProjectAndBranch (remoteProjectId, remoteProjectName) remoteBranchName)
-- "push" with remote mapping for branch
Just (remoteBranchId, remoteBranchName) -> do
let remoteProjectBranchDoesntExist =
let remoteProjectBranchDoesntExist = do
Cli.runTransaction $
Queries.deleteBranchRemoteMapping
localProjectId
localBranchId
Share.hardCodedUri
Cli.returnEarly $
Output.RemoteProjectBranchDoesntExist
Output.RemoteProjectBranchDoesntExist'Push
Share.hardCodedUri
(ProjectAndBranch remoteProjectName remoteBranchName)
Share.getProjectBranchById Share.NoSquashedHead (ProjectAndBranch remoteProjectId remoteBranchId) >>= \case

View File

@ -12,7 +12,8 @@ import Unison.Builtin.Decls qualified as DD
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.NamesUtils (basicParseNames, displayNames, getBasicPrettyPrintNames)
import Unison.Cli.NamesUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils qualified as Cli
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Editor.HandleInput.Load (EvalMode (Native, Permissive), evalUnisonFile)
import Unison.Codebase.Editor.Output qualified as Output
@ -22,7 +23,7 @@ import Unison.Hash qualified as Hash
import Unison.Parser.Ann (Ann (External))
import Unison.Prelude
import Unison.PrettyPrintEnv qualified as PPE
import Unison.PrettyPrintEnv.Names qualified as PPE
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.Reference qualified as Reference
import Unison.Result qualified as Result
import Unison.Symbol (Symbol)
@ -36,6 +37,7 @@ import Unison.Typechecker.TypeLookup (TypeLookup)
import Unison.Typechecker.TypeLookup qualified as TypeLookup
import Unison.UnisonFile (TypecheckedUnisonFile)
import Unison.UnisonFile qualified as UF
import Unison.UnisonFile.Names qualified as UF
import Unison.Var qualified as Var
handleRun :: Bool -> String -> [String] -> Cli ()
@ -44,11 +46,12 @@ handleRun native main args = do
(sym, term, typ, otyp) <- getTerm main
uf <- createWatcherFile sym term typ
pure (uf, otyp)
ppe <- do
names <- displayNames unisonFile
Cli.runTransaction Codebase.hashLength <&> \hashLen -> PPE.makePPE (PPE.hqNamer hashLen names) (PPE.suffixifyByHash names)
names <- Cli.currentNames
let namesWithFileDefinitions = UF.addNamesFromTypeCheckedUnisonFile unisonFile names
pped <- Cli.prettyPrintEnvDeclFromNames namesWithFileDefinitions
let suffixifiedPPE = PPED.suffixifiedPPE pped
let mode | native = Native | otherwise = Permissive
(_, xs) <- evalUnisonFile mode ppe unisonFile args
(_, xs) <- evalUnisonFile mode suffixifiedPPE unisonFile args
mainRes :: Term Symbol () <-
case lookup magicMainWatcherString (map bonk (Map.toList xs)) of
Nothing ->
@ -59,7 +62,7 @@ handleRun native main args = do
)
Just x -> pure (stripUnisonFileReferences unisonFile x)
#lastRunResult .= Just (Term.amap (\() -> External) mainRes, mainResType, unisonFile)
Cli.respond (Output.RunResult ppe mainRes)
Cli.respond (Output.RunResult suffixifiedPPE mainRes)
where
bonk (_, (_ann, watchKind, _id, _term0, term1, _isCacheHit)) =
(watchKind, term1)
@ -77,29 +80,24 @@ getTerm main =
getTerm' main >>= \case
NoTermWithThatName -> do
mainType <- Runtime.mainType <$> view #runtime
ppe <- makePPE
Cli.returnEarly $ Output.NoMainFunction main ppe [mainType]
pped <- Cli.currentPrettyPrintEnvDecl
let suffixifiedPPE = PPED.suffixifiedPPE pped
Cli.returnEarly $ Output.NoMainFunction main suffixifiedPPE [mainType]
TermHasBadType ty -> do
mainType <- Runtime.mainType <$> view #runtime
ppe <- makePPE
Cli.returnEarly $ Output.BadMainFunction "run" main ty ppe [mainType]
pped <- Cli.currentPrettyPrintEnvDecl
let suffixifiedPPE = PPED.suffixifiedPPE pped
Cli.returnEarly $ Output.BadMainFunction "run" main ty suffixifiedPPE [mainType]
GetTermSuccess x -> pure x
where
makePPE :: Cli PPE.PrettyPrintEnv
makePPE = do
basicPrettyPrintNames <- getBasicPrettyPrintNames
Cli.runTransaction Codebase.hashLength <&> \hashLen ->
PPE.makePPE (PPE.hqNamer hashLen basicPrettyPrintNames) (PPE.suffixifyByHash basicPrettyPrintNames)
getTerm' :: String -> Cli GetTermResult
getTerm' mainName =
let getFromCodebase = do
Cli.Env {codebase, runtime} <- ask
parseNames <- basicParseNames
names <- Cli.currentNames
let loadTypeOfTerm ref = Cli.runTransaction (Codebase.getTypeOfTerm codebase ref)
mainToFile
=<< MainTerm.getMainTerm loadTypeOfTerm parseNames mainName (Runtime.mainType runtime)
=<< MainTerm.getMainTerm loadTypeOfTerm names mainName (Runtime.mainType runtime)
where
mainToFile (MainTerm.NotAFunctionName _) = pure NoTermWithThatName
mainToFile (MainTerm.NotFound _) = pure NoTermWithThatName

View File

@ -0,0 +1,86 @@
module Unison.Codebase.Editor.HandleInput.ShowDefinition (showDefinitions) where
import Control.Lens
import Control.Monad.Reader (ask)
import Control.Monad.State qualified as State
import Data.Map qualified as Map
import Data.Set qualified as Set
import Data.Text qualified as Text
import Unison.Builtin.Decls qualified as DD
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.Pretty qualified as Pretty
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Editor.DisplayObject (DisplayObject)
import Unison.Codebase.Editor.Input
import Unison.Codebase.Editor.Output
import Unison.DataDeclaration (Decl)
import Unison.HashQualified qualified as HQ
import Unison.Name (Name)
import Unison.Parser.Ann (Ann)
import Unison.Prelude
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.Reference qualified as Reference
import Unison.Symbol (Symbol)
import Unison.Term (Term)
import Unison.Type (Type)
import Unison.Util.Pretty qualified as Pretty
import Unison.Util.Set qualified as Set
-- | Show the provided definitions to console or scratch file.
-- The caller is responsible for ensuring that the definitions include cycles if that's
-- the desired behavior.
showDefinitions ::
OutputLocation ->
PPED.PrettyPrintEnvDecl ->
(Map Reference.Reference (DisplayObject (Type Symbol Ann) (Term Symbol Ann))) ->
( Map
Reference.Reference
(DisplayObject () (Decl Symbol Ann))
) ->
[HQ.HashQualified Name] ->
Cli ()
showDefinitions outputLoc pped terms types misses = do
Cli.Env {codebase, writeSource} <- ask
outputPath <- getOutputPath
case outputPath of
_ | null terms && null types -> pure ()
Nothing -> do
-- If we're writing to console we don't add test-watch syntax
let isTest _ = False
let isSourceFile = False
-- No filepath, render code to console.
let renderedCodePretty = renderCodePretty pped isSourceFile isTest terms types
Cli.respond $ DisplayDefinitions renderedCodePretty
Just fp -> do
-- We build an 'isTest' check to prepend "test>" to tests in a scratch file.
testRefs <- Cli.runTransaction (Codebase.filterTermsByReferenceIdHavingType codebase (DD.testResultType mempty) (Map.keysSet terms & Set.mapMaybe Reference.toId))
let isTest r = Set.member r testRefs
let isSourceFile = True
let renderedCodePretty = renderCodePretty pped isSourceFile isTest terms types
let renderedCodeText = Text.pack $ Pretty.toPlain 80 renderedCodePretty
-- We set latestFile to be programmatically generated, if we
-- are viewing these definitions to a file - this will skip the
-- next update for that file (which will happen immediately)
#latestFile ?= (fp, True)
liftIO $ writeSource (Text.pack fp) renderedCodeText
let numDefinitions = Map.size terms + Map.size types
Cli.respond $ LoadedDefinitionsToSourceFile fp numDefinitions
when (not (null misses)) (Cli.respond (SearchTermsNotFound misses))
where
-- Get the file path to send the definition(s) to. `Nothing` means the terminal.
getOutputPath :: Cli (Maybe FilePath)
getOutputPath =
case outputLoc of
ConsoleLocation -> pure Nothing
FileLocation path -> pure (Just path)
LatestFileLocation -> do
loopState <- State.get
pure case loopState ^. #latestFile of
Nothing -> Just "scratch.u"
Just (path, _) -> Just path
renderCodePretty pped isSourceFile isTest terms types =
Pretty.syntaxToColor . Pretty.sep "\n\n" $
Pretty.prettyTypeDisplayObjects pped types <> Pretty.prettyTermDisplayObjects pped isSourceFile isTest terms

View File

@ -14,7 +14,8 @@ import Data.Maybe (catMaybes, fromJust)
import Data.Set (fromList, toList)
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.NamesUtils (basicParseNames, basicPrettyPrintNamesA)
import Unison.Cli.NamesUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils qualified as Cli
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Editor.Output (Output (..))
import Unison.Codebase.Path (hqSplitFromName')
@ -26,7 +27,7 @@ import Unison.Names (Names)
import Unison.NamesWithHistory qualified as Names
import Unison.Parser.Ann (Ann)
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.PrettyPrintEnv.Names qualified as PPE
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.Reference (Reference)
import Unison.Referent (Referent, pattern Con, pattern Ref)
import Unison.Symbol (Symbol)
@ -60,69 +61,66 @@ lookupTermRefWithType ::
HQ.HashQualified Name ->
Cli [(Reference, Type Symbol Ann)]
lookupTermRefWithType codebase name = do
nms <- basicParseNames
names <- Cli.currentNames
liftIO
. Codebase.runTransaction codebase
. fmap catMaybes
. traverse annot
. fst
$ lookupTermRefs name nms
$ lookupTermRefs name names
where
annot tm =
fmap ((,) tm) <$> Codebase.getTypeOfTerm codebase tm
resolveTerm :: HQ.HashQualified Name -> Cli Referent
resolveTerm name = do
hashLength <- Cli.runTransaction Codebase.hashLength
basicParseNames >>= \nms ->
case lookupTerm name nms of
[] -> Cli.returnEarly (TermNotFound $ fromJust parsed)
where
parsed = hqSplitFromName' =<< HQ.toName name
[rf] -> pure rf
rfs ->
Cli.returnEarly (TermAmbiguous ppe name (fromList rfs))
where
ppe = PPE.makePPE (PPE.hqNamer hashLength nms) (PPE.suffixifyByHash nms)
names <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
let suffixifiedPPE = PPED.suffixifiedPPE pped
case lookupTerm name names of
[] -> Cli.returnEarly (TermNotFound $ fromJust parsed)
where
parsed = hqSplitFromName' =<< HQ.toName name
[rf] -> pure rf
rfs ->
Cli.returnEarly (TermAmbiguous suffixifiedPPE name (fromList rfs))
resolveCon :: HQ.HashQualified Name -> Cli ConstructorReference
resolveCon name = do
hashLength <- Cli.runTransaction Codebase.hashLength
basicParseNames >>= \nms ->
case lookupCon name nms of
([], _) -> Cli.returnEarly (TermNotFound $ fromJust parsed)
where
parsed = hqSplitFromName' =<< HQ.toName name
([co], _) -> pure co
(_, rfts) ->
Cli.returnEarly (TermAmbiguous ppe name (fromList rfts))
where
ppe = PPE.makePPE (PPE.hqNamer hashLength nms) (PPE.suffixifyByHash nms)
names <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
let suffixifiedPPE = PPED.suffixifiedPPE pped
case lookupCon name names of
([], _) -> Cli.returnEarly (TermNotFound $ fromJust parsed)
where
parsed = hqSplitFromName' =<< HQ.toName name
([co], _) -> pure co
(_, rfts) ->
Cli.returnEarly (TermAmbiguous suffixifiedPPE name (fromList rfts))
resolveTermRef :: HQ.HashQualified Name -> Cli Reference
resolveTermRef name = do
hashLength <- Cli.runTransaction Codebase.hashLength
basicParseNames >>= \nms ->
case lookupTermRefs name nms of
([], _) -> Cli.returnEarly (TermNotFound $ fromJust parsed)
where
parsed = hqSplitFromName' =<< HQ.toName name
([rf], _) -> pure rf
(_, rfts) ->
Cli.returnEarly (TermAmbiguous ppe name (fromList rfts))
where
ppe = PPE.makePPE (PPE.hqNamer hashLength nms) (PPE.suffixifyByHash nms)
names <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
let suffixifiedPPE = PPED.suffixifiedPPE pped
case lookupTermRefs name names of
([], _) -> Cli.returnEarly (TermNotFound $ fromJust parsed)
where
parsed = hqSplitFromName' =<< HQ.toName name
([rf], _) -> pure rf
(_, rfts) ->
Cli.returnEarly (TermAmbiguous suffixifiedPPE name (fromList rfts))
resolveMainRef :: HQ.HashQualified Name -> Cli (Reference, PrettyPrintEnv)
resolveMainRef main = do
Cli.Env {codebase, runtime} <- ask
names <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
let suffixifiedPPE = PPED.suffixifiedPPE pped
let mainType = Runtime.mainType runtime
smain = HQ.toString main
parseNames <- basicPrettyPrintNamesA
k <- Cli.runTransaction Codebase.hashLength
let ppe = PPE.makePPE (PPE.hqNamer k parseNames) (PPE.suffixifyByHash parseNames)
lookupTermRefWithType codebase main >>= \case
[(rf, ty)]
| Typechecker.fitsScheme ty mainType -> pure (rf, ppe)
| otherwise -> Cli.returnEarly (BadMainFunction "main" smain ty ppe [mainType])
_ -> Cli.returnEarly (NoMainFunction smain ppe [mainType])
| Typechecker.fitsScheme ty mainType -> pure (rf, suffixifiedPPE)
| otherwise -> Cli.returnEarly (BadMainFunction "main" smain ty suffixifiedPPE [mainType])
_ -> Cli.returnEarly (NoMainFunction smain suffixifiedPPE [mainType])

View File

@ -20,7 +20,8 @@ import Unison.Builtin.Decls qualified as DD
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.NamesUtils (basicParseNames, makePrintNamesFromLabeled')
import Unison.Cli.NamesUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils qualified as Cli
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Editor.HandleInput.RuntimeUtils qualified as RuntimeUtils
@ -36,7 +37,7 @@ import Unison.NamesWithHistory qualified as Names
import Unison.Parser.Ann (Ann)
import Unison.Prelude
import Unison.PrettyPrintEnv qualified as PPE
import Unison.PrettyPrintEnv.Names qualified as PPE
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.Reference qualified as Reference
import Unison.Referent qualified as Referent
import Unison.ShortHash qualified as SH
@ -53,10 +54,6 @@ import Unison.Util.Relation qualified as R
import Unison.Util.Set qualified as Set
import Unison.WatchKind qualified as WK
fqnPPE :: Names -> Cli PPE.PrettyPrintEnv
fqnPPE ns =
Cli.runTransaction Codebase.hashLength <&> \hashLen -> PPE.makePPE (PPE.hqNamer hashLen ns) PPE.dontSuffixify
-- | Handle a @test@ command.
-- Run pure tests in the current subnamespace.
handleTest :: TestInput -> Cli ()
@ -88,12 +85,13 @@ handleTest TestInput {includeLibNamespace, showFailures, showSuccesses} = do
| otherwise -> Nothing
_ -> Nothing
let stats = Output.CachedTests (Set.size testRefs) (Map.size cachedTests)
names <- makePrintNamesFromLabeled'
ppe <- fqnPPE names
names <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
let fqnPPE = PPED.unsuffixifiedPPE pped
Cli.respond $
TestResults
stats
ppe
fqnPPE
showSuccesses
showFailures
oks
@ -108,9 +106,9 @@ handleTest TestInput {includeLibNamespace, showFailures, showSuccesses} = do
Cli.respond (TermNotFound' . SH.shortenTo hqLength . Reference.toShortHash $ Reference.DerivedId r)
pure []
Just tm -> do
Cli.respond $ TestIncrementalOutputStart ppe (n, total) r
Cli.respond $ TestIncrementalOutputStart fqnPPE (n, total) r
-- v don't cache; test cache populated below
tm' <- RuntimeUtils.evalPureUnison ppe False tm
tm' <- RuntimeUtils.evalPureUnison fqnPPE False tm
case tm' of
Left e -> do
Cli.respond (EvaluationFailure e)
@ -118,28 +116,27 @@ handleTest TestInput {includeLibNamespace, showFailures, showSuccesses} = do
Right tm' -> do
-- After evaluation, cache the result of the test
Cli.runTransaction (Codebase.putWatch WK.TestWatch r tm')
Cli.respond $ TestIncrementalOutputEnd ppe (n, total) r (isTestOk tm')
Cli.respond $ TestIncrementalOutputEnd fqnPPE (n, total) r (isTestOk tm')
pure [(r, tm')]
let m = Map.fromList computedTests
(mOks, mFails) = passFails m
Cli.respond $ TestResults Output.NewlyComputed ppe showSuccesses showFailures mOks mFails
Cli.respond $ TestResults Output.NewlyComputed fqnPPE showSuccesses showFailures mOks mFails
handleIOTest :: HQ.HashQualified Name -> Cli ()
handleIOTest main = do
Cli.Env {runtime} <- ask
parseNames <- basicParseNames
ppe <-
Cli.runTransaction Codebase.hashLength <&> \hashLen ->
PPE.makePPE (PPE.hqNamer hashLen parseNames) (PPE.suffixifyByHash parseNames)
names <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
let suffixifiedPPE = PPED.suffixifiedPPE pped
let isIOTest typ = Foldable.any (Typechecker.isSubtype typ) $ Runtime.ioTestTypes runtime
refs <- resolveHQNames parseNames (Set.singleton main)
refs <- resolveHQNames names (Set.singleton main)
(fails, oks) <-
refs & foldMapM \(ref, typ) -> do
when (not $ isIOTest typ) do
Cli.returnEarly (BadMainFunction "io.test" (HQ.toString main) typ ppe (Foldable.toList $ Runtime.ioTestTypes runtime))
runIOTest ppe ref
Cli.respond $ TestResults Output.NewlyComputed ppe True True oks fails
Cli.returnEarly (BadMainFunction "io.test" (HQ.toString main) typ suffixifiedPPE (Foldable.toList $ Runtime.ioTestTypes runtime))
runIOTest suffixifiedPPE ref
Cli.respond $ TestResults Output.NewlyComputed suffixifiedPPE True True oks fails
findTermsOfTypes :: Codebase.Codebase m Symbol Ann -> Bool -> NESet (Type.Type Symbol Ann) -> Cli (Set Reference.Id)
findTermsOfTypes codebase includeLib filterTypes = do
@ -157,22 +154,21 @@ findTermsOfTypes codebase includeLib filterTypes = do
handleAllIOTests :: Cli ()
handleAllIOTests = do
Cli.Env {codebase, runtime} <- ask
parseNames <- basicParseNames
ppe <-
Cli.runTransaction Codebase.hashLength <&> \hashLen ->
PPE.makePPE (PPE.hqNamer hashLen parseNames) (PPE.suffixifyByHash parseNames)
names <- Cli.currentNames
pped <- Cli.prettyPrintEnvDeclFromNames names
let suffixifiedPPE = PPED.suffixifiedPPE pped
ioTestRefs <- findTermsOfTypes codebase False (Runtime.ioTestTypes runtime)
case NESet.nonEmptySet ioTestRefs of
Nothing -> Cli.respond $ TestResults Output.NewlyComputed ppe True True [] []
Nothing -> Cli.respond $ TestResults Output.NewlyComputed suffixifiedPPE True True [] []
Just neTestRefs -> do
let total = NESet.size neTestRefs
(fails, oks) <-
toList neTestRefs & zip [1 :: Int ..] & foldMapM \(n, r) -> do
Cli.respond $ TestIncrementalOutputStart ppe (n, total) r
(fails, oks) <- runIOTest ppe r
Cli.respond $ TestIncrementalOutputEnd ppe (n, total) r (null fails)
Cli.respond $ TestIncrementalOutputStart suffixifiedPPE (n, total) r
(fails, oks) <- runIOTest suffixifiedPPE r
Cli.respond $ TestIncrementalOutputEnd suffixifiedPPE (n, total) r (null fails)
pure (fails, oks)
Cli.respond $ TestResults Output.NewlyComputed ppe True True oks fails
Cli.respond $ TestResults Output.NewlyComputed suffixifiedPPE True True oks fails
resolveHQNames :: Names -> Set (HQ.HashQualified Name) -> Cli (Set (Reference.Id, Type.Type Symbol Ann))
resolveHQNames parseNames hqNames =

View File

@ -16,13 +16,12 @@ import Unison.ABT qualified as ABT
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.NamesUtils (displayNames)
import Unison.Cli.PrettyPrintUtils (prettyPrintEnvDecl)
import Unison.Cli.NamesUtils qualified as Cli
import Unison.Cli.PrettyPrintUtils qualified as Cli
import Unison.Cli.TypeCheck (computeTypecheckingEnvironment)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch (Branch0 (..))
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Branch.Names qualified as Branch
import Unison.Codebase.BranchUtil qualified as BranchUtil
import Unison.Codebase.Editor.Input
import Unison.Codebase.Editor.Output
@ -81,8 +80,8 @@ handleUpdate input optionalPatch requestedNames = do
NoPatch -> Nothing
DefaultPatch -> Just Cli.defaultPatchPath
UsePatch p -> Just p
slurpCheckNames <- Branch.toNames <$> Cli.getCurrentBranch0
sr <- getSlurpResultForUpdate requestedNames slurpCheckNames
currentCodebaseNames <- Cli.currentNames
sr <- getSlurpResultForUpdate requestedNames currentCodebaseNames
let addsAndUpdates :: SlurpComponent
addsAndUpdates = Slurp.updates sr <> Slurp.adds sr
fileNames :: Names
@ -92,7 +91,7 @@ handleUpdate input optionalPatch requestedNames = do
typeEdits = do
v <- Set.toList (SC.types (updates sr))
let n = Name.unsafeFromVar v
let oldRefs0 = Names.typesNamed slurpCheckNames n
let oldRefs0 = Names.typesNamed currentCodebaseNames n
let newRefs = Names.typesNamed fileNames n
case (,) <$> NESet.nonEmptySet oldRefs0 <*> Set.asSingleton newRefs of
Nothing -> error (reportBug "E722145" ("bad (old,new) names: " ++ show (oldRefs0, newRefs)))
@ -107,7 +106,7 @@ handleUpdate input optionalPatch requestedNames = do
termEdits = do
v <- Set.toList (SC.terms (updates sr))
let n = Name.unsafeFromVar v
let oldRefs0 = Names.refTermsNamed slurpCheckNames n
let oldRefs0 = Names.refTermsNamed currentCodebaseNames n
let newRefs = Names.refTermsNamed fileNames n
case (,) <$> NESet.nonEmptySet oldRefs0 <*> Set.asSingleton newRefs of
Nothing -> error (reportBug "E936103" ("bad (old,new) names: " ++ show (oldRefs0, newRefs)))
@ -118,7 +117,7 @@ handleUpdate input optionalPatch requestedNames = do
termDeprecations =
[ (n, r)
| (_, oldTypeRef, _) <- typeEdits,
(n, r) <- Names.constructorsForType oldTypeRef slurpCheckNames
(n, r) <- Names.constructorsForType oldTypeRef currentCodebaseNames
]
patchOps <- for patchPath \patchPath -> do
ye'ol'Patch <- Cli.getPatchAt patchPath
@ -191,8 +190,10 @@ handleUpdate input optionalPatch requestedNames = do
. Codebase.addDefsToCodebase codebase
. Slurp.filterUnisonFile sr
$ Slurp.originalFile sr
ppe <- prettyPrintEnvDecl =<< displayNames (Slurp.originalFile sr)
Cli.respond $ SlurpOutput input (PPE.suffixifiedPPE ppe) sr
let codebaseAndFileNames = UF.addNamesFromTypeCheckedUnisonFile (Slurp.originalFile sr) currentCodebaseNames
pped <- Cli.prettyPrintEnvDeclFromNames codebaseAndFileNames
let suffixifiedPPE = PPE.suffixifiedPPE pped
Cli.respond $ SlurpOutput input suffixifiedPPE sr
whenJust patchOps \(updatedPatch, _, _) ->
void $ propagatePatchNoSync updatedPatch currentPath'
Cli.syncRoot case patchPath of

View File

@ -6,6 +6,7 @@ module Unison.Codebase.Editor.HandleInput.Update2
-- * Misc helpers to be organized later
addDefinitionsToUnisonFile,
findCtorNames,
findCtorNamesMaybe,
forwardCtorNames,
makeParsingEnv,
prettyParseTypecheck,
@ -142,7 +143,7 @@ handleUpdate2 = do
Cli.respond Output.UpdateTypecheckingSuccess
pure secondTuf
saveTuf (findCtorNames Output.UOUUpdate namesExcludingLibdeps ctorNames Nothing) secondTuf
saveTuf (findCtorNamesMaybe Output.UOUUpdate namesExcludingLibdeps ctorNames Nothing) secondTuf
Cli.respond Output.Success
-- TODO: find a better module for this function, as it's used in a couple places
@ -182,7 +183,7 @@ makeParsingEnv path names = do
}
-- save definitions and namespace
saveTuf :: (Name -> Either Output [Name]) -> TypecheckedUnisonFile Symbol Ann -> Cli ()
saveTuf :: (Name -> Either Output (Maybe [Name])) -> TypecheckedUnisonFile Symbol Ann -> Cli ()
saveTuf getConstructors tuf = do
Cli.Env {codebase} <- ask
currentPath <- Cli.getCurrentPath
@ -205,7 +206,10 @@ saveTuf getConstructors tuf = do
-- [ ("foo.bar", insert-term("baz",<#foo>)) ]
typecheckedUnisonFileToBranchUpdates ::
(forall void. Output -> Transaction void) ->
(Name -> Either Output [Name]) ->
-- | Returns 'Nothing' if the decl isn't in namesExcludingLibdeps,
-- in which case we know the decl is new and do not need to generate
-- delete actions for it.
(Name -> Either Output (Maybe [Name])) ->
TypecheckedUnisonFile Symbol Ann ->
Transaction [(Path, Branch0 m -> Branch0 m)]
typecheckedUnisonFileToBranchUpdates abort getConstructors tuf = do
@ -224,7 +228,7 @@ typecheckedUnisonFileToBranchUpdates abort getConstructors tuf = do
makeDeclUpdates (symbol, (typeRefId, decl)) = do
-- some decls will be deleted, we want to delete their
-- constructors as well
deleteConstructorActions <- case map (BranchUtil.makeAnnihilateTermName . Path.splitFromName) <$> getConstructors (Name.unsafeFromVar symbol) of
deleteConstructorActions <- case maybe [] (map (BranchUtil.makeAnnihilateTermName . Path.splitFromName)) <$> getConstructors (Name.unsafeFromVar symbol) of
Left err -> abort err
Right actions -> pure actions
let deleteTypeAction = BranchUtil.makeAnnihilateTypeName split
@ -362,9 +366,13 @@ forwardCtorNames names =
]
-- | given a decl name, find names for all of its constructors, in order.
--
-- Precondition: 'n' is an element of 'names'
findCtorNames :: Output.UpdateOrUpgrade -> Names -> Map ForwardName (Referent, Name) -> Maybe Int -> Name -> Either Output.Output [Name]
findCtorNames operation names forwardCtorNames ctorCount n =
let declRef = Set.findMin $ Relation.lookupDom n names.types
let declRef = case Set.lookupMin (Relation.lookupDom n names.types) of
Nothing -> error "[findCtorNames] precondition violation: n is not an element of names"
Just x -> x
f = ForwardName.fromName n
(_, centerRight) = Map.split f forwardCtorNames
(center, _) = Map.split (incrementLastSegmentChar f) centerRight
@ -384,6 +392,18 @@ findCtorNames operation names forwardCtorNames ctorCount n =
then Right $ Map.elems m
else Left $ Output.UpdateIncompleteConstructorSet operation n m ctorCount
findCtorNamesMaybe ::
Output.UpdateOrUpgrade ->
Names ->
Map ForwardName (Referent, Name) ->
Maybe Int ->
Name ->
Either Output.Output (Maybe [Name])
findCtorNamesMaybe operation names forwardCtorNames ctorCount name =
case Relation.memberDom name (Names.types names) of
True -> Just <$> findCtorNames operation names forwardCtorNames ctorCount name
False -> Right Nothing
-- Used by `findCtorNames` to filter `forwardCtorNames` to a narrow range which will be searched linearly.
-- >>> incrementLastSegmentChar $ ForwardName.fromName $ Name.unsafeFromText "foo.bar.quux"
-- ForwardName {toList = "foo" :| ["bar","quuy"]}

View File

@ -25,6 +25,7 @@ import Unison.Codebase.Editor.HandleInput.Branch qualified as HandleInput.Branch
import Unison.Codebase.Editor.HandleInput.Update2
( addDefinitionsToUnisonFile,
findCtorNames,
findCtorNamesMaybe,
forwardCtorNames,
getNamespaceDependentsOf,
makeComplicatedPPE,
@ -200,7 +201,7 @@ handleUpgrade oldDepName newDepName = do
Codebase.addDefsToCodebase codebase typecheckedUnisonFile
typecheckedUnisonFileToBranchUpdates
abort
(findCtorNames Output.UOUUpgrade namesExcludingLibdeps constructorNamesExcludingLibdeps Nothing)
(findCtorNamesMaybe Output.UOUUpgrade namesExcludingLibdeps constructorNamesExcludingLibdeps Nothing)
typecheckedUnisonFile
Cli.stepAt
textualDescriptionOfUpgrade

View File

@ -215,10 +215,14 @@ data Input
NamespaceDependenciesI (Maybe Path')
| DebugTabCompletionI [String] -- The raw arguments provided
| DebugFuzzyOptionsI String [String] -- cmd and arguments
| DebugFormatI
| DebugNumberedArgsI
| DebugTypecheckedUnisonFileI
| DebugDumpNamespacesI
| DebugDumpNamespaceSimpleI
| DebugTermI (Bool {- Verbose mode -}) (HQ.HashQualified Name)
| DebugTypeI (HQ.HashQualified Name)
| DebugLSPFoldRangesI
| DebugClearWatchI
| DebugDoctorI
| DebugNameDiffI ShortCausalHash ShortCausalHash
@ -241,6 +245,7 @@ data Input
| CloneI ProjectAndBranchNames (Maybe ProjectAndBranchNames)
| ReleaseDraftI Semver
| UpgradeI !NameSegment !NameSegment
| EditNamespaceI [Path.Path]
deriving (Eq, Show)
-- | The source of a `branch` command: what to make the new branch from.

View File

@ -47,6 +47,7 @@ import Unison.Codebase.ShortCausalHash (ShortCausalHash)
import Unison.Codebase.ShortCausalHash qualified as SCH
import Unison.Codebase.Type (GitError)
import Unison.CommandLine.InputPattern qualified as Input
import Unison.DataDeclaration qualified as DD
import Unison.DataDeclaration.ConstructorId (ConstructorId)
import Unison.HashQualified qualified as HQ
import Unison.HashQualified' qualified as HQ'
@ -324,6 +325,9 @@ data Output
| DisplayDebugCompletions [Completion.Completion]
| DebugDisplayFuzzyOptions Text [String {- arg description, options -}]
| DebugFuzzyOptionsNoResolver
| DebugTerm (Bool {- verbose mode -}) (Either (Text {- A builtin hash -}) (Term Symbol Ann))
| DebugDecl (Either (Text {- A builtin hash -}) (DD.Decl Symbol Ann)) (Maybe ConstructorId {- If 'Just' we're debugging a constructor of the given decl -})
| AnnotatedFoldRanges Text
| ClearScreen
| PulledEmptyBranch (ReadRemoteNamespace Share.RemoteProjectBranch)
| CreatedProject Bool {- randomly-generated name? -} ProjectName
@ -350,6 +354,7 @@ data Output
| LocalProjectNorProjectBranchExist ProjectName ProjectBranchName
| RemoteProjectDoesntExist URI ProjectName
| RemoteProjectBranchDoesntExist URI (ProjectAndBranch ProjectName ProjectBranchName)
| RemoteProjectBranchDoesntExist'Push URI (ProjectAndBranch ProjectName ProjectBranchName)
| RemoteProjectReleaseIsDeprecated URI (ProjectAndBranch ProjectName ProjectBranchName)
| RemoteProjectPublishedReleaseCannotBeChanged URI (ProjectAndBranch ProjectName ProjectBranchName)
| -- A remote project branch head wasn't in the expected state
@ -568,6 +573,9 @@ isFailure o = case o of
DisplayDebugCompletions {} -> False
DebugDisplayFuzzyOptions {} -> False
DebugFuzzyOptionsNoResolver {} -> True
DebugTerm {} -> False
DebugDecl {} -> False
AnnotatedFoldRanges {} -> False
DisplayDebugNameDiff {} -> False
ClearScreen -> False
PulledEmptyBranch {} -> False
@ -590,6 +598,7 @@ isFailure o = case o of
LocalProjectNorProjectBranchExist {} -> True
RemoteProjectDoesntExist {} -> True
RemoteProjectBranchDoesntExist {} -> True
RemoteProjectBranchDoesntExist'Push {} -> True
RemoteProjectReleaseIsDeprecated {} -> True
RemoteProjectPublishedReleaseCannotBeChanged {} -> True
RemoteProjectBranchHeadMismatch {} -> True

View File

@ -49,7 +49,6 @@ import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.ProjectUtils qualified as ProjectUtils
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch.Type qualified as Branch
import Unison.Codebase.Editor.HandleInput qualified as HandleInput
import Unison.Codebase.Editor.Input (Event (UnisonFileChanged), Input (..))
import Unison.Codebase.Editor.Output qualified as Output
@ -366,10 +365,8 @@ run verbosity dir stanzas codebase runtime sbRuntime nRuntime config ucmVersion
[] -> awaitInput
args -> do
liftIO (output ("\n" <> show p <> "\n"))
rootVar <- use #root
numberedArgs <- use #numberedArgs
let getRoot = fmap Branch.head . atomically $ readTMVar rootVar
liftIO (parseInput codebase getRoot curPath numberedArgs patternMap args) >>= \case
liftIO (parseInput codebase curPath numberedArgs patternMap args) >>= \case
-- invalid command is treated as a failure
Left msg -> do
liftIO $ writeIORef hasErrors True

View File

@ -42,6 +42,7 @@ import Data.Vector qualified as Vector
import System.FilePath (takeFileName)
import Text.Regex.TDFA ((=~))
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch (Branch0)
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Editor.Input (Event (..), Input (..))
@ -119,7 +120,6 @@ nothingTodo = emojiNote "😶"
parseInput ::
Codebase IO Symbol Ann ->
IO (Branch0 IO) ->
-- | Current path from root
Path.Absolute ->
-- | Numbered arguments
@ -131,11 +131,9 @@ parseInput ::
-- Returns either an error message or the fully expanded arguments list and parsed input.
-- If the output is `Nothing`, the user cancelled the input (e.g. ctrl-c)
IO (Either (P.Pretty CT.ColorText) (Maybe ([String], Input)))
parseInput codebase getRoot currentPath numberedArgs patterns segments = runExceptT do
parseInput codebase currentPath numberedArgs patterns segments = runExceptT do
let getCurrentBranch0 :: IO (Branch0 IO)
getCurrentBranch0 = do
rootBranch <- getRoot
pure $ Branch.getAt0 (Path.unabsolute currentPath) rootBranch
getCurrentBranch0 = Branch.head <$> Codebase.getBranchAtPath codebase currentPath
let projCtx = projectContextFromPath currentPath
case segments of

View File

@ -2,19 +2,25 @@ module Unison.CommandLine.BranchRelativePath
( BranchRelativePath (..),
parseBranchRelativePath,
branchRelativePathParser,
ResolvedBranchRelativePath (..),
parseIncrementalBranchRelativePath,
IncrementalBranchRelativePath (..),
)
where
import Data.Char (isSpace)
import Control.Lens (view)
import Data.Set qualified as Set
import Data.Text qualified as Text
import Data.These (These (..))
import Text.Builder qualified
import Text.Megaparsec qualified as Megaparsec
import Text.Megaparsec.Char qualified as Megaparsec
import U.Codebase.Sqlite.Project qualified as Sqlite
import U.Codebase.Sqlite.ProjectBranch qualified as Sqlite
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.Path.Parse qualified as Path
import Unison.Prelude
import Unison.Project (ProjectBranchName, ProjectBranchSpecifier (..), ProjectName)
import Unison.Project (ProjectAndBranch (..), ProjectBranchName, ProjectName)
import Unison.Project qualified as Project
import Unison.Util.ColorText qualified as CT
import Unison.Util.Pretty qualified as P
@ -24,51 +30,221 @@ data BranchRelativePath
| LoosePath Path.Path'
deriving stock (Eq, Show)
-- | Strings without colons are parsed as loose code paths. A path with a colon may specify:
-- 1. A project and branch
-- 2. Only a branch, in which case the project is assumed to be the current project
-- 3. Only a path, in which case the path is rooted at the branch root
--
-- Specifying only a project is not allowed.
--
-- >>> parseBranchRelativePath "foo"
-- Right (LoosePath foo)
-- >>> parseBranchRelativePath "foo/bar:"
-- Right (BranchRelative (This (Right (UnsafeProjectName "foo",UnsafeProjectBranchName "bar"))))
-- >>> parseBranchRelativePath "foo/bar:some.path"
-- Right (BranchRelative (These (Right (UnsafeProjectName "foo",UnsafeProjectBranchName "bar")) some.path))
-- >>> parseBranchRelativePath "/bar:some.path"
-- Right (BranchRelative (These (Left (UnsafeProjectBranchName "bar")) some.path))
-- >>> parseBranchRelativePath ":some.path"
-- Right (BranchRelative (That some.path))
parseBranchRelativePath :: String -> Either (P.Pretty CT.ColorText) BranchRelativePath
parseBranchRelativePath str =
case Megaparsec.parse branchRelativePathParser "<none>" (Text.pack str) of
Left e -> Left (P.string (Megaparsec.errorBundlePretty e))
Right x -> Right x
branchRelativePathParser :: Megaparsec.Parsec Void Text BranchRelativePath
branchRelativePathParser =
instance From BranchRelativePath Text where
from = \case
BranchRelative brArg -> case brArg of
This eitherProj ->
Text.Builder.run
( Text.Builder.text (eitherProjToText eitherProj)
<> Text.Builder.char ':'
)
That path ->
Text.Builder.run
( Text.Builder.char ':'
<> Text.Builder.text (Path.convert path)
)
These eitherProj path ->
Text.Builder.run
( Text.Builder.text (eitherProjToText eitherProj)
<> Text.Builder.char ':'
<> Text.Builder.text (Path.convert path)
)
LoosePath path -> Path.toText' path
where
eitherProjToText = \case
Left branchName -> from @(These ProjectName ProjectBranchName) @Text (That branchName)
Right (projName, branchName) -> into @Text (These projName branchName)
data ResolvedBranchRelativePath
= ResolvedBranchRelative (ProjectAndBranch Sqlite.Project Sqlite.ProjectBranch) (Maybe Path.Relative)
| ResolvedLoosePath Path.Absolute
instance From ResolvedBranchRelativePath BranchRelativePath where
from = \case
ResolvedBranchRelative (ProjectAndBranch proj branch) mRel -> case mRel of
Nothing -> BranchRelative (This (Right (view #name proj, view #name branch)))
Just rel -> BranchRelative (These (Right (view #name proj, view #name branch)) rel)
ResolvedLoosePath p -> LoosePath (Path.absoluteToPath' p)
instance From ResolvedBranchRelativePath Text where
from = from . into @BranchRelativePath
data IncrementalBranchRelativePath
= -- | no dots, slashes, or colons
ProjectOrRelative Text Path.Path'
| -- | dots, no slashes or colons
LooseCode Path.Path'
| -- | valid project, no slash
IncompleteProject ProjectName
| -- | valid project/branch, slash, no colon
IncompleteBranch (Maybe ProjectName) (Maybe ProjectBranchName)
| -- | valid project/branch, with colon
IncompletePath (Either (ProjectAndBranch ProjectName ProjectBranchName) ProjectBranchName) (Maybe Path.Relative)
| PathRelativeToCurrentBranch Path.Relative
deriving stock (Show)
-- |
-- >>> parseIncrementalBranchRelativePath "foo"
-- Right (ProjectOrRelative "foo" foo)
--
-- >>> parseIncrementalBranchRelativePath "foo/bar:"
-- Right (IncompletePath (Left (ProjectAndBranch {project = UnsafeProjectName "foo", branch = UnsafeProjectBranchName "bar"})) Nothing)
--
-- >>> parseIncrementalBranchRelativePath "foo/bar:some.path"
-- Right (IncompletePath (Left (ProjectAndBranch {project = UnsafeProjectName "foo", branch = UnsafeProjectBranchName "bar"})) (Just some.path))
--
-- >>> parseIncrementalBranchRelativePath "/bar:some.path"
-- Right (IncompletePath (Right (UnsafeProjectBranchName "bar")) (Just some.path))
--
-- >>> parseIncrementalBranchRelativePath ":some.path"
-- Right (PathRelativeToCurrentBranch some.path)
--
-- >>> parseIncrementalBranchRelativePath "/branch"
-- Right (IncompleteBranch Nothing (Just (UnsafeProjectBranchName "branch")))
--
-- >>> parseIncrementalBranchRelativePath "/"
-- Right (IncompleteBranch Nothing Nothing)
parseIncrementalBranchRelativePath :: String -> Either (P.Pretty CT.ColorText) IncrementalBranchRelativePath
parseIncrementalBranchRelativePath str =
case Megaparsec.parse incrementalBranchRelativePathParser "<none>" (Text.pack str) of
Left e -> Left (P.string (Megaparsec.errorBundlePretty e))
Right x -> Right x
incrementalBranchRelativePathParser :: Megaparsec.Parsec Void Text IncrementalBranchRelativePath
incrementalBranchRelativePathParser =
asum
[ LoosePath <$> path',
BranchRelative <$> branchRelative
[ startingAtSlash Nothing,
pathRelativeToCurrentBranch,
projectName
]
where
branchRelative :: Megaparsec.Parsec Void Text (These (Either ProjectBranchName (ProjectName, ProjectBranchName)) Path.Relative)
branchRelative = asum [fullPath, currentBranchRootPath]
projectName = do
-- Attempt to parse a project name from the string prefix, or a
-- Path' cosuming the entire string, switch based on if we
-- unambiguously parse one or the other.
parseThese Project.projectNameParser path' >>= \case
-- project name parser consumed the slash
This (_, (projectName, True)) ->
startingAtBranch (Just projectName)
-- project name parser did not consume a slash
--
-- Either we are at the end of input or the next character
-- is not a slash, so we have invalid input
This (_, (projectName, False)) ->
let end = do
Megaparsec.eof
pure (IncompleteProject projectName)
in end <|> startingAtSlash (Just projectName)
-- The string doesn't parse as a project name but does parse as a path
That (_, path) -> pure (LooseCode path)
-- The string parses both as a project name and a path
These _ (_, path) -> ProjectOrRelative <$> Megaparsec.takeRest <*> pure path
path' = Megaparsec.try do
offset <- Megaparsec.getOffset
pathStr <- Megaparsec.takeWhile1P (Just "path char") (not . isSpace)
case Path.parsePath' (Text.unpack pathStr) of
Left err -> failureAt offset err
Right x -> pure x
startingAtBranch :: Maybe ProjectName -> Megaparsec.Parsec Void Text IncrementalBranchRelativePath
startingAtBranch mproj =
optionalBranch >>= \case
Nothing -> pure (IncompleteBranch mproj Nothing)
Just branch ->
startingAtColon (maybe (Right branch) (\proj -> Left (ProjectAndBranch proj branch)) mproj)
<|> pure (IncompleteBranch mproj (Just branch))
startingAtSlash ::
Maybe ProjectName ->
Megaparsec.Parsec Void Text IncrementalBranchRelativePath
startingAtSlash mproj = Megaparsec.char '/' *> startingAtBranch mproj
startingAtColon ::
(Either (ProjectAndBranch ProjectName ProjectBranchName) ProjectBranchName) ->
Megaparsec.Parsec Void Text IncrementalBranchRelativePath
startingAtColon projStuff = do
_ <- Megaparsec.char ':'
p <- optionalEof relPath
pure (IncompletePath projStuff p)
pathRelativeToCurrentBranch :: Megaparsec.Parsec Void Text IncrementalBranchRelativePath
pathRelativeToCurrentBranch = do
_ <- Megaparsec.char ':'
p <- relPath
pure (PathRelativeToCurrentBranch p)
optionalEof :: Megaparsec.Parsec Void Text a -> Megaparsec.Parsec Void Text (Maybe a)
optionalEof pa = Just <$> pa <|> Nothing <$ Megaparsec.eof
optionalBranch :: Megaparsec.Parsec Void Text (Maybe ProjectBranchName)
optionalBranch = optionalEof branchNameParser
branchNameParser = Project.projectBranchNameParser False
relPath = do
offset <- Megaparsec.getOffset
path' >>= \(Path.Path' inner) -> case inner of
Left _ -> failureAt offset "Expected a relative path but found an absolute path"
Right x -> pure x
fullPath = do
projectAndBranchNames <- do
projectBranch <- Project.projectAndBranchNamesParser ProjectBranchSpecifier'Name
offset <- Megaparsec.getOffset
_ <- Megaparsec.char ':'
case projectBranch of
This _ -> failureAt offset "Expected a project and branch before the colon (e.g. project/branch:a.path)"
That pbn -> pure (Left pbn)
These pn pbn -> pure (Right (pn, pbn))
optional relPath <&> \case
Nothing -> This projectAndBranchNames
Just rp -> These projectAndBranchNames rp
currentBranchRootPath = do
_ <- Megaparsec.char ':'
That <$> relPath
path' = Megaparsec.try do
offset <- Megaparsec.getOffset
pathStr <- Megaparsec.takeRest
case Path.parsePath' (Text.unpack pathStr) of
Left err -> failureAt offset err
Right x -> pure x
failureAt :: forall a. Int -> String -> Megaparsec.Parsec Void Text a
failureAt offset str = Megaparsec.parseError (Megaparsec.FancyError offset (Set.singleton (Megaparsec.ErrorFail str)))
parseThese ::
forall a b.
Megaparsec.Parsec Void Text a ->
Megaparsec.Parsec Void Text b ->
Megaparsec.Parsec Void Text (These (Int, a) (Int, b))
parseThese pa pb = do
ea <- Megaparsec.observing $ Megaparsec.lookAhead $ Megaparsec.try $ first Text.length <$> Megaparsec.match pa
eb <- Megaparsec.observing $ Megaparsec.lookAhead $ Megaparsec.try $ first Text.length <$> Megaparsec.match pb
case (ea, eb) of
(Left aerr, Left berr) ->
Megaparsec.parseError (aerr <> berr)
(Left _, Right (blen, b)) -> do
Megaparsec.takeP Nothing blen
pure (That (blen, b))
(Right (alen, a), Left _) -> do
Megaparsec.takeP Nothing alen
pure (This (alen, a))
(Right a, Right b) -> pure (These a b)
branchRelativePathParser :: Megaparsec.Parsec Void Text BranchRelativePath
branchRelativePathParser =
incrementalBranchRelativePathParser >>= \case
ProjectOrRelative _txt path -> pure (LoosePath path)
LooseCode path -> pure (LoosePath path)
IncompleteProject _proj -> fail "Branch relative paths require a branch. Expected `/` here."
IncompleteBranch _mproj _mbranch -> fail "Branch relative paths require a colon. Expected `:` here."
PathRelativeToCurrentBranch p -> pure (BranchRelative (That p))
IncompletePath projStuff mpath ->
case projStuff of
Left (ProjectAndBranch projName branchName) -> case mpath of
Nothing -> pure (BranchRelative (This (Right (projName, branchName))))
Just path -> pure (BranchRelative (These (Right (projName, branchName)) path))
Right branch -> case mpath of
Nothing -> pure (BranchRelative (This (Left branch)))
Just path -> pure (BranchRelative (These (Left branch) path))

View File

@ -4,7 +4,7 @@
module Unison.CommandLine.InputPatterns where
import Control.Lens (preview, (^.))
import Control.Lens (preview, review, (^.))
import Control.Lens.Cons qualified as Cons
import Data.List (intercalate)
import Data.List.Extra qualified as List
@ -24,7 +24,7 @@ import U.Codebase.Sqlite.DbId (ProjectBranchId, ProjectId)
import U.Codebase.Sqlite.Project qualified as Sqlite
import U.Codebase.Sqlite.Queries qualified as Queries
import Unison.Auth.HTTPClient (AuthenticatedHttpClient)
import Unison.Cli.Pretty (prettyProjectName, prettyProjectNameSlash, prettySlashProjectBranchName, prettyURI)
import Unison.Cli.Pretty (prettyProjectAndBranchName, prettyProjectName, prettyProjectNameSlash, prettySlashProjectBranchName, prettyURI)
import Unison.Cli.ProjectUtils qualified as ProjectUtils
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
@ -45,7 +45,8 @@ import Unison.Codebase.SyncMode qualified as SyncMode
import Unison.Codebase.Verbosity (Verbosity)
import Unison.Codebase.Verbosity qualified as Verbosity
import Unison.CommandLine
import Unison.CommandLine.BranchRelativePath (parseBranchRelativePath)
import Unison.CommandLine.BranchRelativePath (parseBranchRelativePath, parseIncrementalBranchRelativePath)
import Unison.CommandLine.BranchRelativePath qualified as BranchRelativePath
import Unison.CommandLine.Completion
import Unison.CommandLine.FZFResolvers qualified as Resolvers
import Unison.CommandLine.InputPattern (ArgumentType (..), InputPattern (InputPattern), IsOptional (..), unionSuggestions)
@ -1171,10 +1172,21 @@ forkLocal =
"fork"
["copy.namespace"]
I.Visible
[ ("namespace", Required, namespaceArg),
("new location", Required, newNameArg)
[ ("source location", Required, branchRelativePathArg),
("dest location", Required, branchRelativePathArg)
]
(makeExample forkLocal ["src", "dest"] <> "creates the namespace `dest` as a copy of `src`.")
( P.wrapColumn2
[ ( makeExample forkLocal ["src", "dest"],
"creates the namespace `dest` as a copy of `src`."
),
( makeExample forkLocal ["project0/branch0:a.path", "project1/branch1:foo"],
"creates the namespace `foo` in `branch1` of `project1` as a copy of `a.path` in `project0/branch0`."
),
( makeExample forkLocal ["srcproject/srcbranch", "dest"],
"creates the namespace `dest` as a copy of the branch `srcbranch` of `srcproject`."
)
]
)
( \case
[src, dest] -> do
src <- Input.parseBranchId2 src
@ -1419,6 +1431,23 @@ debugFuzzyOptions =
_ -> Left (I.help debugFuzzyOptions)
)
debugFormat :: InputPattern
debugFormat =
InputPattern
"debug.format"
[]
I.Hidden
[("source-file", Optional, filePathArg)]
( P.lines
[ P.wrap $ "This command can be used to test ucm's file formatter on the latest typechecked file.",
makeExample' debugFormat
]
)
( \case
[] -> Right Input.DebugFormatI
_ -> Left (I.help debugFormat)
)
push :: InputPattern
push =
InputPattern
@ -1851,6 +1880,21 @@ edit =
[] -> Left (I.help edit)
}
editNamespace :: InputPattern
editNamespace =
InputPattern
{ patternName = "edit.namespace",
aliases = [],
visibility = I.Visible,
args = [("namespace to load definitions from", ZeroPlus, namespaceArg)],
help =
P.lines
[ "`edit.namespace` will load all terms and types contained within the current namespace into your scratch file. This includes definitions in namespaces, but excludes libraries.",
"`edit.namespace ns1 ns2 ...` loads the terms and types contained within the provided namespaces."
],
parse = Right . Input.EditNamespaceI . fmap (Path.fromText . Text.pack)
}
topicNameArg :: ArgumentType
topicNameArg =
let topics = Map.keys helpTopicsMap
@ -2231,6 +2275,55 @@ debugDumpNamespaceSimple =
"Dump the namespace to a text file"
(const $ Right Input.DebugDumpNamespaceSimpleI)
debugTerm :: InputPattern
debugTerm =
InputPattern
"debug.term.abt"
[]
I.Hidden
[("term", Required, exactDefinitionTermQueryArg)]
"View debugging information for a given term."
( \case
[thing] -> fmap (Input.DebugTermI False) $ parseHashQualifiedName thing
_ -> Left (I.help debugTerm)
)
debugTermVerbose :: InputPattern
debugTermVerbose =
InputPattern
"debug.term.abt.verbose"
[]
I.Hidden
[("term", Required, exactDefinitionTermQueryArg)]
"View verbose debugging information for a given term."
( \case
[thing] -> fmap (Input.DebugTermI True) $ parseHashQualifiedName thing
_ -> Left (I.help debugTermVerbose)
)
debugType :: InputPattern
debugType =
InputPattern
"debug.type.abt"
[]
I.Hidden
[("type", Required, exactDefinitionTypeQueryArg)]
"View debugging information for a given type."
( \case
[thing] -> fmap (Input.DebugTypeI) $ parseHashQualifiedName thing
_ -> Left (I.help debugType)
)
debugLSPFoldRanges :: InputPattern
debugLSPFoldRanges =
InputPattern
"debug.lsp.fold-ranges"
[]
I.Hidden
[]
"Output the source from the most recently parsed file, but annotated with the computed fold ranges."
(const $ Right Input.DebugLSPFoldRangesI)
debugClearWatchCache :: InputPattern
debugClearWatchCache =
InputPattern
@ -2957,11 +3050,16 @@ validInputs =
debugDoctor,
debugDumpNamespace,
debugDumpNamespaceSimple,
debugTerm,
debugTermVerbose,
debugType,
debugLSPFoldRanges,
debugFileHashes,
debugNameDiff,
debugNumberedArgs,
debugTabCompletion,
debugFuzzyOptions,
debugFormat,
delete,
deleteBranch,
deleteProject,
@ -2985,6 +3083,7 @@ validInputs =
docs,
docsToHtml,
edit,
editNamespace,
execute,
fetchScheme,
find,
@ -3295,7 +3394,7 @@ projectAndOrBranchSuggestions config inputStr codebase _httpClient path = do
Nothing -> pure []
Just project -> do
let projectId = project ^. #projectId
fmap filterBranches do
fmap (filterBranches config path) do
Queries.loadAllProjectBranchesBeginningWith projectId Nothing
pure (map (projectBranchToCompletion projectName) branches)
-- This branch is probably dead due to intercepting inputs that begin with "/" above
@ -3308,13 +3407,13 @@ projectAndOrBranchSuggestions config inputStr codebase _httpClient path = do
Nothing -> pure []
Just project -> do
let projectId = project ^. #projectId
fmap filterBranches do
fmap (filterBranches config path) do
Queries.loadAllProjectBranchesBeginningWith projectId (Just $ into @Text branchName)
pure (map (projectBranchToCompletion projectName) branches)
where
input = Text.strip . Text.pack $ inputStr
(mayCurrentProjectId, mayCurrentBranchId) = case projectContextFromPath path of
(mayCurrentProjectId, _mayCurrentBranchId) = case projectContextFromPath path of
LooseCodePath {} -> (Nothing, Nothing)
ProjectBranchPath projectId branchId _ -> (Just projectId, Just branchId)
@ -3330,7 +3429,7 @@ projectAndOrBranchSuggestions config inputStr codebase _httpClient path = do
case mayCurrentProjectId of
Nothing -> pure []
Just currentProjectId ->
fmap filterBranches do
fmap (filterBranches config path) do
Queries.loadAllProjectBranchesBeginningWith currentProjectId (Just input)
projects <- case (projectInclusion config, mayCurrentProjectId) of
(OnlyWithinCurrentProject, Just currentProjectId) -> Queries.loadProject currentProjectId <&> maybeToList
@ -3415,17 +3514,10 @@ projectAndOrBranchSuggestions config inputStr codebase _httpClient path = do
Nothing -> pure []
Just (ProjectAndBranch currentProjectId _, _) ->
Codebase.runTransaction codebase do
fmap filterBranches do
fmap (filterBranches config path) do
Queries.loadAllProjectBranchesBeginningWith currentProjectId (Just branchName)
pure (map currentProjectBranchToCompletion branches)
filterBranches :: [(ProjectBranchId, a)] -> [(ProjectBranchId, a)]
filterBranches branches =
case (mayCurrentBranchId, branchInclusion config) of
(_, AllBranches) -> branches
(Nothing, _) -> branches
(Just currentBranchId, ExcludeCurrentBranch) -> branches & filter (\(branchId, _) -> branchId /= currentBranchId)
filterProjects :: [Sqlite.Project] -> [Sqlite.Project]
filterProjects projects =
case (mayCurrentProjectId, projectInclusion config) of
@ -3437,31 +3529,169 @@ projectAndOrBranchSuggestions config inputStr codebase _httpClient path = do
& List.find (\Sqlite.Project {projectId} -> projectId == currentBranchId)
& maybeToList
currentProjectBranchToCompletion :: (ProjectBranchId, ProjectBranchName) -> Completion
currentProjectBranchToCompletion (_, branchName) =
projectToCompletion :: Sqlite.Project -> Completion
projectToCompletion project =
Completion
{ replacement = stringProjectName,
display = P.toAnsiUnbroken (prettyProjectNameSlash (project ^. #name)),
isFinished = False
}
where
stringProjectName = Text.unpack (into @Text (project ^. #name) <> "/")
projectBranchToCompletion :: ProjectName -> (ProjectBranchId, ProjectBranchName) -> Completion
projectBranchToCompletion projectName (_, branchName) =
Completion
{ replacement = Text.unpack (into @Text (ProjectAndBranch projectName branchName)),
display = P.toAnsiUnbroken (prettySlashProjectBranchName branchName),
isFinished = False
}
handleBranchesComplete ::
MonadIO m =>
ProjectBranchSuggestionsConfig ->
Text ->
Codebase m v a ->
Path.Absolute ->
m [Completion]
handleBranchesComplete config branchName codebase path = do
branches <-
case preview ProjectUtils.projectBranchPathPrism path of
Nothing -> pure []
Just (ProjectAndBranch currentProjectId _, _) ->
Codebase.runTransaction codebase do
fmap (filterBranches config path) do
Queries.loadAllProjectBranchesBeginningWith currentProjectId (Just branchName)
pure (map currentProjectBranchToCompletion branches)
filterBranches :: ProjectBranchSuggestionsConfig -> Path.Absolute -> [(ProjectBranchId, a)] -> [(ProjectBranchId, a)]
filterBranches config path branches =
case (mayCurrentBranchId, branchInclusion config) of
(_, AllBranches) -> branches
(Nothing, _) -> branches
(Just currentBranchId, ExcludeCurrentBranch) -> branches & filter (\(branchId, _) -> branchId /= currentBranchId)
where
(_mayCurrentProjectId, mayCurrentBranchId) = case projectContextFromPath path of
LooseCodePath {} -> (Nothing, Nothing)
ProjectBranchPath projectId branchId _ -> (Just projectId, Just branchId)
currentProjectBranchToCompletion :: (ProjectBranchId, ProjectBranchName) -> Completion
currentProjectBranchToCompletion (_, branchName) =
Completion
{ replacement = '/' : Text.unpack (into @Text branchName),
display = P.toAnsiUnbroken (prettySlashProjectBranchName branchName),
isFinished = False
}
branchRelativePathSuggestions ::
MonadIO m =>
ProjectBranchSuggestionsConfig ->
String ->
Codebase m v a ->
AuthenticatedHttpClient ->
Path.Absolute -> -- Current path
m [Line.Completion]
branchRelativePathSuggestions config inputStr codebase _httpClient currentPath = do
case parseIncrementalBranchRelativePath inputStr of
Left _ -> pure []
Right ibrp -> case ibrp of
BranchRelativePath.ProjectOrRelative _txt _path -> do
namespaceSuggestions <- Codebase.runTransaction codebase (prefixCompleteNamespace inputStr currentPath)
projectSuggestions <- projectNameSuggestions WithSlash inputStr codebase
pure (namespaceSuggestions ++ projectSuggestions)
BranchRelativePath.LooseCode _path ->
Codebase.runTransaction codebase (prefixCompleteNamespace inputStr currentPath)
BranchRelativePath.IncompleteProject _proj ->
projectNameSuggestions WithSlash inputStr codebase
BranchRelativePath.IncompleteBranch mproj mbranch -> case mproj of
Nothing -> map suffixPathSep <$> handleBranchesComplete config (maybe "" into mbranch) codebase currentPath
Just projectName -> do
branches <-
Codebase.runTransaction codebase do
Queries.loadProjectByName projectName >>= \case
Nothing -> pure []
Just project -> do
let projectId = project ^. #projectId
fmap (filterBranches config currentPath) do
Queries.loadAllProjectBranchesBeginningWith projectId (into @Text <$> mbranch)
pure (map (projectBranchToCompletionWithSep projectName) branches)
BranchRelativePath.PathRelativeToCurrentBranch relPath -> Codebase.runTransaction codebase do
mprojectBranch <- runMaybeT do
(projectId, branchId) <- MaybeT (pure $ (,) <$> mayCurrentProjectId <*> mayCurrentBranchId)
MaybeT (Queries.loadProjectBranch projectId branchId)
case mprojectBranch of
Nothing -> pure []
Just projectBranch -> do
let branchPath = review ProjectUtils.projectBranchPathPrism (projectAndBranch, mempty)
projectAndBranch = ProjectAndBranch (projectBranch ^. #projectId) (projectBranch ^. #branchId)
map prefixPathSep <$> prefixCompleteNamespace (Path.convert relPath) branchPath
BranchRelativePath.IncompletePath projStuff mpath -> do
Codebase.runTransaction codebase do
mprojectBranch <- runMaybeT do
case projStuff of
Left names@(ProjectAndBranch projectName branchName) -> do
(,Left names) <$> MaybeT (Queries.loadProjectBranchByNames projectName branchName)
Right branchName -> do
currentProjectId <- MaybeT (pure mayCurrentProjectId)
projectBranch <- MaybeT (Queries.loadProjectBranchByName currentProjectId branchName)
pure (projectBranch, Right (projectBranch ^. #name))
case mprojectBranch of
Nothing -> pure []
Just (projectBranch, prefix) -> do
let branchPath = review ProjectUtils.projectBranchPathPrism (projectAndBranch, mempty)
projectAndBranch = ProjectAndBranch (projectBranch ^. #projectId) (projectBranch ^. #branchId)
map (addBranchPrefix prefix) <$> prefixCompleteNamespace (maybe "" Path.convert mpath) branchPath
where
(mayCurrentProjectId, mayCurrentBranchId) = case projectContextFromPath currentPath of
LooseCodePath {} -> (Nothing, Nothing)
ProjectBranchPath projectId branchId _ -> (Just projectId, Just branchId)
projectBranchToCompletionWithSep :: ProjectName -> (ProjectBranchId, ProjectBranchName) -> Completion
projectBranchToCompletionWithSep projectName (_, branchName) =
Completion
{ replacement = '/' : Text.unpack (into @Text branchName),
display = P.toAnsiUnbroken (prettySlashProjectBranchName branchName),
{ replacement = Text.unpack (into @Text (ProjectAndBranch projectName branchName) <> branchPathSep),
display = P.toAnsiUnbroken (prettySlashProjectBranchName branchName <> branchPathSepPretty),
isFinished = False
}
projectBranchToCompletion :: ProjectName -> (ProjectBranchId, ProjectBranchName) -> Completion
projectBranchToCompletion projectName (_, branchName) =
Completion
{ replacement = Text.unpack (into @Text (ProjectAndBranch projectName branchName)),
display = P.toAnsiUnbroken (prettySlashProjectBranchName branchName),
isFinished = False
prefixPathSep :: Completion -> Completion
prefixPathSep c =
c
{ Line.replacement = branchPathSep <> Line.replacement c,
Line.display = P.toAnsiUnbroken branchPathSepPretty <> Line.display c
}
projectToCompletion :: Sqlite.Project -> Completion
projectToCompletion project =
Completion
{ replacement = stringProjectName,
display = P.toAnsiUnbroken (prettyProjectNameSlash (project ^. #name)),
isFinished = False
suffixPathSep :: Completion -> Completion
suffixPathSep c =
c
{ Line.replacement = Line.replacement c <> branchPathSep,
Line.display = Line.display c <> P.toAnsiUnbroken branchPathSepPretty
}
where
stringProjectName = Text.unpack (into @Text (project ^. #name) <> "/")
addBranchPrefix ::
Either (ProjectAndBranch ProjectName ProjectBranchName) ProjectBranchName ->
Completion ->
Completion
addBranchPrefix eproj =
let (prefixText, prefixPretty) = case eproj of
Left pb ->
( into @Text pb,
prettyProjectAndBranchName pb
)
Right branch ->
( "/" <> into @Text branch,
prettySlashProjectBranchName branch
)
in \c ->
c
{ Line.replacement = Text.unpack prefixText <> branchPathSep <> Line.replacement c,
Line.display = P.toAnsiUnbroken (prefixPretty <> branchPathSepPretty) <> Line.display c
}
branchPathSepPretty = P.hiBlack branchPathSep
branchPathSep :: IsString s => s
branchPathSep = ":"
-- | A project name, branch name, or both.
projectAndBranchNamesArg :: ProjectBranchSuggestionsConfig -> ArgumentType
@ -3490,26 +3720,60 @@ projectBranchNameWithOptionalProjectNameArg =
fzfResolver = Just Resolvers.projectBranchResolver
}
branchRelativePathArg :: ArgumentType
branchRelativePathArg =
ArgumentType
{ typeName = "branch-relative-path",
suggestions = branchRelativePathSuggestions config,
fzfResolver = Nothing
}
where
config =
ProjectBranchSuggestionsConfig
{ showProjectCompletions = True,
projectInclusion = AllProjects,
branchInclusion = AllBranches
}
-- | A project name.
projectNameArg :: ArgumentType
projectNameArg =
ArgumentType
{ typeName = "project-name",
suggestions = \(Text.strip . Text.pack -> input) codebase _httpClient _path -> do
projects <-
Codebase.runTransaction codebase do
Queries.loadAllProjectsBeginningWith (Just input)
pure $ map projectToCompletion projects,
suggestions = \input codebase _httpClient _path -> projectNameSuggestions NoSlash input codebase,
fzfResolver = Just $ Resolvers.multiResolver [Resolvers.projectNameOptions]
}
data OptionalSlash
= WithSlash
| NoSlash
projectNameSuggestions ::
MonadIO m =>
OptionalSlash ->
String ->
Codebase m v a ->
m [Line.Completion]
projectNameSuggestions slash (Text.strip . Text.pack -> input) codebase = do
projects <-
Codebase.runTransaction codebase do
Queries.loadAllProjectsBeginningWith (Just input)
pure $ map projectToCompletion projects
where
projectToCompletion :: Sqlite.Project -> Completion
projectToCompletion project =
Completion
{ replacement = Text.unpack (into @Text (project ^. #name)),
display = P.toAnsiUnbroken (prettyProjectName (project ^. #name)),
isFinished = False
}
projectToCompletion =
let toPretty = case slash of
NoSlash -> prettyProjectName
WithSlash -> prettyProjectNameSlash
toText project = case slash of
NoSlash -> into @Text (project ^. #name)
WithSlash -> Text.snoc (into @Text (project ^. #name)) '/'
in \project ->
Completion
{ replacement = Text.unpack (toText project),
display = P.toAnsiUnbroken (toPretty (project ^. #name)),
isFinished = False
}
parsePullSource :: Text -> Maybe (ReadRemoteNamespace (These ProjectName ProjectBranchNameOrLatestRelease))
parsePullSource =

View File

@ -16,6 +16,7 @@ import Ki qualified
import System.Console.Haskeline qualified as Line
import System.IO (hGetEcho, hPutStrLn, hSetEcho, stderr, stdin)
import System.IO.Error (isDoesNotExistError)
import U.Codebase.HashTags (CausalHash)
import U.Codebase.Sqlite.Operations qualified as Operations
import U.Codebase.Sqlite.Queries qualified as Queries
import Unison.Auth.CredentialManager (newCredentialManager)
@ -27,7 +28,6 @@ import Unison.Cli.Pretty (prettyProjectAndBranchName)
import Unison.Cli.ProjectUtils (projectBranchPathPrism)
import Unison.Codebase (Codebase)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch (Branch)
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Editor.HandleInput qualified as HandleInput
import Unison.Codebase.Editor.Input (Event, Input (..))
@ -58,11 +58,10 @@ import UnliftIO.STM
getUserInput ::
Codebase IO Symbol Ann ->
AuthenticatedHttpClient ->
IO (Branch IO) ->
Path.Absolute ->
[String] ->
IO Input
getUserInput codebase authHTTPClient getRoot currentPath numberedArgs =
getUserInput codebase authHTTPClient currentPath numberedArgs =
Line.runInputT
settings
(haskelineCtrlCHandling go)
@ -101,7 +100,7 @@ getUserInput codebase authHTTPClient getRoot currentPath numberedArgs =
Just l -> case words l of
[] -> go
ws -> do
liftIO (parseInput codebase (Branch.head <$> getRoot) currentPath numberedArgs IP.patternMap ws) >>= \case
liftIO (parseInput codebase currentPath numberedArgs IP.patternMap ws) >>= \case
Left msg -> do
liftIO $ putPrettyLn msg
go
@ -128,7 +127,7 @@ main ::
Codebase IO Symbol Ann ->
Maybe Server.BaseUrl ->
UCMVersion ->
(Branch IO -> STM ()) ->
(CausalHash -> STM ()) ->
(Path.Absolute -> STM ()) ->
ShouldWatchFiles ->
IO ()
@ -156,7 +155,7 @@ main dir welcome initialPath config initialInputs runtime sbRuntime nRuntime cod
currentRoot <- atomically do
currentRoot <- readTMVar rootVar
guard $ Just currentRoot /= lastRoot
notifyBranchChange currentRoot
notifyBranchChange (Branch.headHash currentRoot)
pure (Just currentRoot)
loop currentRoot
loop Nothing
@ -178,7 +177,6 @@ main dir welcome initialPath config initialInputs runtime sbRuntime nRuntime cod
getUserInput
codebase
authHTTPClient
(atomically . readTMVar $ loopState ^. #root)
(loopState ^. #currentPath)
(loopState ^. #numberedArgs)
let loadSourceFile :: Text -> IO Cli.LoadSourceResult

View File

@ -21,15 +21,18 @@ import Data.Set qualified as Set
import Data.Set.NonEmpty (NESet)
import Data.Text qualified as Text
import Data.Text.Encoding qualified as Text
import Data.Text.Lazy qualified as TL
import Data.Time (UTCTime, getCurrentTime)
import Data.Tuple (swap)
import Data.Tuple.Extra (dupe)
import Data.Void (absurd)
import Debug.RecoverRTTI qualified as RTTI
import Network.HTTP.Types qualified as Http
import Servant.Client qualified as Servant
import System.Console.ANSI qualified as ANSI
import System.Console.Haskeline.Completion qualified as Completion
import System.Directory (canonicalizePath, getHomeDirectory)
import Text.Pretty.Simple (pShowNoColor, pStringNoColor)
import U.Codebase.Branch (NamespaceStats (..))
import U.Codebase.Branch.Diff (NameChanges (..))
import U.Codebase.HashTags (CausalHash (..))
@ -156,6 +159,9 @@ import Unison.Var qualified as Var
import Unison.WatchKind qualified as WK
import Witch (unsafeFrom)
reportBugURL :: Pretty
reportBugURL = "https://github.com/unisonweb/unison/issues/new"
type Pretty = P.Pretty P.ColorText
shortenDirectory :: FilePath -> IO FilePath
@ -1120,11 +1126,11 @@ notifyUser dir = \case
]
ParseErrors src es ->
pure . P.sep "\n\n" $ prettyParseError (Text.unpack src) <$> es
TypeErrors curPath src ppenv notes -> do
TypeErrors _curPath src ppenv notes -> do
let showNote =
intercalateMap "\n\n" (printNoteWithSource ppenv (Text.unpack src) curPath)
intercalateMap "\n\n" (printNoteWithSource ppenv (Text.unpack src))
. map Result.TypeError
pure . showNote $ notes
pure $ showNote notes
CompilerBugs src env bugs -> pure $ intercalateMap "\n\n" bug bugs
where
bug = renderCompilerBug env (Text.unpack src)
@ -1772,6 +1778,22 @@ notifyUser dir = \case
IntegrityCheck result -> pure $ case result of
NoIntegrityErrors -> "🎉 No issues detected 🎉"
IntegrityErrorDetected ns -> prettyPrintIntegrityErrors ns
DebugTerm verbose builtinOrTerm -> pure $ case builtinOrTerm of
Left builtin -> "Builtin term: ##" <> P.text builtin
Right trm ->
if verbose
then P.text . TL.toStrict . pStringNoColor $ RTTI.anythingToString trm
else P.shown trm
DebugDecl typ mayConId -> do
let constructorMsg = case mayConId of
Nothing -> ""
Just conId -> "Constructor #" <> P.shown conId <> " of the following type:\n"
pure $
constructorMsg
<> case typ of
Left builtinTxt -> "Builtin type: ##" <> P.text builtinTxt
Right decl -> either (P.text . TL.toStrict . pShowNoColor) (P.text . TL.toStrict . pShowNoColor) decl
AnnotatedFoldRanges txt -> pure $ P.text txt
DisplayDebugNameDiff NameChanges {termNameAdds, termNameRemovals, typeNameAdds, typeNameRemovals} -> do
let referentText =
-- We don't use the constructor type in the actual output here, so there's no
@ -1933,6 +1955,17 @@ notifyUser dir = \case
RemoteProjectBranchDoesntExist host projectAndBranch ->
pure . P.wrap $
prettyProjectAndBranchName projectAndBranch <> "does not exist on" <> prettyURI host
RemoteProjectBranchDoesntExist'Push host projectAndBranch ->
let push = P.group . P.backticked . IP.patternName $ IP.push
in pure . P.wrap $
"The previous push target named"
<> prettyProjectAndBranchName projectAndBranch
<> "has been deleted from"
<> P.group (prettyURI host <> ".")
<> "I've deleted the invalid push target."
<> "Run the"
<> push
<> "command again to push to a new target."
RemoteProjectBranchHeadMismatch host projectAndBranch ->
pure . P.wrap $
prettyProjectAndBranchName projectAndBranch
@ -2286,13 +2319,51 @@ prettyUpdatePathError repoInfo = \case
prettyUploadEntitiesError :: Share.UploadEntitiesError -> Pretty
prettyUploadEntitiesError = \case
Share.UploadEntitiesError'HashMismatchForEntity _hashMismatch -> error "TODO: hash mismatch error message"
Share.UploadEntitiesError'EntityValidationFailure validationFailureErr -> prettyValidationFailure validationFailureErr
Share.UploadEntitiesError'HashMismatchForEntity (Share.HashMismatchForEntity {supplied, computed}) ->
hashMismatchFromShare supplied computed
Share.UploadEntitiesError'InvalidRepoInfo err repoInfo -> invalidRepoInfo err repoInfo
Share.UploadEntitiesError'NeedDependencies dependencies -> needDependencies dependencies
Share.UploadEntitiesError'NoWritePermission repoInfo -> noWritePermissionForRepo repoInfo
Share.UploadEntitiesError'ProjectNotFound project -> shareProjectNotFound project
Share.UploadEntitiesError'UserNotFound userHandle -> shareUserNotFound (Share.RepoInfo userHandle)
prettyValidationFailure :: Share.EntityValidationError -> Pretty
prettyValidationFailure = \case
Share.EntityHashMismatch entityType (Share.HashMismatchForEntity {supplied, computed}) ->
P.lines
[ P.wrap $ "The hash associated with the given " <> prettyEntityType entityType <> " entity is incorrect.",
"",
P.wrap $ "The associated hash is: " <> prettyHash32 supplied,
P.wrap $ "The computed hash is: " <> prettyHash32 computed
]
Share.UnsupportedEntityType hash32 entityType ->
P.lines
[ P.wrap $ "The entity with hash " <> prettyHash32 hash32 <> " of type " <> prettyEntityType entityType <> " is not supported by your version of ucm.",
P.wrap $ "Try upgrading to the latest version of ucm."
]
Share.InvalidByteEncoding hash32 entityType msg ->
P.lines
[ P.wrap $ "Failed to decode a " <> prettyEntityType entityType <> " entity with the hash " <> prettyHash32 hash32 <> ".",
"Please create an issue and report this to the Unison team",
"",
P.wrap $ "The error was: " <> P.text msg
]
Share.HashResolutionFailure hash32 ->
P.lines
[ P.wrap $ "Failed to resolve a referenced hash when validating the hash for " <> prettyHash32 hash32 <> ".",
"Please create an issue and report this to the Unison team"
]
where
prettyEntityType = \case
Share.TermComponentType -> "term component"
Share.DeclComponentType -> "type component"
Share.PatchType -> "patch"
Share.PatchDiffType -> "patch diff"
Share.NamespaceType -> "namespace"
Share.NamespaceDiffType -> "namespace diff"
Share.CausalType -> "causal"
prettyTransportError :: CodeserverTransportError -> Pretty
prettyTransportError = \case
DecodeFailure msg resp ->
@ -2380,6 +2451,18 @@ invalidRepoInfo err repoInfo =
P.text err
]
hashMismatchFromShare :: Hash32 -> Hash32 -> Pretty
hashMismatchFromShare supplied computed =
P.lines
[ P.wrap "Uh oh, Share double-checked the hash of something you're uploading and it didn't match.",
P.wrap "Don't worry, you didn't do anything wrong, this is a bug in UCM, please report it and we'll do our best to sort it out 🤞",
reportBugURL,
"",
"Please include the following information in your report:",
P.wrap $ "The hash provided by your UCM is: " <> prettyHash32 supplied,
P.wrap $ "The hash computed by Share is: " <> prettyHash32 computed
]
pushPublicNote :: InputPattern -> Text -> [Text] -> Pretty
pushPublicNote cmd uname ys =
let msg =

View File

@ -23,8 +23,8 @@ import Language.LSP.VFS
import Network.Simple.TCP qualified as TCP
import System.Environment (lookupEnv)
import System.IO (hPutStrLn)
import U.Codebase.HashTags
import Unison.Codebase
import Unison.Codebase.Branch (Branch)
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.Runtime (Runtime)
import Unison.Debug qualified as Debug
@ -36,6 +36,7 @@ import Unison.LSP.Completion (completionHandler, completionItemResolveHandler)
import Unison.LSP.Configuration qualified as Config
import Unison.LSP.FileAnalysis qualified as Analysis
import Unison.LSP.FoldingRange (foldingRangeRequest)
import Unison.LSP.Formatting (formatDocRequest, formatRangeRequest)
import Unison.LSP.HandlerUtils qualified as Handlers
import Unison.LSP.Hover (hoverHandler)
import Unison.LSP.NotificationHandlers qualified as Notifications
@ -45,8 +46,6 @@ import Unison.LSP.UCMWorker (ucmWorker)
import Unison.LSP.VFS qualified as VFS
import Unison.Parser.Ann
import Unison.Prelude
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.Server.NameSearch.FromNames qualified as NameSearch
import Unison.Symbol
import UnliftIO
import UnliftIO.Foreign (Errno (..), eADDRINUSE)
@ -55,8 +54,8 @@ getLspPort :: IO String
getLspPort = fromMaybe "5757" <$> lookupEnv "UNISON_LSP_PORT"
-- | Spawn an LSP server on the configured port.
spawnLsp :: Codebase IO Symbol Ann -> Runtime Symbol -> STM (Branch IO) -> STM (Path.Absolute) -> IO ()
spawnLsp codebase runtime latestBranch latestPath =
spawnLsp :: Codebase IO Symbol Ann -> Runtime Symbol -> STM CausalHash -> STM (Path.Absolute) -> IO ()
spawnLsp codebase runtime latestRootHash latestPath =
ifEnabled . TCP.withSocketsDo $ do
lspPort <- getLspPort
UnliftIO.handleIO (handleFailure lspPort) $ do
@ -76,7 +75,7 @@ spawnLsp codebase runtime latestBranch latestPath =
-- different un-saved state for the same file.
initVFS $ \vfs -> do
vfsVar <- newMVar vfs
void $ runServerWith lspServerLogger lspClientLogger clientInput clientOutput (serverDefinition vfsVar codebase runtime scope latestBranch latestPath)
void $ runServerWith lspServerLogger lspClientLogger clientInput clientOutput (serverDefinition vfsVar codebase runtime scope latestRootHash latestPath)
where
handleFailure :: String -> IOException -> IO ()
handleFailure lspPort ioerr =
@ -106,16 +105,16 @@ serverDefinition ::
Codebase IO Symbol Ann ->
Runtime Symbol ->
Ki.Scope ->
STM (Branch IO) ->
STM CausalHash ->
STM (Path.Absolute) ->
ServerDefinition Config
serverDefinition vfsVar codebase runtime scope latestBranch latestPath =
serverDefinition vfsVar codebase runtime scope latestRootHash latestPath =
ServerDefinition
{ defaultConfig = defaultLSPConfig,
configSection = "unison",
parseConfig = Config.parseConfig,
onConfigChange = Config.updateConfig,
doInitialize = lspDoInitialize vfsVar codebase runtime scope latestBranch latestPath,
doInitialize = lspDoInitialize vfsVar codebase runtime scope latestRootHash latestPath,
staticHandlers = lspStaticHandlers,
interpretHandler = lspInterpretHandler,
options = lspOptions
@ -127,26 +126,31 @@ lspDoInitialize ::
Codebase IO Symbol Ann ->
Runtime Symbol ->
Ki.Scope ->
STM (Branch IO) ->
STM CausalHash ->
STM (Path.Absolute) ->
LanguageContextEnv Config ->
Msg.TMessage 'Msg.Method_Initialize ->
IO (Either Msg.ResponseError Env)
lspDoInitialize vfsVar codebase runtime scope latestBranch latestPath lspContext _initMsg = do
-- TODO: some of these should probably be MVars so that we correctly wait for names and
-- things to be generated before serving requests.
lspDoInitialize vfsVar codebase runtime scope latestRootHash latestPath lspContext _initMsg = do
checkedFilesVar <- newTVarIO mempty
dirtyFilesVar <- newTVarIO mempty
ppedCacheVar <- newTVarIO PPED.empty
parseNamesCacheVar <- newTVarIO mempty
currentPathCacheVar <- newTVarIO Path.absoluteEmpty
ppedCacheVar <- newEmptyTMVarIO
currentNamesCacheVar <- newEmptyTMVarIO
currentPathCacheVar <- newEmptyTMVarIO
cancellationMapVar <- newTVarIO mempty
completionsVar <- newTVarIO mempty
nameSearchCacheVar <- newTVarIO $ NameSearch.makeNameSearch 0 mempty
let env = Env {ppedCache = readTVarIO ppedCacheVar, parseNamesCache = readTVarIO parseNamesCacheVar, currentPathCache = readTVarIO currentPathCacheVar, nameSearchCache = readTVarIO nameSearchCacheVar, ..}
completionsVar <- newEmptyTMVarIO
nameSearchCacheVar <- newEmptyTMVarIO
let env =
Env
{ ppedCache = atomically $ readTMVar ppedCacheVar,
currentNamesCache = atomically $ readTMVar currentNamesCacheVar,
currentPathCache = atomically $ readTMVar currentPathCacheVar,
nameSearchCache = atomically $ readTMVar nameSearchCacheVar,
..
}
let lspToIO = flip runReaderT lspContext . unLspT . flip runReaderT env . runLspM
Ki.fork scope (lspToIO Analysis.fileAnalysisWorker)
Ki.fork scope (lspToIO $ ucmWorker ppedCacheVar parseNamesCacheVar nameSearchCacheVar latestBranch latestPath)
Ki.fork scope (lspToIO $ ucmWorker ppedCacheVar currentNamesCacheVar nameSearchCacheVar currentPathCacheVar latestRootHash latestPath)
pure $ Right $ env
-- | LSP request handlers that don't register/unregister dynamically
@ -168,6 +172,8 @@ lspRequestHandlers =
& SMM.insert Msg.SMethod_TextDocumentFoldingRange (mkHandler foldingRangeRequest)
& SMM.insert Msg.SMethod_TextDocumentCompletion (mkHandler completionHandler)
& SMM.insert Msg.SMethod_CompletionItemResolve (mkHandler completionItemResolveHandler)
& SMM.insert Msg.SMethod_TextDocumentFormatting (mkHandler formatDocRequest)
& SMM.insert Msg.SMethod_TextDocumentRangeFormatting (mkHandler formatRangeRequest)
where
defaultTimeout = 10_000 -- 10s
mkHandler ::

View File

@ -53,7 +53,7 @@ completionHandler m respond =
respond . maybe (Right $ InL mempty) (Right . InR . InL) =<< runMaybeT do
let fileUri = (m ^. params . textDocument . uri)
(range, prefix) <- VFS.completionPrefix (m ^. params . textDocument . uri) (m ^. params . position)
ppe <- PPED.suffixifiedPPE <$> lift globalPPED
ppe <- PPED.suffixifiedPPE <$> lift currentPPED
codebaseCompletions <- lift getCodebaseCompletions
Config {maxCompletions} <- lift getConfig
let defMatches = matchCompletions codebaseCompletions prefix

View File

@ -16,24 +16,36 @@ rangeToInterval (Range start end) =
annToInterval :: Ann -> Maybe (Interval.Interval Position)
annToInterval ann = annToRange ann <&> rangeToInterval
-- | Convert a Unison file-position where the first char is 1 and line is 1, to an LSP `Position`
-- where the first char is 0 and line is 0.
uToLspPos :: Lex.Pos -> Position
uToLspPos uPos =
Position
{ _line = fromIntegral $ Lex.line uPos - 1, -- 1 indexed vs 0 indexed
_character = fromIntegral $ Lex.column uPos - 1
{ _line = fromIntegral $ max 0 (Lex.line uPos - 1),
_character = fromIntegral $ max 0 (Lex.column uPos - 1)
}
-- | Convert an LSP `Position` where the first char is 0 and line is 0, to a Unison file-position
-- where the first char is 1 and line is 1.
lspToUPos :: Position -> Lex.Pos
lspToUPos Position {_line = line, _character = char} =
Lex.Pos
(fromIntegral $ line + 1) -- 1 indexed vs 0 indexed
(fromIntegral $ line + 1)
(fromIntegral $ char + 1)
-- | Convert a Unison `Range` where the first char is 1 and line is 1, to an LSP `Range`
-- where the first char is 0 and line is 0.
uToLspRange :: Range.Range -> Range
uToLspRange (Range.Range start end) = Range (uToLspPos start) (uToLspPos end)
-- | Convert an LSP `Range` where the first char is 0 and line is 0, to a Unison `Range`
-- where the first char is 1 and line is 1.
lspToURange :: Range -> Range.Range
lspToURange (Range start end) = Range.Range (lspToUPos start) (lspToUPos end)
annToRange :: Ann -> Maybe Range
annToRange = \case
Ann.Intrinsic -> Nothing
Ann.External -> Nothing
Ann.GeneratedFrom a -> annToRange a
Ann.Ann start end -> Just $ Range (uToLspPos start) (uToLspPos end)

View File

@ -1,8 +1,5 @@
module Unison.LSP.Diagnostics
( annToRange,
uToLspPos,
uToLspRange,
reportDiagnostics,
( reportDiagnostics,
mkDiagnostic,
DiagnosticSeverity (..),
)
@ -11,27 +8,7 @@ where
import Language.LSP.Protocol.Message qualified as Msg
import Language.LSP.Protocol.Types
import Unison.LSP.Types
import Unison.Parser.Ann (Ann)
import Unison.Parser.Ann qualified as Ann
import Unison.Prelude
import Unison.Syntax.Lexer qualified as Lex
import Unison.Util.Range qualified as Range
annToRange :: Ann -> Maybe Range
annToRange = \case
Ann.Intrinsic -> Nothing
Ann.External -> Nothing
Ann.Ann start end -> Just $ Range (uToLspPos start) (uToLspPos end)
uToLspPos :: Lex.Pos -> Position
uToLspPos uPos =
Position
{ _line = fromIntegral $ Lex.line uPos - 1, -- 1 indexed vs 0 indexed
_character = fromIntegral $ Lex.column uPos - 1 -- 1 indexed vs 0 indexed
}
uToLspRange :: Range.Range -> Range
uToLspRange (Range.Range start end) = Range (uToLspPos start) (uToLspPos end)
reportDiagnostics ::
(Foldable f) =>

View File

@ -27,7 +27,6 @@ import Unison.ABT qualified as ABT
import Unison.Cli.TypeCheck (computeTypecheckingEnvironment)
import Unison.Cli.UniqueTypeGuidLookup qualified as Cli
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Path qualified as Path
import Unison.DataDeclaration qualified as DD
import Unison.Debug qualified as Debug
import Unison.FileParsers (ShouldUseTndr (..))
@ -41,6 +40,7 @@ import Unison.LSP.Types
import Unison.LSP.Types qualified as LSP
import Unison.LSP.VFS qualified as VFS
import Unison.Name (Name)
import Unison.Names (Names)
import Unison.Names qualified as Names
import Unison.Parser.Ann (Ann)
import Unison.Parsers qualified as Parsers
@ -56,23 +56,22 @@ import Unison.Referent qualified as Referent
import Unison.Result (Note)
import Unison.Result qualified as Result
import Unison.Symbol (Symbol)
import Unison.Symbol qualified as Symbol
import Unison.Syntax.HashQualified' qualified as HQ' (toText)
import Unison.Syntax.Lexer qualified as L
import Unison.Syntax.Name qualified as Name
import Unison.Syntax.Parser qualified as Parser
import Unison.Syntax.TypePrinter qualified as TypePrinter
import Unison.Term qualified as Term
import Unison.Type (Type)
import Unison.Typechecker.Context qualified as Context
import Unison.Typechecker.TypeError qualified as TypeError
import Unison.UnisonFile qualified as UF
import Unison.UnisonFile.Names qualified as UF
import Unison.UnisonFile.Summary (FileSummary (..), fileDefLocations)
import Unison.UnisonFile.Summary qualified as FileSummary
import Unison.Util.Monoid (foldMapM)
import Unison.Util.Pretty qualified as Pretty
import Unison.Util.Relation qualified as R1
import Unison.Var qualified as Var
import Unison.WatchKind (pattern TestWatch)
import UnliftIO.STM
import Witherable
@ -82,7 +81,7 @@ checkFile doc = runMaybeT do
currentPath <- lift getCurrentPath
let fileUri = doc ^. uri
(fileVersion, contents) <- VFS.getFileContents fileUri
parseNames <- lift getParseNames
parseNames <- lift getCurrentNames
let sourceName = getUri $ doc ^. uri
let lexedSource@(srcText, tokens) = (contents, L.lexer (Text.unpack sourceName) (Text.unpack contents))
let ambientAbilities = []
@ -111,7 +110,7 @@ checkFile doc = runMaybeT do
& foldMap (\(RangedCodeAction {_codeActionRanges, _codeAction}) -> (,_codeAction) <$> _codeActionRanges)
& toRangeMap
let typeSignatureHints = fromMaybe mempty (mkTypeSignatureHints <$> parsedFile <*> typecheckedFile)
let fileSummary = mkFileSummary parsedFile typecheckedFile
let fileSummary = FileSummary.mkFileSummary parsedFile typecheckedFile
let tokenMap = getTokenMap tokens
conflictWarningDiagnostics <-
fold <$> for fileSummary \fs ->
@ -123,106 +122,11 @@ checkFile doc = runMaybeT do
let fileAnalysis = FileAnalysis {diagnostics = diagnosticRanges, codeActions = codeActionRanges, fileSummary, typeSignatureHints, ..}
pure fileAnalysis
-- | If a symbol is a 'User' symbol, return (Just sym), otherwise return Nothing.
assertUserSym :: Symbol -> Maybe Symbol
assertUserSym sym = case sym of
Symbol.Symbol _ (Var.User {}) -> Just sym
_ -> Nothing
-- | Summarize the information available to us from the current state of the file.
-- See 'FileSummary' for more information.
mkFileSummary :: Maybe (UF.UnisonFile Symbol Ann) -> Maybe (UF.TypecheckedUnisonFile Symbol Ann) -> Maybe FileSummary
mkFileSummary parsed typechecked = case (parsed, typechecked) of
(Nothing, Nothing) -> Nothing
(_, Just tf@(UF.TypecheckedUnisonFileId {dataDeclarationsId', effectDeclarationsId', hashTermsId})) ->
let (trms, testWatches, exprWatches) =
hashTermsId & ifoldMap \sym (ann, ref, wk, trm, typ) ->
case wk of
Nothing -> (Map.singleton sym (ann, Just ref, trm, getUserTypeAnnotation sym <|> Just typ), mempty, mempty)
Just TestWatch -> (mempty, [(ann, assertUserSym sym, Just ref, trm, getUserTypeAnnotation sym <|> Just typ)], mempty)
Just _ -> (mempty, mempty, [(ann, assertUserSym sym, Just ref, trm, getUserTypeAnnotation sym <|> Just typ)])
in Just $
FileSummary
{ dataDeclsBySymbol = dataDeclarationsId',
dataDeclsByReference = declsRefMap dataDeclarationsId',
effectDeclsBySymbol = effectDeclarationsId',
effectDeclsByReference = declsRefMap effectDeclarationsId',
termsBySymbol = trms,
termsByReference = termsRefMap trms,
testWatchSummary = testWatches,
exprWatchSummary = exprWatches,
fileNames = UF.typecheckedToNames tf
}
(Just uf@(UF.UnisonFileId {dataDeclarationsId, effectDeclarationsId, terms, watches}), _) ->
let trms =
terms & foldMap \(sym, ann, trm) ->
(Map.singleton sym (ann, Nothing, trm, Nothing))
(testWatches, exprWatches) =
watches & ifoldMap \wk tms ->
tms & foldMap \(v, ann, trm) ->
case wk of
TestWatch -> ([(ann, assertUserSym v, Nothing, trm, Nothing)], mempty)
_ -> (mempty, [(ann, assertUserSym v, Nothing, trm, Nothing)])
in Just $
FileSummary
{ dataDeclsBySymbol = dataDeclarationsId,
dataDeclsByReference = declsRefMap dataDeclarationsId,
effectDeclsBySymbol = effectDeclarationsId,
effectDeclsByReference = declsRefMap effectDeclarationsId,
termsBySymbol = trms,
termsByReference = termsRefMap trms,
testWatchSummary = testWatches,
exprWatchSummary = exprWatches,
fileNames = UF.toNames uf
}
where
declsRefMap :: (Ord v, Ord r) => Map v (r, a) -> Map r (Map v a)
declsRefMap m =
m
& Map.toList
& fmap (\(v, (r, a)) -> (r, Map.singleton v a))
& Map.fromListWith (<>)
termsRefMap :: (Ord v, Ord r) => Map v (ann, r, a, b) -> Map r (Map v (ann, a, b))
termsRefMap m =
m
& Map.toList
& fmap (\(v, (ann, r, a, b)) -> (r, Map.singleton v (ann, a, b)))
& Map.fromListWith (<>)
-- Gets the user provided type annotation for a term if there is one.
-- This type sig will have Ann's within the file if it exists.
getUserTypeAnnotation :: Symbol -> Maybe (Type Symbol Ann)
getUserTypeAnnotation v = do
UF.UnisonFileId {terms, watches} <- parsed
trm <- (terms <> fold watches) ^? folded . filteredBy (_1 . only v) . _3
typ <- Term.getTypeAnnotation trm
pure typ
-- | Get the location of user defined definitions within the file
getFileDefLocations :: Uri -> MaybeT Lsp (Map Symbol (Set Ann))
getFileDefLocations uri = do
fileDefLocations <$> getFileSummary uri
-- | Compute the location of user defined definitions within the file
fileDefLocations :: FileSummary -> Map Symbol (Set Ann)
fileDefLocations FileSummary {dataDeclsBySymbol, effectDeclsBySymbol, testWatchSummary, exprWatchSummary, termsBySymbol} =
fold
[ dataDeclsBySymbol <&> \(_, decl) ->
decl
& DD.annotation
& Set.singleton,
effectDeclsBySymbol <&> \(_, decl) ->
decl
& DD.toDataDecl
& DD.annotation
& Set.singleton,
(testWatchSummary <> exprWatchSummary)
& foldMap \(ann, maySym, _id, _trm, _typ) ->
case maySym of
Nothing -> mempty
Just sym -> Map.singleton sym (Set.singleton ann),
termsBySymbol <&> \(ann, _id, _trm, _typ) -> Set.singleton ann
]
fileAnalysisWorker :: Lsp ()
fileAnalysisWorker = forever do
dirtyFilesV <- asks dirtyFilesVar
@ -253,7 +157,7 @@ fileAnalysisWorker = forever do
analyseFile :: (Foldable f) => Uri -> Text -> f (Note Symbol Ann) -> Lsp ([Diagnostic], [RangedCodeAction])
analyseFile fileUri srcText notes = do
pped <- PPED.suffixifiedPPE <$> LSP.globalPPED
pped <- PPED.suffixifiedPPE <$> LSP.currentPPED
(noteDiags, noteActions) <- analyseNotes fileUri pped (Text.unpack srcText) notes
pure (noteDiags, noteActions)
@ -262,7 +166,7 @@ analyseFile fileUri srcText notes = do
computeConflictWarningDiagnostics :: Uri -> FileSummary -> Lsp [Diagnostic]
computeConflictWarningDiagnostics fileUri fileSummary@FileSummary {fileNames} = do
let defLocations = fileDefLocations fileSummary
conflictedNames <- Names.conflicts <$> getParseNames
conflictedNames <- Names.conflicts <$> getCurrentNames
let locationForName :: Name -> Set Ann
locationForName name = fold $ Map.lookup (Name.toVar name) defLocations
let conflictedTermLocations =
@ -303,7 +207,6 @@ getTokenMap tokens =
analyseNotes :: (Foldable f) => Uri -> PrettyPrintEnv -> String -> f (Note Symbol Ann) -> Lsp ([Diagnostic], [RangedCodeAction])
analyseNotes fileUri ppe src notes = do
currentPath <- getCurrentPath
flip foldMapM notes \note -> case note of
Result.TypeError errNote@(Context.ErrorNote {cause}) -> do
let typeErr = TypeError.typeErrorFromNote errNote
@ -357,7 +260,7 @@ analyseNotes fileUri ppe src notes = do
shouldHaveBeenHandled e = do
Debug.debugM Debug.LSP "This diagnostic should have been handled by a previous case but was not" e
empty
diags = noteDiagnostic currentPath note ranges
diags = noteDiagnostic note ranges
-- Sort on match accuracy first, then name.
codeActions <- case cause of
Context.UnknownTerm _ v suggestions typ -> do
@ -369,7 +272,7 @@ analyseNotes fileUri ppe src notes = do
pure (diags, codeActions)
Result.NameResolutionFailures {} -> do
-- TODO: diagnostics/code actions for resolution failures
pure (noteDiagnostic currentPath note todoAnnotation, [])
pure (noteDiagnostic note todoAnnotation, [])
Result.Parsing err -> do
let diags = do
(errMsg, ranges) <- PrintError.renderParseErrors src err
@ -379,7 +282,7 @@ analyseNotes fileUri ppe src notes = do
-- TODO: Some parsing errors likely have reasonable code actions
pure (diags, [])
Result.UnknownSymbol _ loc ->
pure (noteDiagnostic currentPath note (singleRange loc), [])
pure (noteDiagnostic note (singleRange loc), [])
Result.TypeInfo {} ->
-- No relevant diagnostics from type info.
pure ([], [])
@ -402,7 +305,7 @@ analyseNotes fileUri ppe src notes = do
Context.UnknownExistentialVariable _sym _con -> todoAnnotation
Context.IllegalContextExtension _con _el _s -> todoAnnotation
Context.OtherBug _s -> todoAnnotation
pure (noteDiagnostic currentPath note ranges, [])
pure (noteDiagnostic note ranges, [])
where
-- Diagnostics with this return value haven't been properly configured yet.
todoAnnotation = []
@ -420,7 +323,6 @@ analyseNotes fileUri ppe src notes = do
withNeighbours (a : as) = (a, as) : (second (a :) <$> withNeighbours as)
-- Builds diagnostics for a note, one diagnostic per range.
noteDiagnostic ::
Path.Absolute ->
Note Symbol Ann ->
-- All ranges affected by this note, each range may have references to 'related'
-- ranges.
@ -428,8 +330,8 @@ analyseNotes fileUri ppe src notes = do
-- other conflicted name locations.
[(Range, [(Text, Range)])] ->
[Diagnostic]
noteDiagnostic currentPath note ranges =
let msg = Text.pack $ Pretty.toPlain 80 $ PrintError.printNoteWithSource ppe src currentPath note
noteDiagnostic note ranges =
let msg = Text.pack $ Pretty.toPlain 80 $ PrintError.printNoteWithSource ppe src note
in do
(range, references) <- ranges
pure $ mkDiagnostic fileUri range DiagnosticSeverity_Error msg references
@ -455,7 +357,7 @@ analyseNotes fileUri ppe src notes = do
| not (isUserBlank v) = pure []
| otherwise = do
Env {codebase} <- ask
ppe <- PPED.suffixifiedPPE <$> globalPPED
ppe <- PPED.suffixifiedPPE <$> currentPPED
let cleanedTyp = Context.generalizeAndUnTypeVar typ -- TODO: is this right?
refs <- liftIO . Codebase.runTransaction codebase $ Codebase.termsOfType codebase cleanedTyp
forMaybe (toList refs) $ \ref -> runMaybeT $ do
@ -490,7 +392,25 @@ getFileAnalysis uri = do
writeTVar checkedFilesV $ Map.insert uri mvar checkedFiles
pure mvar
Just mvar -> pure mvar
atomically (readTMVar tmvar)
Debug.debugM Debug.LSP "Waiting on file analysis" uri
r <- atomically (readTMVar tmvar)
Debug.debugM Debug.LSP "Got file analysis" uri
pure r
-- | Build a Names from a file if it's parseable.
--
-- If the file typechecks, generate names from that,
-- otherwise, generate names from the 'parsed' file. Note that the
-- names for a parsed file contains only names for parts of decls, since
-- we don't know references within terms before typechecking due to TDNR.
-- This should be fine though, since those references will all be kept in the
-- ABT as symbols anyways.
--
-- See UF.toNames and UF.typecheckedToNames for more info.
getFileNames :: Uri -> MaybeT Lsp Names
getFileNames fileUri = do
FileAnalysis {typecheckedFile = tf, parsedFile = pf} <- getFileAnalysis fileUri
hoistMaybe (fmap UF.typecheckedToNames tf <|> fmap UF.toNames pf)
getFileSummary :: Uri -> MaybeT Lsp FileSummary
getFileSummary uri = do
@ -507,7 +427,7 @@ ppedForFile fileUri = do
ppedForFileHelper :: Maybe (UF.UnisonFile Symbol a) -> Maybe (UF.TypecheckedUnisonFile Symbol a) -> Lsp PPED.PrettyPrintEnvDecl
ppedForFileHelper uf tf = do
codebasePPED <- globalPPED
codebasePPED <- currentPPED
hashLen <- asks codebase >>= \codebase -> liftIO (Codebase.runTransaction codebase Codebase.hashLength)
pure $ case (uf, tf) of
(Nothing, Nothing) -> codebasePPED

View File

@ -1,50 +1,71 @@
{-# LANGUAGE DataKinds #-}
module Unison.LSP.FoldingRange where
module Unison.LSP.FoldingRange
( foldingRangeRequest,
foldingRangesForFile,
)
where
import Control.Lens hiding (List)
import Data.Map qualified as Map
import Data.Text qualified as Text
import Language.LSP.Protocol.Lens hiding (id, to)
import Language.LSP.Protocol.Message qualified as Msg
import Language.LSP.Protocol.Types
import Unison.ABT qualified as ABT
import Unison.DataDeclaration qualified as DD
import Unison.Debug qualified as Debug
import Unison.LSP.Conversions (annToRange)
import Unison.LSP.FileAnalysis (getFileAnalysis)
import Unison.LSP.Types
import Unison.Parser.Ann (Ann)
import Unison.Prelude
import Unison.Symbol (Symbol)
import Unison.UnisonFile (UnisonFile (..))
import Unison.UnisonFile qualified as UF
import Unison.Var qualified as Var
foldingRangeRequest :: Msg.TRequestMessage 'Msg.Method_TextDocumentFoldingRange -> (Either Msg.ResponseError (Msg.MessageResult 'Msg.Method_TextDocumentFoldingRange) -> Lsp ()) -> Lsp ()
foldingRangeRequest m respond = do
foldRanges <- foldingRangesForFile (m ^. params . textDocument . uri)
Debug.debugM Debug.LSP "Folding Ranges" foldRanges
let fileUri = m ^. params . textDocument . uri
foldRanges <-
fromMaybe [] <$> runMaybeT do
FileAnalysis {parsedFile = mayParsedFile} <- getFileAnalysis fileUri
parsedFile <- hoistMaybe mayParsedFile
pure $ foldingRangesForFile parsedFile
respond . Right . InL $ foldRanges
-- | Return a folding range for each top-level definition
foldingRangesForFile :: Uri -> Lsp [FoldingRange]
foldingRangesForFile fileUri =
fromMaybe []
<$> runMaybeT do
FileAnalysis {parsedFile} <- getFileAnalysis fileUri
UnisonFileId {dataDeclarationsId, effectDeclarationsId, terms} <- MaybeT $ pure parsedFile
let dataFolds = dataDeclarationsId ^.. folded . _2 . to dataDeclSpan
let abilityFolds = effectDeclarationsId ^.. folded . _2 . to DD.toDataDecl . to dataDeclSpan
let termFolds = terms ^.. folded . _3 . to ABT.annotation
let folds = dataFolds <> abilityFolds <> termFolds
let ranges = mapMaybe annToRange folds
pure $
ranges <&> \r ->
FoldingRange
{ _startLine = r ^. start . line,
_startCharacter = Just (r ^. start . character),
_endLine = r ^. end . line,
_endCharacter = Just (r ^. end . character),
_kind = Just FoldingRangeKind_Region,
_collapsedText = Nothing
}
where
dataDeclSpan dd =
-- We don't have a proper Annotation for data decls so we take the span of all the
-- constructors using their monoid instance.
DD.annotation dd <> DD.constructors' dd ^. folded . to (\(a, _v, typ) -> a <> ABT.annotation typ)
foldingRangesForFile :: UF.UnisonFile Symbol Ann -> [FoldingRange]
foldingRangesForFile UnisonFileId {dataDeclarationsId, effectDeclarationsId, terms, watches} =
let dataFolds =
dataDeclarationsId
& Map.toList
& map \(sym, (_typ, decl)) -> (Just sym, DD.annotation decl)
abilityFolds =
effectDeclarationsId
& Map.toList
& map \(sym, (_typ, decl)) -> (Just sym, DD.annotation . DD.toDataDecl $ decl)
termFolds = terms & fmap \(sym, ann, _trm) -> (Just sym, ann)
watchFolds =
watches
& fold
& fmap
( \(_sym, ann, _trm) ->
-- We don't use the symbol here because watch symbols are often auto-generated
-- and ugly.
(Nothing, ann)
)
folds =
dataFolds <> abilityFolds <> termFolds <> watchFolds
ranges =
folds
& mapMaybe \(sym, range) ->
(Text.pack . Var.nameStr <$> sym,) <$> annToRange range
in ranges <&> \(maySym, r) ->
FoldingRange
{ _startLine = r ^. start . line,
_startCharacter = Just (r ^. start . character),
_endLine = r ^. end . line,
_endCharacter = Just (r ^. end . character),
_kind = Just FoldingRangeKind_Region,
_collapsedText = maySym
}

View File

@ -0,0 +1,40 @@
{-# LANGUAGE DataKinds #-}
module Unison.LSP.Formatting where
import Control.Lens hiding (List)
import Data.Set qualified as Set
import Language.LSP.Protocol.Lens
import Language.LSP.Protocol.Message qualified as Msg
import Language.LSP.Protocol.Types
import Unison.Codebase.Editor.HandleInput.FormatFile qualified as Formatting
import Unison.LSP.Conversions (lspToURange, uToLspRange)
import Unison.LSP.FileAnalysis (getFileAnalysis)
import Unison.LSP.FileAnalysis qualified as FileAnalysis
import Unison.LSP.Types
import Unison.Prelude
formatDocRequest :: Msg.TRequestMessage 'Msg.Method_TextDocumentFormatting -> (Either Msg.ResponseError (Msg.MessageResult 'Msg.Method_TextDocumentFormatting) -> Lsp ()) -> Lsp ()
formatDocRequest m respond = do
edits <- formatDefs (m ^. params . textDocument . uri) Nothing
respond . Right . InL $ edits
formatRangeRequest :: Msg.TRequestMessage 'Msg.Method_TextDocumentRangeFormatting -> (Either Msg.ResponseError (Msg.MessageResult 'Msg.Method_TextDocumentRangeFormatting) -> Lsp ()) -> Lsp ()
formatRangeRequest m respond = do
let p = m ^. params
edits <- formatDefs (p ^. textDocument . uri) (Just . Set.singleton $ p ^. range)
respond . Right . InL $ edits
-- | Format all definitions in a file.
formatDefs :: Uri -> Maybe (Set Range {- the ranges to format, if Nothing then format the whole file. -}) -> Lsp [TextEdit]
formatDefs fileUri mayRangesToFormat =
fromMaybe [] <$> runMaybeT do
FileAnalysis {parsedFile = mayParsedFile, typecheckedFile = mayTypecheckedFile} <- getFileAnalysis fileUri
currentPath <- lift getCurrentPath
Config {formattingWidth} <- lift getConfig
MaybeT $
Formatting.formatFile (\uf tf -> FileAnalysis.ppedForFileHelper uf tf) formattingWidth currentPath mayParsedFile mayTypecheckedFile (Set.map lspToURange <$> mayRangesToFormat)
<&> (fmap . fmap) uTextReplacementToLSP
where
uTextReplacementToLSP :: Formatting.TextReplacement -> TextEdit
uTextReplacementToLSP (Formatting.TextReplacement newText range) = TextEdit (uToLspRange range) newText

View File

@ -55,6 +55,7 @@ import Unison.Term (MatchCase (MatchCase), Term)
import Unison.Term qualified as Term
import Unison.Type (Type)
import Unison.Type qualified as Type
import Unison.UnisonFile.Summary (FileSummary (..))
import Unison.Util.Pretty qualified as Pretty
-- | Returns a reference to whatever the symbol at the given position refers to.
@ -360,6 +361,7 @@ annIsFilePosition = \case
Ann.Intrinsic -> False
Ann.External -> False
Ann.Ann {} -> True
Ann.GeneratedFrom ann -> annIsFilePosition ann
-- | Okay, so currently during synthesis in typechecking the typechecker adds `Ann` nodes
-- to the term specifying types of subterms. This is a problem because we the types in these

View File

@ -26,7 +26,6 @@ import Language.LSP.VFS
import Unison.Codebase
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.Runtime (Runtime)
import Unison.DataDeclaration qualified as DD
import Unison.Debug qualified as Debug
import Unison.LSP.Orphans ()
import Unison.LabeledDependency (LabeledDependency)
@ -36,7 +35,6 @@ import Unison.Names (Names)
import Unison.Parser.Ann
import Unison.Prelude
import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl)
import Unison.Reference qualified as Reference
import Unison.Referent (Referent)
import Unison.Result (Note)
import Unison.Server.Backend qualified as Backend
@ -44,9 +42,9 @@ import Unison.Server.NameSearch (NameSearch)
import Unison.Sqlite qualified as Sqlite
import Unison.Symbol
import Unison.Syntax.Lexer qualified as Lexer
import Unison.Term (Term)
import Unison.Type (Type)
import Unison.UnisonFile qualified as UF
import Unison.UnisonFile.Summary (FileSummary (..))
import UnliftIO
-- | A custom LSP monad wrapper so we can provide our own environment.
@ -71,7 +69,7 @@ data Env = Env
{ -- contains handlers for talking to the client.
lspContext :: LanguageContextEnv Config,
codebase :: Codebase IO Symbol Ann,
parseNamesCache :: IO Names,
currentNamesCache :: IO Names,
ppedCache :: IO PrettyPrintEnvDecl,
nameSearchCache :: IO (NameSearch Sqlite.Transaction),
currentPathCache :: IO Path.Absolute,
@ -85,7 +83,7 @@ data Env = Env
-- A map of request IDs to an action which kills that request.
cancellationMapVar :: TVar (Map (Int32 |? Text) (IO ())),
-- A lazily computed map of all valid completion suffixes from the current path.
completionsVar :: TVar CompletionTree,
completionsVar :: TMVar CompletionTree,
scope :: Ki.Scope
}
@ -131,42 +129,24 @@ data FileAnalysis = FileAnalysis
}
deriving stock (Show)
-- | A file that parses might not always type-check, but often we just want to get as much
-- information as we have available. This provides a type where we can summarize the
-- information available in a Unison file.
--
-- If the file typechecked then all the Ref Ids and types will be filled in, otherwise
-- they will be Nothing.
data FileSummary = FileSummary
{ dataDeclsBySymbol :: Map Symbol (Reference.Id, DD.DataDeclaration Symbol Ann),
dataDeclsByReference :: Map Reference.Id (Map Symbol (DD.DataDeclaration Symbol Ann)),
effectDeclsBySymbol :: Map Symbol (Reference.Id, DD.EffectDeclaration Symbol Ann),
effectDeclsByReference :: Map Reference.Id (Map Symbol (DD.EffectDeclaration Symbol Ann)),
termsBySymbol :: Map Symbol (Ann, Maybe Reference.Id, Term Symbol Ann, Maybe (Type Symbol Ann)),
termsByReference :: Map (Maybe Reference.Id) (Map Symbol (Ann, Term Symbol Ann, Maybe (Type Symbol Ann))),
testWatchSummary :: [(Ann, Maybe Symbol, Maybe Reference.Id, Term Symbol Ann, Maybe (Type Symbol Ann))],
exprWatchSummary :: [(Ann, Maybe Symbol, Maybe Reference.Id, Term Symbol Ann, Maybe (Type Symbol Ann))],
fileNames :: Names
}
deriving stock (Show)
getCurrentPath :: Lsp Path.Absolute
getCurrentPath = asks currentPathCache >>= liftIO
getCodebaseCompletions :: Lsp CompletionTree
getCodebaseCompletions = asks completionsVar >>= readTVarIO
getCodebaseCompletions = asks completionsVar >>= atomically . readTMVar
globalPPED :: Lsp PrettyPrintEnvDecl
globalPPED = asks ppedCache >>= liftIO
currentPPED :: Lsp PrettyPrintEnvDecl
currentPPED = asks ppedCache >>= liftIO
getNameSearch :: Lsp (NameSearch Sqlite.Transaction)
getNameSearch = asks nameSearchCache >>= liftIO
getParseNames :: Lsp Names
getParseNames = asks parseNamesCache >>= liftIO
getCurrentNames :: Lsp Names
getCurrentNames = asks currentNamesCache >>= liftIO
data Config = Config
{ -- 'Nothing' will load ALL available completions, which is slower, but may provide a better
{ formattingWidth :: Int,
-- 'Nothing' will load ALL available completions, which is slower, but may provide a better
-- solution for some users.
--
-- 'Just n' will only fetch the first 'n' completions and will prompt the client to ask for
@ -179,17 +159,20 @@ instance Aeson.FromJSON Config where
parseJSON = Aeson.withObject "Config" \obj -> do
maxCompletions <- obj Aeson..:! "maxCompletions" Aeson..!= maxCompletions defaultLSPConfig
Debug.debugM Debug.LSP "Config" $ "maxCompletions: " <> show maxCompletions
formattingWidth <- obj Aeson..:? "formattingWidth" Aeson..!= formattingWidth defaultLSPConfig
pure Config {..}
instance Aeson.ToJSON Config where
toJSON (Config maxCompletions) =
toJSON (Config formattingWidth maxCompletions) =
Aeson.object
[ "maxCompletions" Aeson..= maxCompletions
[ "formattingWidth" Aeson..= formattingWidth,
"maxCompletions" Aeson..= maxCompletions
]
defaultLSPConfig :: Config
defaultLSPConfig = Config {..}
where
formattingWidth = 80
maxCompletions = Just 100
-- | Lift a backend computation into the Lsp monad.

View File

@ -1,8 +1,10 @@
module Unison.LSP.UCMWorker where
import Control.Monad.Reader
import U.Codebase.HashTags
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch (Branch)
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Branch.Names qualified as Branch
import Unison.Codebase.Path qualified as Path
import Unison.Debug qualified as Debug
import Unison.LSP.Completion
@ -12,7 +14,6 @@ import Unison.Names (Names)
import Unison.PrettyPrintEnv.Names qualified as PPE
import Unison.PrettyPrintEnvDecl
import Unison.PrettyPrintEnvDecl.Names qualified as PPED
import Unison.Server.Backend qualified as Backend
import Unison.Server.NameSearch (NameSearch)
import Unison.Server.NameSearch.FromNames qualified as NameSearch
import Unison.Sqlite qualified as Sqlite
@ -20,36 +21,48 @@ import UnliftIO.STM
-- | Watches for state changes in UCM and updates cached LSP state accordingly
ucmWorker ::
TVar PrettyPrintEnvDecl ->
TVar Names ->
TVar (NameSearch Sqlite.Transaction) ->
STM (Branch IO) ->
TMVar PrettyPrintEnvDecl ->
TMVar Names ->
TMVar (NameSearch Sqlite.Transaction) ->
TMVar Path.Absolute ->
STM CausalHash ->
STM Path.Absolute ->
Lsp ()
ucmWorker ppeVar parseNamesVar nameSearchCacheVar getLatestRoot getLatestPath = do
ucmWorker ppedVar parseNamesVar nameSearchCacheVar currentPathVar getLatestRoot getLatestPath = do
Env {codebase, completionsVar} <- ask
let loop :: (Branch IO, Path.Absolute) -> Lsp a
let loop :: (CausalHash, Path.Absolute) -> Lsp a
loop (currentRoot, currentPath) = do
Debug.debugM Debug.LSP "LSP path: " currentPath
let parseNames = Backend.getCurrentParseNames (Backend.Within (Path.unabsolute currentPath)) currentRoot
currentBranch0 <- fmap Branch.head . liftIO $ (Codebase.getBranchAtPath codebase currentPath)
let parseNames = Branch.toNames currentBranch0
hl <- liftIO $ Codebase.runTransaction codebase Codebase.hashLength
let ppe = PPED.makePPED (PPE.hqNamer hl parseNames) (PPE.suffixifyByHash parseNames)
let pped = PPED.makePPED (PPE.hqNamer hl parseNames) (PPE.suffixifyByHash parseNames)
atomically $ do
writeTVar parseNamesVar parseNames
writeTVar ppeVar ppe
writeTVar nameSearchCacheVar (NameSearch.makeNameSearch hl parseNames)
writeTMVar currentPathVar currentPath
writeTMVar parseNamesVar parseNames
writeTMVar ppedVar pped
writeTMVar nameSearchCacheVar (NameSearch.makeNameSearch hl parseNames)
-- Re-check everything with the new names and ppe
VFS.markAllFilesDirty
atomically do
writeTVar completionsVar (namesToCompletionTree parseNames)
writeTMVar completionsVar (namesToCompletionTree parseNames)
Debug.debugLogM Debug.LSP "LSP Initialized"
latest <- atomically $ do
latestRoot <- getLatestRoot
latestPath <- getLatestPath
guard $ (currentRoot /= latestRoot || currentPath /= latestPath)
pure (latestRoot, latestPath)
Debug.debugLogM Debug.LSP "LSP Change detected"
loop latest
-- Bootstrap manually from codebase just in case we're in headless mode and don't get any
-- updates from UCM
rootBranch <- liftIO $ Codebase.getRootBranch codebase
loop (rootBranch, Path.absoluteEmpty)
(rootBranch, currentPath) <- atomically $ do
rootBranch <- getLatestRoot
currentPath <- getLatestPath
pure (rootBranch, currentPath)
loop (rootBranch, currentPath)
where
-- This is added in stm-2.5.1, remove this if we upgrade.
writeTMVar :: TMVar a -> a -> STM ()
writeTMVar var a =
tryReadTMVar var >>= \case
Nothing -> putTMVar var a
Just _ -> void $ swapTMVar var a

View File

@ -22,8 +22,6 @@ where
import Control.Concurrent.STM
import Control.Lens
import GHC.IO (unsafePerformIO)
import System.Environment (lookupEnv)
import Control.Monad.Except
import Control.Monad.Reader (ask)
import Control.Monad.Trans.Reader (ReaderT, runReaderT)
@ -43,12 +41,14 @@ import Data.Set.NonEmpty (NESet)
import Data.Set.NonEmpty qualified as NESet
import Data.Text.Lazy qualified as Text.Lazy
import Data.Text.Lazy.Encoding qualified as Text.Lazy
import GHC.IO (unsafePerformIO)
import Ki qualified
import Network.HTTP.Client qualified as Http.Client
import Network.HTTP.Types qualified as HTTP
import Servant.API qualified as Servant ((:<|>) (..), (:>))
import Servant.Client (BaseUrl)
import Servant.Client qualified as Servant
import System.Environment (lookupEnv)
import U.Codebase.HashTags (CausalHash)
import U.Codebase.Sqlite.Queries qualified as Q
import U.Codebase.Sqlite.V2.HashHandle (v2HashHandle)
@ -77,6 +77,8 @@ maxSimultaneousPullDownloaders :: Int
maxSimultaneousPullDownloaders = 5
-- | The maximum number of push workers at a time. Each push worker reads from the database and uploads entities.
-- Share currently parallelizes on it's own in the backend, and any more than one push worker
-- just results in serialization conflicts which slow things down.
maxSimultaneousPushWorkers :: Int
maxSimultaneousPushWorkers = 5
@ -481,7 +483,6 @@ shouldValidateEntities = unsafePerformIO $ do
_ -> False
{-# NOINLINE shouldValidateEntities #-}
type WorkerCount =
TVar Int

View File

@ -50,9 +50,13 @@ library
Unison.Codebase.Editor.HandleInput.Branch
Unison.Codebase.Editor.HandleInput.Branches
Unison.Codebase.Editor.HandleInput.BranchRename
Unison.Codebase.Editor.HandleInput.DebugDefinition
Unison.Codebase.Editor.HandleInput.DebugFoldRanges
Unison.Codebase.Editor.HandleInput.DeleteBranch
Unison.Codebase.Editor.HandleInput.DeleteProject
Unison.Codebase.Editor.HandleInput.EditNamespace
Unison.Codebase.Editor.HandleInput.FindAndReplace
Unison.Codebase.Editor.HandleInput.FormatFile
Unison.Codebase.Editor.HandleInput.Load
Unison.Codebase.Editor.HandleInput.MoveAll
Unison.Codebase.Editor.HandleInput.MoveBranch
@ -70,6 +74,7 @@ library
Unison.Codebase.Editor.HandleInput.ReleaseDraft
Unison.Codebase.Editor.HandleInput.Run
Unison.Codebase.Editor.HandleInput.RuntimeUtils
Unison.Codebase.Editor.HandleInput.ShowDefinition
Unison.Codebase.Editor.HandleInput.TermResolution
Unison.Codebase.Editor.HandleInput.Tests
Unison.Codebase.Editor.HandleInput.UI
@ -114,6 +119,7 @@ library
Unison.LSP.Diagnostics
Unison.LSP.FileAnalysis
Unison.LSP.FoldingRange
Unison.LSP.Formatting
Unison.LSP.HandlerUtils
Unison.LSP.Hover
Unison.LSP.NotificationHandlers

View File

@ -50,10 +50,10 @@ import System.IO.CodePage (withCP65001)
import System.IO.Error (catchIOError)
import System.IO.Temp qualified as Temp
import System.Path qualified as Path
import U.Codebase.HashTags (CausalHash)
import U.Codebase.Sqlite.Queries qualified as Queries
import Unison.Codebase (Codebase, CodebasePath)
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch (Branch)
import Unison.Codebase.Editor.Input qualified as Input
import Unison.Codebase.Execute (execute)
import Unison.Codebase.Init (CodebaseInitOptions (..), InitError (..), InitResult (..), SpecifiedCodebase (..))
@ -280,14 +280,12 @@ main = do
segments <- Codebase.runTransaction theCodebase Queries.expectMostRecentNamespace
pure (Path.Absolute (Path.fromList (map NameSegment.NameSegment segments)))
Headless -> pure $ fromMaybe defaultInitialPath mayStartingPath
rootVar <- newEmptyTMVarIO
rootCausalHash <- Codebase.runTransaction theCodebase (Queries.expectNamespaceRoot >>= Queries.expectCausalHash)
rootCausalHashVar <- newTVarIO rootCausalHash
pathVar <- newTVarIO startingPath
let notifyOnRootChanges :: Branch IO -> STM ()
let notifyOnRootChanges :: CausalHash -> STM ()
notifyOnRootChanges b = do
isEmpty <- isEmptyTMVar rootVar
if isEmpty
then putTMVar rootVar b
else void $ swapTMVar rootVar b
writeTVar rootCausalHashVar b
let notifyOnPathChanges :: Path.Absolute -> STM ()
notifyOnPathChanges = writeTVar pathVar
-- Unfortunately, the windows IO manager on GHC 8.* is prone to just hanging forever
@ -295,7 +293,7 @@ main = do
-- prevent UCM from shutting down properly. Hopefully we can re-enable LSP on
-- Windows when we move to GHC 9.*
-- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/1224
void . Ki.fork scope $ LSP.spawnLsp theCodebase runtime (readTMVar rootVar) (readTVar pathVar)
void . Ki.fork scope $ LSP.spawnLsp theCodebase runtime (readTVar rootCausalHashVar) (readTVar pathVar)
Server.startServer (Backend.BackendEnv {Backend.useNamesIndex = False}) codebaseServerOpts sbRuntime theCodebase $ \baseUrl -> do
case exitOption of
DoNotExit -> do
@ -503,7 +501,7 @@ launch ::
Maybe Server.BaseUrl ->
Maybe Path.Absolute ->
InitResult ->
(Branch IO -> STM ()) ->
(CausalHash -> STM ()) ->
(Path.Absolute -> STM ()) ->
CommandLine.ShouldWatchFiles ->
IO ()

View File

@ -24,5 +24,16 @@ data Recorded loc
loc
deriving (Show, Eq, Ord, Functor, Generic)
data Blank loc = Blank | Recorded (Recorded loc)
-- - Blank is just a dummy annotation.
-- - Recorded indicates that we want to remember the variable's solution
-- for some kind of
data Blank loc
= -- | just a dummy annotation
Blank
| -- | indicates that we want to remember the variable's solution for
-- some reason
Recorded (Recorded loc)
| -- | indicates that we want to prefer keeping the variable in the
-- context to better refine the above recorded solutions
Retain
deriving (Show, Eq, Ord, Functor, Generic)

View File

@ -147,23 +147,25 @@ withEffectDeclM f = fmap EffectDeclaration . f . toDataDecl
-- propose to move this code to some very feature-specific module —AI
generateRecordAccessors ::
(Semigroup a, Var v) =>
(a -> a) ->
[(v, a)] ->
v ->
Reference ->
[(v, a, Term v a)]
generateRecordAccessors fields typename typ =
generateRecordAccessors generatedAnn fields typename typ =
join [tm t i | (t, i) <- fields `zip` [(0 :: Int) ..]]
where
argname = Var.uncapitalize typename
tm (fname, ann) i =
tm (fname, fieldAnn) i =
[ (Var.namespaced [typename, fname], ann, get),
(Var.namespaced [typename, fname, Var.named "set"], ann, set),
(Var.namespaced [typename, fname, Var.named "modify"], ann, modify)
]
where
ann = generatedAnn fieldAnn
-- example: `point -> case point of Point x _ -> x`
get =
Term.lam ann argname $
Term.lam (generatedAnn fieldAnn) argname $
Term.match
ann
(Term.var ann argname)
@ -177,7 +179,7 @@ generateRecordAccessors fields typename typ =
rhs = ABT.abs' ann fname (Term.var ann fname)
-- example: `x point -> case point of Point _ y -> Point x y`
set =
Term.lam' ann [fname', argname] $
Term.lam' (generatedAnn ann) [fname', argname] $
Term.match
ann
(Term.var ann argname)
@ -202,7 +204,7 @@ generateRecordAccessors fields typename typ =
]
-- example: `f point -> case point of Point x y -> Point (f x) y`
modify =
Term.lam' ann [fname', argname] $
Term.lam' (generatedAnn ann) [fname', argname] $
Term.match
ann
(Term.var ann argname)

View File

@ -2,8 +2,8 @@
module Unison.DataDeclaration.Names (bindNames, dataDeclToNames', effectDeclToNames') where
import Data.Set qualified as Set
import Data.Map qualified as Map
import Data.Set qualified as Set
import Unison.ABT qualified as ABT
import Unison.ConstructorReference (GConstructorReference (..))
import Unison.ConstructorType qualified as CT
@ -57,4 +57,4 @@ bindNames varToName localNames names (DataDeclaration m a bound constructors) =
pure $ DataDeclaration m a bound constructors
where
keepFree = Set.fromList (Map.elems localNames)
subs = Map.toList $ Map.map (Type.var ()) localNames
subs = Map.toList $ Map.map (Type.var ()) localNames

View File

@ -79,6 +79,11 @@ fromNamedReference n r = HashQualified n (Reference.toShortHash r)
fromName :: n -> HashQualified n
fromName = NameOnly
fromNameHash :: n -> Maybe ShortHash -> HashQualified n
fromNameHash name = \case
Nothing -> NameOnly name
Just hash -> HashQualified name hash
matchesNamedReferent :: (Eq n) => n -> Referent -> HashQualified n -> Bool
matchesNamedReferent n r = \case
NameOnly n' -> n' == n

View File

@ -5,6 +5,7 @@ module Unison.Name
-- * Basic construction
cons,
snoc,
joinDot,
fromSegment,
fromSegments,
@ -56,13 +57,13 @@ module Unison.Name
)
where
import Data.Monoid (Sum(..))
import Control.Lens (mapped, over, _1, _2)
import Data.List qualified as List
import Data.List.Extra qualified as List
import Data.List.NonEmpty (NonEmpty ((:|)))
import Data.List.NonEmpty qualified as List.NonEmpty
import Data.Map qualified as Map
import Data.Monoid (Sum (..))
import Data.RFC5051 qualified as RFC5051
import Data.Set qualified as Set
import Unison.Name.Internal
@ -120,6 +121,13 @@ cons x name =
("cannot cons " ++ show x ++ " onto absolute name" ++ show name)
Name Relative (y :| ys) -> Name Relative (y :| ys ++ [x])
-- | Snoc a name segment onto the end of a name.
--
-- /O(1)/.
snoc :: Name -> NameSegment -> Name
snoc (Name pos (s1 :| ss)) s0 =
Name pos (s0 :| s1 : ss)
-- | Return the number of name segments in a name.
--
-- /O(n)/, where /n/ is the number of name segments.

Some files were not shown because too many files have changed in this diff Show More