Merge remote-tracking branch 'origin/trunk' into topic/projects

This commit is contained in:
Chris Penner 2023-03-29 14:19:06 -06:00
commit 02095670c3
70 changed files with 2827 additions and 543 deletions

View File

@ -7,6 +7,7 @@ import qualified Data.Text as Text
import Unison.Prelude
import Unison.Sqlite
-- | E.g. ("map" :| ["List", "base"])
type ReversedSegments = NonEmpty Text
data ConstructorType
@ -49,11 +50,6 @@ instance (FromRow ref) => FromRow (NamedRef ref) where
ref <- fromRow
pure (NamedRef {reversedSegments, ref})
toRowWithNamespace :: (ToRow ref) => NamedRef ref -> [SQLData]
toRowWithNamespace nr = toRow nr <> [SQLText namespace]
where
namespace = Text.intercalate "." . reverse . NEL.tail . reversedSegments $ nr
-- | The new 'scoped' name lookup format is different from the old version.
--
-- Specifically, the scoped format adds the 'lastNameSegment' as well as adding a trailing '.' to the db format

View File

@ -69,8 +69,16 @@ module U.Codebase.Sqlite.Operations
-- ** name lookup index
namesByPath,
NamesByPath (..),
termNamesForRefWithinNamespace,
typeNamesForRefWithinNamespace,
termNamesBySuffix,
typeNamesBySuffix,
termRefsForExactName,
typeRefsForExactName,
checkBranchHashNameLookupExists,
buildNameLookupForBranchHash,
longestMatchingTermNameForSuffixification,
longestMatchingTypeNameForSuffixification,
-- * reflog
getReflog,
@ -212,9 +220,7 @@ loadRootCausalHash =
lift . Q.expectCausalHash =<< MaybeT Q.loadNamespaceRoot
-- | Load the causal hash at the given path from the root.
--
-- FIXME should we move some Path type here?
loadCausalHashAtPath :: [Text] -> Transaction (Maybe CausalHash)
loadCausalHashAtPath :: Q.TextPathSegments -> Transaction (Maybe CausalHash)
loadCausalHashAtPath =
let go :: Db.CausalHashId -> [Text] -> MaybeT Transaction CausalHash
go hashId = \case
@ -229,9 +235,7 @@ loadCausalHashAtPath =
runMaybeT (go hashId path)
-- | Expect the causal hash at the given path from the root.
--
-- FIXME should we move some Path type here?
expectCausalHashAtPath :: [Text] -> Transaction CausalHash
expectCausalHashAtPath :: Q.TextPathSegments -> Transaction CausalHash
expectCausalHashAtPath =
let go :: Db.CausalHashId -> [Text] -> Transaction CausalHash
go hashId = \case
@ -735,7 +739,7 @@ expectBranchByBranchHashId bhId = do
expectBranchByBranchHash :: BranchHash -> Transaction (C.Branch.Branch Transaction)
expectBranchByBranchHash bh = do
bhId <- Q.saveBranchHash bh
bhId <- Q.expectBranchHashId bh
expectBranchByBranchHashId bhId
-- | Expect a branch value given its causal hash id.
@ -1086,13 +1090,13 @@ buildNameLookupForBranchHash ::
([S.NamedRef C.Reference], [S.NamedRef C.Reference]) ->
Transaction ()
buildNameLookupForBranchHash mayExistingBranchIndex newBranchHash (newTermNames, removedTermNames) (newTypeNames, removedTypeNames) = do
newBranchHashId <- Q.saveBranchHash newBranchHash
newBranchHashId <- Q.expectBranchHashId newBranchHash
Q.trackNewBranchHashNameLookup newBranchHashId
case mayExistingBranchIndex of
Nothing -> pure ()
Just existingBranchIndex -> do
unlessM (checkBranchHashNameLookupExists existingBranchIndex) $ error "buildNameLookupForBranchHash: existingBranchIndex was provided, but no index was found for that branch hash."
existingBranchHashId <- Q.saveBranchHash existingBranchIndex
existingBranchHashId <- Q.expectBranchHashId existingBranchIndex
Q.copyScopedNameLookup existingBranchHashId newBranchHashId
Q.removeScopedTermNames newBranchHashId ((fmap c2sTextReferent <$> removedTermNames))
Q.removeScopedTypeNames newBranchHashId ((fmap c2sTextReference <$> removedTypeNames))
@ -1102,7 +1106,7 @@ buildNameLookupForBranchHash mayExistingBranchIndex newBranchHash (newTermNames,
-- | Check whether we've already got an index for a given branch hash.
checkBranchHashNameLookupExists :: BranchHash -> Transaction Bool
checkBranchHashNameLookupExists bh = do
bhId <- Q.saveBranchHash bh
bhId <- Q.expectBranchHashId bh
Q.checkBranchHashNameLookupExists bhId
data NamesByPath = NamesByPath
@ -1131,11 +1135,71 @@ namesByPath bh path = do
convertTerms = fmap (bimap s2cTextReferent (fmap s2cConstructorType))
convertTypes = fmap s2cTextReference
-- | NOTE: requires that the codebase has an up-to-date name lookup index. As of writing, this
-- is only true on Share.
--
-- Get the list of a names for a given Referent.
termNamesForRefWithinNamespace :: BranchHash -> Q.NamespaceText -> C.Referent -> Maybe S.ReversedSegments -> Transaction [S.ReversedSegments]
termNamesForRefWithinNamespace bh namespace ref maySuffix = do
bhId <- Q.expectBranchHashId bh
Q.termNamesForRefWithinNamespace bhId namespace (c2sTextReferent ref) maySuffix
-- | NOTE: requires that the codebase has an up-to-date name lookup index. As of writing, this
-- is only true on Share.
--
-- Get the list of a names for a given Reference, with an optional required suffix.
typeNamesForRefWithinNamespace :: BranchHash -> Q.NamespaceText -> C.Reference -> Maybe S.ReversedSegments -> Transaction [S.ReversedSegments]
typeNamesForRefWithinNamespace bh namespace ref maySuffix = do
bhId <- Q.expectBranchHashId bh
Q.typeNamesForRefWithinNamespace bhId namespace (c2sTextReference ref) maySuffix
termNamesBySuffix :: BranchHash -> Q.NamespaceText -> S.ReversedSegments -> Transaction [S.NamedRef (C.Referent, Maybe C.ConstructorType)]
termNamesBySuffix bh namespace suffix = do
bhId <- Q.expectBranchHashId bh
Q.termNamesBySuffix bhId namespace suffix <&> fmap (fmap (bimap s2cTextReferent (fmap s2cConstructorType)))
typeNamesBySuffix :: BranchHash -> Q.NamespaceText -> S.ReversedSegments -> Transaction [S.NamedRef C.Reference]
typeNamesBySuffix bh namespace suffix = do
bhId <- Q.expectBranchHashId bh
Q.typeNamesBySuffix bhId namespace suffix <&> fmap (fmap s2cTextReference)
termRefsForExactName :: BranchHash -> S.ReversedSegments -> Transaction [S.NamedRef (C.Referent, Maybe C.ConstructorType)]
termRefsForExactName bh reversedName = do
bhId <- Q.expectBranchHashId bh
Q.termRefsForExactName bhId reversedName <&> fmap (fmap (bimap s2cTextReferent (fmap s2cConstructorType)))
typeRefsForExactName :: BranchHash -> S.ReversedSegments -> Transaction [S.NamedRef C.Reference]
typeRefsForExactName bh reversedName = do
bhId <- Q.expectBranchHashId bh
Q.typeRefsForExactName bhId reversedName <&> fmap (fmap s2cTextReference)
-- | Get the name within the provided namespace that has the longest matching suffix
-- with the provided name, but a different ref.
-- This is a bit of a hack but allows us to shortcut suffixification.
-- We can clean this up if we make a custom PPE type just for sqlite pretty printing, but
-- for now this works fine.
longestMatchingTermNameForSuffixification :: BranchHash -> Q.NamespaceText -> S.NamedRef C.Referent -> Transaction (Maybe (S.NamedRef (C.Referent, Maybe C.ConstructorType)))
longestMatchingTermNameForSuffixification bh namespace namedRef = do
bhId <- Q.expectBranchHashId bh
Q.longestMatchingTermNameForSuffixification bhId namespace (c2sTextReferent <$> namedRef)
<&> fmap (fmap (bimap s2cTextReferent (fmap s2cConstructorType)))
-- | Get the name within the provided namespace that has the longest matching suffix
-- with the provided name, but a different ref.
-- This is a bit of a hack but allows us to shortcut suffixification.
-- We can clean this up if we make a custom PPE type just for sqlite pretty printing, but
-- for now this works fine.
longestMatchingTypeNameForSuffixification :: BranchHash -> Q.NamespaceText -> S.NamedRef C.Reference -> Transaction (Maybe (S.NamedRef C.Reference))
longestMatchingTypeNameForSuffixification bh namespace namedRef = do
bhId <- Q.expectBranchHashId bh
Q.longestMatchingTypeNameForSuffixification bhId namespace (c2sTextReference <$> namedRef)
<&> fmap (fmap s2cTextReference)
-- | Looks up statistics for a given branch, if none exist, we compute them and save them
-- then return them.
expectNamespaceStatsByHash :: BranchHash -> Transaction C.Branch.NamespaceStats
expectNamespaceStatsByHash bh = do
bhId <- Q.saveBranchHash bh
bhId <- Q.expectBranchHashId bh
expectNamespaceStatsByHashId bhId
-- | Looks up statistics for a given branch, if none exist, we compute them and save them

View File

@ -168,8 +168,16 @@ module U.Codebase.Sqlite.Queries
removeScopedTypeNames,
termNamesWithinNamespace,
typeNamesWithinNamespace,
termNamesForRefWithinNamespace,
typeNamesForRefWithinNamespace,
termRefsForExactName,
typeRefsForExactName,
checkBranchHashNameLookupExists,
trackNewBranchHashNameLookup,
termNamesBySuffix,
typeNamesBySuffix,
longestMatchingTermNameForSuffixification,
longestMatchingTypeNameForSuffixification,
-- * Reflog
appendReflog,
@ -228,6 +236,10 @@ module U.Codebase.Sqlite.Queries
x2cTType,
x2cTerm,
checkBranchExistsForCausalHash,
-- * Types
NamespaceText,
TextPathSegments,
)
where
@ -241,8 +253,11 @@ import Data.Bifunctor (Bifunctor (bimap))
import Data.Bitraversable (bitraverse)
import Data.Bytes.Put (runPutS)
import qualified Data.Foldable as Foldable
import qualified Data.List.NonEmpty as List (NonEmpty)
import qualified Data.List as List
import qualified Data.List.Extra as List
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as Nel
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Map as Map
import Data.Map.NonEmpty (NEMap)
import qualified Data.Map.NonEmpty as NEMap
@ -252,6 +267,7 @@ import qualified Data.Set as Set
import Data.String.Here.Uninterpolated (here, hereFile)
import qualified Data.Text as Text
import qualified Data.Vector as Vector
import GHC.Stack (callStack)
import NeatInterpolation (trimming)
import Network.URI (URI)
import U.Codebase.Branch.Type (NamespaceStats (..))
@ -294,7 +310,7 @@ import U.Codebase.Sqlite.LocalIds
LocalTextId (..),
)
import qualified U.Codebase.Sqlite.LocalIds as LocalIds
import U.Codebase.Sqlite.NamedRef (NamedRef)
import U.Codebase.Sqlite.NamedRef (NamedRef, ReversedSegments)
import qualified U.Codebase.Sqlite.NamedRef as NamedRef
import U.Codebase.Sqlite.ObjectType (ObjectType (DeclComponent, Namespace, Patch, TermComponent))
import qualified U.Codebase.Sqlite.ObjectType as ObjectType
@ -325,6 +341,7 @@ import qualified U.Core.ABT as ABT
import qualified U.Util.Serialization as S
import qualified U.Util.Term as TermUtil
import Unison.Core.Project (ProjectBranchName, ProjectName)
import qualified Unison.Debug as Debug
import Unison.Hash (Hash)
import qualified Unison.Hash as Hash
import Unison.Hash32 (Hash32)
@ -336,6 +353,12 @@ import qualified Unison.Sqlite as Sqlite
import qualified Unison.Util.Alternative as Alternative
import qualified Unison.Util.Lens as Lens
-- | A namespace rendered as a path, no leading '.'
-- E.g. "base.data"
type NamespaceText = Text
type TextPathSegments = [Text]
-- * main squeeze
currentSchemaVersion :: SchemaVersion
@ -764,7 +787,7 @@ expectPrimaryHash32ByObjectId oId = queryOneCol sql (Only oId)
WHERE object.id = ?
|]
expectHashIdsForObject :: ObjectId -> Transaction (List.NonEmpty HashId)
expectHashIdsForObject :: ObjectId -> Transaction (NonEmpty HashId)
expectHashIdsForObject oId = do
primaryHashId <- queryOneCol sql1 (Only oId)
hashIds <- queryListCol sql2 (Only oId)
@ -1781,7 +1804,7 @@ globEscape =
-- We can't use Text.replace, since we'd end up replacing either "[" or "]" multiple
-- times.
Text.concatMap \case
'*' -> "*"
'*' -> "[*]"
'?' -> "[?]"
'[' -> "[[]"
']' -> "[]]"
@ -1818,7 +1841,7 @@ termNamesWithinNamespace :: BranchHashId -> Maybe Text -> Transaction [NamedRef
termNamesWithinNamespace bhId mayNamespace = do
let namespaceGlob = case mayNamespace of
Nothing -> "*"
Just namespace -> globEscape namespace <> ".*"
Just namespace -> toNamespaceGlob namespace
results :: [NamedRef (Referent.TextReferent :. Only (Maybe NamedRef.ConstructorType))] <- queryListRow sql (bhId, namespaceGlob)
pure (fmap unRow <$> results)
where
@ -1831,12 +1854,15 @@ termNamesWithinNamespace bhId mayNamespace = do
AND namespace GLOB ?
|]
-- | NOTE: requires that the codebase has an up-to-date name lookup index. As of writing, this
-- is only true on Share.
--
-- | Get the list of a type names in the root namespace according to the name lookup index
typeNamesWithinNamespace :: BranchHashId -> Maybe Text -> Transaction [NamedRef Reference.TextReference]
typeNamesWithinNamespace bhId mayNamespace = do
let namespaceGlob = case mayNamespace of
Nothing -> "*"
Just namespace -> globEscape namespace <> ".*"
Just namespace -> toNamespaceGlob namespace
results :: [NamedRef Reference.TextReference] <- queryListRow sql (bhId, namespaceGlob)
pure results
where
@ -1848,6 +1874,247 @@ typeNamesWithinNamespace bhId mayNamespace = do
AND namespace GLOB ?
|]
-- | NOTE: requires that the codebase has an up-to-date name lookup index. As of writing, this
-- is only true on Share.
--
-- Get the list of term names within a given namespace which have the given suffix.
termNamesBySuffix :: BranchHashId -> NamespaceText -> ReversedSegments -> Transaction [NamedRef (Referent.TextReferent, Maybe NamedRef.ConstructorType)]
termNamesBySuffix bhId namespaceRoot suffix = do
Debug.debugM Debug.Server "termNamesBySuffix" (namespaceRoot, suffix)
let namespaceGlob = toNamespaceGlob namespaceRoot
let lastSegment = NonEmpty.head suffix
results :: [NamedRef (Referent.TextReferent :. Only (Maybe NamedRef.ConstructorType))] <- queryListRow sql (bhId, lastSegment, namespaceGlob, toSuffixGlob suffix)
pure (fmap unRow <$> results)
where
unRow (a :. Only b) = (a, b)
-- Note: It may seem strange that we do a last_name_segment constraint AND a reversed_name
-- GLOB, but this helps improve query performance.
-- The SQLite query optimizer is smart enough to do a prefix-search on globs, but will
-- ONLY do a single prefix-search, meaning we use the index for `namespace`, but not for
-- `reversed_name`. By adding the `last_name_segment` constraint, we can cull a ton of
-- names which couldn't possibly match before we then manually filter the remaining names
-- using the `reversed_name` glob which can't be optimized with an index.
sql =
[here|
SELECT reversed_name, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, referent_constructor_type FROM scoped_term_name_lookup
WHERE root_branch_hash_id = ?
AND last_name_segment IS ?
AND namespace GLOB ?
AND reversed_name GLOB ?
|]
-- | NOTE: requires that the codebase has an up-to-date name lookup index. As of writing, this
-- is only true on Share.
--
-- Get the list of type names within a given namespace which have the given suffix.
typeNamesBySuffix :: BranchHashId -> NamespaceText -> ReversedSegments -> Transaction [NamedRef Reference.TextReference]
typeNamesBySuffix bhId namespaceRoot suffix = do
Debug.debugM Debug.Server "typeNamesBySuffix" (namespaceRoot, suffix)
let namespaceGlob = toNamespaceGlob namespaceRoot
let lastNameSegment = NonEmpty.head suffix
queryListRow sql (bhId, lastNameSegment, namespaceGlob, toSuffixGlob suffix)
where
sql =
-- Note: It may seem strange that we do a last_name_segment constraint AND a reversed_name
-- GLOB, but this helps improve query performance.
-- The SQLite query optimizer is smart enough to do a prefix-search on globs, but will
-- ONLY do a single prefix-search, meaning we use the index for `namespace`, but not for
-- `reversed_name`. By adding the `last_name_segment` constraint, we can cull a ton of
-- names which couldn't possibly match before we then manually filter the remaining names
-- using the `reversed_name` glob which can't be optimized with an index.
[here|
SELECT reversed_name, reference_builtin, reference_component_hash, reference_component_index FROM scoped_type_name_lookup
WHERE root_branch_hash_id = ?
AND last_name_segment IS ?
AND namespace GLOB ?
AND reversed_name GLOB ?
|]
-- | NOTE: requires that the codebase has an up-to-date name lookup index. As of writing, this
-- is only true on Share.
--
-- Get the set of refs for an exact name.
termRefsForExactName :: BranchHashId -> ReversedSegments -> Transaction [NamedRef (Referent.TextReferent, Maybe NamedRef.ConstructorType)]
termRefsForExactName bhId reversedSegments = do
let reversedName = toReversedName reversedSegments
results :: [NamedRef (Referent.TextReferent :. Only (Maybe NamedRef.ConstructorType))] <- queryListRow sql (bhId, reversedName)
pure (fmap unRow <$> results)
where
unRow (a :. Only b) = (a, b)
sql =
[here|
SELECT reversed_name, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, referent_constructor_type FROM scoped_term_name_lookup
WHERE root_branch_hash_id = ?
AND reversed_name = ?
|]
-- | NOTE: requires that the codebase has an up-to-date name lookup index. As of writing, this
-- is only true on Share.
--
-- Get the set of refs for an exact name.
typeRefsForExactName :: BranchHashId -> ReversedSegments -> Transaction [NamedRef Reference.TextReference]
typeRefsForExactName bhId reversedSegments = do
let reversedName = toReversedName reversedSegments
queryListRow sql (bhId, reversedName)
where
sql =
[here|
SELECT reversed_name, reference_builtin, reference_component_hash, reference_component_index FROM scoped_type_name_lookup
WHERE root_branch_hash_id = ?
AND reversed_name = ?
|]
-- | NOTE: requires that the codebase has an up-to-date name lookup index. As of writing, this
-- is only true on Share.
--
-- Get the list of term names for a given Referent within a given namespace.
termNamesForRefWithinNamespace :: BranchHashId -> NamespaceText -> Referent.TextReferent -> Maybe ReversedSegments -> Transaction [ReversedSegments]
termNamesForRefWithinNamespace bhId namespaceRoot ref maySuffix = do
let namespaceGlob = toNamespaceGlob namespaceRoot
let suffixGlob = case maySuffix of
Just suffix -> toSuffixGlob suffix
Nothing -> "*"
queryListColCheck sql (Only bhId :. ref :. Only namespaceGlob :. Only suffixGlob) \reversedNames ->
for reversedNames reversedNameToReversedSegments
where
sql =
[here|
SELECT reversed_name FROM scoped_term_name_lookup
WHERE root_branch_hash_id = ?
AND referent_builtin IS ? AND referent_component_hash IS ? AND referent_component_index IS ? AND referent_constructor_index IS ?
AND namespace GLOB ?
AND reversed_name GLOB ?
|]
-- | NOTE: requires that the codebase has an up-to-date name lookup index. As of writing, this
-- is only true on Share.
--
-- Get the list of type names for a given Reference within a given namespace.
typeNamesForRefWithinNamespace :: BranchHashId -> NamespaceText -> Reference.TextReference -> Maybe ReversedSegments -> Transaction [ReversedSegments]
typeNamesForRefWithinNamespace bhId namespaceRoot ref maySuffix = do
let namespaceGlob = toNamespaceGlob namespaceRoot
let suffixGlob = case maySuffix of
Just suffix -> toSuffixGlob suffix
Nothing -> "*"
queryListColCheck sql (Only bhId :. ref :. Only namespaceGlob :. Only suffixGlob) \reversedNames ->
for reversedNames reversedNameToReversedSegments
where
sql =
[here|
SELECT reversed_name FROM scoped_type_name_lookup
WHERE root_branch_hash_id = ?
AND reference_builtin IS ? AND reference_component_hash IS ? AND reference_component_index IS ?
AND namespace GLOB ?
AND reversed_name GLOB ?
|]
-- | NOTE: requires that the codebase has an up-to-date name lookup index. As of writing, this
-- is only true on Share.
--
-- The goal of this query is to search the codebase for the single name which has a different
-- hash from the provided name, but shares longest matching suffix for for that name.
--
-- Including this name in the pretty-printer object causes it to suffixify the name so that it
-- is unambiguous from other names in scope.
--
-- Sqlite doesn't provide enough functionality to do this query in a single query, so we do
-- it iteratively, querying for longer and longer suffixes we no longer find matches.
-- Then we return the name with longest matching suffix.
--
-- This is still relatively efficient because we can use an index and LIMIT 1 to make each
-- individual query fast, and in the common case we'll only need two or three queries to find
-- the longest matching suffix.
longestMatchingTermNameForSuffixification :: BranchHashId -> NamespaceText -> NamedRef Referent.TextReferent -> Transaction (Maybe (NamedRef (Referent.TextReferent, Maybe NamedRef.ConstructorType)))
longestMatchingTermNameForSuffixification bhId namespaceRoot (NamedRef.NamedRef {reversedSegments = revSuffix@(lastSegment NonEmpty.:| _), ref}) = do
let namespaceGlob = globEscape namespaceRoot <> ".*"
let loop :: [Text] -> MaybeT Transaction (NamedRef (Referent.TextReferent, Maybe NamedRef.ConstructorType))
loop [] = empty
loop (suffGlob : rest) = do
result :: Maybe (NamedRef (Referent.TextReferent :. Only (Maybe NamedRef.ConstructorType))) <-
lift $ queryMaybeRow sql ((bhId, lastSegment, namespaceGlob, suffGlob) :. ref)
case result of
Just namedRef ->
-- We want to find matches for the _longest_ possible suffix, so we keep going until we
-- don't find any more matches.
pure (unRow <$> namedRef) <|> loop rest
Nothing ->
-- If we don't find a match for a suffix, there's no way we could match on an even
-- longer suffix, so we bail.
empty
let suffixes =
revSuffix
& toList
& List.inits
& mapMaybe NonEmpty.nonEmpty
& map toSuffixGlob
runMaybeT $ loop suffixes
where
unRow (a :. Only b) = (a, b)
sql =
-- Note: It may seem strange that we do a last_name_segment constraint AND a reversed_name
-- GLOB, but this helps improve query performance.
-- The SQLite query optimizer is smart enough to do a prefix-search on globs, but will
-- ONLY do a single prefix-search, meaning we use the index for `namespace`, but not for
-- `reversed_name`. By adding the `last_name_segment` constraint, we can cull a ton of
-- names which couldn't possibly match before we then manually filter the remaining names
-- using the `reversed_name` glob which can't be optimized with an index.
[here|
SELECT reversed_name, referent_builtin, referent_component_hash, referent_component_index, referent_constructor_index, referent_constructor_type FROM scoped_term_name_lookup
WHERE root_branch_hash_id = ?
AND last_name_segment IS ?
AND namespace GLOB ?
AND reversed_name GLOB ?
-- We don't need to consider names for the same definition when suffixifying, so
-- we filter those out. Importantly this also avoids matching the name we're trying to suffixify.
AND NOT (referent_builtin IS ? AND referent_component_hash IS ? AND referent_component_index IS ? AND referent_constructor_index IS ?)
LIMIT 1
|]
longestMatchingTypeNameForSuffixification :: BranchHashId -> NamespaceText -> NamedRef Reference.TextReference -> Transaction (Maybe (NamedRef Reference.TextReference))
longestMatchingTypeNameForSuffixification bhId namespaceRoot (NamedRef.NamedRef {reversedSegments = revSuffix@(lastSegment NonEmpty.:| _), ref}) = do
let namespaceGlob = globEscape namespaceRoot <> ".*"
let loop :: [Text] -> MaybeT Transaction (NamedRef Reference.TextReference)
loop [] = empty
loop (suffGlob : rest) = do
result :: Maybe (NamedRef (Reference.TextReference)) <-
lift $ queryMaybeRow sql ((bhId, lastSegment, namespaceGlob, suffGlob) :. ref)
case result of
Just namedRef ->
-- We want to find matches for the _longest_ possible suffix, so we keep going until we
-- don't find any more matches.
pure namedRef <|> loop rest
Nothing ->
-- If we don't find a match for a suffix, there's no way we could match on an even
-- longer suffix, so we bail.
empty
let suffixes =
revSuffix
& toList
& List.inits
& mapMaybe NonEmpty.nonEmpty
& map toSuffixGlob
runMaybeT $ loop suffixes
where
sql =
-- Note: It may seem strange that we do a last_name_segment constraint AND a reversed_name
-- GLOB, but this helps improve query performance.
-- The SQLite query optimizer is smart enough to do a prefix-search on globs, but will
-- ONLY do a single prefix-search, meaning we use the index for `namespace`, but not for
-- `reversed_name`. By adding the `last_name_segment` constraint, we can cull a ton of
-- names which couldn't possibly match before we then manually filter the remaining names
-- using the `reversed_name` glob which can't be optimized with an index.
[here|
SELECT reversed_name, reference_builtin, reference_component_hash, reference_component_index FROM scoped_type_name_lookup
WHERE root_branch_hash_id = ?
AND last_name_segment IS ?
AND namespace GLOB ?
AND reversed_name GLOB ?
-- We don't need to consider names for the same definition when suffixifying, so
-- we filter those out. Importantly this also avoids matching the name we're trying to suffixify.
AND NOT (reference_builtin IS ? AND reference_component_hash IS ? AND reference_component_index IS ?)
LIMIT 1
|]
-- | @before x y@ returns whether or not @x@ occurred before @y@, i.e. @x@ is an ancestor of @y@.
before :: CausalHashId -> CausalHashId -> Transaction Bool
before chId1 chId2 = queryOneCol sql (chId2, chId1)
@ -2037,7 +2304,7 @@ elaborateHashes hashes =
hashesValues :: Values (Only Hash32)
hashesValues =
Values (coerce @(List.NonEmpty Hash32) @(List.NonEmpty (Only Hash32)) hashes)
Values (coerce @(NonEmpty Hash32) @(NonEmpty (Only Hash32)) hashes)
moveTempEntityToMain ::
HashHandle ->
@ -2944,3 +3211,43 @@ ensureBranchRemoteMapping pid bid rpid host rbid =
DO NOTHING
|]
(pid, bid, rpid, rbid, host)
-- | Convert reversed name segments into glob for searching based on suffix
--
-- >>> toSuffixGlob ("foo" NonEmpty.:| ["bar"])
-- "foo.bar.*"
toSuffixGlob :: ReversedSegments -> Text
toSuffixGlob suffix = globEscape (Text.intercalate "." (toList suffix)) <> ".*"
-- | Convert reversed segments into the DB representation of a reversed_name.
--
-- >>> toReversedName (NonEmpty.fromList ["foo", "bar"])
-- "foo.bar."
toReversedName :: ReversedSegments -> Text
toReversedName revSegs = Text.intercalate "." (toList revSegs) <> "."
-- | Convert a namespace into the appropriate glob for searching within that namespace
--
-- >>> toNamespaceGlob "foo.bar"
-- "foo.bar.*"
toNamespaceGlob :: Text -> Text
toNamespaceGlob namespace = globEscape namespace <> ".*"
-- | Thrown if we try to get the segments of an empty name, shouldn't ever happen since empty names
-- are invalid.
data EmptyName = EmptyName String
deriving stock (Eq, Show)
deriving anyclass (SqliteExceptionReason)
-- | Convert a reversed name into reversed segments.
--
-- >>> reversedNameToReversedSegments "foo.bar."
-- Right ("foo" :| ["bar"])
reversedNameToReversedSegments :: (HasCallStack) => Text -> Either EmptyName ReversedSegments
reversedNameToReversedSegments txt =
txt
& Text.splitOn "."
-- Names have a trailing dot, so we need to drop the last empty segment
& List.dropEnd1
& NonEmpty.nonEmpty
& maybe (Left (EmptyName $ show callStack)) Right

View File

@ -67,10 +67,65 @@ Configuration for [coc-nvim](https://github.com/neoclide/coc.nvim), enter the fo
}
```
For [lspconfig](https://github.com/neovim/nvim-lspconfig), you can use the following setup function:
For [lspconfig](https://github.com/neovim/nvim-lspconfig) with optional autocomplete [nvim-cmp](https://github.com/hrsh7th/nvim-cmp) for LSP
[cmp-nvim-lsp](https://github.com/hrsh7th/cmp-nvim-lsp), you can use the following setup function(s):
```lua
require('lspconfig').unison.setup({})
-- This function is for configuring a buffer when an LSP is attached
local on_attach = function(client, bufnr)
-- Always show the signcolumn, otherwise it would shift the text each time
-- diagnostics appear/become resolved
vim.o.signcolumn = 'yes'
-- Update the cursor hover location every 1/4 of a second
vim.o.updatetime = 250
-- Disable appending of the error text at the offending line
vim.diagnostic.config({virtual_text=false})
-- Enable a floating window containing the error text when hovering over an error
vim.api.nvim_create_autocmd("CursorHold", {
buffer = bufnr,
callback = function()
local opts = {
focusable = false,
close_events = { "BufLeave", "CursorMoved", "InsertEnter", "FocusLost" },
border = 'rounded',
source = 'always',
prefix = ' ',
scope = 'cursor',
}
vim.diagnostic.open_float(nil, opts)
end
})
-- This setting is to display hover information about the symbol under the cursor
vim.keymap.set('n', 'K', vim.lsp.buf.hover)
end
-- Setup the Unison LSP
require('lspconfig')['unison'].setup{
on_attach = on_attach,
}
```
```lua
-- This is NVim Autocompletion support
local cmp = require 'cmp'
-- This function sets up autocompletion
cmp.setup {
-- This mapping affects the autocompletion choices menu
mapping = cmp.mapping.preset.insert(),
-- This table names the sources for autocompletion
sources = {
{ name = 'nvim_lsp' },
},
}
```
Note that you'll need to start UCM _before_ you try connecting to it in your editor or your editor might give up.

View File

@ -5,10 +5,10 @@ If you are a newcomer to the Haskell ecosystem trying to set up your dev environ
Here is a working set of versions you can use to build the Unison executable:
GHC version: 8.10.7
Stack version: 2.9.1
Cabal version 3.6.2.0
Haskell language server version: 1.7.0.0
- GHC version: 8.10.7
- Stack version: 2.9.1
- Cabal version 3.6.2.0
- Haskell language server version: 1.7.0.0
The GHC version for the project can be confirmed by looking at the `resolver` key in this project's `stack.yaml`.

View File

@ -17,11 +17,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1674781052,
"narHash": "sha256-nseKFXRvmZ+BDAeWQtsiad+5MnvI/M2Ak9iAWzooWBw=",
"lastModified": 1678703398,
"narHash": "sha256-Y1mW3dBsoWLHpYm+UIHb5VZ7rx024NNHaF16oZBx++o=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "cc4bb87f5457ba06af9ae57ee4328a49ce674b1b",
"rev": "67f26c1cfc5d5783628231e776a81c1ade623e0b",
"type": "github"
},
"original": {

166
flake.nix
View File

@ -8,70 +8,23 @@
outputs = { self, flake-utils, nixpkgs }:
let
ghc-version = "8107";
systemAttrs = flake-utils.lib.eachDefaultSystem (system:
let
pkgs = nixpkgs.legacyPackages."${system}".extend self.overlay;
mystack = pkgs.symlinkJoin {
name = "stack";
paths = [ pkgs.stack ];
buildInputs = [ pkgs.makeWrapper ];
postBuild = let
flags = [ "--no-nix" "--system-ghc" "--no-install-ghc" ];
add-flags =
"--add-flags '${pkgs.lib.concatStringsSep " " flags}'";
in ''
wrapProgram "$out/bin/stack" ${add-flags}
'';
};
ghc-version = "8107";
ghc = pkgs.haskell.packages."ghc${ghc-version}";
make-ormolu = p:
p.callHackageDirect {
pkg = "ormolu";
ver = "0.4.0.0";
sha256 = "0r8jb8lpaxx7wxnvxiynx2dkrfibfl8nxnjl5n4vwy0az166bbnd";
} {
ghc-lib-parser =
pkgs.haskellPackages.ghc-lib-parser_9_2_5_20221107;
Cabal = pkgs.haskellPackages.Cabal_3_6_3_0;
};
myhls = let
hp = pkgs.haskellPackages.extend hp-override;
hp-override = final: prev: {
hls-floskell-plugin =
pkgs.haskell.lib.dontCheck prev.hls-floskell-plugin;
hls-rename-plugin =
pkgs.haskell.lib.dontCheck prev.hls-rename-plugin;
haskell-language-server =
pkgs.haskell.lib.overrideCabal prev.haskell-language-server
(drv: {
configureFlags = drv.configureFlags ++ [
"-f-brittany"
"-f-fourmolu"
"-f-floskell"
"-f-stylishhaskell"
"-f-hlint"
];
});
ormolu = make-ormolu final;
};
in pkgs.haskell-language-server.override {
haskellPackages = hp;
dynamic = true;
supportedGhcVersions = [ ghc-version ];
};
myormolu = make-ormolu pkgs.haskellPackages;
nativePackages = pkgs.lib.optionals pkgs.stdenv.isDarwin (with pkgs.darwin.apple_sdk.frameworks; [ Cocoa ]);
nativePackages = pkgs.lib.optionals pkgs.stdenv.isDarwin
(with pkgs.darwin.apple_sdk.frameworks; [ Cocoa ]);
unison-env = pkgs.mkShell {
packages = with pkgs; [
mystack
(haskell.compiler."ghc${ghc-version}".override {
useLLVM = pkgs.stdenv.isAarch64;
})
myormolu
myhls
packages = let exports = self.packages."${system}";
in with pkgs;
[
exports.stack
exports.hls
exports.ormolu
exports.ghc
pkg-config
zlib
] ++ nativePackages;
@ -94,12 +47,103 @@
pkgs = pkgs;
devShell = unison-env;
devShells.default = unison-env;
packages = { };
packages = {
hls = pkgs.unison-hls;
hls-call-hierarchy-plugin = ghc.hls-call-hierarchy-plugin;
ormolu = pkgs.ormolu;
ghc = pkgs.haskell.compiler."ghc${ghc-version}".override {
useLLVM = pkgs.stdenv.isAarch64;
};
stack = pkgs.unison-stack;
devShell = self.devShells."${system}".default;
defaultPackage = self.packages."${system}".unison-env;
};
defaultPackage = self.packages."${system}".devShell;
});
topLevelAttrs = { overlay = final: prev: { }; };
topLevelAttrs = {
overlay = final: prev: {
ormolu = prev.haskell.lib.justStaticExecutables
final.haskell.packages."ghc${ghc-version}".ormolu;
haskell = with prev.haskell.lib;
prev.haskell // {
packages = prev.haskell.packages // {
"ghc${ghc-version}" = prev.haskell.packages.ghc8107.extend
(hfinal: hprev: {
mkDerivation = drv:
hprev.mkDerivation (drv // {
doCheck = false;
doHaddock = false;
doBenchmark = false;
enableLibraryProfiling = false;
enableExecutableProfiling = false;
});
aeson = hfinal.aeson_2_1_1_0;
lens-aeson = hfinal.lens-aeson_1_2_2;
Cabal = hfinal.Cabal_3_6_3_0;
ormolu = hfinal.ormolu_0_5_0_1;
ghc-lib-parser = hfinal.ghc-lib-parser_9_2_5_20221107;
# avoid deprecated version https://github.com/Avi-D-coder/implicit-hie/issues/50
implicit-hie = hfinal.callHackageDirect {
pkg = "implicit-hie";
ver = "0.1.4.0";
sha256 =
"15qy9vwm8vbnyv47vh6kd50m09vc4vhqbbrhf8gdifrvlxhad69l";
} { };
haskell-language-server = let
p = prev.haskell.lib.overrideCabal
hprev.haskell-language-server (drv: {
# undo terrible nixpkgs hacks
buildDepends =
prev.lib.filter (x: x != hprev.hls-brittany-plugin)
drv.buildDepends;
configureFlags = drv.configureFlags ++ [
"-f-brittany"
"-f-fourmolu"
"-f-floskell"
"-f-stylishhaskell"
"-f-hlint"
];
});
in p.overrideScope (lfinal: lprev: {
# undo all of the horrible overrideScope in
# nixpkgs configuration files
ormolu = hfinal.ormolu;
ghc-lib-parser = hfinal.ghc-lib-parser;
ghc-lib-parser-ex = hfinal.ghc-lib-parser-ex;
ghc-paths = hfinal.ghc-paths;
aeson = hfinal.aeson;
lsp-types = hfinal.lsp-types;
# null out some dependencies that we drop with cabal flags
hls-fourmolu-plugin = null;
hls-floskell-plugin = null;
hls-brittany-plugin = hfinal.hls-brittany-plugin;
hls-stylish-haskell-plugin = null;
hls-hlint-plugin = null;
});
});
};
};
unison-hls = final.haskell-language-server.override {
haskellPackages = final.haskell.packages."ghc${ghc-version}";
dynamic = true;
supportedGhcVersions = [ ghc-version ];
};
unison-stack = prev.symlinkJoin {
name = "stack";
paths = [ final.stack ];
buildInputs = [ final.makeWrapper ];
postBuild = let
flags = [ "--no-nix" "--system-ghc" "--no-install-ghc" ];
add-flags =
"--add-flags '${prev.lib.concatStringsSep " " flags}'";
in ''
wrapProgram "$out/bin/stack" ${add-flags}
'';
};
};
};
in systemAttrs // topLevelAttrs;
}

View File

@ -39,6 +39,8 @@ data DebugFlag
Temp
| -- | Shows Annotations when printing terms
Annotations
| -- | Debug endpoints of the local UI (or Share) server
Server
| PatternCoverage
| PatternCoverageConstraintSolver
deriving (Eq, Ord, Show, Bounded, Enum)
@ -63,6 +65,7 @@ debugFlags = case (unsafePerformIO (lookupEnv "UNISON_DEBUG")) of
"TIMING" -> pure Timing
"TEMP" -> pure Temp
"ANNOTATIONS" -> pure Annotations
"SERVER" -> pure Server
"PATTERN_COVERAGE" -> pure PatternCoverage
"PATTERN_COVERAGE_CONSTRAINT_SOLVER" -> pure PatternCoverageConstraintSolver
_ -> empty
@ -112,6 +115,10 @@ debugAnnotations :: Bool
debugAnnotations = Annotations `Set.member` debugFlags
{-# NOINLINE debugAnnotations #-}
debugServer :: Bool
debugServer = Server `Set.member` debugFlags
{-# NOINLINE debugServer #-}
debugPatternCoverage :: Bool
debugPatternCoverage = PatternCoverage `Set.member` debugFlags
{-# NOINLINE debugPatternCoverage #-}
@ -171,5 +178,6 @@ shouldDebug = \case
Timing -> debugTiming
Temp -> debugTemp
Annotations -> debugAnnotations
Server -> debugServer
PatternCoverage -> debugPatternCoverage
PatternCoverageConstraintSolver -> debugPatternCoverageConstraintSolver

View File

@ -6,9 +6,11 @@ module Unison.Util.Set
Unison.Util.Set.traverse,
flatMap,
filterM,
forMaybe,
)
where
import Data.Function ((&))
import Data.Functor ((<&>))
import qualified Data.Maybe as Maybe
import Data.Set (Set)
@ -33,6 +35,13 @@ symmetricDifference a b = (a `Set.difference` b) `Set.union` (b `Set.difference`
mapMaybe :: (Ord b) => (a -> Maybe b) -> Set a -> Set b
mapMaybe f = Set.fromList . Maybe.mapMaybe f . Set.toList
forMaybe :: (Ord b, Applicative f) => Set a -> (a -> f (Maybe b)) -> f (Set b)
forMaybe xs f =
Prelude.traverse f (Set.toList xs) <&> \ys ->
ys
& Maybe.catMaybes
& Set.fromList
traverse :: (Applicative f, Ord b) => (a -> f b) -> Set a -> f (Set b)
traverse f = fmap Set.fromList . Prelude.traverse f . Set.toList

View File

@ -2,18 +2,28 @@
module Unison.Codebase.Editor.DisplayObject where
import Data.Bifoldable
import Data.Bifunctor
import Data.Bitraversable
import Unison.Prelude
import Unison.ShortHash
data DisplayObject b a = BuiltinObject b | MissingObject ShortHash | UserObject a
deriving (Eq, Ord, Show, Functor, Generic)
deriving (Eq, Ord, Show, Functor, Generic, Foldable, Traversable)
instance Bifunctor DisplayObject where
bimap _ _ (MissingObject sh) = MissingObject sh
bimap f _ (BuiltinObject b) = BuiltinObject (f b)
bimap _ f (UserObject a) = UserObject (f a)
instance Bitraversable DisplayObject where
bitraverse f _ (BuiltinObject b) = BuiltinObject <$> f b
bitraverse _ _ (MissingObject sh) = pure (MissingObject sh)
bitraverse _ g (UserObject a) = UserObject <$> g a
instance Bifoldable DisplayObject where
bifoldMap = bifoldMapDefault
toMaybe :: DisplayObject b a -> Maybe a
toMaybe = \case
UserObject a -> Just a

View File

@ -263,7 +263,11 @@ splitFromName name =
case Name.reverseSegments name of
(seg :| pathSegments) -> (fromList $ reverse pathSegments, seg)
-- | what is this? —AI
-- | Remove a path prefix from a name.
-- Returns 'Nothing' if there are no remaining segments to construct the name from.
--
-- >>> unprefixName (Absolute $ fromList ["base", "List"]) (Name.unsafeFromText "base.List.map")
-- Just (Name Relative (NameSegment {toText = "map"} :| []))
unprefixName :: Absolute -> Name -> Maybe Name
unprefixName prefix = toName . unprefix prefix . fromName'

View File

@ -309,6 +309,12 @@ referent2to1 lookupCT = \case
V2.Ref r -> pure $ V1.Ref (reference2to1 r)
V2.Con r i -> V1.Con (V1.ConstructorReference (reference2to1 r) (fromIntegral i)) <$> lookupCT r
-- | Like referent2to1, but uses the provided constructor type directly
referent2to1UsingCT :: V2.ConstructorType -> V2.Referent -> V1.Referent
referent2to1UsingCT ct = \case
V2.Ref r -> V1.Ref (reference2to1 r)
V2.Con r i -> V1.Con (V1.ConstructorReference (reference2to1 r) (fromIntegral i)) (constructorType2to1 ct)
referent1to2 :: V1.Referent -> V2.Referent
referent1to2 = \case
V1.Ref r -> V2.Ref $ reference1to2 r

View File

@ -605,7 +605,7 @@ namesAtPath bh namesRootPath relativeToPath = do
(Name.fromReverseSegments (coerce reversedSegments), Cv.reference2to1 ref)
convertTerms names =
names <&> \(S.NamedRef {reversedSegments, ref = (ref, ct)}) ->
let v1ref = runIdentity $ Cv.referent2to1 (const . pure . Cv.constructorType2to1 . fromMaybe (error "Required constructor type for constructor but it was null") $ ct) ref
let v1ref = Cv.referent2to1UsingCT (fromMaybe (error "Required constructor type for constructor but it was null") ct) ref
in (Name.fromReverseSegments (coerce reversedSegments), v1ref)
-- If the given prefix matches the given name, the prefix is stripped and it's collected

View File

@ -16,12 +16,14 @@ import qualified Data.Set as Set
import Data.Text (unpack)
import qualified Unison.ABT as ABT
import qualified Unison.Blank as Blank
import qualified Unison.Builtin as Builtin
import qualified Unison.Name as Name
import qualified Unison.Names as Names
import qualified Unison.NamesWithHistory as NamesWithHistory
import Unison.Parser.Ann (Ann)
import qualified Unison.Parsers as Parsers
import Unison.Prelude
import qualified Unison.PrettyPrintEnv.Names as PPE
import Unison.Reference (Reference)
import qualified Unison.Referent as Referent
import Unison.Result (CompilerBug (..), Note (..), Result, ResultT, pattern Result)
@ -155,8 +157,13 @@ synthesizeFile ambient tl fqnsByShortName uf term = do
let -- substitute Blanks for any remaining free vars in UF body
tdnrTerm = Term.prepareTDNR term
env0 = Typechecker.Env ambient tl fqnsByShortName
unisonFilePPE =
( PPE.fromNames
10
(NamesWithHistory.shadowing (UF.toNames uf) Builtin.names)
)
Result notes mayType =
evalStateT (Typechecker.synthesizeAndResolve env0) tdnrTerm
evalStateT (Typechecker.synthesizeAndResolve unisonFilePPE env0) tdnrTerm
-- If typechecking succeeded, reapply the TDNR decisions to user's term:
Result (convertNotes notes) mayType >>= \_typ -> do
let infos = Foldable.toList $ Typechecker.infos notes

View File

@ -68,15 +68,17 @@ checkMatch matchLocation scrutineeType cases = do
uncoveredExpanded <- concat . fmap Set.toList <$> traverse (expandSolution v0) (Set.toList uncovered)
let sols = map (generateInhabitants v0) uncoveredExpanded
let (_accessible, inaccessible, redundant) = classify grdtree1
ppe <- getPrettyPrintEnv
let debugOutput =
P.sep
"\n"
[ P.hang "desugared:" (prettyGrdTree prettyPmGrd (\_ -> "<loc>") grdtree0),
P.hang "annotated:" (prettyGrdTree NC.prettyDnf (NC.prettyDnf . fst) grdtree1),
P.hang "uncovered:" (NC.prettyDnf uncovered),
P.hang "uncovered expanded:" (NC.prettyDnf (Set.fromList uncoveredExpanded))
"\n\n"
[ P.hang (title "desugared:") (prettyGrdTree (prettyPmGrd ppe) (\_ -> "<loc>") grdtree0),
P.hang (title "annotated:") (prettyGrdTree (NC.prettyDnf ppe) (NC.prettyDnf ppe . fst) grdtree1),
P.hang (title "uncovered:") (NC.prettyDnf ppe uncovered),
P.hang (title "uncovered expanded:") (NC.prettyDnf ppe (Set.fromList uncoveredExpanded))
]
title = P.bold
doDebug = case shouldDebug PatternCoverage of
True -> trace (P.toPlainUnbroken debugOutput)
True -> trace (P.toAnsiUnbroken debugOutput)
False -> id
doDebug (pure (redundant, inaccessible, sols))

View File

@ -10,6 +10,7 @@ where
import Control.Monad.Fix (MonadFix)
import Unison.ConstructorReference (ConstructorReference)
import Unison.PatternMatchCoverage.ListPat (ListPat)
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.Type (Type)
import Unison.Var (Var)
@ -25,6 +26,8 @@ class (Ord loc, Var vt, Var v, MonadFix m) => Pmc vt v loc m | m -> vt v loc whe
-- | Get a fresh variable
fresh :: m v
getPrettyPrintEnv :: m PrettyPrintEnv
data EnumeratedConstructors vt v loc
= ConstructorType [(v, ConstructorReference, Type vt loc)]
| SequenceType [(ListPat, [Type vt loc])]

View File

@ -7,7 +7,8 @@ where
import Unison.ConstructorReference (ConstructorReference)
import Unison.PatternMatchCoverage.IntervalSet (IntervalSet)
import Unison.PatternMatchCoverage.PmLit
import qualified Unison.PrettyPrintEnv as PPE
import Unison.PatternMatchCoverage.Pretty
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import qualified Unison.Syntax.TypePrinter as TypePrinter
import Unison.Type (Type)
import Unison.Util.Pretty
@ -53,20 +54,21 @@ data Constraint vt v loc
Eq v v
deriving stock (Eq, Ord)
prettyConstraint :: (Var vt, Var v) => Constraint vt v loc -> Pretty ColorText
prettyConstraint = \case
prettyConstraint :: forall vt v loc. (Var vt, Var v) => PrettyPrintEnv -> Constraint vt v loc -> Pretty ColorText
prettyConstraint ppe = \case
PosCon var con convars ->
let xs = pc con : fmap (\(trm, typ) -> sep " " [pv trm, ":", TypePrinter.pretty PPE.empty typ]) convars ++ ["<-", pv var]
let xs = pc con : fmap (\(trm, typ) -> sep " " ["(" <> prettyVar trm, ":", TypePrinter.pretty ppe typ <> ")"]) convars ++ ["<-", prettyVar var]
in sep " " xs
NegCon var con -> sep " " [pv var, "", pc con]
PosLit var lit -> sep " " [prettyPmLit lit, "<-", pv var]
NegLit var lit -> sep " " [pv var, "", prettyPmLit lit]
PosListHead root n el -> sep " " [pv el, "<-", "head", pc n, pv root]
PosListTail root n el -> sep " " [pv el, "<-", "tail", pc n, pv root]
NegListInterval var x -> sep " " [pv var, "", string (show x)]
Effectful var -> "!" <> pv var
Eq v0 v1 -> sep " " [pv v0, "=", pv v1]
NegCon var con -> sep " " [prettyVar var, "", pc con]
PosLit var lit -> sep " " [prettyPmLit lit, "<-", prettyVar var]
NegLit var lit -> sep " " [prettyVar var, "", prettyPmLit lit]
PosListHead root n el -> sep " " [prettyVar el, "<-", "head", pany n, prettyVar root]
PosListTail root n el -> sep " " [prettyVar el, "<-", "tail", pany n, prettyVar root]
NegListInterval var x -> sep " " [prettyVar var, "", string (show x)]
Effectful var -> "!" <> prettyVar var
Eq v0 v1 -> sep " " [prettyVar v0, "=", prettyVar v1]
where
pv = string . show
pc :: forall a. (Show a) => a -> Pretty ColorText
pc = string . show
pany :: (Show a) => a -> Pretty ColorText
pany = string . show
pc = prettyConstructorReference ppe

View File

@ -15,7 +15,6 @@ module Unison.PatternMatchCoverage.NormalizedConstraints
where
import Data.Functor.Compose
import Data.List (intersperse)
import Data.Sequence (pattern Empty)
import qualified Data.Set as Set
import Unison.ConstructorReference (ConstructorReference)
@ -23,6 +22,7 @@ import Unison.PatternMatchCoverage.Constraint
import Unison.PatternMatchCoverage.IntervalSet (IntervalSet)
import qualified Unison.PatternMatchCoverage.IntervalSet as IntervalSet
import qualified Unison.PatternMatchCoverage.PmLit as PmLit
import Unison.PatternMatchCoverage.Pretty
import Unison.PatternMatchCoverage.UFMap (UFMap)
import qualified Unison.PatternMatchCoverage.UFMap as UFMap
import Unison.Prelude
@ -226,12 +226,12 @@ data EffectInfo
| IsNotEffectful
deriving stock (Show, Eq, Ord)
prettyNormalizedConstraints :: forall vt v loc. (Var v, Var vt) => NormalizedConstraints vt v loc -> Pretty ColorText
prettyNormalizedConstraints (NormalizedConstraints {constraintMap}) = sep " " ["", pconstraints, ""]
prettyNormalizedConstraints :: forall vt v loc. (Var v, Var vt) => PPE.PrettyPrintEnv -> NormalizedConstraints vt v loc -> Pretty ColorText
prettyNormalizedConstraints ppe (NormalizedConstraints {constraintMap}) = sep " " ["", pconstraints, ""]
where
cls = UFMap.toClasses constraintMap
pconstraints = sep " " (intersperse "," $ prettyCon <$> cls)
pconstraints = sep ", " (prettyCon <$> cls)
prettyCon (kcanon, ks, vi) =
let posCon = fromMaybe [] $ case vi_con vi of
Vc'Constructor pos _neg ->
@ -267,12 +267,13 @@ prettyNormalizedConstraints (NormalizedConstraints {constraintMap}) = sep " " ["
IsNotEffectful -> []
IsEffectful -> [Effectful kcanon]
in sep " " $
pv kcanon
: fmap pv (Set.toList $ Set.delete kcanon ks)
++ [":", TypePrinter.pretty PPE.empty (vi_typ vi)]
prettyVar kcanon
: fmap prettyVar (Set.toList $ Set.delete kcanon ks)
++ [":", TypePrinter.pretty ppe (vi_typ vi)]
++ ["|"]
++ [sep ", " $ fmap prettyConstraint (posCon ++ negCon ++ botCon)]
pv = string . show
++ case posCon ++ negCon ++ botCon of
[] -> [""]
_ -> [sep ", " $ fmap (prettyConstraint ppe) (posCon ++ negCon ++ botCon)]
prettyDnf :: (Var v, Var vt) => Set (NormalizedConstraints vt v loc) -> Pretty ColorText
prettyDnf xs = sep " " ("{" : intersperse "," (prettyNormalizedConstraints <$> Set.toList xs) ++ ["}"])
prettyDnf :: (Var v, Var vt) => PPE.PrettyPrintEnv -> Set (NormalizedConstraints vt v loc) -> Pretty ColorText
prettyDnf ppe xs = sep " " ("{" : sep ", " (prettyNormalizedConstraints ppe <$> Set.toList xs) : ["}"])

View File

@ -2,6 +2,7 @@ module Unison.PatternMatchCoverage.PmGrd where
import Unison.ConstructorReference (ConstructorReference)
import Unison.PatternMatchCoverage.PmLit (PmLit, prettyPmLit)
import Unison.PatternMatchCoverage.Pretty
import qualified Unison.PrettyPrintEnv as PPE
import qualified Unison.Syntax.TypePrinter as TypePrinter
import Unison.Term (Term')
@ -50,15 +51,16 @@ data
PmLet v (Term' vt v loc) (Type vt loc)
deriving stock (Show)
prettyPmGrd :: (Var vt, Var v) => PmGrd vt v loc -> Pretty ColorText
prettyPmGrd = \case
prettyPmGrd :: (Var vt, Var v) => PPE.PrettyPrintEnv -> PmGrd vt v loc -> Pretty ColorText
prettyPmGrd ppe = \case
PmCon var con convars ->
let xs = string (show con) : (formatConVar <$> convars) ++ ["<-", string (show var)]
formatConVar (v, t) = sep " " ["(", string (show v), ":", TypePrinter.pretty PPE.empty t, ")"]
let xs = pc con : fmap (\(trm, typ) -> sep " " ["(" <> prettyVar trm, ":", TypePrinter.pretty ppe typ <> ")"]) convars ++ ["<-", prettyVar var]
in sep " " xs
PmListHead var n el _ -> sep " " ["Cons", string (show n), string (show el), "<-", string (show var)]
PmListTail var n el _ -> sep " " ["Snoc", string (show n), string (show el), "<-", string (show var)]
PmListInterval var minLen maxLen -> sep " " ["Interval", string (show (minLen, maxLen)), "<-", string (show var)]
PmLit var lit -> sep " " [prettyPmLit lit, "<-", string (show var)]
PmBang v -> "!" <> string (show v)
PmLet v _expr _ -> sep " " ["let", string (show v), "=", "<expr>"]
PmListHead var n el _ -> sep " " ["Cons", string (show n), prettyVar el, "<-", prettyVar var]
PmListTail var n el _ -> sep " " ["Snoc", string (show n), prettyVar el, "<-", prettyVar var]
PmListInterval var minLen maxLen -> sep " " ["Interval", string (show (minLen, maxLen)), "<-", prettyVar var]
PmLit var lit -> sep " " [prettyPmLit lit, "<-", prettyVar var]
PmBang v -> "!" <> prettyVar v
PmLet v _expr _ -> sep " " ["let", prettyVar v, "=", "<expr>"]
where
pc = prettyConstructorReference ppe

View File

@ -0,0 +1,27 @@
module Unison.PatternMatchCoverage.Pretty where
import Data.Char
import Unison.ConstructorReference (ConstructorReference)
import Unison.PrettyPrintEnv
import Unison.Symbol
import qualified Unison.Syntax.TermPrinter as TermPrinter
import qualified Unison.Term as Term
import Unison.Util.Pretty
import qualified Unison.Util.Pretty as P
import Unison.Var
prettyVar :: (Var v) => v -> Pretty ColorText
prettyVar v =
let go x =
let (d, m) = divMod x 26
c = chr (ord 'a' + fromIntegral m)
in c : case d of
0 -> ""
_ -> go d
in P.bold $ string (go (freshId v))
prettyConstructorReference :: PrettyPrintEnv -> ConstructorReference -> Pretty ColorText
prettyConstructorReference ppe cr =
let con :: Term.Term Symbol ()
con = Term.constructor () cr
in TermPrinter.pretty ppe con

View File

@ -457,8 +457,9 @@ addConstraint ::
Constraint vt v loc ->
NormalizedConstraints vt v loc ->
m (Maybe (NormalizedConstraints vt v loc))
addConstraint con0 nc =
debugConstraint <$> case con0 of
addConstraint con0 nc = do
ppe <- getPrettyPrintEnv
debugConstraint ppe <$> case con0 of
C.PosLit var pmlit ->
let updateLiteral pos neg lit
| Just lit1 <- pos,
@ -568,13 +569,13 @@ addConstraint con0 nc =
| otherwise -> pure $ Just $ insertVarInfo var vi {vi_eff = IsEffectful} nc
C.Eq x y -> union x y nc
where
debugConstraint x =
debugConstraint ppe x =
let debugOutput =
P.sep
"\n"
[ P.hang (P.red "input constraints: ") (prettyNormalizedConstraints nc),
P.hang (P.yellow "additional constraint: ") (C.prettyConstraint con0),
P.hang (P.green "resulting constraint: ") (maybe "contradiction" prettyNormalizedConstraints x),
[ P.hang (P.red "input constraints: ") (prettyNormalizedConstraints ppe nc),
P.hang (P.yellow "additional constraint: ") (C.prettyConstraint ppe con0),
P.hang (P.green "resulting constraint: ") (maybe "contradiction" (prettyNormalizedConstraints ppe) x),
""
]
in if shouldDebug PatternCoverageConstraintSolver then trace (P.toAnsiUnbroken debugOutput) x else x

View File

@ -0,0 +1,63 @@
module Unison.PrettyPrintEnvDecl.Sqlite where
import U.Codebase.HashTags (BranchHash)
import U.Codebase.Sqlite.NamedRef (NamedRef (..))
import qualified U.Codebase.Sqlite.Operations as Ops
import qualified Unison.Codebase as Codebase
import Unison.Codebase.Path
import qualified Unison.Codebase.Path as Path
import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv
import Unison.LabeledDependency (LabeledDependency)
import qualified Unison.LabeledDependency as LD
import Unison.Name (Name)
import qualified Unison.Name as Name
import Unison.NameSegment (NameSegment (..))
import qualified Unison.Names as Names
import qualified Unison.NamesWithHistory as NamesWithHistory
import Unison.Prelude
import qualified Unison.PrettyPrintEnvDecl as PPED
import qualified Unison.PrettyPrintEnvDecl.Names as PPED
import Unison.Reference (Reference)
import Unison.Referent (Referent)
import qualified Unison.Sqlite as Sqlite
import Unison.Util.Monoid (foldMapM)
-- | Given a set of references, return a PPE which contains names for only those references.
-- Names are limited to those within the provided perspective
ppedForReferences :: BranchHash -> Path -> Set LabeledDependency -> Sqlite.Transaction PPED.PrettyPrintEnvDecl
ppedForReferences rootHash perspective refs = do
hashLen <- Codebase.hashLength
(termNames, typeNames) <-
refs & foldMapM \ref ->
namesForReference ref
-- Ideally we'd only suffixify the name we're actually going to use, but due to name biasing
-- we won't know that until we actually call the pretty-printer, so
-- we add suffixifications for every name we have for each reference.
longestTermSuffixMatches <- forMaybe termNames \(name, ref) -> do
result <-
Ops.longestMatchingTermNameForSuffixification rootHash pathText (NamedRef {reversedSegments = coerce $ Name.reverseSegments name, ref = Cv.referent1to2 ref})
<&> fmap \(NamedRef {reversedSegments, ref = (ref, mayCt)}) ->
let ct = fromMaybe (error "ppedForReferences: Required constructor type for constructor but it was null") mayCt
in (Name.fromReverseSegments (coerce reversedSegments), Cv.referent2to1UsingCT ct ref)
pure result
longestTypeSuffixMatches <- forMaybe typeNames \(name, ref) -> do
result <-
Ops.longestMatchingTypeNameForSuffixification rootHash pathText (NamedRef {reversedSegments = coerce $ Name.reverseSegments name, ref = Cv.reference1to2 ref})
<&> fmap \(NamedRef {reversedSegments, ref}) ->
(Name.fromReverseSegments (coerce reversedSegments), Cv.reference2to1 ref)
pure result
let allTermNamesToConsider = termNames <> longestTermSuffixMatches
let allTypeNamesToConsider = typeNames <> longestTypeSuffixMatches
pure . PPED.fromNamesDecl hashLen . NamesWithHistory.fromCurrentNames $ Names.fromTermsAndTypes allTermNamesToConsider allTypeNamesToConsider
where
pathText :: Text
pathText = Path.toText perspective
namesForReference :: LabeledDependency -> Sqlite.Transaction ([(Name, Referent)], [(Name, Reference)])
namesForReference = \case
LD.TermReferent ref -> do
termNames <- fmap (Name.fromReverseSegments . coerce) <$> Ops.termNamesForRefWithinNamespace rootHash pathText (Cv.referent1to2 ref) Nothing
pure ((,ref) <$> termNames, [])
LD.TypeReference ref -> do
typeNames <- fmap (Name.fromReverseSegments . coerce) <$> Ops.typeNamesForRefWithinNamespace rootHash pathText (Cv.reference1to2 ref) Nothing
pure ([], (,ref) <$> typeNames)

View File

@ -128,7 +128,8 @@ prettyDataDecl (PrettyPrintEnvDecl unsuffixifiedPPE suffixifiedPPE) r name dd =
field (fname, typ) =
P.group $
styleHashQualified'' (fmt (S.TypeReference r)) fname
<> fmt S.TypeAscriptionColon " :" `P.hang` runPretty suffixifiedPPE (TypePrinter.prettyRaw Map.empty (-1) typ)
<> fmt S.TypeAscriptionColon " :"
`P.hang` runPretty suffixifiedPPE (TypePrinter.prettyRaw Map.empty (-1) typ)
header = prettyDataHeader name dd <> fmt S.DelimiterChar (" = " `P.orElse` "\n = ")
-- Comes up with field names for a data declaration which has the form of a
@ -176,7 +177,7 @@ fieldNames env r name dd = do
}
accessorsWithTypes :: [(v, Term.Term v (), Type.Type v ())] <-
for accessors \(v, trm) ->
case Result.result (Typechecker.synthesize typecheckingEnv trm) of
case Result.result (Typechecker.synthesize env typecheckingEnv trm) of
Nothing -> Nothing
Just typ -> Just (v, trm, typ)
let hashes = Hashing.hashTermComponents (Map.fromList . fmap (\(v, trm, typ) -> (v, (trm, typ))) $ accessorsWithTypes)

View File

@ -24,6 +24,7 @@ import qualified Unison.ABT as ABT
import qualified Unison.Blank as B
import qualified Unison.Name as Name
import Unison.Prelude
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.Referent (Referent)
import Unison.Result
( Result,
@ -90,13 +91,15 @@ makeLenses ''Env
-- contained in that term.
synthesize ::
(Monad f, Var v, Ord loc) =>
PrettyPrintEnv ->
Env v loc ->
Term v loc ->
ResultT (Notes v loc) f (Type v loc)
synthesize env t =
synthesize ppe env t =
let result =
convertResult $
Context.synthesizeClosed
ppe
(TypeVar.liftType <$> view ambientAbilities env)
(view typeLookup env)
(TypeVar.liftTerm t)
@ -150,11 +153,11 @@ data Resolution v loc = Resolution
-- | Infer the type of a 'Unison.Term', using type-directed name resolution
-- to attempt to resolve unknown symbols.
synthesizeAndResolve ::
(Monad f, Var v, Monoid loc, Ord loc) => Env v loc -> TDNR f v loc (Type v loc)
synthesizeAndResolve env = do
(Monad f, Var v, Monoid loc, Ord loc) => PrettyPrintEnv -> Env v loc -> TDNR f v loc (Type v loc)
synthesizeAndResolve ppe env = do
tm <- get
(tp, notes) <- listen . lift $ synthesize env tm
typeDirectedNameResolution notes tp env
(tp, notes) <- listen . lift $ synthesize ppe env tm
typeDirectedNameResolution ppe notes tp env
compilerBug :: Context.CompilerBug v loc -> Result (Notes v loc) ()
compilerBug bug = do
@ -185,11 +188,12 @@ liftResult = lift . MaybeT . WriterT . pure . runIdentity . runResultT
typeDirectedNameResolution ::
forall v loc f.
(Monad f, Var v, Ord loc, Monoid loc) =>
PrettyPrintEnv ->
Notes v loc ->
Type v loc ->
Env v loc ->
TDNR f v loc (Type v loc)
typeDirectedNameResolution oldNotes oldType env = do
typeDirectedNameResolution ppe oldNotes oldType env = do
-- Add typed components (local definitions) to the TDNR environment.
let tdnrEnv = execState (traverse_ addTypedComponent $ infos oldNotes) env
-- Resolve blanks in the notes and generate some resolutions
@ -205,7 +209,7 @@ typeDirectedNameResolution oldNotes oldType env = do
in if goAgain
then do
traverse_ substSuggestion rs
synthesizeAndResolve tdnrEnv
synthesizeAndResolve ppe tdnrEnv
else do
-- The type hasn't changed
liftResult $ suggest rs
@ -299,11 +303,12 @@ typeDirectedNameResolution oldNotes oldType env = do
-- and a note about typechecking failure otherwise.
check ::
(Monad f, Var v, Ord loc) =>
PrettyPrintEnv ->
Env v loc ->
Term v loc ->
Type v loc ->
ResultT (Notes v loc) f (Type v loc)
check env term typ = synthesize env (Term.ann (ABT.annotation term) term typ)
check ppe env term typ = synthesize ppe env (Term.ann (ABT.annotation term) term typ)
-- | `checkAdmissible' e t` tests that `(f : t -> r) e` is well-typed.
-- If `t` has quantifiers, these are moved outside, so if `t : forall a . a`,
@ -315,8 +320,8 @@ check env term typ = synthesize env (Term.ann (ABT.annotation term) term typ)
-- tweak (Type.ForallNamed' v body) = Type.forall() v (tweak body)
-- tweak t = Type.arrow() t t
-- | Returns `True` if the expression is well-typed, `False` otherwise
wellTyped :: (Monad f, Var v, Ord loc) => Env v loc -> Term v loc -> f Bool
wellTyped env term = go <$> runResultT (synthesize env term)
wellTyped :: (Monad f, Var v, Ord loc) => PrettyPrintEnv -> Env v loc -> Term v loc -> f Bool
wellTyped ppe env term = go <$> runResultT (synthesize ppe env term)
where
go (may, _) = isJust may

View File

@ -59,6 +59,7 @@ import Data.Bifunctor
)
import qualified Data.Foldable as Foldable
import Data.Function (on)
import Data.Functor.Compose
import Data.List
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as Nel
@ -85,9 +86,11 @@ import Unison.DataDeclaration.ConstructorId (ConstructorId)
import Unison.Pattern (Pattern)
import qualified Unison.Pattern as Pattern
import Unison.PatternMatchCoverage (checkMatch)
import Unison.PatternMatchCoverage.Class (EnumeratedConstructors (..), Pmc (..), traverseConstructors)
import Unison.PatternMatchCoverage.Class (EnumeratedConstructors (..), Pmc, traverseConstructors)
import qualified Unison.PatternMatchCoverage.Class as Pmc
import qualified Unison.PatternMatchCoverage.ListPat as ListPat
import Unison.Prelude
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import qualified Unison.PrettyPrintEnv as PPE
import Unison.Reference (Reference)
import qualified Unison.Reference as Reference
@ -217,6 +220,8 @@ mapErrors f r = case r of
newtype MT v loc f a = MT
{ runM ::
-- for debug output
PrettyPrintEnv ->
-- Data declarations in scope
DataDeclarations v loc ->
-- Effect declarations in scope
@ -234,10 +239,10 @@ type M v loc = MT v loc (Result v loc)
type TotalM v loc = MT v loc (Either (CompilerBug v loc))
liftResult :: Result v loc a -> M v loc a
liftResult r = MT (\_ _ env -> (,env) <$> r)
liftResult r = MT (\_ _ _ env -> (,env) <$> r)
liftTotalM :: TotalM v loc a -> M v loc a
liftTotalM (MT m) = MT $ \datas effects env -> case m datas effects env of
liftTotalM (MT m) = MT $ \ppe datas effects env -> case m ppe datas effects env of
Left bug -> CompilerBug bug mempty mempty
Right a -> Success mempty a
@ -251,7 +256,7 @@ modEnv :: (Env v loc -> Env v loc) -> M v loc ()
modEnv f = modEnv' $ ((),) . f
modEnv' :: (Env v loc -> (a, Env v loc)) -> M v loc a
modEnv' f = MT (\_ _ env -> pure . f $ env)
modEnv' f = MT (\_ _ _ env -> pure . f $ env)
data Unknown = Data | Effect deriving (Show)
@ -414,7 +419,7 @@ scope' p (ErrorNote cause path) = ErrorNote cause (path `mappend` pure p)
-- Add `p` onto the end of the `path` of any `ErrorNote`s emitted by the action
scope :: PathElement v loc -> M v loc a -> M v loc a
scope p (MT m) = MT \datas effects env -> mapErrors (scope' p) (m datas effects env)
scope p (MT m) = MT \ppe datas effects env -> mapErrors (scope' p) (m ppe datas effects env)
newtype Context v loc = Context [(Element v loc, Info v loc)]
@ -725,7 +730,7 @@ extendN ctx es = foldM (flip extend) ctx es
orElse :: M v loc a -> M v loc a -> M v loc a
orElse m1 m2 = MT go
where
go datas effects env = runM m1 datas effects env <|> runM m2 datas effects env
go ppe datas effects env = runM m1 ppe datas effects env <|> runM m2 ppe datas effects env
s@(Success _ _) <|> _ = s
TypeError _ _ <|> r = r
CompilerBug _ _ _ <|> r = r -- swallowing bugs for now: when checking whether a type annotation
@ -738,11 +743,14 @@ orElse m1 m2 = MT go
-- hoistMaybe :: (Maybe a -> Maybe b) -> Result v loc a -> Result v loc b
-- hoistMaybe f (Result es is a) = Result es is (f a)
getPrettyPrintEnv :: M v loc PrettyPrintEnv
getPrettyPrintEnv = MT \ppe _ _ env -> pure (ppe, env)
getDataDeclarations :: M v loc (DataDeclarations v loc)
getDataDeclarations = MT \datas _ env -> pure (datas, env)
getDataDeclarations = MT \_ datas _ env -> pure (datas, env)
getEffectDeclarations :: M v loc (EffectDeclarations v loc)
getEffectDeclarations = MT \_ effects env -> pure (effects, env)
getEffectDeclarations = MT \_ _ effects env -> pure (effects, env)
compilerCrash :: CompilerBug v loc -> M v loc a
compilerCrash bug = liftResult $ compilerBug bug
@ -1272,10 +1280,23 @@ getDataConstructorsAtType t0 = do
equate t0 lastT
applyM t
instance (Ord loc, Var v) => Pmc (TypeVar v loc) v loc (StateT (Set v) (M v loc)) where
getConstructors = lift . getDataConstructorsAtType
data PmcState vt v loc = PmcState
{ variables :: !(Set v),
constructorCache :: !(Map (Type v loc) (EnumeratedConstructors vt v loc))
}
instance (Ord loc, Var v) => Pmc (TypeVar v loc) v loc (StateT (PmcState (TypeVar v loc) v loc) (M v loc)) where
getPrettyPrintEnv = lift getPrettyPrintEnv
getConstructors typ = do
st@PmcState {constructorCache} <- get
let f = \case
Nothing -> Compose $ (\t -> (t, Just t)) <$> lift (getDataConstructorsAtType typ)
Just t -> Compose $ pure (t, Just t)
(result, newCache) <- getCompose (Map.alterF f typ constructorCache)
put st {constructorCache = newCache}
pure result
getConstructorVarTypes t cref@(ConstructorReference _r cid) = do
getConstructors t >>= \case
Pmc.getConstructors t >>= \case
ConstructorType cs -> case drop (fromIntegral cid) cs of
[] -> error $ show cref <> " not found in constructor list: " <> show cs
(_, _, consArgs) : _ -> case consArgs of
@ -1285,9 +1306,9 @@ instance (Ord loc, Var v) => Pmc (TypeVar v loc) v loc (StateT (Set v) (M v loc)
OtherType -> pure []
SequenceType {} -> pure []
fresh = do
vs <- get
let v = Var.freshIn vs (Var.typed Var.Pattern)
put (Set.insert v vs)
st@PmcState {variables} <- get
let v = Var.freshIn variables (Var.typed Var.Pattern)
put (st {variables = Set.insert v variables})
pure v
ensurePatternCoverage ::
@ -1305,7 +1326,12 @@ ensurePatternCoverage wholeMatch _scrutinee scrutineeType cases = do
-- Don't check coverage on ability handlers yet
Type.Apps' (Type.Ref' r) _args | r == Type.effectRef -> pure ()
_ -> do
(redundant, _inaccessible, uncovered) <- flip evalStateT (ABT.freeVars wholeMatch) do
let pmcState :: PmcState (TypeVar v loc) v loc =
PmcState
{ variables = ABT.freeVars wholeMatch,
constructorCache = mempty
}
(redundant, _inaccessible, uncovered) <- flip evalStateT pmcState do
checkMatch matchLoc scrutineeType cases
let checkUncovered = case Nel.nonEmpty uncovered of
Nothing -> pure ()
@ -2964,18 +2990,19 @@ verifyDataDeclarations decls = forM_ (Map.toList decls) $ \(_ref, decl) -> do
-- | public interface to the typechecker
synthesizeClosed ::
(Var v, Ord loc) =>
PrettyPrintEnv ->
[Type v loc] ->
TL.TypeLookup v loc ->
Term v loc ->
Result v loc (Type v loc)
synthesizeClosed abilities lookupType term0 =
synthesizeClosed ppe abilities lookupType term0 =
let datas = TL.dataDecls lookupType
effects = TL.effectDecls lookupType
term = annotateRefs (TL.typeOfTerm' lookupType) term0
in case term of
Left missingRef ->
compilerCrashResult (UnknownTermReference missingRef)
Right term -> run datas effects $ do
Right term -> run ppe datas effects $ do
liftResult $
verifyDataDeclarations datas
*> verifyDataDeclarations (DD.toDataDecl <$> effects)
@ -3014,13 +3041,14 @@ annotateRefs synth = ABT.visit f
run ::
(Var v, Ord loc, Functor f) =>
PrettyPrintEnv ->
DataDeclarations v loc ->
EffectDeclarations v loc ->
MT v loc f a ->
f a
run datas effects m =
run ppe datas effects m =
fmap fst
. runM m datas effects
. runM m ppe datas effects
$ Env 1 context0
synthesizeClosed' ::
@ -3044,8 +3072,8 @@ synthesizeClosed' abilities term = do
-- Check if the given typechecking action succeeds.
succeeds :: M v loc a -> TotalM v loc Bool
succeeds m =
MT \datas effects env ->
case runM m datas effects env of
MT \ppe datas effects env ->
case runM m ppe datas effects env of
Success _ _ -> Right (True, env)
TypeError _ _ -> Right (False, env)
CompilerBug bug _ _ -> Left bug
@ -3060,7 +3088,7 @@ isSubtype' type1 type2 = succeeds $ do
-- See documentation at 'Unison.Typechecker.fitsScheme'
fitsScheme :: (Var v, Ord loc) => Type v loc -> Type v loc -> Either (CompilerBug v loc) Bool
fitsScheme type1 type2 = run Map.empty Map.empty $
fitsScheme type1 type2 = run PPE.empty Map.empty Map.empty $
succeeds $ do
let vars = Set.toList $ Set.union (ABT.freeVars type1) (ABT.freeVars type2)
reserveAll (TypeVar.underlying <$> vars)
@ -3101,7 +3129,7 @@ isRedundant userType0 inferredType0 = do
isSubtype ::
(Var v, Ord loc) => Type v loc -> Type v loc -> Either (CompilerBug v loc) Bool
isSubtype t1 t2 =
run Map.empty Map.empty (isSubtype' t1 t2)
run PPE.empty Map.empty Map.empty (isSubtype' t1 t2)
isEqual ::
(Var v, Ord loc) => Type v loc -> Type v loc -> Either (CompilerBug v loc) Bool
@ -3131,22 +3159,22 @@ instance (Ord loc, Var v) => Show (Context v loc) where
instance (Monad f) => Monad (MT v loc f) where
return = pure
m >>= f = MT \datas effects env0 -> do
(a, env1) <- runM m datas effects env0
runM (f a) datas effects $! env1
m >>= f = MT \ppe datas effects env0 -> do
(a, env1) <- runM m ppe datas effects env0
runM (f a) ppe datas effects $! env1
instance (Monad f) => MonadFail.MonadFail (MT v loc f) where
fail = error
instance (Monad f) => Applicative (MT v loc f) where
pure a = MT (\_ _ env -> pure (a, env))
pure a = MT (\_ _ _ env -> pure (a, env))
(<*>) = ap
instance (Monad f) => MonadState (Env v loc) (MT v loc f) where
get = MT \_ _ env -> pure (env, env)
put env = MT \_ _ _ -> pure ((), env)
get = MT \_ _ _ env -> pure (env, env)
put env = MT \_ _ _ _ -> pure ((), env)
instance (MonadFix f) => MonadFix (MT v loc f) where
mfix f = MT \a b c ->
let res = mfix (\ ~(wubble, _finalenv) -> runM (f wubble) a b c)
mfix f = MT \ppe a b c ->
let res = mfix (\ ~(wubble, _finalenv) -> runM (f wubble) ppe a b c)
in res

View File

@ -4,6 +4,7 @@ module Unison.Test.Typechecker.Context (test) where
import Data.Foldable (for_)
import EasyTest
import qualified Unison.PrettyPrintEnv as PPE
import Unison.Symbol (Symbol)
import qualified Unison.Term as Term
import qualified Unison.Type as Type
@ -33,7 +34,7 @@ verifyClosedTermTest =
()
(Term.ann () (Term.var () a) (Type.var () a'))
(Term.ann () (Term.var () b) (Type.var () b'))
res = Context.synthesizeClosed [] mempty t
res = Context.synthesizeClosed PPE.empty [] mempty t
errors = Context.typeErrors res
expectUnknownSymbol (Context.ErrorNote cause _) = case cause of
Context.UnknownSymbol _ _ -> ok

View File

@ -111,6 +111,7 @@ library
Unison.PatternMatchCoverage.NormalizedConstraints
Unison.PatternMatchCoverage.PmGrd
Unison.PatternMatchCoverage.PmLit
Unison.PatternMatchCoverage.Pretty
Unison.PatternMatchCoverage.Solve
Unison.PatternMatchCoverage.UFMap
Unison.PrettyPrintEnv
@ -120,6 +121,7 @@ library
Unison.PrettyPrintEnv.Util
Unison.PrettyPrintEnvDecl
Unison.PrettyPrintEnvDecl.Names
Unison.PrettyPrintEnvDecl.Sqlite
Unison.PrintError
Unison.Result
Unison.Runtime.ANF

View File

@ -99,6 +99,7 @@
unison-POp-CMPU
unison-POp-COMN
unison-POp-CONS
unison-POp-DBTX
unison-POp-DECI
unison-POp-DIVN
unison-POp-DRPB
@ -166,14 +167,27 @@
unison-FOp-IO.serverSocket.impl.v3
unison-FOp-IO.socketAccept.impl.v3
unison-FOp-IO.listen.impl.v3
unison-FOp-Tls.ClientConfig.default
unison-FOp-Tls.ClientConfig.certificates.set
unison-FOp-Tls.decodeCert.impl.v3
unison-FOp-Tls.newServer.impl.v3
unison-FOp-Tls.decodePrivateKey
unison-FOp-Tls.ServerConfig.default
unison-FOp-Tls.handshake.impl.v3
unison-FOp-Tls.newClient.impl.v3
unison-FOp-Tls.receive.impl.v3
unison-FOp-Tls.send.impl.v3
unison-FOp-Tls.terminate.impl.v3
)
(import (rnrs)
(only (srfi :28) format)
(unison core)
(unison data)
(unison string)
(unison crypto)
(unison data)
(unison tls)
(unison tcp)
(unison bytevector)
(unison vector)
@ -243,6 +257,8 @@
(define (unison-POp-SUBN m n) (fx- m n))
(define (unison-POp-TAKS n s) (list-head s n))
(define (unison-POp-TAKT n t) (istring-take n t))
(define (unison-POp-DBTX x)
(format "~a" x))
(define (unison-POp-TRCE s x)
(display s)
(display "\n")

View File

@ -19,3 +19,5 @@ You can then run the tests with
$ raco test scheme-libs/racket/unison/crypto.rkt
```
On success, it has no output.
You'll also need to install `x509-lib` with `raco pkg install x509-lib`

View File

@ -82,10 +82,12 @@
(pointwise (cdr ll) (cdr lr)))])))
(cond
[(eq? l r) 1]
[(equal? l r) 1]
[(and (data? l) (data? r))
(and
(eqv? (data-tag l) (data-tag r))
(pointwise (data-fields l) (data-fields r)))]))
(pointwise (data-fields l) (data-fields r)))]
[#t #f]))
(define exception->string exn->string)

View File

@ -1,4 +1,4 @@
; TLS primitives! Supplied by openssl (libssl)
; TCP primitives!
#lang racket/base
(require racket/exn
racket/match
@ -6,6 +6,8 @@
unison/data)
(provide
socket-pair-input
socket-pair-output
(prefix-out
unison-FOp-IO.
(combine-out
@ -18,46 +20,54 @@
socketAccept.impl.v3
socketSend.impl.v3)))
(define (input socket) (car socket))
(define (output socket) (car (cdr socket)))
(struct socket-pair (input output))
(define (closeSocket.impl.v3 socket)
(if (pair? socket)
(begin
(close-input-port (input socket))
(close-output-port (output socket)))
(tcp-close socket))
(right none))
(define (clientSocket.impl.v3 host port)
(define (handle-errors fn)
(with-handlers
[[exn:fail:network? (lambda (e) (exception "IOFailure" (exn->string e) '()))]
[exn:fail:contract? (lambda (e) (exception "InvalidArguments" (exn->string e) '()))]
[(lambda _ #t) (lambda (e) (exception "MiscFailure" "Unknown exception" e))] ]
[(lambda _ #t) (lambda (e) (exception "MiscFailure" (format "Unknown exception ~a" (exn->string e)) e))] ]
(fn)))
(let-values ([(input output) (tcp-connect host (string->number port))])
(right (list input output)))))
(define (closeSocket.impl.v3 socket)
(handle-errors
(lambda ()
(if (socket-pair? socket)
(begin
(close-input-port (socket-pair-input socket))
(close-output-port (socket-pair-output socket)))
(tcp-close socket))
(right none))))
(define (clientSocket.impl.v3 host port)
(handle-errors
(lambda ()
(let-values ([(input output) (tcp-connect host (string->number port))])
(right (socket-pair input output))))))
(define (socketSend.impl.v3 socket data)
(if (not (pair? socket))
(exception "InvalidArguments" "Cannot send on a server socket")
(if (not (socket-pair? socket))
(exception "InvalidArguments" "Cannot send on a server socket" '())
(begin
(write-bytes data (output socket))
(flush-output (output socket))
(right none))))
(write-bytes data (socket-pair-output socket))
(flush-output (socket-pair-output socket))
(right none)))); )
(define (socketReceive.impl.v3 socket amt)
(if (not (pair? socket))
(if (not (socket-pair? socket))
(exception "InvalidArguments" "Cannot receive on a server socket")
(begin
(let ([buffer (make-bytes amt)])
(read-bytes-avail! buffer (input socket))
(right buffer)))))
(handle-errors
(lambda ()
(begin
(let* ([buffer (make-bytes amt)]
[read (read-bytes-avail! buffer (socket-pair-input socket))])
(right (subbytes buffer 0 read))))))))
; A "connected" socket is represented as a list of (list input-port output-port),
; while a "listening" socket is just the tcp-listener itself.
(define (socketPort.impl.v3 socket)
(let-values ([(_ local-port __ ___) (tcp-addresses (if (pair? socket) (input socket) socket) #t)])
(let-values ([(_ local-port __ ___) (tcp-addresses
(if (socket-pair? socket)
(socket-pair-input socket)
socket) #t)])
(right local-port)))
(define serverSocket.impl.v3
@ -84,8 +94,8 @@
(right none))
(define (socketAccept.impl.v3 listener)
(if (pair? listener)
(if (socket-pair? listener)
(exception "InvalidArguments" "Cannot accept on a non-server socket")
(begin
(let-values ([(input output) (tcp-accept listener)])
(right (list input output))))))
(right (socket-pair input output))))))

View File

@ -0,0 +1,156 @@
; TLS primitives! Supplied by openssl (libssl)
#lang racket/base
(require racket/exn
racket/string
racket/file
compatibility/mlist
unison/data
unison/tcp
x509
openssl)
(provide
(prefix-out
unison-FOp-Tls.
(combine-out
ClientConfig.default
ClientConfig.certificates.set
ServerConfig.default
decodeCert.impl.v3
decodePrivateKey
handshake.impl.v3
newServer.impl.v3
newClient.impl.v3
receive.impl.v3
send.impl.v3
terminate.impl.v3)))
(define (decodePrivateKey bytes) ; bytes -> list tlsPrivateKey
(let* ([tmp (make-temporary-file* #"unison" #".pem")]
[ctx (ssl-make-server-context)]
[of (open-output-file tmp #:exists 'replace)])
(write-bytes bytes of)
(flush-output of)
(close-output-port of)
(with-handlers
[[exn:fail? (lambda (e) (mlist))]]
(ssl-load-private-key! ctx tmp)
(mlist tmp))))
(define (decodeCert.impl.v3 bytes) ; bytes -> either failure tlsSignedCert
(let ([certs (read-pem-certificates (open-input-bytes bytes))])
(if (= 1 (length certs))
(right bytes)
(exception "Wrong number of certs" "nope" certs))))
(struct server-config (certs key))
(define (ServerConfig.default certs key) ; list tlsSignedCert tlsPrivateKey -> tlsServerConfig
(server-config certs key))
(struct client-config (host certs))
(struct tls (config input output))
(define (newServer.impl.v3 config sockets) ; tlsServerConfig socket -> {io} tls
(handle-errors
(lambda ()
(let* ([input (socket-pair-input sockets)]
[output (socket-pair-output sockets)]
[certs (server-config-certs config)]
[key (server-config-key config)]
[tmp (make-temporary-file* #"unison" #".pem")]
[of (open-output-file tmp #:exists 'replace)])
(write-bytes (mcar certs) of)
(flush-output of)
(close-output-port of)
(let*-values ([(ctx) (ssl-make-server-context
#:private-key (list 'pem key)
#:certificate-chain tmp)]
[(in out) (ports->ssl-ports
input output
#:mode 'accept
#:context ctx
#:close-original? #t
)])
(right (tls config in out)))))))
(define (ClientConfig.default host service-identification-suffix)
(if (= 0 (bytes-length service-identification-suffix))
(client-config host (mlist))
(error 'NotImplemented "service-identification-suffix not supported")))
(define (ClientConfig.certificates.set certs config) ; list tlsSignedCert tlsClientConfig -> tlsClientConfig
(client-config (client-config-host config) certs))
(define (handle-errors fn)
(with-handlers
[[exn:fail:network? (lambda (e) (exception "IOFailure" (exn->string e) '()))]
[exn:fail:contract? (lambda (e) (exception "InvalidArguments" (exn->string e) '()))]
[(lambda err
(string-contains? (exn->string err) "not valid for hostname"))
(lambda (e) (exception "IOFailure" "NameMismatch" '()))]
[(lambda _ #t) (lambda (e) (exception "MiscFailure" (format "Unknown exception ~a" (exn->string e)) e))] ]
(fn)))
(define (newClient.impl.v3 config socket)
(handle-errors
(lambda ()
(let ([input (socket-pair-input socket)]
[output (socket-pair-output socket)]
[hostname (client-config-host config)]
[ctx (ssl-make-client-context)])
(ssl-set-verify-hostname! ctx #t)
(let-values ([(in out) (ports->ssl-ports
input output
#:mode 'connect
#:context ctx
#:hostname hostname
#:close-original? #t
)])
(right (tls config in out)))))))
(define (handshake.impl.v3 tls)
(handle-errors
(lambda ()
(ssl-set-verify! (tls-input tls) #t)
(right none))))
(define (send.impl.v3 tls data)
(handle-errors
(lambda ()
(let* ([output (tls-output tls)])
(write-bytes data output)
(flush-output output)
(right none)))))
(define (read-more n port)
(let* ([buffer (make-bytes n)]
[read (read-bytes-avail! buffer port)])
(if (< read n)
(subbytes buffer 0 read)
(bytes-append buffer (read-more (* 2 n) port)))))
(define (read-all n port)
(let* ([buffer (make-bytes n)]
[read (read-bytes-avail! buffer port)])
(if (= n read)
(bytes-append buffer (read-more (* 2 n) port))
(subbytes buffer 0 read))))
(define (receive.impl.v3 tls)
(handle-errors
(lambda ()
(right (read-all 4096 (tls-input tls))))))
(define (terminate.impl.v3 tls)
; NOTE: This actually does more than the unison impl,
; which only sends the `close_notify` message, and doesn't
; mark the port as no longer usable in the runtime.
; Not sure if this is an important difference.
; Racket's openssl lib doesn't expose a way to *just* call
; SSL_Shutdown on a port without also closing it.
(handle-errors
(lambda ()
(ssl-abandon-port (tls-input tls))
(ssl-abandon-port (tls-output tls))
(right none))))

View File

@ -156,7 +156,7 @@ import qualified Unison.PrettyPrintEnv as PPE
import qualified Unison.PrettyPrintEnv.Names as PPE
import qualified Unison.PrettyPrintEnvDecl as PPE hiding (biasTo, empty)
import qualified Unison.PrettyPrintEnvDecl as PPED
import qualified Unison.PrettyPrintEnvDecl.Names as PPE
import qualified Unison.PrettyPrintEnvDecl.Names as PPED
import Unison.Reference (Reference (..), TermReference)
import qualified Unison.Reference as Reference
import Unison.Referent (Referent)
@ -166,6 +166,9 @@ import qualified Unison.Runtime.IOSource as IOSource
import Unison.Server.Backend (ShallowListEntry (..))
import qualified Unison.Server.Backend as Backend
import qualified Unison.Server.CodebaseServer as Server
import qualified Unison.Server.Doc.Markdown.Render as Md
import qualified Unison.Server.Doc.Markdown.Types as Md
import qualified Unison.Server.NameSearch.FromNames as NameSearch
import Unison.Server.QueryResult
import Unison.Server.SearchResult (SearchResult)
import qualified Unison.Server.SearchResult as SR
@ -176,7 +179,7 @@ import qualified Unison.Sqlite as Sqlite
import Unison.Symbol (Symbol)
import qualified Unison.Syntax.HashQualified as HQ (fromString, toString, toText, unsafeFromString)
import qualified Unison.Syntax.Lexer as L
import qualified Unison.Syntax.Name as Name (toString, toVar, unsafeFromString, unsafeFromVar)
import qualified Unison.Syntax.Name as Name (toString, toText, toVar, unsafeFromString, unsafeFromVar)
import qualified Unison.Syntax.Parser as Parser
import qualified Unison.Syntax.TermPrinter as TP
import Unison.Term (Term)
@ -193,6 +196,7 @@ import qualified Unison.Util.Find as Find
import Unison.Util.List (uniqueBy)
import qualified Unison.Util.Monoid as Monoid
import qualified Unison.Util.Pretty as P
import qualified Unison.Util.Pretty as Pretty
import qualified Unison.Util.Relation as R
import qualified Unison.Util.Relation as Relation
import qualified Unison.Util.Relation4 as R4
@ -525,6 +529,19 @@ loop e = do
whenJust serverBaseUrl \url -> do
_success <- liftIO (openBrowser (Server.urlFor Server.UI url))
pure ()
DocToMarkdownI docName -> do
basicPrettyPrintNames <- getBasicPrettyPrintNames
hqLength <- Cli.runTransaction Codebase.hashLength
let pped = PPED.fromNamesDecl hqLength (NamesWithHistory.NamesWithHistory basicPrettyPrintNames mempty)
basicPrettyPrintNames <- basicParseNames
let nameSearch = NameSearch.makeNameSearch hqLength (NamesWithHistory.fromCurrentNames basicPrettyPrintNames)
Cli.Env {codebase, runtime} <- ask
mdText <- liftIO $ do
docRefs <- Backend.docsForDefinitionName codebase nameSearch docName
for docRefs $ \docRef -> do
Identity (_, _, doc) <- Backend.renderDocRefs pped (Pretty.Width 80) codebase runtime (Identity docRef)
pure . Md.toText $ Md.toMarkdown doc
Cli.respond $ Output.MarkdownOut (Text.intercalate "\n---\n" mdText)
DocsToHtmlI namespacePath' sourceDirectory -> do
Cli.Env {codebase, sandboxedRuntime} <- ask
rootBranch <- Cli.getRootBranch
@ -665,7 +682,7 @@ loop e = do
let root0 = Branch.head root
let names = NamesWithHistory.fromCurrentNames . Names.makeAbsolute $ Branch.toNames root0
-- Use an absolutely qualified ppe for view.global
let pped = PPE.fromNamesDecl hqLength names
let pped = PPED.fromNamesDecl hqLength names
pure (names, pped)
else do
currentBranch <- Cli.getCurrentBranch0
@ -1559,6 +1576,8 @@ inputDescription input =
UiI -> wat
UpI {} -> wat
VersionI -> wat
DebugTabCompletionI _input -> wat
DocToMarkdownI name -> pure ("debug.doc-to-markdown " <> Name.toText name)
where
hp' :: Either SCH.ShortCausalHash Path' -> Cli Text
hp' = either (pure . Text.pack . show) p'
@ -1849,7 +1868,7 @@ handleShowDefinition outputLoc showDefinitionScope inputQuery = do
(_, ShowDefinitionGlobal) -> do
let names = NamesWithHistory.fromCurrentNames . Names.makeAbsolute $ Branch.toNames root0
-- Use an absolutely qualified ppe for view.global
let ppe = PPE.fromNamesDecl hqLength names
let ppe = PPED.fromNamesDecl hqLength names
pure (names, ppe)
(_, ShowDefinitionLocal) -> do
currentBranch <- Cli.getCurrentBranch0
@ -1857,7 +1876,7 @@ handleShowDefinition outputLoc showDefinitionScope inputQuery = do
let ppe = Backend.getCurrentPrettyNames hqLength (Backend.Within currentPath') root
pure (currentNames, ppe)
Backend.DefinitionResults terms types misses <- do
let nameSearch = Backend.makeNameSearch hqLength names
let nameSearch = NameSearch.makeNameSearch hqLength names
Cli.runTransaction (Backend.definitionsBySuffixes codebase nameSearch includeCycles query)
outputPath <- getOutputPath
when (not (null types && null terms)) do
@ -2937,7 +2956,7 @@ hqNameQuery query = do
Cli.runTransaction do
hqLength <- Codebase.hashLength
let parseNames = Backend.parseNamesForBranch root' (Backend.AllNames (Path.unabsolute currentPath))
let nameSearch = Backend.makeNameSearch hqLength (NamesWithHistory.fromCurrentNames parseNames)
let nameSearch = NameSearch.makeNameSearch hqLength (NamesWithHistory.fromCurrentNames parseNames)
Backend.hqNameQuery codebase nameSearch query
-- | Select a definition from the given branch.
@ -2993,7 +3012,7 @@ synthesizeForce typeOfFunc = do
TypeLookup.dataDecls = Map.empty,
TypeLookup.effectDecls = Map.empty
}
case Result.runResultT (Typechecker.synthesize env (DD.forceTerm External External term)) of
case Result.runResultT (Typechecker.synthesize PPE.empty env (DD.forceTerm External External term)) of
Identity (Nothing, notes) ->
error
( unlines

View File

@ -42,6 +42,7 @@ import qualified Unison.Codebase.Editor.Output as Output
import qualified Unison.Debug as Debug
import Unison.Prelude
import Unison.Share.Types
import qualified UnliftIO
import qualified Web.Browser as Web
ucmOAuthClientID :: ByteString
@ -98,9 +99,8 @@ authLogin host = do
let redirectURI = "http://localhost:" <> show port <> "/redirect"
liftIO (putMVar redirectURIVar redirectURI)
let authorizationKickoff = authURI authorizationEndpoint redirectURI state challenge
void . liftIO $ Web.openBrowser (show authorizationKickoff)
Cli.respond . Output.InitiateAuthFlow $ authorizationKickoff
bailOnFailure (readMVar authResultVar)
bailOnFailure . liftIO $ UnliftIO.withAsync (Web.openBrowser (show authorizationKickoff)) \_ -> readMVar authResultVar
userInfo <- bailOnFailure (getUserInfo doc accessToken)
let codeserverId = codeserverIdFromCodeserverURI host
let creds = codeserverCredentials discoveryURI tokens userInfo

View File

@ -214,6 +214,7 @@ data Input
| QuitI
| ApiI
| UiI
| DocToMarkdownI Name
| DocsToHtmlI Path' FilePath
| GistI GistInput
| AuthLoginI

View File

@ -314,6 +314,7 @@ data Output
RemoteProjectBranchHeadMismatch URI (ProjectAndBranch ProjectName ProjectBranchName)
| Unauthorized Text
| ServantClientError Servant.ClientError
| MarkdownOut Text
data DisplayDefinitionsOutput = DisplayDefinitionsOutput
{ isTest :: TermReference -> Bool,
@ -492,6 +493,7 @@ isFailure o = case o of
RemoteProjectBranchHeadMismatch {} -> True
Unauthorized {} -> True
ServantClientError {} -> False
MarkdownOut {} -> False
isNumberedFailure :: NumberedOutput -> Bool
isNumberedFailure = \case

View File

@ -46,7 +46,7 @@ import qualified Unison.NameSegment as NameSegment
import Unison.Prelude
import Unison.Project (ProjectBranchName, ProjectName)
import qualified Unison.Syntax.HashQualified as HQ (fromString)
import qualified Unison.Syntax.Name as Name (unsafeFromString)
import qualified Unison.Syntax.Name as Name (fromText, unsafeFromString)
import qualified Unison.Util.ColorText as CT
import Unison.Util.Monoid (intercalateMap)
import qualified Unison.Util.Pretty as P
@ -2044,6 +2044,26 @@ docsToHtml =
_ -> Left $ showPatternHelp docsToHtml
)
docToMarkdown :: InputPattern
docToMarkdown =
InputPattern
"debug.doc-to-markdown"
[]
I.Visible
[]
( P.wrapColumn2
[ ( "`debug.doc-to-markdown term.doc`",
"Render a doc to markdown."
)
]
)
( \case
[docNameText] -> first fromString $ do
docName <- maybeToEither "Invalid name" . Name.fromText . Text.pack $ docNameText
pure $ Input.DocToMarkdownI docName
_ -> Left $ showPatternHelp docToMarkdown
)
execute :: InputPattern
execute =
InputPattern
@ -2422,6 +2442,7 @@ validInputs =
api,
ui,
docs,
docToMarkdown,
docsToHtml,
findPatch,
viewPatch,

View File

@ -47,6 +47,7 @@ import Unison.Parser.Ann (Ann)
import Unison.Prelude
import Unison.PrettyTerminal
import Unison.Project (ProjectAndBranch (..))
import qualified Unison.Runtime.IOSource as IOSource
import qualified Unison.Server.CodebaseServer as Server
import Unison.Symbol (Symbol)
import qualified Unison.Syntax.Parser as Parser
@ -126,10 +127,12 @@ main dir welcome initialPath config initialInputs runtime sbRuntime codebase ser
-- Try putting the root, but if someone else as already written over the root, don't
-- overwrite it.
void $ tryPutTMVar rootVar root
-- Start forcing the thunk in a background thread.
-- Start forcing thunks in a background thread.
-- This might be overly aggressive, maybe we should just evaluate the top level but avoid
-- recursive "deep*" things.
void $ UnliftIO.evaluate root
UnliftIO.concurrently_
(UnliftIO.evaluate root)
(UnliftIO.evaluate IOSource.typecheckedFile) -- IOSource takes a while to compile, we should start compiling it on startup
let initialState = Cli.loopState0 initialRootCausalHash rootVar initialPath
Ki.fork_ scope $ do
let loop lastRoot = do

View File

@ -2067,6 +2067,7 @@ notifyUser dir = \case
<> P.newline
<> P.newline
<> P.indentN 2 (P.pshown response)
MarkdownOut md -> pure $ P.text md
where
_nameChange _cmd _pastTenseCmd _oldName _newName _r = error "todo"
expectedEmptyPushDest :: WriteRemoteNamespace Void -> Pretty
@ -2580,7 +2581,7 @@ renderEditConflicts ppe Patch {..} = do
then "deprecated and also replaced with"
else "replaced with"
)
`P.hang` P.lines replacements
`P.hang` P.lines replacements
formatTermEdits ::
(Reference.TermReference, Set TermEdit.TermEdit) ->
Numbered Pretty
@ -2595,7 +2596,7 @@ renderEditConflicts ppe Patch {..} = do
then "deprecated and also replaced with"
else "replaced with"
)
`P.hang` P.lines replacements
`P.hang` P.lines replacements
formatConflict ::
Either
(Reference, Set TypeEdit.TypeEdit)

View File

@ -43,6 +43,7 @@ import qualified Unison.LSP.VFS as VFS
import Unison.Parser.Ann
import Unison.Prelude
import qualified Unison.PrettyPrintEnvDecl as PPED
import qualified Unison.Server.NameSearch.FromNames as NameSearch
import Unison.Symbol
import UnliftIO
import UnliftIO.Foreign (Errno (..), eADDRINUSE)
@ -136,10 +137,11 @@ lspDoInitialize vfsVar codebase runtime scope latestBranch latestPath lspContext
currentPathCacheVar <- newTVarIO Path.absoluteEmpty
cancellationMapVar <- newTVarIO mempty
completionsVar <- newTVarIO mempty
let env = Env {ppedCache = readTVarIO ppedCacheVar, parseNamesCache = readTVarIO parseNamesCacheVar, currentPathCache = readTVarIO currentPathCacheVar, ..}
nameSearchCacheVar <- newTVarIO $ NameSearch.makeNameSearch 0 mempty
let env = Env {ppedCache = readTVarIO ppedCacheVar, parseNamesCache = readTVarIO parseNamesCacheVar, currentPathCache = readTVarIO currentPathCacheVar, nameSearchCache = readTVarIO nameSearchCacheVar, ..}
let lspToIO = flip runReaderT lspContext . unLspT . flip runReaderT env . runLspM
Ki.fork scope (lspToIO Analysis.fileAnalysisWorker)
Ki.fork scope (lspToIO $ ucmWorker ppedCacheVar parseNamesCacheVar latestBranch latestPath)
Ki.fork scope (lspToIO $ ucmWorker ppedCacheVar parseNamesCacheVar nameSearchCacheVar latestBranch latestPath)
pure $ Right $ env
-- | LSP request handlers that don't register/unregister dynamically

View File

@ -38,6 +38,7 @@ import qualified Unison.PrettyPrintEnv as PPE
import qualified Unison.PrettyPrintEnvDecl as PPED
import qualified Unison.Reference as Reference
import qualified Unison.Referent as Referent
import qualified Unison.Runtime.IOSource as IOSource
import qualified Unison.Syntax.DeclPrinter as DeclPrinter
import qualified Unison.Syntax.HashQualified' as HQ' (toText)
import qualified Unison.Syntax.Name as Name (fromText, toText)
@ -45,6 +46,7 @@ import qualified Unison.Syntax.TypePrinter as TypePrinter
import qualified Unison.Util.Monoid as Monoid
import qualified Unison.Util.Pretty as Pretty
import qualified Unison.Util.Relation as Relation
import qualified UnliftIO
completionHandler :: RequestMessage 'TextDocumentCompletion -> (Either ResponseError (ResponseResult 'TextDocumentCompletion) -> Lsp ()) -> Lsp ()
completionHandler m respond =
@ -262,26 +264,51 @@ completionItemResolveHandler message respond = do
case Aeson.fromJSON <$> (completion ^. xdata) of
Just (Aeson.Success (CompletionItemDetails {dep, fullyQualifiedName, relativeName, fileUri})) -> do
pped <- lift $ ppedForFile fileUri
builtinsAsync <- liftIO . UnliftIO.async $ UnliftIO.evaluate IOSource.typecheckedFile
checkBuiltinsReady <- liftIO do
pure
( UnliftIO.poll builtinsAsync
<&> ( \case
Nothing -> False
Just (Left {}) -> False
Just (Right {}) -> True
)
)
renderedDocs <-
-- We don't want to block the type signature hover info if the docs are taking a long time to render;
-- We know it's also possible to write docs that eval forever, so the timeout helps
-- protect against that.
lift (UnliftIO.timeout 2_000_000 (LSPQ.markdownDocsForFQN fileUri (HQ.NameOnly fullyQualifiedName)))
>>= ( \case
Nothing ->
checkBuiltinsReady >>= \case
False -> pure ["\n---\n🔜 Doc renderer is initializing, try again in a few seconds."]
True -> pure ["\n---\n⏳ Timeout evaluating docs"]
Just [] -> pure []
-- Add some space from the type signature
Just xs@(_ : _) -> pure ("\n---\n" : xs)
)
case dep of
LD.TermReferent ref -> do
typ <- LSPQ.getTypeOfReferent fileUri ref
let renderedType = ": " <> (Text.pack $ TypePrinter.prettyStr (Just typeWidth) (PPED.suffixifiedPPE pped) typ)
let doc = CompletionDocMarkup (toUnisonMarkup (Name.toText fullyQualifiedName))
let doc = CompletionDocMarkup $ toMarkup (Text.unlines $ ["```unison", Name.toText fullyQualifiedName, "```"] ++ renderedDocs)
pure $ (completion {_detail = Just renderedType, _documentation = Just doc} :: CompletionItem)
LD.TypeReference ref ->
case ref of
Reference.Builtin {} -> do
let renderedBuiltin = ": <builtin>"
let doc = CompletionDocMarkup (toUnisonMarkup (Name.toText fullyQualifiedName))
let doc = CompletionDocMarkup $ toMarkup (Text.unlines $ ["```unison", Name.toText fullyQualifiedName, "```"] ++ renderedDocs)
pure $ (completion {_detail = Just renderedBuiltin, _documentation = Just doc} :: CompletionItem)
Reference.DerivedId refId -> do
decl <- LSPQ.getTypeDeclaration fileUri refId
let renderedDecl = ": " <> (Text.pack . Pretty.toPlain typeWidth . Pretty.syntaxToColor $ DeclPrinter.prettyDecl pped ref (HQ.NameOnly relativeName) decl)
let doc = CompletionDocMarkup (toUnisonMarkup (Name.toText fullyQualifiedName))
let doc = CompletionDocMarkup $ toMarkup (Text.unlines $ ["```unison", Name.toText fullyQualifiedName, "```"] ++ renderedDocs)
pure $ (completion {_detail = Just renderedDecl, _documentation = Just doc} :: CompletionItem)
_ -> empty
where
toUnisonMarkup txt = MarkupContent {_kind = MkMarkdown, _value = Text.unlines ["```unison", txt, "```"]}
toMarkup txt = MarkupContent {_kind = MkMarkdown, _value = txt}
-- Completion windows can be very small, so this seems like a good default
typeWidth = Pretty.Width 20

View File

@ -11,7 +11,6 @@ import Data.IntervalMap.Lazy (IntervalMap)
import qualified Data.IntervalMap.Lazy as IM
import qualified Data.Map as Map
import qualified Data.Text as Text
import Debug.RecoverRTTI (anythingToString)
import Language.LSP.Types
( Diagnostic,
DiagnosticSeverity (DsError),
@ -185,7 +184,7 @@ fileAnalysisWorker = forever do
Map.fromList <$> forMaybe (toList dirtyFileIDs) \docUri -> runMaybeT do
fileInfo <- MaybeT (checkFile $ TextDocumentIdentifier docUri)
pure (docUri, fileInfo)
Debug.debugM Debug.LSP "Freshly Typechecked " (anythingToString (Map.toList freshlyCheckedFiles))
Debug.debugM Debug.LSP "Freshly Typechecked " (Map.toList freshlyCheckedFiles)
-- Overwrite any files we successfully checked
atomically $ modifyTVar' checkedFilesV (Map.union freshlyCheckedFiles)
for freshlyCheckedFiles \(FileAnalysis {fileUri, fileVersion, diagnostics}) -> do
@ -240,9 +239,25 @@ analyseNotes fileUri ppe src notes = do
-- still have valid diagnostics.
TypeError.Other e@(Context.ErrorNote {cause}) -> case cause of
Context.PatternArityMismatch loc _typ _numArgs -> singleRange loc
_ -> do
Debug.debugM Debug.LSP "No Diagnostic configured for type error: " e
empty
Context.HandlerOfUnexpectedType loc _typ -> singleRange loc
Context.TypeMismatch {} -> shouldHaveBeenHandled e
Context.IllFormedType {} -> shouldHaveBeenHandled e
Context.UnknownSymbol loc _ -> singleRange loc
Context.UnknownTerm loc _ _ _ -> singleRange loc
Context.AbilityCheckFailure {} -> shouldHaveBeenHandled e
Context.AbilityEqFailure {} -> shouldHaveBeenHandled e
Context.EffectConstructorWrongArgCount {} -> shouldHaveBeenHandled e
Context.MalformedEffectBind {} -> shouldHaveBeenHandled e
Context.DuplicateDefinitions {} -> shouldHaveBeenHandled e
Context.UnguardedLetRecCycle {} -> shouldHaveBeenHandled e
Context.ConcatPatternWithoutConstantLength loc _ -> singleRange loc
Context.DataEffectMismatch _ _ decl -> singleRange $ DD.annotation decl
Context.UncoveredPatterns loc _ -> singleRange loc
Context.RedundantPattern loc -> singleRange loc
Context.InaccessiblePattern loc -> singleRange loc
shouldHaveBeenHandled e = do
Debug.debugM Debug.LSP "This diagnostic should have been handled by a previous case but was not" e
empty
diags = noteDiagnostic currentPath note ranges
-- Sort on match accuracy first, then name.
codeActions <- case cause of

View File

@ -18,14 +18,17 @@ import qualified Unison.LabeledDependency as LD
import Unison.Parser.Ann (Ann)
import qualified Unison.Pattern as Pattern
import Unison.Prelude
import qualified Unison.PrettyPrintEnv as PPE
import qualified Unison.PrettyPrintEnvDecl as PPED
import qualified Unison.Reference as Reference
import qualified Unison.Runtime.IOSource as IOSource
import Unison.Symbol (Symbol)
import qualified Unison.Syntax.DeclPrinter as DeclPrinter
import qualified Unison.Syntax.Name as Name
import qualified Unison.Syntax.TypePrinter as TypePrinter
import qualified Unison.Term as Term
import qualified Unison.Util.Pretty as Pretty
import qualified UnliftIO
-- | Hover help handler
--
@ -45,7 +48,7 @@ hoverHandler m respond =
hoverInfo :: Uri -> Position -> MaybeT Lsp Text
hoverInfo uri pos =
markdownify <$> (hoverInfoForRef <|> hoverInfoForLiteral)
(hoverInfoForRef <|> hoverInfoForLiteral)
where
markdownify :: Text -> Text
markdownify rendered = Text.unlines ["```unison", rendered, "```"]
@ -56,27 +59,60 @@ hoverInfo uri pos =
symAtCursor <- VFS.identifierAtPosition uri pos
ref <- LSPQ.refAtPosition uri pos
pped <- lift $ ppedForFile uri
case ref of
LD.TypeReference (Reference.Builtin {}) -> pure (symAtCursor <> " : <builtin>")
LD.TypeReference ref@(Reference.DerivedId refId) -> do
nameAtCursor <- MaybeT . pure $ Name.fromText symAtCursor
decl <- LSPQ.getTypeDeclaration uri refId
let typ = Text.pack . Pretty.toPlain prettyWidth . Pretty.syntaxToColor $ DeclPrinter.prettyDecl pped ref (HQ.NameOnly nameAtCursor) decl
pure typ
LD.TermReferent ref -> do
typ <- LSPQ.getTypeOfReferent uri ref
let renderedType = Text.pack $ TypePrinter.prettyStr (Just prettyWidth) (PPED.suffixifiedPPE pped) typ
pure (symAtCursor <> " : " <> renderedType)
let unsuffixifiedPPE = PPED.unsuffixifiedPPE pped
let fqn = case ref of
LD.TypeReference ref -> PPE.typeName unsuffixifiedPPE ref
LD.TermReferent ref -> PPE.termName unsuffixifiedPPE ref
builtinsAsync <- liftIO . UnliftIO.async $ UnliftIO.evaluate IOSource.typecheckedFile
checkBuiltinsReady <- liftIO do
pure
( UnliftIO.poll builtinsAsync
<&> ( \case
Nothing -> False
Just (Left {}) -> False
Just (Right {}) -> True
)
)
renderedDocs <-
-- We don't want to block the type signature hover info if the docs are taking a long time to render;
-- We know it's also possible to write docs that eval forever, so the timeout helps
-- protect against that.
lift (UnliftIO.timeout 2_000_000 (LSPQ.markdownDocsForFQN uri fqn))
>>= ( \case
Nothing ->
checkBuiltinsReady >>= \case
False -> pure ["\n---\n🔜 Doc renderer is initializing, try again in a few seconds."]
True -> pure ["\n---\n⏳ Timeout evaluating docs"]
Just [] -> pure []
-- Add some space from the type signature
Just xs@(_ : _) -> pure ("\n---\n" : xs)
)
typeSig <-
case ref of
LD.TypeReference (Reference.Builtin {}) -> do
pure (symAtCursor <> " : <builtin>")
LD.TypeReference ref@(Reference.DerivedId refId) -> do
nameAtCursor <- MaybeT . pure $ Name.fromText symAtCursor
decl <- LSPQ.getTypeDeclaration uri refId
let typ = Text.pack . Pretty.toPlain prettyWidth . Pretty.syntaxToColor $ DeclPrinter.prettyDecl pped ref (HQ.NameOnly nameAtCursor) decl
pure typ
LD.TermReferent ref -> do
typ <- LSPQ.getTypeOfReferent uri ref
let renderedType = Text.pack $ TypePrinter.prettyStr (Just prettyWidth) (PPED.suffixifiedPPE pped) typ
pure (symAtCursor <> " : " <> renderedType)
pure . Text.unlines $ [markdownify typeSig] <> renderedDocs
hoverInfoForLiteral :: MaybeT Lsp Text
hoverInfoForLiteral = do
LSPQ.nodeAtPosition uri pos >>= \case
LSPQ.TermNode term -> do
typ <- hoistMaybe $ builtinTypeForTermLiterals term
pure (": " <> typ)
LSPQ.TypeNode {} -> empty
LSPQ.PatternNode pat -> do
typ <- hoistMaybe $ builtinTypeForPatternLiterals pat
pure (": " <> typ)
hoverInfoForLiteral =
markdownify <$> do
LSPQ.nodeAtPosition uri pos >>= \case
LSPQ.TermNode term -> do
typ <- hoistMaybe $ builtinTypeForTermLiterals term
pure (": " <> typ)
LSPQ.TypeNode {} -> empty
LSPQ.PatternNode pat -> do
typ <- hoistMaybe $ builtinTypeForPatternLiterals pat
pure (": " <> typ)
hoistMaybe :: Maybe a -> MaybeT Lsp a
hoistMaybe = MaybeT . pure

View File

@ -2,7 +2,8 @@
-- | Rewrites of some codebase queries, but which check the scratch file for info first.
module Unison.LSP.Queries
( getTypeOfReferent,
( markdownDocsForFQN,
getTypeOfReferent,
getTypeDeclaration,
refAtPosition,
nodeAtPosition,
@ -27,13 +28,15 @@ import Unison.ConstructorReference (GConstructorReference (..))
import qualified Unison.ConstructorType as CT
import Unison.DataDeclaration (Decl)
import qualified Unison.DataDeclaration as DD
import qualified Unison.HashQualified as HQ
import Unison.LSP.Conversions (lspToUPos)
import Unison.LSP.FileAnalysis (getFileSummary)
import Unison.LSP.FileAnalysis (getFileSummary, ppedForFile)
import Unison.LSP.Orphans ()
import Unison.LSP.Types
import Unison.LabeledDependency
import qualified Unison.LabeledDependency as LD
import Unison.Lexer.Pos (Pos (..))
import Unison.Name (Name)
import Unison.Parser.Ann (Ann)
import qualified Unison.Parser.Ann as Ann
import qualified Unison.Pattern as Pattern
@ -42,12 +45,16 @@ import Unison.Reference (TypeReference)
import qualified Unison.Reference as Reference
import Unison.Referent (Referent)
import qualified Unison.Referent as Referent
import qualified Unison.Server.Backend as Backend
import qualified Unison.Server.Doc.Markdown.Render as Md
import qualified Unison.Server.Doc.Markdown.Types as Md
import Unison.Symbol (Symbol)
import Unison.Syntax.Parser (ann)
import Unison.Term (MatchCase (MatchCase), Term)
import qualified Unison.Term as Term
import Unison.Type (Type)
import qualified Unison.Type as Type
import qualified Unison.Util.Pretty as Pretty
-- | Returns a reference to whatever the symbol at the given position refers to.
refAtPosition :: Uri -> Position -> MaybeT Lsp LabeledDependency
@ -369,3 +376,17 @@ removeInferredTypeAnnotations =
-- If the type's annotation is identical to the term's annotation, then this must be an inferred type
| ABT.annotation typ == ABT.annotation trm -> trm
t -> t
-- | Renders all docs for a given FQN to markdown.
markdownDocsForFQN :: Uri -> HQ.HashQualified Name -> Lsp [Text]
markdownDocsForFQN fileUri fqn =
fromMaybe [] <$> runMaybeT do
pped <- lift $ ppedForFile fileUri
name <- MaybeT . pure $ HQ.toName fqn
nameSearch <- lift $ getNameSearch
Env {codebase, runtime} <- ask
liftIO $ do
docRefs <- Backend.docsForDefinitionName codebase nameSearch name
for docRefs $ \docRef -> do
Identity (_, _, doc) <- Backend.renderDocRefs pped (Pretty.Width 80) codebase runtime (Identity docRef)
pure . Md.toText $ Md.toMarkdown doc

View File

@ -43,6 +43,8 @@ import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl)
import qualified Unison.Reference as Reference
import Unison.Result (Note)
import qualified Unison.Server.Backend as Backend
import Unison.Server.NameSearch (NameSearch)
import qualified Unison.Sqlite as Sqlite
import Unison.Symbol
import qualified Unison.Syntax.Lexer as Lexer
import Unison.Term (Term)
@ -74,6 +76,7 @@ data Env = Env
codebase :: Codebase IO Symbol Ann,
parseNamesCache :: IO NamesWithHistory,
ppedCache :: IO PrettyPrintEnvDecl,
nameSearchCache :: IO (NameSearch Sqlite.Transaction),
currentPathCache :: IO Path.Absolute,
vfsVar :: MVar VFS,
runtime :: Runtime Symbol,
@ -119,6 +122,7 @@ data FileAnalysis = FileAnalysis
codeActions :: IntervalMap Position [CodeAction],
fileSummary :: Maybe FileSummary
}
deriving stock (Show)
-- | A file that parses might not always type-check, but often we just want to get as much
-- information as we have available. This provides a type where we can summarize the
@ -148,6 +152,9 @@ getCodebaseCompletions = asks completionsVar >>= readTVarIO
globalPPED :: Lsp PrettyPrintEnvDecl
globalPPED = asks ppedCache >>= liftIO
getNameSearch :: Lsp (NameSearch Sqlite.Transaction)
getNameSearch = asks nameSearchCache >>= liftIO
getParseNames :: Lsp NamesWithHistory
getParseNames = asks parseNamesCache >>= liftIO

View File

@ -13,16 +13,20 @@ import qualified Unison.NamesWithHistory as NamesWithHistory
import Unison.PrettyPrintEnvDecl
import qualified Unison.PrettyPrintEnvDecl.Names as PPE
import qualified Unison.Server.Backend as Backend
import Unison.Server.NameSearch (NameSearch)
import qualified Unison.Server.NameSearch.FromNames as NameSearch
import qualified Unison.Sqlite as Sqlite
import UnliftIO.STM
-- | Watches for state changes in UCM and updates cached LSP state accordingly
ucmWorker ::
TVar PrettyPrintEnvDecl ->
TVar NamesWithHistory ->
TVar (NameSearch Sqlite.Transaction) ->
STM (Branch IO) ->
STM Path.Absolute ->
Lsp ()
ucmWorker ppeVar parseNamesVar getLatestRoot getLatestPath = do
ucmWorker ppeVar parseNamesVar nameSearchCacheVar getLatestRoot getLatestPath = do
Env {codebase, completionsVar} <- ask
let loop :: (Branch IO, Path.Absolute) -> Lsp a
loop (currentRoot, currentPath) = do
@ -33,6 +37,7 @@ ucmWorker ppeVar parseNamesVar getLatestRoot getLatestPath = do
atomically $ do
writeTVar parseNamesVar parseNames
writeTVar ppeVar ppe
writeTVar nameSearchCacheVar (NameSearch.makeNameSearch hl parseNames)
-- Re-check everything with the new names and ppe
VFS.markAllFilesDirty
atomically do

View File

@ -7,7 +7,7 @@ import Data.Functor (void)
import qualified Data.Sequence as Seq
import Data.Text (Text)
import qualified Data.Text as Text
import Data.These (These(..))
import Data.These (These (..))
import EasyTest
import qualified Text.Megaparsec as P
import Unison.Codebase.Editor.RemoteRepo (ReadGitRepo (..), ReadRemoteNamespace (..), ShareCodeserver (..), ShareUserHandle (..), pattern ReadGitRemoteNamespace, pattern ReadShareLooseCode)
@ -15,8 +15,8 @@ import qualified Unison.Codebase.Editor.UriParser as UriParser
import Unison.Codebase.Path (Path (..))
import qualified Unison.Codebase.Path as Path
import Unison.Codebase.ShortCausalHash (ShortCausalHash (..))
import Unison.NameSegment (NameSegment (..))
import Unison.Core.Project (ProjectBranchName, ProjectName)
import Unison.NameSegment (NameSegment (..))
test :: Test ()
test = scope "uriparser" . tests $ [testShare, testGit]

View File

@ -157,6 +157,7 @@ codebaseServerOptsFromEnv :: IO CodebaseServerOpts
codebaseServerOptsFromEnv = do
token <- lookupEnv Server.ucmTokenVar
host <- lookupEnv Server.ucmHostVar
allowCorsHost <- lookupEnv Server.ucmAllowCorsHost
port <- lookupEnv Server.ucmPortVar <&> (>>= readMaybe)
codebaseUIPath <- lookupEnv Server.ucmUIVar
pure $ CodebaseServerOpts {..}
@ -312,12 +313,14 @@ codebaseServerOptsParser envOpts = do
cliToken <- tokenFlag <|> pure (token envOpts)
cliHost <- hostFlag <|> pure (host envOpts)
cliPort <- portFlag <|> pure (port envOpts)
cliAllowCorsHost <- allowCorsHostFlag <|> pure (allowCorsHost envOpts)
cliCodebaseUIPath <- codebaseUIPathFlag <|> pure (codebaseUIPath envOpts)
pure
CodebaseServerOpts
{ token = cliToken <|> token envOpts,
host = cliHost <|> host envOpts,
port = cliPort <|> port envOpts,
allowCorsHost = cliAllowCorsHost <|> allowCorsHost envOpts,
codebaseUIPath = cliCodebaseUIPath <|> codebaseUIPath envOpts
}
where
@ -339,6 +342,12 @@ codebaseServerOptsParser envOpts = do
<> metavar "NUMBER"
<> help "Codebase server port"
<> noGlobal
allowCorsHostFlag =
optional . strOption $
long "allow-cors-host"
<> metavar "STRING"
<> help "Host that should be allowed to access api (cors)"
<> noGlobal
codebaseUIPathFlag =
optional . strOption $
long "ui"

View File

@ -18,6 +18,7 @@ module Unison.DataDeclaration
declConstructorReferents,
declDependencies,
labeledDeclDependencies,
labeledDeclDependenciesIncludingSelf,
declFields,
dependencies,
labeledDependencies,
@ -34,7 +35,7 @@ module Unison.DataDeclaration
)
where
import Control.Lens (Iso', Lens', iso, lens, over, _3)
import Control.Lens (Iso', Lens', imap, iso, lens, over, _3)
import Control.Monad.State (evalState)
import Data.Bifunctor (bimap, first, second)
import qualified Data.Map as Map
@ -76,6 +77,21 @@ declDependencies = either (dependencies . toDataDecl) dependencies
labeledDeclDependencies :: (Ord v) => Decl v a -> Set LD.LabeledDependency
labeledDeclDependencies = Set.map LD.TypeReference . declDependencies
-- | Compute the dependencies of a data declaration,
-- including the type itself and references for each of its constructors.
labeledDeclDependenciesIncludingSelf :: (Ord v) => Reference.TypeReference -> Decl v a -> Set LD.LabeledDependency
labeledDeclDependenciesIncludingSelf selfRef decl =
labeledDeclDependencies decl <> (Set.singleton $ LD.TypeReference selfRef) <> labeledConstructorRefs
where
labeledConstructorRefs :: Set LD.LabeledDependency
labeledConstructorRefs =
case selfRef of
Reference.Builtin {} -> mempty
Reference.DerivedId selfRefId ->
declConstructorReferents selfRefId decl
& fmap (LD.TermReferent . fmap Reference.DerivedId)
& Set.fromList
constructorType :: Decl v a -> CT.ConstructorType
constructorType = \case
Left {} -> CT.Effect
@ -229,8 +245,10 @@ declConstructorReferents rid decl =
where
ct = constructorType decl
-- | The constructor ids for the given data declaration.
constructorIds :: DataDeclaration v a -> [ConstructorId]
constructorIds dd = [0 .. fromIntegral $ length (constructors dd) - 1]
constructorIds dd =
imap (\i _ -> fromIntegral i) (constructorTypes dd)
-- | All variables mentioned in the given data declaration.
-- Includes both term and type variables, both free and bound.

View File

@ -21,6 +21,7 @@ module Unison.Name
reverseSegments,
segments,
suffixes,
lastSegment,
-- * Basic manipulation
makeAbsolute,
@ -274,6 +275,13 @@ reverseSegments :: Name -> NonEmpty NameSegment
reverseSegments (Name _ ss) =
ss
-- | Return the final segment of a name.
--
-- >>> lastSegment (fromSegments ("base" :| ["List", "map"]))
-- NameSegment {toText = "map"}
lastSegment :: Name -> NameSegment
lastSegment = List.NonEmpty.head . reverseSegments
-- If there's no exact matches for `suffix` in `rel`, find all
-- `r` in `rel` whose corresponding name `suffix` as a suffix.
-- For example, `searchBySuffix List.map {(base.List.map, r1)}`

View File

@ -47,6 +47,7 @@ module Unison.Names
isEmpty,
hashQualifyTypesRelation,
hashQualifyTermsRelation,
fromTermsAndTypes,
)
where
@ -366,6 +367,10 @@ fromTerms ts = Names (R.fromList ts) mempty
fromTypes :: [(Name, TypeReference)] -> Names
fromTypes ts = Names mempty (R.fromList ts)
fromTermsAndTypes :: [(Name, Referent)] -> [(Name, TypeReference)] -> Names
fromTermsAndTypes terms types =
fromTerms terms <> fromTypes types
-- | Map over each name in a 'Names'.
mapNames :: (Name -> Name) -> Names -> Names
mapNames f Names {terms, types} =

View File

@ -60,6 +60,7 @@ dependencies:
- utf8-string
- vector
- wai
- wai-cors
- warp
- yaml

View File

@ -8,10 +8,89 @@
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE ViewPatterns #-}
module Unison.Server.Backend where
module Unison.Server.Backend
( -- * Types
BackendError (..),
Backend (..),
ShallowListEntry (..),
BackendEnv (..),
TypeEntry (..),
FoundRef (..),
NameScoping (..),
IncludeCycles (..),
DefinitionResults (..),
-- * Endpoints
prettyDefinitionsForHQName,
fuzzyFind,
-- * Utilities
basicSuffixifiedNames,
bestNameForTerm,
bestNameForType,
definitionsBySuffixes,
displayType,
docsInBranchToHtmlFiles,
expandShortCausalHash,
findShallowReadmeInBranchAndRender,
formatSuffixedType,
getCurrentParseNames,
getCurrentPrettyNames,
getShallowCausalAtPathFromRootHash,
getTermTag,
getTypeTag,
hoistBackend,
hqNameQuery,
loadReferentType,
loadSearchResults,
lsAtPath,
lsBranch,
mungeSyntaxText,
namesForBranch,
parseNamesForBranch,
prettyNamesForBranch,
resolveCausalHashV2,
resolveRootBranchHashV2,
scopedNamesForBranchHash,
termEntryDisplayName,
termEntryHQName,
termEntryToNamedTerm,
termEntryType,
termListEntry,
termReferentsByShortHash,
typeDeclHeader,
typeEntryDisplayName,
typeEntryHQName,
typeEntryToNamedType,
typeListEntry,
typeReferencesByShortHash,
typeToSyntaxHeader,
renderDocRefs,
docsForDefinitionName,
normaliseRootCausalHash,
-- * Unused, could remove?
resolveRootBranchHash,
shallowPPE,
isTestResultList,
toAllNames,
fixupNamesRelative,
-- * Re-exported for Share Server
termsToSyntax,
typesToSyntax,
definitionResultsDependencies,
termEntryTag,
evalDocRef,
relocateToProjectRoot,
mkTermDefinition,
mkTypeDefinition,
)
where
import Control.Error.Util (hush)
import Control.Lens hiding ((??))
import qualified Control.Lens.Cons as Cons
import Control.Monad.Except
import Control.Monad.Reader
import Data.Bifunctor (Bifunctor (..), first)
@ -61,6 +140,7 @@ import qualified Unison.DataDeclaration as DD
import qualified Unison.HashQualified as HQ
import qualified Unison.HashQualified' as HQ'
import qualified Unison.Hashing.V2.Convert as Hashing
import qualified Unison.LabeledDependency as LD
import Unison.Name (Name)
import qualified Unison.Name as Name
import Unison.NameSegment (NameSegment (..))
@ -83,6 +163,9 @@ import qualified Unison.Referent as Referent
import qualified Unison.Runtime.IOSource as DD
import qualified Unison.Server.Doc as Doc
import qualified Unison.Server.Doc.AsHtml as DocHtml
import Unison.Server.NameSearch (NameSearch (..), Search (..), applySearch)
import Unison.Server.NameSearch.FromNames (makeNameSearch)
import Unison.Server.NameSearch.Sqlite (termReferentsByShortHash, typeReferencesByShortHash)
import Unison.Server.QueryResult
import qualified Unison.Server.SearchResult as SR
import qualified Unison.Server.SearchResult' as SR'
@ -107,6 +190,7 @@ import qualified Unison.Typechecker as Typechecker
import Unison.Util.AnnotatedText (AnnotatedText)
import Unison.Util.List (uniqueBy)
import qualified Unison.Util.Map as Map
import Unison.Util.Monoid (foldMapM)
import qualified Unison.Util.Monoid as Monoid
import Unison.Util.Pretty (Width)
import qualified Unison.Util.Pretty as Pretty
@ -149,6 +233,10 @@ data BackendError
| NoSuchDefinition (HQ.HashQualified Name)
| -- We needed a name lookup index we didn't have.
ExpectedNameLookup BranchHash
| -- The inferred project root for a given perspective is neither a parent nor child
-- of the perspective. This shouldn't happen and indicates a bug.
-- (perspective, project root)
DisjointProjectAndPerspective Path Path
deriving stock (Show)
newtype BackendEnv = BackendEnv
@ -359,10 +447,10 @@ findShallowReadmeInBranchAndRender ::
PPED.PrettyPrintEnvDecl ->
V2Branch.Branch m ->
Backend IO (Maybe Doc.Doc)
findShallowReadmeInBranchAndRender width runtime codebase ppe namespaceBranch =
findShallowReadmeInBranchAndRender _width runtime codebase ppe namespaceBranch =
let renderReadme :: PPED.PrettyPrintEnvDecl -> Reference -> IO Doc.Doc
renderReadme ppe docReference = do
(_, _, doc) <- renderDoc ppe width runtime codebase docReference
doc <- evalDocRef runtime codebase docReference <&> Doc.renderDoc ppe
pure doc
-- choose the first term (among conflicted terms) matching any of these names, in this order.
@ -588,36 +676,6 @@ lsBranch codebase b0 = do
++ branchEntries
++ patchEntries
-- | Look up types in the codebase by short hash, and include builtins.
typeReferencesByShortHash :: ShortHash -> Sqlite.Transaction (Set Reference)
typeReferencesByShortHash sh = do
fromCodebase <- Codebase.typeReferencesByPrefix sh
let fromBuiltins =
Set.filter
(\r -> sh == Reference.toShortHash r)
B.intrinsicTypeReferences
pure (fromBuiltins <> Set.map Reference.DerivedId fromCodebase)
termReferencesByShortHash :: ShortHash -> Sqlite.Transaction (Set Reference)
termReferencesByShortHash sh = do
fromCodebase <- Codebase.termReferencesByPrefix sh
let fromBuiltins =
Set.filter
(\r -> sh == Reference.toShortHash r)
B.intrinsicTermReferences
pure (fromBuiltins <> Set.mapMonotonic Reference.DerivedId fromCodebase)
-- | Look up terms in the codebase by short hash, and include builtins.
termReferentsByShortHash :: Codebase m v a -> ShortHash -> Sqlite.Transaction (Set Referent)
termReferentsByShortHash codebase sh = do
fromCodebase <- Codebase.termReferentsByPrefix codebase sh
let fromBuiltins =
Set.map Referent.Ref $
Set.filter
(\r -> sh == Reference.toShortHash r)
B.intrinsicTermReferences
pure (fromBuiltins <> Set.mapMonotonic (over Referent.reference_ Reference.DerivedId) fromCodebase)
-- currentPathNames :: Path -> Names
-- currentPathNames = Branch.toNames . Branch.head . Branch.getAt
@ -665,72 +723,9 @@ fixupNamesRelative root names =
then n
else fromMaybe (Name.makeAbsolute n) (Name.stripNamePrefix prefix n)
-- | A @Search r@ is a small bag of functions that is used to power a search for @r@s.
--
-- Construct a 'Search' with 'makeTypeSearch' or 'makeTermSearch', and eliminate it with 'applySearch'.
data Search r = Search
{ lookupNames :: r -> Set (HQ'.HashQualified Name),
lookupRelativeHQRefs' :: HQ'.HashQualified Name -> Set r,
makeResult :: HQ.HashQualified Name -> r -> Set (HQ'.HashQualified Name) -> SR.SearchResult,
matchesNamedRef :: Name -> r -> HQ'.HashQualified Name -> Bool
}
data NameSearch = NameSearch
{ typeSearch :: Search Reference,
termSearch :: Search Referent
}
-- | Make a type search, given a short hash length and names to search in.
makeTypeSearch :: Int -> NamesWithHistory -> Search Reference
makeTypeSearch len names =
Search
{ lookupNames = \ref -> NamesWithHistory.typeName len ref names,
lookupRelativeHQRefs' = (`NamesWithHistory.lookupRelativeHQType'` names),
matchesNamedRef = HQ'.matchesNamedReference,
makeResult = SR.typeResult
}
-- | Make a term search, given a short hash length and names to search in.
makeTermSearch :: Int -> NamesWithHistory -> Search Referent
makeTermSearch len names =
Search
{ lookupNames = \ref -> NamesWithHistory.termName len ref names,
lookupRelativeHQRefs' = (`NamesWithHistory.lookupRelativeHQTerm'` names),
matchesNamedRef = HQ'.matchesNamedReferent,
makeResult = SR.termResult
}
makeNameSearch :: Int -> NamesWithHistory -> NameSearch
makeNameSearch hashLength names =
NameSearch
{ typeSearch = makeTypeSearch hashLength names,
termSearch = makeTermSearch hashLength names
}
-- | Interpret a 'Search' as a function from name to search results.
applySearch :: (Show r) => Search r -> HQ'.HashQualified Name -> [SR.SearchResult]
applySearch Search {lookupNames, lookupRelativeHQRefs', makeResult, matchesNamedRef} query = do
-- a bunch of references will match a HQ ref.
toList (lookupRelativeHQRefs' query) <&> \ref ->
let -- Precondition: the input set is non-empty
prioritize :: Set (HQ'.HashQualified Name) -> (HQ'.HashQualified Name, Set (HQ'.HashQualified Name))
prioritize =
Set.toList
>>> sortOn (\n -> matchesNamedRef (HQ'.toName n) ref query)
>>> List.uncons
>>> fromMaybe (error (reportBug "E839404" ("query = " ++ show query ++ ", ref = " ++ show ref)))
>>> over _2 Set.fromList
names = lookupNames ref
(primaryName, aliases) =
-- The precondition of `prioritize` should hold here because we are passing in the set of names that are
-- related to this ref, which is itself one of the refs that the query name was related to! (Hence it should
-- be non-empty).
prioritize names
in makeResult (HQ'.toHQ primaryName) ref aliases
hqNameQuery ::
Codebase m v Ann ->
NameSearch ->
NameSearch Sqlite.Transaction ->
[HQ.HashQualified Name] ->
Sqlite.Transaction QueryResult
hqNameQuery codebase NameSearch {typeSearch, termSearch} hqs = do
@ -756,9 +751,10 @@ hqNameQuery codebase NameSearch {typeSearch, termSearch} hqs = do
(\(sh, tms) -> mkTermResult sh <$> toList tms) <$> termRefs
typeResults =
(\(sh, tps) -> mkTypeResult sh <$> toList tps) <$> typeRefs
-- Now do the actual name query
resultss = map (\name -> applySearch typeSearch name <> applySearch termSearch name) hqnames
(misses, hits) =
-- Now do the actual name query
resultss <- for hqnames (\name -> liftA2 (<>) (applySearch typeSearch name) (applySearch termSearch name))
let (misses, hits) =
zipWith
( \hqname results ->
(if null results then Left hqname else Right results)
@ -790,6 +786,27 @@ data DefinitionResults = DefinitionResults
typeResults :: Map Reference (DisplayObject () (DD.Decl Symbol Ann)),
noResults :: [HQ.HashQualified Name]
}
deriving stock (Show)
-- | Finds ALL direct references contained within a 'DefinitionResults' so we can
-- build a pretty printer for them.
definitionResultsDependencies :: DefinitionResults -> Set LD.LabeledDependency
definitionResultsDependencies (DefinitionResults {termResults, typeResults}) =
let topLevelTerms = Set.fromList . fmap LD.TermReference $ Map.keys termResults
topLevelTypes = Set.fromList . fmap LD.TypeReference $ Map.keys typeResults
termDeps =
termResults
& foldOf
( folded
. beside
(to Type.labeledDependencies)
(to Term.labeledDependencies)
)
typeDeps =
typeResults
& ifoldMap \typeRef ddObj ->
foldMap (DD.labeledDeclDependenciesIncludingSelf typeRef) ddObj
in termDeps <> typeDeps <> topLevelTerms <> topLevelTypes
expandShortCausalHash :: ShortCausalHash -> Backend Sqlite.Transaction CausalHash
expandShortCausalHash hash = do
@ -848,117 +865,45 @@ prettyDefinitionsForHQName ::
-- | The name, hash, or both, of the definition to display.
HQ.HashQualified Name ->
Backend IO DefinitionDisplayResults
prettyDefinitionsForHQName path shallowRoot renderWidth suffixifyBindings rt codebase query = do
(shallowRoot, hqLength) <-
(lift . Codebase.runTransaction codebase) do
hqLength <- Codebase.hashLength
pure (shallowRoot, hqLength)
(localNamesOnly, unbiasedPPE) <- scopedNamesForBranchHash codebase (Just shallowRoot) path
prettyDefinitionsForHQName perspective shallowRoot renderWidth suffixifyBindings rt codebase perspectiveQuery = do
result <- liftIO . Codebase.runTransaction codebase $ do
shallowBranch <- V2Causal.value shallowRoot
relocateToProjectRoot perspective perspectiveQuery shallowBranch >>= \case
Left err -> pure $ Left err
Right (namesRoot, locatedQuery) -> pure $ Right (shallowRoot, namesRoot, locatedQuery)
(shallowRoot, namesRoot, query) <- either throwError pure result
-- Bias towards both relative and absolute path to queries,
-- This allows us to still bias towards definitions outside our perspective but within the
-- same tree;
-- e.g. if the query is `map` and we're in `base.trunk.List`,
-- we bias towards `map` and `.base.trunk.List.map` which ensures we still prefer names in
-- `trunk` over those in other releases.
let biases = maybeToList $ HQ.toName query
let pped = PPED.biasTo biases unbiasedPPE
-- ppe which returns names fully qualified to the current perspective, not to the codebase root.
let fqnPPE :: PPE.PrettyPrintEnv
fqnPPE = PPED.unsuffixifiedPPE pped
let nameSearch :: NameSearch
nameSearch = makeNameSearch hqLength (NamesWithHistory.fromCurrentNames localNamesOnly)
(DefinitionResults terms types misses, branchAtPath) <-
(lift . Codebase.runTransaction codebase) do
results <- definitionsBySuffixes codebase nameSearch DontIncludeCycles [query]
branchAtPath <- do
causalAtPath <- Codebase.getShallowCausalAtPath path (Just shallowRoot)
V2Causal.value causalAtPath
pure (results, branchAtPath)
let biases = maybeToList $ HQ.toName query
hqLength <- liftIO $ Codebase.runTransaction codebase $ Codebase.hashLength
(localNamesOnly, unbiasedPPED) <- scopedNamesForBranchHash codebase (Just shallowRoot) perspective
let pped = PPED.biasTo biases unbiasedPPED
let nameSearch = makeNameSearch hqLength (NamesWithHistory.fromCurrentNames localNamesOnly)
(DefinitionResults terms types misses) <- liftIO $ Codebase.runTransaction codebase do
definitionsBySuffixes codebase nameSearch DontIncludeCycles [query]
let width = mayDefaultWidth renderWidth
-- Return only references which refer to docs.
filterForDocs :: [Referent] -> Sqlite.Transaction [TermReference]
filterForDocs rs = do
rts <- fmap join . for rs $ \case
Referent.Ref r ->
maybe [] (pure . (r,)) <$> Codebase.getTypeOfTerm codebase r
_ -> pure []
pure [r | (r, t) <- rts, Typechecker.isSubtype t (Type.ref mempty DD.doc2Ref)]
let docResults :: Name -> IO [(HashQualifiedName, UnisonHash, Doc.Doc)]
docResults name = do
docRefs <- docsForDefinitionName codebase nameSearch name
renderDocRefs pped width codebase rt docRefs
docResults :: Reference -> HQ.HashQualified Name -> IO [(HashQualifiedName, UnisonHash, Doc.Doc)]
docResults ref hqName = do
let docRefs = case HQ.toName hqName of
Nothing -> mempty
Just name ->
let docName = name :> "doc"
in Names.termsNamed localNamesOnly docName
let selfRef = Referent.Ref ref
-- It's possible the user is loading a doc directly, in which case we should render it as a doc
-- too.
let allPotentialDocRefs = Set.insert selfRef docRefs
-- lookup the type of each, make sure it's a doc
docs <- Codebase.runTransaction codebase (filterForDocs (toList allPotentialDocRefs))
-- render all the docs
traverse (renderDoc pped width rt codebase) docs
mkTermDefinition ::
Reference ->
DisplayObject
(AnnotatedText (UST.Element Reference))
(AnnotatedText (UST.Element Reference)) ->
Backend IO TermDefinition
mkTermDefinition r tm = do
let referent = Referent.Ref r
ts <- liftIO (Codebase.runTransaction codebase (Codebase.getTypeOfTerm codebase r))
let hqTermName = PPE.termNameOrHashOnly fqnPPE referent
let bn = bestNameForTerm @Symbol (PPED.suffixifiedPPE pped) width (Referent.Ref r)
tag <-
lift
( termEntryTag
<$> termListEntry codebase branchAtPath (ExactName (NameSegment bn) (Cv.referent1to2 referent))
)
docs <- lift (docResults r hqTermName)
mk docs ts bn tag
where
mk _ Nothing _ _ = throwError $ MissingSignatureForTerm r
mk docs (Just typeSig) bn tag = do
-- We don't ever display individual constructors (they're shown as part of their
-- type), so term references are never constructors.
let referent = Referent.Ref r
pure $
TermDefinition
(HQ'.toText <$> PPE.allTermNames fqnPPE referent)
bn
tag
(bimap mungeSyntaxText mungeSyntaxText tm)
(formatSuffixedType pped width typeSig)
docs
mkTypeDefinition ::
( Reference ->
DisplayObject
(AnnotatedText (UST.Element Reference))
(AnnotatedText (UST.Element Reference)) ->
Backend IO TypeDefinition
)
mkTypeDefinition r tp = lift do
let hqTypeName = PPE.typeNameOrHashOnly fqnPPE r
let bn = bestNameForType @Symbol (PPED.suffixifiedPPE pped) width r
tag <-
Codebase.runTransaction codebase do
typeEntryTag <$> typeListEntry codebase branchAtPath (ExactName (NameSegment bn) r)
docs <- docResults r hqTypeName
pure $
TypeDefinition
(HQ'.toText <$> PPE.allTypeNames fqnPPE r)
bn
tag
(bimap mungeSyntaxText mungeSyntaxText tp)
docs
let fqnPPE = PPED.unsuffixifiedPPE pped
typeDefinitions <-
Map.traverseWithKey mkTypeDefinition $
typesToSyntax suffixifyBindings width pped types
ifor (typesToSyntax suffixifyBindings width pped types) \ref tp -> do
let hqTypeName = PPE.typeNameOrHashOnly fqnPPE ref
docs <- liftIO $ (maybe (pure []) docResults (HQ.toName hqTypeName))
mkTypeDefinition codebase pped namesRoot shallowRoot width ref docs tp
termDefinitions <-
Map.traverseWithKey mkTermDefinition $
termsToSyntax suffixifyBindings width pped terms
ifor (termsToSyntax suffixifyBindings width pped terms) \reference trm -> do
let referent = Referent.Ref reference
let hqTermName = PPE.termNameOrHashOnly fqnPPE referent
docs <- liftIO $ (maybe (pure []) docResults (HQ.toName hqTermName))
mkTermDefinition codebase pped namesRoot shallowRoot width reference docs trm
let renderedDisplayTerms = Map.mapKeys Reference.toText termDefinitions
renderedDisplayTypes = Map.mapKeys Reference.toText typeDefinitions
renderedMisses = fmap HQ.toText misses
@ -968,25 +913,113 @@ prettyDefinitionsForHQName path shallowRoot renderWidth suffixifyBindings rt cod
renderedDisplayTypes
renderedMisses
renderDoc ::
mkTypeDefinition ::
Codebase IO Symbol Ann ->
PPED.PrettyPrintEnvDecl ->
Path.Path ->
V2Branch.CausalBranch Sqlite.Transaction ->
Width ->
Reference ->
[(HashQualifiedName, UnisonHash, Doc.Doc)] ->
DisplayObject
(AnnotatedText (UST.Element Reference))
(AnnotatedText (UST.Element Reference)) ->
Backend IO TypeDefinition
mkTypeDefinition codebase pped namesRoot rootCausal width r docs tp = do
let bn = bestNameForType @Symbol (PPED.suffixifiedPPE pped) width r
tag <-
liftIO $ Codebase.runTransaction codebase do
causalAtPath <- Codebase.getShallowCausalAtPath namesRoot (Just rootCausal)
branchAtPath <- V2Causal.value causalAtPath
typeEntryTag <$> typeListEntry codebase branchAtPath (ExactName (NameSegment bn) r)
pure $
TypeDefinition
(HQ'.toText <$> PPE.allTypeNames fqnPPE r)
bn
tag
(bimap mungeSyntaxText mungeSyntaxText tp)
docs
where
fqnPPE = PPED.unsuffixifiedPPE pped
mkTermDefinition ::
Codebase IO Symbol Ann ->
PPED.PrettyPrintEnvDecl ->
Path.Path ->
V2Branch.CausalBranch Sqlite.Transaction ->
Width ->
Reference ->
[(HashQualifiedName, UnisonHash, Doc.Doc)] ->
DisplayObject
(AnnotatedText (UST.Element Reference))
(AnnotatedText (UST.Element Reference)) ->
Backend IO TermDefinition
mkTermDefinition codebase termPPED namesRoot rootCausal width r docs tm = do
let referent = Referent.Ref r
(ts, branchAtPath) <- liftIO $ Codebase.runTransaction codebase do
ts <- Codebase.getTypeOfTerm codebase r
causalAtPath <- Codebase.getShallowCausalAtPath namesRoot (Just rootCausal)
branchAtPath <- V2Causal.value causalAtPath
pure (ts, branchAtPath)
let bn = bestNameForTerm @Symbol (PPED.suffixifiedPPE termPPED) width (Referent.Ref r)
tag <-
lift
( termEntryTag
<$> termListEntry codebase branchAtPath (ExactName (NameSegment bn) (Cv.referent1to2 referent))
)
mk ts bn tag
where
fqnTermPPE = PPED.unsuffixifiedPPE termPPED
mk Nothing _ _ = throwError $ MissingSignatureForTerm r
mk (Just typeSig) bn tag = do
-- We don't ever display individual constructors (they're shown as part of their
-- type), so term references are never constructors.
let referent = Referent.Ref r
pure $
TermDefinition
(HQ'.toText <$> PPE.allTermNames fqnTermPPE referent)
bn
tag
(bimap mungeSyntaxText mungeSyntaxText tm)
(formatSuffixedType termPPED width typeSig)
docs
-- | Given an arbitrary query and perspective, find the project root the query belongs in,
-- then return that root and the query relocated to that project root.
relocateToProjectRoot :: Path -> HQ.HashQualified Name -> V2Branch.Branch Sqlite.Transaction -> Sqlite.Transaction (Either BackendError (Path, HQ.HashQualified Name))
relocateToProjectRoot perspective query rootBranch = do
let queryLocation = HQ.toName query & maybe perspective \name -> perspective <> Path.fromName name
-- Names should be found from the project root of the queried name
(Projects.inferNamesRoot queryLocation rootBranch) >>= \case
Nothing -> do
pure $ Right (perspective, query)
Just projectRoot ->
case Path.longestPathPrefix perspective projectRoot of
-- The perspective is equal to the project root
(_sharedPrefix, Path.Empty, Path.Empty) -> do
pure $ Right (perspective, query)
-- The perspective is _outside_ of the project containing the query
(_sharedPrefix, Path.Empty, remainder) -> do
-- Since the project root is lower down we need to strip the part of the prefix
-- which is now redundant.
pure . Right $ (projectRoot, query <&> \n -> fromMaybe n $ Path.unprefixName (Path.Absolute remainder) n)
-- The namesRoot is _inside_ of the project containing the query
(_sharedPrefix, remainder, Path.Empty) -> do
-- Since the project is higher up, we need to prefix the query
-- with the remainder of the path
pure . Right $ (projectRoot, query <&> Path.prefixName (Path.Absolute remainder))
-- The namesRoot and project root are disjoint, this shouldn't ever happen.
(_, _, _) -> pure $ Left (DisjointProjectAndPerspective perspective projectRoot)
-- | Evaluate the doc at the given reference and return its evaluated-but-not-rendered form.
evalDocRef ::
Rt.Runtime Symbol ->
Codebase IO Symbol Ann ->
TermReference ->
IO (HashQualifiedName, UnisonHash, Doc.Doc)
renderDoc ppe width rt codebase r = do
let name = bestNameForTerm @Symbol (PPED.suffixifiedPPE ppe) width (Referent.Ref r)
let hash = Reference.toText r
(name,hash,)
<$> let tm = Term.ref () r
in Doc.evalAndRenderDoc
ppe
terms
typeOf
eval
decls
tm
IO (Doc.EvaluatedDoc Symbol)
evalDocRef rt codebase r = do
let tm = Term.ref () r
Doc.evalDoc terms typeOf eval decls tm
where
terms r@(Reference.Builtin _) = pure (Just (Term.ref () r))
terms (Reference.DerivedId r) =
@ -994,10 +1027,11 @@ renderDoc ppe width rt codebase r = do
typeOf r = fmap void <$> Codebase.runTransaction codebase (Codebase.getTypeOfReferent codebase r)
eval (Term.amap (const mempty) -> tm) = do
let ppes = PPED.suffixifiedPPE ppe
-- We use an empty ppe for evalutation, it's only used for adding additional context to errors.
let evalPPE = PPE.empty
let codeLookup = Codebase.toCodeLookup codebase
let cache r = fmap Term.unannotate <$> Codebase.runTransaction codebase (Codebase.lookupWatchCache codebase r)
r <- fmap hush . liftIO $ Rt.evaluateTerm' codeLookup cache ppes rt tm
r <- fmap hush . liftIO $ Rt.evaluateTerm' codeLookup cache evalPPE rt tm
case r of
Just tmr ->
Codebase.runTransaction codebase do
@ -1012,6 +1046,47 @@ renderDoc ppe width rt codebase r = do
fmap (DD.amap (const ())) <$> Codebase.runTransaction codebase (Codebase.getTypeDeclaration codebase r)
decls _ = pure Nothing
-- | Fetch the docs associated with the given name.
-- Returns all references with a Doc type which are at the name provided, or at '<name>.doc'.
docsForDefinitionName ::
Codebase IO Symbol Ann ->
NameSearch Sqlite.Transaction ->
Name ->
IO [TermReference]
docsForDefinitionName codebase (NameSearch {termSearch}) name = do
let potentialDocNames = [name, name Cons.:> "doc"]
Codebase.runTransaction codebase do
refs <-
potentialDocNames & foldMapM \name ->
-- TODO: Should replace this with an exact name lookup.
lookupRelativeHQRefs' termSearch (HQ'.NameOnly name)
filterForDocs (toList refs)
where
filterForDocs :: [Referent] -> Sqlite.Transaction [TermReference]
filterForDocs rs = do
rts <- fmap join . for rs $ \case
Referent.Ref r ->
maybe [] (pure . (r,)) <$> Codebase.getTypeOfTerm codebase r
_ -> pure []
pure [r | (r, t) <- rts, Typechecker.isSubtype t (Type.ref mempty DD.doc2Ref)]
-- | Evaluate and render the given docs
renderDocRefs ::
(Traversable t) =>
PPED.PrettyPrintEnvDecl ->
Width ->
Codebase IO Symbol Ann ->
Rt.Runtime Symbol ->
t TermReference ->
IO (t (HashQualifiedName, UnisonHash, Doc.Doc))
renderDocRefs pped width codebase rt docRefs = do
eDocs <- for docRefs \ref -> (ref,) <$> (evalDocRef rt codebase ref)
for eDocs \(ref, eDoc) -> do
let name = bestNameForTerm @Symbol (PPED.suffixifiedPPE pped) width (Referent.Ref ref)
let hash = Reference.toText ref
let renderedDoc = Doc.renderDoc pped eDoc
pure (name, hash, renderedDoc)
docsInBranchToHtmlFiles ::
Rt.Runtime Symbol ->
Codebase IO Symbol Ann ->
@ -1036,8 +1111,10 @@ docsInBranchToHtmlFiles runtime codebase root currentPath directory = do
docs <- for docTermsWithNames (renderDoc' ppe runtime codebase)
liftIO $ traverse_ (renderDocToHtmlFile docNamesByRef directory) docs
where
renderDoc' ppe runtime codebase (ref, name) = do
(_, hash, doc) <- renderDoc ppe defaultWidth runtime codebase (Referent.toReference ref)
renderDoc' ppe runtime codebase (docReferent, name) = do
let docReference = Referent.toReference docReferent
doc <- evalDocRef runtime codebase docReference <&> Doc.renderDoc ppe
let hash = Reference.toText docReference
pure (name, hash, doc)
cleanPath :: FilePath -> FilePath
@ -1213,7 +1290,7 @@ data IncludeCycles
definitionsBySuffixes ::
Codebase m Symbol Ann ->
NameSearch ->
NameSearch Sqlite.Transaction ->
IncludeCycles ->
[HQ.HashQualified Name] ->
Sqlite.Transaction DefinitionResults

View File

@ -31,7 +31,7 @@ import Network.HTTP.Media ((//), (/:))
import Network.HTTP.Types (HeaderName)
import Network.HTTP.Types.Status (ok200)
import qualified Network.URI.Encode as URI
import Network.Wai (responseLBS)
import Network.Wai (Middleware, responseLBS)
import Network.Wai.Handler.Warp
( Port,
defaultSettings,
@ -41,6 +41,7 @@ import Network.Wai.Handler.Warp
setPort,
withApplicationSettings,
)
import Network.Wai.Middleware.Cors (cors, corsMethods, corsOrigins, simpleCorsResourcePolicy)
import Servant
( Handler,
HasServer,
@ -207,9 +208,10 @@ app ::
Codebase IO Symbol Ann ->
FilePath ->
Strict.ByteString ->
Maybe String ->
Application
app env rt codebase uiPath expectedToken =
serve appAPI $ server env rt codebase uiPath expectedToken
app env rt codebase uiPath expectedToken allowCorsHost =
corsPolicy allowCorsHost $ serve appAPI $ server env rt codebase uiPath expectedToken
-- | The Token is used to help prevent multiple users on a machine gain access to
-- each others codebases.
@ -242,6 +244,9 @@ ucmPortVar = "UCM_PORT"
ucmHostVar :: String
ucmHostVar = "UCM_HOST"
ucmAllowCorsHost :: String
ucmAllowCorsHost = "UCM_ALLOW_CORS_HOST"
ucmTokenVar :: String
ucmTokenVar = "UCM_TOKEN"
@ -249,6 +254,7 @@ data CodebaseServerOpts = CodebaseServerOpts
{ token :: Maybe String,
host :: Maybe String,
port :: Maybe Int,
allowCorsHost :: Maybe String,
codebaseUIPath :: Maybe FilePath
}
deriving (Show, Eq)
@ -259,6 +265,7 @@ defaultCodebaseServerOpts =
{ token = Nothing,
host = Nothing,
port = Nothing,
allowCorsHost = Nothing,
codebaseUIPath = Nothing
}
@ -282,7 +289,7 @@ startServer env opts rt codebase onStart = do
defaultSettings
& maybe id setPort (port opts)
& maybe id (setHost . fromString) (host opts)
let a = app env rt codebase envUI token
let a = app env rt codebase envUI token (allowCorsHost opts)
case port opts of
Nothing -> withApplicationSettings settings (pure a) (onStart . baseUrl)
Just p -> do
@ -318,6 +325,17 @@ serveIndex path = do
serveUI :: FilePath -> Server WebUI
serveUI path _ = serveIndex path
-- Apply cors if there is allow-cors-host defined
corsPolicy :: Maybe String -> Middleware
corsPolicy = maybe id \allowCorsHost ->
cors $
const $
Just
simpleCorsResourcePolicy
{ corsMethods = ["GET", "OPTIONS"],
corsOrigins = Just ([C8.pack allowCorsHost], True)
}
server ::
BackendEnv ->
Rt.Runtime Symbol ->

View File

@ -0,0 +1,253 @@
-- | Render Unison.Server.Doc as plain markdown, used in the LSP
module Unison.Server.Doc.Markdown.Render (toMarkdown) where
import Control.Monad.Reader
import Data.Foldable
import qualified Data.Text as Text
import Unison.Codebase.Editor.DisplayObject (DisplayObject (..))
import Unison.Prelude
import Unison.Server.Doc
import qualified Unison.Server.Doc as Doc
import qualified Unison.Server.Doc.Markdown.Types as Md
import Unison.Server.Syntax (SyntaxText)
import qualified Unison.Server.Syntax as Syntax
import Unison.Util.Monoid (foldMapM)
data EmbeddedSource
= EmbeddedSource SyntaxText SyntaxText
| Builtin SyntaxText
embeddedSource :: Ref (UnisonHash, DisplayObject SyntaxText Src) -> Maybe EmbeddedSource
embeddedSource ref =
let embeddedSource' (_, displayObj) =
case displayObj of
BuiltinObject s -> Just (Builtin s)
UserObject (Src sum det) -> Just (EmbeddedSource sum det)
MissingObject _ -> Nothing
in case ref of
Term s -> embeddedSource' s
Type s -> embeddedSource' s
normalizeHref :: [Md.Markdown] -> Doc -> MarkdownM [Md.Markdown]
normalizeHref label = \case
Word w -> pure [Md.Link label w]
Group d ->
normalizeHref label d
j@Join {} -> do
let uri = toRawText j
pure [Md.Link label uri]
Special (Link {}) -> do
-- We don't support cross-doc links in Markdown (yet)
pure label
_ -> pure label
embeddedSourceToMarkdown :: EmbeddedSource -> [Md.Markdown]
embeddedSourceToMarkdown source =
case source of
Builtin summary ->
[ Md.CodeBlock "unison" (Syntax.toPlainText summary),
Md.Txt "Built-in provided by the Unison runtime"
]
EmbeddedSource _summary details ->
[Md.CodeBlock "unison" $ Syntax.toPlainText details]
-- | Used when a contained block is expected to be raw text. E.g. inside a CodeBlock.
-- Other renderers may need to handle links and things in code blocks, but for Markdown we don't.
toRawText :: Doc -> Text
toRawText doc =
case doc of
Paragraph ds -> listToText ds <> "\n"
Group d -> toRawText d
Join ds -> listToText ds
Bold d -> "**" <> toRawText d <> "** "
Italic d -> "_" <> toRawText d <> "_ "
Strikethrough d -> "~~" <> toRawText d <> "~~ "
Blockquote d -> ">" <> toRawText d <> " "
Section d ds ->
Text.unlines
[ "#" <> toRawText d,
listToText ds
]
UntitledSection ds -> listToText ds
Column ds -> listToText ds
Word w -> w <> " "
Code code -> "`" <> toRawText code <> "` "
CodeBlock lang code ->
Text.unlines
[ "```" <> lang,
toRawText code,
"```\n"
]
Style {} -> ""
Anchor {} -> ""
Blankline -> "\n\n"
Linebreak -> "\n"
SectionBreak -> "---\n"
Tooltip {} -> ""
Aside {} -> ""
Callout {} -> ""
-- Most other things shouldn't appear anywhere inside links and such
_ -> ""
where
listToText xs =
xs
& fmap toRawText
& filter (not . Text.null)
& Text.unwords
data MarkdownEnv = MarkdownEnv
{ section :: Word64
}
-- | Tracks the current section level
type MarkdownM = Reader MarkdownEnv
-- | Renders a Doc to a list of Markdown blocks
toMarkdown :: Doc -> [Md.Markdown]
toMarkdown doc = (runReader (toMarkdown_ doc) env)
where
env :: MarkdownEnv
env = (MarkdownEnv {section = 1})
toMarkdown_ :: Doc -> MarkdownM [Md.Markdown]
toMarkdown_ doc =
case doc of
Tooltip {} ->
-- We don't render tooltips in markdown for now
pure mempty
Word word -> do
pure [Md.Txt word]
Code (Word txt) -> do
pure [Md.InlineCode txt]
Code contents -> do
pure [Md.InlineCode (toRawText contents)]
CodeBlock lang (Word txt) -> do
pure [Md.CodeBlock lang txt]
CodeBlock lang contents -> do
pure [Md.CodeBlock lang (toRawText contents)]
Bold d -> do
result <- toMarkdown_ d
pure [Md.Strong result]
Italic d -> do
result <- toMarkdown_ d
pure [Md.Italics result]
Strikethrough d -> do
result <- toMarkdown_ d
pure [Md.Strikethrough result]
Style {} -> pure mempty
Anchor uri d -> do
label <- toMarkdown_ d
pure [Md.Link label uri]
Blockquote d -> do
contents <- toMarkdown_ d
pure [Md.BlockQuote contents]
Blankline ->
pure [Md.Linebreak, Md.Linebreak]
Linebreak ->
pure [Md.Linebreak]
SectionBreak -> do
pure [Md.ThematicBreak]
Aside d -> do
contents <- toMarkdown_ d
pure [Md.BlockQuote contents]
Callout icon content -> do
contents <- toMarkdown_ content
pure [Md.BlockQuote $ [Md.Txt ico, Md.Linebreak] <> contents]
where
(ico :: Text) =
case icon of
Just emoji ->
( toRawText $ emoji
)
Nothing -> ("")
Table rows -> do
renderedRows <- traverse (traverse toMarkdown_) rows
pure [Md.Table Nothing renderedRows]
Folded _isFolded _summary details -> do
-- We don't fold anything in Markdown
toMarkdown_ details
Paragraph docs -> do
rendered <- for docs toMarkdown_
pure $ fold rendered <> [Md.Linebreak]
BulletedList items -> do
rendered <- for items toMarkdown_
pure [Md.UnorderedList rendered]
NumberedList startNum items -> do
rendered <- for items toMarkdown_
pure [Md.OrderedList (fromIntegral startNum) rendered]
Section title docs -> do
sectionLevel <- asks section
renderedTitle <- toMarkdown_ title
body <- local (\env -> env {section = section env + 1}) $ foldMapM toMarkdown_ docs
pure $ [Md.Heading (fromIntegral sectionLevel) renderedTitle] <> body
NamedLink label url -> do
renderedLabel <- toMarkdown_ label
normalizeHref renderedLabel url
Image altText src caption -> do
renderedAltText <- toMarkdown_ altText
renderedCaption <- traverse toMarkdown_ caption
let srcText = toRawText src
pure $ [Md.Image renderedAltText srcText] <> (fromMaybe mempty renderedCaption)
Special specialForm -> do
case specialForm of
Source sources -> do
pure $ foldMap (foldMap embeddedSourceToMarkdown . embeddedSource) sources
FoldedSource sources -> do
-- We can't fold in markdown
pure $ foldMap (foldMap embeddedSourceToMarkdown . embeddedSource) sources
Example syntax -> do
pure [Md.InlineCode (Syntax.toPlainText syntax)]
ExampleBlock syntax -> do
pure [Md.CodeBlock "unison" (Syntax.toPlainText syntax)]
Link syntax -> do
pure [Md.InlineCode (Syntax.toPlainText syntax)]
Signature signatures -> do
signatures
& foldMap (pure @[] . Md.CodeBlock "unison" . Syntax.toPlainText)
& pure
SignatureInline sig -> do
pure [Md.InlineCode $ Syntax.toPlainText sig]
Eval source result -> do
pure
[ Md.CodeBlock
"unison"
( Text.unlines
[ Syntax.toPlainText source,
"",
Syntax.toPlainText result
]
)
]
EvalInline source result -> do
-- I'm not sure of a good way to express this 'inline' in markdown
pure
[ Md.CodeBlock "unison" $
Text.unlines
[ Syntax.toPlainText source,
"",
Syntax.toPlainText result
]
]
Video sources _attrs -> do
case sources of
[] -> pure mempty
(MediaSource src _ : _) -> do
pure [Md.Image mempty src]
Doc.FrontMatter {} -> pure mempty
LaTeXInline latex -> do
pure [Md.CodeBlock "latex" latex]
Svg {} -> do pure [Md.Txt "{inline svg}"]
Embed syntax -> do
pure [Md.CodeBlock "unison" (Syntax.toPlainText syntax)]
EmbedInline syntax -> do
pure [Md.InlineCode (Syntax.toPlainText syntax)]
RenderError (InvalidTerm err) -> do
pure [Md.Txt $ Syntax.toPlainText err]
Join docs -> do
foldMapM toMarkdown_ docs
UntitledSection docs -> do
foldMapM toMarkdown_ docs
Column docs -> do
foldMapM toMarkdown_ docs
Group content -> do
toMarkdown_ content

View File

@ -0,0 +1,101 @@
module Unison.Server.Doc.Markdown.Types where
import Control.Lens (imap)
import qualified Data.Char as Char
import qualified Data.Char as Text
import qualified Data.Text as Text
import qualified Unison.Debug as Debug
import Unison.Prelude
-- | Custom type for converting Docs into Markdown.
-- I tried using the existing cmark-gfm library for this, but we have too many edge-cases
-- for it to work well.
data Markdown
= -- | E.g. '---'
ThematicBreak
| Paragraph [Markdown]
| BlockQuote [Markdown]
| -- lang, contents
CodeBlock Text Text
| Heading Int [Markdown]
| OrderedList Int [[Markdown]]
| UnorderedList [[Markdown]]
| Txt Text
| Linebreak
| InlineCode Text
| Italics [Markdown]
| Strong [Markdown]
| Strikethrough [Markdown]
| -- label, uri
Link [Markdown] Text
| -- label, uri
Image [Markdown] Text
| -- Header, cells
Table (Maybe [[Markdown]]) [[[Markdown]]]
deriving (Show)
-- | Render the markdown datatype to markdown text
toText :: [Markdown] -> Text
toText = toText' . Paragraph
where
toText' :: Markdown -> Text
toText' =
Debug.debug Debug.Temp "Markdown" >>> \case
ThematicBreak -> "\n---"
Paragraph m -> flattenParagraph m
BlockQuote m -> "> " <> flattenParagraph m
CodeBlock lang contents ->
"```"
<> lang
<> "\n"
<> contents
<> "\n```\n\n"
Heading n contents ->
(Text.replicate n "#" <> " " <> (flattenInline contents)) <> "\n\n"
-- TODO: Nested lists
OrderedList startNum items ->
items
& imap
( \n item ->
tShow (n + startNum) <> ". " <> flattenInline item
)
& Text.unlines
& (<> "\n")
UnorderedList items ->
items
& fmap
( \item ->
"- " <> flattenInline item
)
& Text.unlines
& (<> "\n")
Txt txt -> txt
Linebreak -> "\n\n"
InlineCode txt -> "`" <> txt <> "`"
Italics md -> "_" <> flattenInline md <> "_"
Strong md -> "**" <> flattenInline md <> "**"
Strikethrough md -> "~~" <> flattenInline md <> "~~"
-- label, uri
Link label uri ->
"[" <> (flattenInline label) <> "](" <> uri <> ")"
Image label uri -> "![" <> flattenInline label <> "](" <> uri <> ")"
Table _headers _rows -> mempty -- TODO
where
flattenInline :: [Markdown] -> Text
flattenInline m =
(toText' <$> m)
& filter (Text.any (not . Text.isSpace))
& Text.unwords
flattenParagraph :: [Markdown] -> Text
flattenParagraph m =
let go :: Maybe Text -> Text -> Maybe Text
go Nothing next = Just next
go (Just acc) next = case (Text.unsnoc acc, Text.uncons next) of
(Nothing, _) -> Just $ "\n" <> next
(_, Nothing) -> Just $ acc <> "\n"
(Just (_, lastChar), Just (firstChar, _))
| Char.isSpace lastChar || Char.isSpace firstChar -> Just $ acc <> next
| otherwise -> Just $ Text.unwords [acc, next]
in case foldl' go Nothing (toText' <$> m) of
Nothing -> ""
Just x -> x <> "\n"

View File

@ -53,6 +53,7 @@ backendError = \case
Backend.NoSuchDefinition hqName -> noSuchDefinition hqName
Backend.AmbiguousHashForDefinition shorthash -> ambiguousHashForDefinition shorthash
Backend.ExpectedNameLookup branchHash -> expectedNameLookup branchHash
Backend.DisjointProjectAndPerspective perspective projectRoot -> disjointProjectAndPerspective perspective projectRoot
badNamespace :: String -> String -> ServerError
badNamespace err namespace =
@ -119,3 +120,14 @@ expectedNameLookup branchHash =
{ errBody =
"Name lookup index required for branch hash: " <> BSC.pack (show branchHash)
}
disjointProjectAndPerspective :: Path.Path -> Path.Path -> ServerError
disjointProjectAndPerspective perspective projectRoot =
err500
{ errBody =
"The project root "
<> munge (Path.toText projectRoot)
<> " is disjoint with the perspective "
<> munge (Path.toText perspective)
<> ". This is a bug, please report it."
}

View File

@ -0,0 +1,54 @@
module Unison.Server.NameSearch where
import Control.Lens
import qualified Data.List as List
import qualified Data.Set as Set
import qualified Unison.HashQualified as HQ
import qualified Unison.HashQualified' as HQ'
import Unison.Name (Name)
import Unison.Prelude
import Unison.Reference (Reference)
import Unison.Referent (Referent)
import qualified Unison.Server.SearchResult as SR
-- | A @Search r@ is a small bag of functions that is used to power a search for @r@s.
--
-- There are currently two implementations of this interface:
--
-- * 'NameSearch.FromNames' which builds a name search from a 'Names' object
-- * 'NameSearch.Sqlite which builds a name search that uses a sqlite name lookup index.
--
-- You can use the individual methods of a name search or can use 'applySearch'.
data Search m r = Search
{ lookupNames :: r -> m (Set (HQ'.HashQualified Name)),
lookupRelativeHQRefs' :: HQ'.HashQualified Name -> m (Set r),
makeResult :: HQ.HashQualified Name -> r -> Set (HQ'.HashQualified Name) -> m SR.SearchResult,
matchesNamedRef :: Name -> r -> HQ'.HashQualified Name -> Bool
}
data NameSearch m = NameSearch
{ typeSearch :: Search m Reference,
termSearch :: Search m Referent
}
-- | Interpret a 'Search' as a function from name to search results.
applySearch :: (Show r, Monad m) => Search m r -> HQ'.HashQualified Name -> m [SR.SearchResult]
applySearch Search {lookupNames, lookupRelativeHQRefs', makeResult, matchesNamedRef} query = do
refs <- (lookupRelativeHQRefs' query)
-- a bunch of references will match a HQ ref.
for (toList refs) \ref -> do
let -- Precondition: the input set is non-empty
prioritize :: Set (HQ'.HashQualified Name) -> (HQ'.HashQualified Name, Set (HQ'.HashQualified Name))
prioritize =
Set.toList
>>> sortOn (\n -> matchesNamedRef (HQ'.toName n) ref query)
>>> List.uncons
>>> fromMaybe (error (reportBug "E839404" ("query = " ++ show query ++ ", ref = " ++ show ref)))
>>> over _2 Set.fromList
names <- lookupNames ref
let (primaryName, aliases) =
-- The precondition of `prioritize` should hold here because we are passing in the set of names that are
-- related to this ref, which is itself one of the refs that the query name was related to! (Hence it should
-- be non-empty).
prioritize names
makeResult (HQ'.toHQ primaryName) ref aliases

View File

@ -0,0 +1,36 @@
module Unison.Server.NameSearch.FromNames where
import qualified Unison.HashQualified' as HQ'
import Unison.NamesWithHistory (NamesWithHistory)
import qualified Unison.NamesWithHistory as NamesWithHistory
import Unison.Reference (Reference)
import Unison.Referent (Referent)
import Unison.Server.NameSearch
import qualified Unison.Server.SearchResult as SR
-- | Make a type search, given a short hash length and names to search in.
makeTypeSearch :: (Applicative m) => Int -> NamesWithHistory -> Search m Reference
makeTypeSearch len names =
Search
{ lookupNames = \ref -> pure $ NamesWithHistory.typeName len ref names,
lookupRelativeHQRefs' = pure . (`NamesWithHistory.lookupRelativeHQType'` names),
matchesNamedRef = HQ'.matchesNamedReference,
makeResult = \hqname r names -> pure $ SR.typeResult hqname r names
}
-- | Make a term search, given a short hash length and names to search in.
makeTermSearch :: (Applicative m) => Int -> NamesWithHistory -> Search m Referent
makeTermSearch len names =
Search
{ lookupNames = \ref -> pure $ NamesWithHistory.termName len ref names,
lookupRelativeHQRefs' = pure . (`NamesWithHistory.lookupRelativeHQTerm'` names),
matchesNamedRef = HQ'.matchesNamedReferent,
makeResult = \hqname r names -> pure $ SR.termResult hqname r names
}
makeNameSearch :: (Applicative m) => Int -> NamesWithHistory -> NameSearch m
makeNameSearch hashLength names =
NameSearch
{ typeSearch = makeTypeSearch hashLength names,
termSearch = makeTermSearch hashLength names
}

View File

@ -0,0 +1,182 @@
module Unison.Server.NameSearch.Sqlite
( resolveShortHash,
typeReferencesByShortHash,
termReferentsByShortHash,
NameSearch (..),
scopedNameSearch,
)
where
import Control.Lens
import qualified Data.Set as Set
import U.Codebase.HashTags (BranchHash)
import qualified U.Codebase.Sqlite.NamedRef as NamedRef
import qualified U.Codebase.Sqlite.Operations as Ops
import qualified Unison.Builtin as Builtin
import Unison.Codebase (Codebase)
import qualified Unison.Codebase as Codebase
import Unison.Codebase.Path
import qualified Unison.Codebase.Path as Path
import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv
import qualified Unison.HashQualified' as HQ'
import qualified Unison.LabeledDependency as LD
import Unison.Name (Name)
import qualified Unison.Name as Name
import Unison.NameSegment (NameSegment (..))
import Unison.Prelude
import Unison.Reference (Reference)
import qualified Unison.Reference as Reference
import Unison.Referent (Referent)
import qualified Unison.Referent as Referent
import Unison.Server.NameSearch (NameSearch (..), Search (..))
import qualified Unison.Server.SearchResult as SR
import qualified Unison.ShortHash as SH
import qualified Unison.Sqlite as Sqlite
import qualified Unison.Util.Set as Set
data SearchStrategy
= ExactMatch
| SuffixMatch
deriving (Show, Eq)
scopedNameSearch :: Codebase m v a -> BranchHash -> Path -> NameSearch Sqlite.Transaction
scopedNameSearch codebase rootHash path =
NameSearch {typeSearch, termSearch}
where
typeSearch =
Search
{ lookupNames = lookupNamesForTypes,
lookupRelativeHQRefs' = lookupRelativeHQRefsForTypes,
makeResult = \hqname r names -> pure $ SR.typeResult hqname r names,
matchesNamedRef = HQ'.matchesNamedReference
}
termSearch =
Search
{ lookupNames = lookupNamesForTerms,
lookupRelativeHQRefs' = lookupRelativeHQRefsForTerms,
makeResult = \hqname r names -> pure $ SR.termResult hqname r names,
matchesNamedRef = HQ'.matchesNamedReferent
}
pathText :: Text
pathText = Path.toText path
lookupNamesForTypes :: Reference -> Sqlite.Transaction (Set (HQ'.HashQualified Name))
lookupNamesForTypes ref = do
names <- Ops.typeNamesForRefWithinNamespace rootHash pathText (Cv.reference1to2 ref) Nothing
names
& fmap (\segments -> HQ'.HashQualified (reversedSegmentsToName segments) (Reference.toShortHash ref))
& Set.fromList
& pure
lookupNamesForTerms :: Referent -> Sqlite.Transaction (Set (HQ'.HashQualified Name))
lookupNamesForTerms ref = do
names <- Ops.termNamesForRefWithinNamespace rootHash pathText (Cv.referent1to2 ref) Nothing
names
& fmap (\segments -> HQ'.HashQualified (reversedSegmentsToName segments) (Referent.toShortHash ref))
& Set.fromList
& pure
-- This is a bit messy, but the existing 'lookupRelativeHQRefs' semantics
-- will return ONLY exact matches if any exist, otherwise it falls back on
-- suffix search, so we maintain that behaviour here. It would probably be better
-- to have separate functions in the Search type for each of these, and be more explicit
-- about desired behaviour at the call-site.
lookupRelativeHQRefsForTerms :: HQ'.HashQualified Name -> Sqlite.Transaction (Set Referent)
lookupRelativeHQRefsForTerms hqName = do
exact <- hqTermSearch ExactMatch hqName
if Set.null exact
then do
hqTermSearch SuffixMatch hqName
else do
pure exact
lookupRelativeHQRefsForTypes :: HQ'.HashQualified Name -> Sqlite.Transaction (Set Reference)
lookupRelativeHQRefsForTypes hqName = do
exact <- hqTypeSearch ExactMatch hqName
if Set.null exact
then do
hqTypeSearch SuffixMatch hqName
else do
pure exact
-- Search the codebase for matches to the given hq name.
-- Supports either an exact match or a suffix match.
hqTermSearch :: SearchStrategy -> HQ'.HashQualified Name -> Sqlite.Transaction (Set Referent)
hqTermSearch searchStrat hqName = do
case hqName of
HQ'.NameOnly name -> do
let fqn = Path.prefixName (Path.Absolute path) name
namedRefs <-
case searchStrat of
ExactMatch -> Ops.termRefsForExactName rootHash (coerce $ Name.reverseSegments fqn)
SuffixMatch -> Ops.termNamesBySuffix rootHash pathText (coerce $ Name.reverseSegments name)
namedRefs
& fmap
( \(NamedRef.ref -> (ref, mayCT)) ->
Cv.referent2to1UsingCT (fromMaybe (error "Required constructor type for constructor but it was null") mayCT) ref
)
& Set.fromList
& pure
HQ'.HashQualified name sh -> do
let fqn = Path.prefixName (Path.Absolute path) name
termRefs <- termReferentsByShortHash codebase sh
Set.forMaybe termRefs \termRef -> do
matches <- Ops.termNamesForRefWithinNamespace rootHash pathText (Cv.referent1to2 termRef) (Just . coerce $ Name.reverseSegments name)
-- Return a valid ref if at least one match was found. Require that it be an exact
-- match if specified.
if any (\n -> coerce (Name.reverseSegments fqn) == n || searchStrat /= ExactMatch) matches
then pure (Just termRef)
else pure Nothing
-- Search the codebase for matches to the given hq name.
-- Supports either an exact match or a suffix match.
hqTypeSearch :: SearchStrategy -> HQ'.HashQualified Name -> Sqlite.Transaction (Set Reference)
hqTypeSearch searchStrat hqName = do
case hqName of
HQ'.NameOnly name -> do
let fqn = Path.prefixName (Path.Absolute path) name
namedRefs <-
case searchStrat of
ExactMatch -> Ops.typeRefsForExactName rootHash (coerce $ Name.reverseSegments fqn)
SuffixMatch -> Ops.typeNamesBySuffix rootHash pathText (coerce $ Name.reverseSegments name)
namedRefs
& fmap (Cv.reference2to1 . NamedRef.ref)
& Set.fromList
& pure
HQ'.HashQualified name sh -> do
let fqn = Path.prefixName (Path.Absolute path) name
typeRefs <- typeReferencesByShortHash sh
Set.forMaybe typeRefs \typeRef -> do
matches <- Ops.typeNamesForRefWithinNamespace rootHash pathText (Cv.reference1to2 typeRef) (Just . coerce $ Name.reverseSegments name)
-- Return a valid ref if at least one match was found. Require that it be an exact
-- match if specified.
if any (\n -> coerce (Name.reverseSegments fqn) == n || searchStrat /= ExactMatch) matches
then pure (Just typeRef)
else pure Nothing
reversedSegmentsToName :: NamedRef.ReversedSegments -> Name
reversedSegmentsToName = Name.fromReverseSegments . coerce
-- | Look up types in the codebase by short hash, and include builtins.
typeReferencesByShortHash :: SH.ShortHash -> Sqlite.Transaction (Set Reference)
typeReferencesByShortHash sh = do
fromCodebase <- Codebase.typeReferencesByPrefix sh
let fromBuiltins =
Set.filter
(\r -> sh == Reference.toShortHash r)
Builtin.intrinsicTypeReferences
pure (fromBuiltins <> Set.map Reference.DerivedId fromCodebase)
-- | Look up terms in the codebase by short hash, and include builtins.
termReferentsByShortHash :: Codebase m v a -> SH.ShortHash -> Sqlite.Transaction (Set Referent)
termReferentsByShortHash codebase sh = do
fromCodebase <- Codebase.termReferentsByPrefix codebase sh
let fromBuiltins =
Set.map Referent.Ref $
Set.filter
(\r -> sh == Reference.toShortHash r)
Builtin.intrinsicTermReferences
pure (fromBuiltins <> Set.mapMonotonic (over Referent.reference_ Reference.DerivedId) fromCodebase)
-- | Resolves a shorthash into any possible matches.
resolveShortHash :: Codebase m v a -> SH.ShortHash -> Sqlite.Transaction (Set LD.LabeledDependency)
resolveShortHash codebase sh = do
terms <- Set.map LD.TermReferent <$> termReferentsByShortHash codebase sh
types <- Set.map LD.TypeReference <$> typeReferencesByShortHash sh
pure $ terms <> types

View File

@ -0,0 +1,126 @@
-- | This module contains implementations of Backend methods which are specialized for Share.
-- We should likely move them to the Share repository eventually, but for now it's much easier
-- to ensure they're resilient to refactors and changes in the Backend API if they live here.
--
-- Perhaps we'll move them when the backing implementation switches to postgres.
module Unison.Server.Share.Definitions (definitionForHQName) where
import Control.Lens hiding ((??))
import Control.Monad.Except
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified U.Codebase.Causal as V2Causal
import U.Codebase.HashTags (CausalHash (..))
import Unison.Codebase (Codebase)
import qualified Unison.Codebase as Codebase
import Unison.Codebase.Path (Path)
import qualified Unison.Codebase.Runtime as Rt
import qualified Unison.Debug as Debug
import qualified Unison.HashQualified as HQ
import qualified Unison.LabeledDependency as LD
import Unison.Name (Name)
import Unison.Parser.Ann (Ann)
import Unison.Prelude
import qualified Unison.PrettyPrintEnv as PPE
import qualified Unison.PrettyPrintEnvDecl as PPED
import qualified Unison.PrettyPrintEnvDecl.Sqlite as PPESqlite
import Unison.Reference (TermReference)
import qualified Unison.Reference as Reference
import qualified Unison.Referent as Referent
import Unison.Server.Backend hiding (renderDocRefs)
import qualified Unison.Server.Backend as Backend
import qualified Unison.Server.Doc as Doc
import qualified Unison.Server.NameSearch.Sqlite as SqliteNameSearch
import Unison.Server.Types
import Unison.Symbol (Symbol)
import qualified Unison.Syntax.HashQualified as HQ (toText)
import Unison.Util.Pretty (Width)
-- | Renders a definition for the given name or hash alongside its documentation.
definitionForHQName ::
-- | The path representing the user's current namesRoot.
-- Searches will be limited to definitions within this path, and names will be relative to
-- this path.
Path ->
-- | The root branch to use
CausalHash ->
Maybe Width ->
-- | Whether to suffixify bindings in the rendered syntax
Suffixify ->
-- | Runtime used to evaluate docs. This should be sandboxed if run on the server.
Rt.Runtime Symbol ->
Codebase IO Symbol Ann ->
-- | The name, hash, or both, of the definition to display.
HQ.HashQualified Name ->
Backend IO DefinitionDisplayResults
definitionForHQName perspective rootHash renderWidth suffixifyBindings rt codebase perspectiveQuery = do
result <- liftIO . Codebase.runTransaction codebase $ do
shallowRoot <- resolveCausalHashV2 (Just rootHash)
shallowBranch <- V2Causal.value shallowRoot
Backend.relocateToProjectRoot perspective perspectiveQuery shallowBranch >>= \case
Left err -> pure $ Left err
Right (namesRoot, locatedQuery) -> pure $ Right (shallowRoot, namesRoot, locatedQuery)
(shallowRoot, namesRoot, query) <- either throwError pure result
Debug.debugM Debug.Server "definitionForHQName: (namesRoot, query)" (namesRoot, query)
-- Bias towards both relative and absolute path to queries,
-- This allows us to still bias towards definitions outside our namesRoot but within the
-- same tree;
-- e.g. if the query is `map` and we're in `base.trunk.List`,
-- we bias towards `map` and `.base.trunk.List.map` which ensures we still prefer names in
-- `trunk` over those in other releases.
-- ppe which returns names fully qualified to the current namesRoot, not to the codebase root.
let biases = maybeToList $ HQ.toName query
let rootBranchHash = V2Causal.valueHash shallowRoot
let ppedBuilder deps = fmap (PPED.biasTo biases) . liftIO . Codebase.runTransaction codebase $ PPESqlite.ppedForReferences rootBranchHash namesRoot deps
let nameSearch = SqliteNameSearch.scopedNameSearch codebase rootBranchHash namesRoot
dr@(DefinitionResults terms types misses) <- liftIO $ Codebase.runTransaction codebase do
definitionsBySuffixes codebase nameSearch DontIncludeCycles [query]
Debug.debugM Debug.Server "definitionForHQName: found definitions" dr
let width = mayDefaultWidth renderWidth
let docResults :: Name -> Backend IO [(HashQualifiedName, UnisonHash, Doc.Doc)]
docResults name = do
docRefs <- liftIO $ docsForDefinitionName codebase nameSearch name
renderDocRefs ppedBuilder width codebase rt docRefs
let drDeps = definitionResultsDependencies dr
termAndTypePPED <- ppedBuilder drDeps
let fqnTermAndTypePPE = PPED.unsuffixifiedPPE termAndTypePPED
typeDefinitions <-
ifor (typesToSyntax suffixifyBindings width termAndTypePPED types) \ref tp -> do
let hqTypeName = PPE.typeNameOrHashOnly fqnTermAndTypePPE ref
docs <- maybe (pure []) docResults (HQ.toName hqTypeName)
mkTypeDefinition codebase termAndTypePPED namesRoot shallowRoot width ref docs tp
termDefinitions <-
ifor (termsToSyntax suffixifyBindings width termAndTypePPED terms) \reference trm -> do
let referent = Referent.Ref reference
let hqTermName = PPE.termNameOrHashOnly fqnTermAndTypePPE referent
docs <- maybe (pure []) docResults (HQ.toName hqTermName)
mkTermDefinition codebase termAndTypePPED namesRoot shallowRoot width reference docs trm
let renderedDisplayTerms = Map.mapKeys Reference.toText termDefinitions
renderedDisplayTypes = Map.mapKeys Reference.toText typeDefinitions
renderedMisses = fmap HQ.toText misses
pure $
DefinitionDisplayResults
renderedDisplayTerms
renderedDisplayTypes
renderedMisses
renderDocRefs ::
PPEDBuilder ->
Width ->
Codebase IO Symbol Ann ->
Rt.Runtime Symbol ->
[TermReference] ->
Backend IO [(HashQualifiedName, UnisonHash, Doc.Doc)]
renderDocRefs _ppedBuilder _width _codebase _rt [] = pure []
renderDocRefs ppedBuilder width codebase rt docRefs = do
eDocs <- for docRefs \ref -> (ref,) <$> liftIO (Backend.evalDocRef rt codebase ref)
let docDeps = foldMap (Doc.dependencies . snd) eDocs <> Set.fromList (LD.TermReference <$> docRefs)
docsPPED <- ppedBuilder docDeps
for eDocs \(ref, eDoc) -> do
let name = bestNameForTerm @Symbol (PPED.suffixifiedPPE docsPPED) width (Referent.Ref ref)
let hash = Reference.toText ref
let renderedDoc = Doc.renderDoc docsPPED eDoc
pure (name, hash, renderedDoc)
type PPEDBuilder = Set LD.LabeledDependency -> Backend IO PPED.PrettyPrintEnvDecl

View File

@ -173,6 +173,9 @@ reference (Segment _ el) =
toPlain :: SyntaxText -> String
toPlain (AnnotatedText at) = join (toList $ segment <$> at)
toPlainText :: SyntaxText -> Text
toPlainText = Text.pack . toPlain
-- HTML -----------------------------------------------------------------------
toHtml :: SyntaxText -> Html ()

View File

@ -21,6 +21,8 @@ library
Unison.Server.CodebaseServer
Unison.Server.Doc
Unison.Server.Doc.AsHtml
Unison.Server.Doc.Markdown.Render
Unison.Server.Doc.Markdown.Types
Unison.Server.Endpoints.DefinitionSummary
Unison.Server.Endpoints.FuzzyFind
Unison.Server.Endpoints.GetDefinitions
@ -28,10 +30,14 @@ library
Unison.Server.Endpoints.NamespaceListing
Unison.Server.Endpoints.Projects
Unison.Server.Errors
Unison.Server.NameSearch
Unison.Server.NameSearch.FromNames
Unison.Server.NameSearch.Sqlite
Unison.Server.Orphans
Unison.Server.QueryResult
Unison.Server.SearchResult
Unison.Server.SearchResult'
Unison.Server.Share.Definitions
Unison.Server.Syntax
Unison.Server.Types
Unison.Sync.API
@ -121,6 +127,7 @@ library
, utf8-string
, vector
, wai
, wai-cors
, warp
, yaml
default-language: Haskell2010

View File

@ -15,6 +15,13 @@ to `Tests.check` and `Tests.checkEqual`).
.> add
```
```ucm:hide
.> load unison-src/builtin-tests/tcp-tests.u
.> add
.> load unison-src/builtin-tests/tls-tests.u
.> add
```
```ucm:hide
.> load unison-src/builtin-tests/tests.u
.> add

View File

@ -17,6 +17,13 @@ to `Tests.check` and `Tests.checkEqual`).
.> add
```
```ucm:hide
.> load unison-src/builtin-tests/tcp-tests.u
.> add
.> load unison-src/builtin-tests/tls-tests.u
.> add
```
```ucm:hide
.> load unison-src/builtin-tests/tests.u
.> add

View File

@ -0,0 +1,48 @@
shouldFail fn =
result = catchAll fn
isLeft result
tcp.tests = do
check "connects to example.com" do
socket = Socket.client (HostName "example.com") (Port "80")
Socket.send socket (toUtf8 "GET /index.html HTTP/1.0\r\n\r\n")
response = Socket.receive socket
Socket.close socket
contains "HTTP/1.0 200 OK" (base.Text.fromUtf8 response)
check "rejects invalid port" do shouldFail do Socket.client (HostName "example.com") (Port "what")
check "no send after close" do shouldFail do
socket = Socket.client (HostName "example.com") (Port "80")
Socket.close socket
Socket.send socket (toUtf8 "GET /index.html HTTP/1.0\r\n\r\n")
check "no send on listener" do shouldFail do
match Socket.server None (Port "0") with
BoundServerSocket socket -> Socket.send socket (toUtf8 "what")
!testServerAndClient
testServerAndClient = do
setup = catchAll do
socket = Socket.listen (server None (Port "0"))
port = match socket with ListeningServerSocket sock -> Socket.port sock
(socket, port)
match setup with
Left exn ->
Tests.fail "Unable to bind and listen on a socket" ""
Right (socket, port) ->
serve = do
sock = Socket.accept socket
data = Socket.receive sock
Socket.send sock (toUtf8 "from server")
base.Text.fromUtf8 data
serveResult = !Promise.new
_ = fork do Promise.write serveResult (catchAll serve)
data = catchAll do
clientSocket = Socket.client (HostName "localhost") (Port (Nat.toText port))
Socket.send clientSocket (toUtf8 "from client")
base.Text.fromUtf8 (Socket.receive clientSocket)
checkEqual "Server received data" (Promise.read serveResult) (Right "from client")
checkEqual "Client received data" data (Right "from server")

View File

@ -1,59 +1,13 @@
shouldFail fn = isLeft <| catchAll fn
tests : '{IO,Exception} ()
tests = Tests.main do
!tls.tests
!crypto.hash.tests
!hmac.tests
!concurrency.tests
!tcp.tests
check "bug is caught" do shouldFail do bug ()
tcp.tests = do
check "connects to example.com" do
socket = Socket.client (HostName "example.com") (Port "80")
Socket.send socket (toUtf8 "GET /index.html HTTP/1.0\r\n\r\n")
response = Socket.receive socket
Socket.close socket
contains "HTTP/1.0 200 OK" (base.Text.fromUtf8 response)
check "rejects invalid port" do shouldFail do Socket.client (HostName "example.com") (Port "what")
check "no send after close" do shouldFail do
socket = Socket.client (HostName "example.com") (Port "80")
Socket.close socket
Socket.send socket (toUtf8 "GET /index.html HTTP/1.0\r\n\r\n")
check "no send on listener" do shouldFail do
match Socket.server None (Port "0") with
BoundServerSocket socket -> Socket.send socket (toUtf8 "what")
setup = catchAll do
socket = Socket.listen (server None (Port "0"))
port = match socket with
ListeningServerSocket sock -> Socket.port sock
(socket, port)
match setup with
Left exn ->
Debug.trace "Setup failed" exn
Tests.fail "Unable to bind and listen on a socket" ""
Right (socket, port) ->
serve = do
sock = Socket.accept socket
data = Socket.receive sock
Socket.send sock (toUtf8 "from server")
base.Text.fromUtf8 data
serveResult = !Promise.new
_ = fork do Promise.write serveResult (catchAll serve)
data = catchAll do
clientSocket = Socket.client (HostName "localhost") (Port (Nat.toText port))
Socket.send clientSocket (toUtf8 "from client")
base.Text.fromUtf8 (Socket.receive clientSocket)
checkEqual "Server received data" (Promise.read serveResult) (Right "from client")
checkEqual "Client received data" data (Right "from server")
crypto.hash.tests = do
hash alg = hashBytes alg (toUtf8 "")
tag name = name ++ " hashBytes"

View File

@ -0,0 +1,111 @@
-- generated with:
-- openssl req -newkey rsa:2048 -subj '/CN=test.unison.cloud/O=Unison/C=US' -nodes -keyout key.pem -x509 -days 3650 -out cert.pem
selfSignedKey = "-----BEGIN PRIVATE KEY-----\nMIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDtV0Lqk9i5DKJG\ne5zwDFuxHxSxhOygCuq6Jl4xonsjl4hdvXxUUiuqxGGbv4x9HSvavpHwyriGiIRQ\noIjanWiNK9Jp6VDYWOvErnTG/+Rfm1vCoUKQvn8lDrD9knSPUoTz3Cz7JS8FE/rr\nFR3IRyXa0gpXmvIwX16SeCS/Lb/Le9o1HJh9DrkxVyoFq3zlX1OE0AVV0a014IDB\nNprqLITwiVzyDPQkP8rbJF9WPI5afzW8+3+v5UanIFknOOPaJl8pf3gmqI5g8fxk\n/SSMlPgnLd1Fi7h90gBygdpJw3do3/ZA1IOvmFQ+LXE1xtqU1Ay3f3At3DiJgTxP\n8mwBYdtdAgMBAAECggEBAMo85QRF3xIvtcchZeUWYrtWpKdvgMIPC1x7fSAGN69o\nXAakg+DF8/ebRyET435o8QmAAZOQ6hOZGEYrxPGj14cTpEQjT4RKoPwDO/al7c+Z\n7mK2TqZP7L+C+UXZGgFWa3vwTVPjp2FIWTMf1zTli1geSjnECkM1wLxGK+nL7fZQ\nesHXPkJJG5AqzA84bJ/fY5OQ/dfcCxnHEv5XpHPq6VFgXg7jtcNbr1R9EBiQfreN\nU7Hd38R77jYjL1fT71HwEUQ0cwavfxTu0jZFXJxEC7CC1J65QXUguZXLf9vwgSB0\nm0gZgeJlQ905bDJrxUcqCFxdROy/SndP6qFnJSCsfwECgYEA+2cld/WCieUGstJd\njsIrJ6f/e+uuOSTnGTtnsBX6KoiHdcg3sVVVK18xI9El9V+YX9SjN37XeGFe/Wzu\ngE3M4A3Jqz7cgdNj/PaKjqQwJWNbcJnL5ku6eQvcAIpc5gAZxXVCPIbY1ZpeYcsh\nMwr3cOEpQu8UVFBbn/OeJ1r07dECgYEA8a5J3Ls5PSxXq8NDrkAxt3vUJIWLGQQJ\nbV2aGDI2XP2N+vh2WML9rlFeyyBOeRxK9TsErVOaEeOcQZV97//fzIGxCU+SXyiC\nnVMXT2U1mzOu5qPfzLO5Ga4sunxqKDman6NM2IPw2NPA7zMWNQMEIHAerwYZzjm5\nB5tFcMA8e80CgYBgF8rwkTz2LD5lN5dfK8SHAeXbnfgYC4zxzg0R9zSJ8WmlkYQI\nGk/VpisIP7c8lO+PIZ3JZohBkSZXw71d+V7n/R0qgXqTfRNo62uGnidxAws+fOq8\n+hEql2feJQThPQScvvc0X26eJsUQqC3mbripwsacuPmSSKzc9Kds741TIQKBgQCd\nXnG2CytATAliTKlbY218HmOKzHJAfcJttk9KhhekAW5cB0F4lq98vHtPJOA0OFoO\nyLlI63EdSOpMQj1Y83IUxjYy699Rmx1BuAMrral0P/kZMYfe0QAsWp/BZpXxT2EB\npeG58l/3sBqnJsrFBgu/24H/UaeoAyoaa96Rhntb2QKBgQCSEkcUnzTvoUyMFN14\n8NttxOUZiSsCmgoXk6Rk2QKyCPsJocGS4BffGt3kOMcotz/0YsvM1TBBLB7vIaAy\nE1eWLBxK4yYeS8dKXwiCZn170yaJyjoBwZC1RgqQiKa5Y22Di7KjJoMa4Da8Tk4z\nFbE5dBApbLhvNTyQ7BHZxlfmdg==\n-----END PRIVATE KEY-----"
selfSignedCert = "-----BEGIN CERTIFICATE-----\nMIIDVTCCAj2gAwIBAgIUdMNT5sYMfDJYH48Rh8LrlN+5wwgwDQYJKoZIhvcNAQEL\nBQAwOjEaMBgGA1UEAwwRdGVzdC51bmlzb24uY2xvdWQxDzANBgNVBAoMBlVuaXNv\nbjELMAkGA1UEBhMCVVMwHhcNMjIwMTI0MjAxNzQ2WhcNMzIwMTIyMjAxNzQ2WjA6\nMRowGAYDVQQDDBF0ZXN0LnVuaXNvbi5jbG91ZDEPMA0GA1UECgwGVW5pc29uMQsw\nCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAO1XQuqT\n2LkMokZ7nPAMW7EfFLGE7KAK6romXjGieyOXiF29fFRSK6rEYZu/jH0dK9q+kfDK\nuIaIhFCgiNqdaI0r0mnpUNhY68SudMb/5F+bW8KhQpC+fyUOsP2SdI9ShPPcLPsl\nLwUT+usVHchHJdrSClea8jBfXpJ4JL8tv8t72jUcmH0OuTFXKgWrfOVfU4TQBVXR\nrTXggME2muoshPCJXPIM9CQ/ytskX1Y8jlp/Nbz7f6/lRqcgWSc449omXyl/eCao\njmDx/GT9JIyU+Cct3UWLuH3SAHKB2knDd2jf9kDUg6+YVD4tcTXG2pTUDLd/cC3c\nOImBPE/ybAFh210CAwEAAaNTMFEwHQYDVR0OBBYEFIfwxpuqtqxfCpaJGW32jH2J\nNbnYMB8GA1UdIwQYMBaAFIfwxpuqtqxfCpaJGW32jH2JNbnYMA8GA1UdEwEB/wQF\nMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAKh7EDo5XjSd6J190WGH3V8v49J0Sh8M\nP7APe1eL8eTkW1Vh7/QCOhRpkSnyCz2OxJjjeFVAsCO3aLxlRM6wQZQKXu45iM2U\niPmv7ECS5xUn7LqRZd/JG1P6jvRPtBC1+oqA+NNDe27wzQp3rWyDG3pWZga8jJfW\nq+2xQ+s6GfzszxYZ/8MLn4zaUSymnOA+70yQ8czXkSO7MT2jJ7QDX8jxuJPZZARW\nuXeAYPRqD+b4MjdBATEtxgPTDWEi8gtfHFGUgInFhD4hOu+D3NLiE6lfR5brUqpQ\nZ4v8prCI8OjGSUx1dIJhqQHB5O0vdaxO0hkVdfqDVE93UrGBPwBRDlo=\n-----END CERTIFICATE-----"
tls.tests = do
check "decoding a cert should work" do isRight (decodeCert (toUtf8 selfSignedCert))
check "decoding a private key should work" do 1 == List.size (decodePrivateKey (toUtf8 selfSignedKey))
check "decoding an invalid private key should ignore" do 0 == List.size (decodePrivateKey (toUtf8 "not a private key"))
check "decoding an invalid cert should fail" do isLeft (decodeCert (toUtf8 "not a cert"))
!testConnectSelfSigned
expectError "self signed wrong host" "NameMismatch" testConnectSelfSignedWrongHost
check "connects to example.com over tls" do
socket = Socket.client (HostName "example.com") (Port "443")
config = ClientConfig.default (HostName "example.com") ""
tls = base.IO.net.Tls.newClient config socket
conn = base.IO.net.Tls.handshake tls
TlsSocket.send conn (toUtf8 "GET /index.html HTTP/1.0\r\n\r\n")
response = TlsSocket.receive conn
TlsSocket.close conn
contains "HTTP/1.0 200 OK" (fromUtf8 response)
expectError "wrong host example.com fails" "NameMismatch" do
socket = Socket.client (HostName "example.com") (Port "443")
config = ClientConfig.default (HostName "examplez.com") ""
tls = base.IO.net.Tls.newClient config socket
_ = base.IO.net.Tls.handshake tls
()
expectError "trying to connect to example.com:443 without tls fails" "Connection reset" do
socket = Socket.client (HostName "example.com") (Port "443")
Socket.send socket (toUtf8 "GET /index.html HTTP/1.0\r\n\r\n")
Socket.receive socket
testConnectSelfSigned = do
portPromise = Promise.new ()
toSend = "12345"
-- Server
serverResult = defer do serverThread portPromise toSend
-- Client
received = clientThread "test.unison.cloud" portPromise
_ = Either.toException <| Promise.read serverResult
-- Check it
checkEqual "should have received what we've sent" toSend received
expectError msg text fn = match catchAll fn with
Left (Failure _ message _) ->
if Text.contains text message
then
Tests.pass msg
else
Tests.fail msg ("Unexpected exception found " ++ message)
Right _ ->
Tests.fail msg "Expected exception, none found"
testConnectSelfSignedWrongHost = do
-- Server
portPromise = Promise.new ()
toSend = "12345"
serverResult = defer do serverThread portPromise toSend
-- Client
_ = clientThread "some.other.hostname" portPromise
()
clientThread host portPromise =
cert = match decodeCert (toUtf8 selfSignedCert) with
Right c -> c
Left _ -> raise (failure "Bad cert" selfSignedCert)
Either.toException !(testClient (Some cert) host portPromise)
defer comp =
result = Promise.new ()
_ = fork do Promise.write result (catchAll comp)
result
Optional.toException message = cases
Some x -> x
None -> raise (failure message 0)
serverThread portPromise toSend =
cert = Either.toException (decodeCert (toUtf8 selfSignedCert))
key = Optional.toException "No private key decoded" <| List.head (decodePrivateKey (toUtf8 selfSignedKey))
tlsconfig = Tls.ServerConfig.default [cert] key
sock = Socket.listen (server (Some (HostName ("127" ++ ".0.0.1"))) (Port "0"))
port = match sock with ListeningServerSocket sock -> Socket.port sock
_ = Promise.write portPromise port
sock' = Socket.accept sock
-- attach TLS to our TCP connection
tls = Tls.newServer tlsconfig sock'
tlsock = net.Tls.handshake tls
TlsSocket.send tlsock (toUtf8 toSend)
TlsSocket.close tlsock
testClient : Optional SignedCert -> Text -> Promise Nat -> '{IO} Either Failure Text
testClient cert hostname portVar _ = catch do
use base.IO.net
-- create a client that will expect a cert from the given hostname (CN)
defaultClient = (Tls.ClientConfig.default (HostName.HostName hostname) "")
-- if we were passed a certificate to trust, it is the only certificate we trust
tlsconfig = match cert with
None -> defaultClient
Some (cert) -> defaultClient |> ClientConfig.certificates.set [cert]
-- create a tcp connection with the server
sock = clientSocket (HostName.HostName ("127" ++ ".0.0.1")) (Port.Port (Nat.toText (Promise.read portVar)))
-- attach the TLS client to the TCP socket
tls = Tls.newClient tlsconfig sock
-- verify that the server presents us with a certificate chain for
-- test.unison.cloud originating with a certificate we trust, and
-- that the server can use a compatible TLS version and cipher
tlsock = Tls.handshake tls
-- -- receive a message from the server
fromUtf8 (TlsSocket.receive tlsock)

View File

@ -0,0 +1,94 @@
```ucm:hide
.> builtins.mergeio
```
```unison:hide
otherDoc : a -> Doc2
otherDoc _ = {{ yo }}
otherTerm : Nat
otherTerm = 99
fulldoc : Doc2
fulldoc =
use Nat +
{{
Heres some text with a
soft line break
hard line break
Here's a cool **BOLD** __italic__ ~~strikethrough~~ thing with an inline code block ''1 + 2''
# Heading
## Heading 2
Term Link: {otherTerm}
Type Link: {type Optional}
Term source:
@source{term}
Term signature:
@signature{term}
* List item
Inline code:
`` 1 + 2 ``
` "doesn't typecheck" + 1 `
[Link](https://unison-lang.org)
![Image](https://share-next.unison-lang.org/static/unison-logo-circle.png)
Horizontal rule
---
Video
{{
Special
(Embed
(Any (Video [MediaSource "test.mp4" None] [("poster", "test.png")])))
}}
Transclusion/evaluation:
{{ otherDoc (a -> Word a) }}
---
The following markdown features aren't supported by the Doc format yet, but maybe will someday
> Block quote
Table
| Header 1 | Header 2 |
| -------- | -------- |
| Cell 1 | Cell 2 |
Indented Code block
}}
```
```ucm:hide
.> add
```
```ucm
.> debug.doc-to-markdown fulldoc
```

View File

@ -0,0 +1,159 @@
```unison
otherDoc : a -> Doc2
otherDoc _ = {{ yo }}
otherTerm : Nat
otherTerm = 99
fulldoc : Doc2
fulldoc =
use Nat +
{{
Heres some text with a
soft line break
hard line break
Here's a cool **BOLD** __italic__ ~~strikethrough~~ thing with an inline code block ''1 + 2''
# Heading
## Heading 2
Term Link: {otherTerm}
Type Link: {type Optional}
Term source:
@source{term}
Term signature:
@signature{term}
* List item
Inline code:
`` 1 + 2 ``
` "doesn't typecheck" + 1 `
[Link](https://unison-lang.org)
![Image](https://share-next.unison-lang.org/static/unison-logo-circle.png)
Horizontal rule
---
Video
{{
Special
(Embed
(Any (Video [MediaSource "test.mp4" None] [("poster", "test.png")])))
}}
Transclusion/evaluation:
{{ otherDoc (a -> Word a) }}
---
The following markdown features aren't supported by the Doc format yet, but maybe will someday
> Block quote
Table
| Header 1 | Header 2 |
| -------- | -------- |
| Cell 1 | Cell 2 |
Indented Code block
}}
```
```ucm
.> debug.doc-to-markdown fulldoc
Heres some text with a soft line break
hard line break
Here's a cool **BOLD** _italic_ ~~strikethrough~~ thing with an inline code block `1 + 2`
# Heading
## Heading 2
Term Link: `otherTerm`
Type Link: `Optional`
Term source:
```unison
term : '{g} a -> Doc2.Term
term a = Term.Term (Any a)
```
Term signature:
```unison
term : '{g} a -> Doc2.Term
```
- List item
Inline code:
`1 Nat.+ 2`
` "doesn't typecheck" + 1 `
[Link](https://unison-lang.org)
![Image](https://share-next.unison-lang.org/static/unison-logo-circle.png)
Horizontal rule
---
Video
![](test.mp4)
Transclusion/evaluation:
yo
---
The following markdown features aren't supported by the Doc format yet, but maybe will someday
> Block quote
Table
| Header 1 | Header 2 | | -------- | -------- | | Cell 1 | Cell 2 |
Indented Code block
```