Merge remote-tracking branch 'origin/trunk' into topic/rehash-codebase

# Conflicts:
#	unison-core/src/Unison/Reference.hs
#	unison-core/src/Unison/Referent'.hs
This commit is contained in:
Arya Irani 2021-11-03 10:48:13 -04:00
commit 4a7b0f5604
27 changed files with 658 additions and 363 deletions

View File

@ -34,6 +34,9 @@ module U.Codebase.Sqlite.Operations
loadDeclByReference,
getDeclTypeById,
-- * terms/decls
getCycleLen,
-- * patches
savePatch,
loadPatchById,
@ -76,9 +79,6 @@ module U.Codebase.Sqlite.Operations
addTypeMentionsToIndexForTerm,
termsMentioningType,
-- * delete me
getCycleLen,
-- * low-level stuff
liftQ,
loadDbBranchByObjectId,
@ -505,21 +505,19 @@ decodeDeclFormat = getFromBytesOr ErrDeclFormat S.getDeclFormat
decodeDeclElement :: Err m => Word64 -> ByteString -> m (LocalIds, S.Decl.Decl Symbol)
decodeDeclElement i = getFromBytesOr (ErrDeclElement i) (S.lookupDeclElement i)
-- * legacy conversion helpers
getCycleLen :: EDB m => H.Hash -> m Word64
getCycleLen :: EDB m => H.Hash -> m (Maybe Word64)
getCycleLen h = do
when debug $ traceM $ "\ngetCycleLen " ++ (Text.unpack . Base32Hex.toText $ H.toBase32Hex h)
runMaybeT (primaryHashToExistingObjectId h)
>>= maybe (throwError $ LegacyUnknownCycleLen h) pure
>>= liftQ . Q.loadObjectById
-- todo: decodeComponentLengthOnly is unintentionally a hack that relies on
-- the fact the two things that references can refer to (term and decl
-- components) have the same basic serialized structure: first a format
-- byte that is always 0 for now, followed by a framed array representing
-- the strongly-connected component. :grimace:
>>= decodeComponentLengthOnly
>>= pure . fromIntegral
runMaybeT $
-- actually want Nothing in case of non term/decl component hash
MaybeT (anyHashToMaybeObjectId h)
>>= liftQ . Q.loadObjectById
-- todo: decodeComponentLengthOnly is unintentionally a hack that relies on
-- the fact the two things that references can refer to (term and decl
-- components) have the same basic serialized structure: first a format
-- byte that is always 0 for now, followed by a framed array representing
-- the strongly-connected component. :grimace:
>>= decodeComponentLengthOnly
-- | Get the 'C.DeclType.DeclType' of a 'C.Reference.Id'.
getDeclTypeById :: EDB m => C.Reference.Id -> m C.Decl.DeclType

View File

@ -4,6 +4,7 @@ copyright: Copyright (C) 2013-2021 Unison Computing, PBC and contributors
default-extensions:
- ApplicativeDo
- BangPatterns
- BlockArguments
- DeriveFunctor
- DeriveGeneric
@ -17,6 +18,7 @@ default-extensions:
- NamedFieldPuns
- OverloadedStrings
- PatternSynonyms
- RankNTypes
- ScopedTypeVariables
- TupleSections
- TypeApplications

View File

@ -26,6 +26,14 @@ module Unison.Codebase
dependentsOfComponent,
isTerm,
isType,
componentReferencesForReference,
-- * Unsafe variants
unsafeGetTerm,
unsafeGetTermWithType,
unsafeGetTypeDeclaration,
unsafeGetTypeOfTermById,
unsafeGetComponentLength,
)
where
@ -60,6 +68,7 @@ import qualified Unison.Reference as Reference
import qualified Unison.Referent as Referent
import Unison.Symbol (Symbol)
import Unison.Term (Term)
import qualified Unison.Term as Term
import Unison.Type (Type)
import Unison.Typechecker.TypeLookup (TypeLookup (TypeLookup))
import qualified Unison.Typechecker.TypeLookup as TL
@ -180,6 +189,12 @@ getTypeOfReferent c (Referent.Ref r) = getTypeOfTerm c r
getTypeOfReferent c (Referent.Con r cid _) =
getTypeOfConstructor c r cid
componentReferencesForReference :: Monad m => Codebase m v a -> Reference -> m (Set Reference)
componentReferencesForReference c = \case
r@Reference.Builtin{} -> pure (Set.singleton r)
Reference.Derived h _i ->
Set.mapMonotonic Reference.DerivedId . Reference.componentFromLength h <$> unsafeGetComponentLength c h
-- | The dependents of a builtin type includes the set of builtin terms which
-- mention that type.
dependents :: Functor m => Codebase m v a -> Reference -> m (Set Reference)
@ -256,3 +271,41 @@ viewRemoteBranch ::
viewRemoteBranch codebase ns = runExceptT do
(cleanup, branch, _) <- ExceptT $ viewRemoteBranch' codebase ns
pure (cleanup, branch)
unsafeGetTerm :: (HasCallStack, Monad m) => Codebase m v a -> Reference.Id -> m (Term v a)
unsafeGetTerm codebase rid =
getTerm codebase rid >>= \case
Nothing -> error (reportBug "E520818" ("term " ++ show rid ++ " not found"))
Just term -> pure term
unsafeGetTypeDeclaration :: (HasCallStack, Monad m) => Codebase m v a -> Reference.Id -> m (Decl v a)
unsafeGetTypeDeclaration codebase rid =
getTypeDeclaration codebase rid >>= \case
Nothing -> error (reportBug "E129043" ("type decl " ++ show rid ++ " not found"))
Just decl -> pure decl
unsafeGetTypeOfTermById :: (HasCallStack, Monad m) => Codebase m v a -> Reference.Id -> m (Type v a)
unsafeGetTypeOfTermById codebase rid =
getTypeOfTermImpl codebase rid >>= \case
Nothing -> error (reportBug "E377910" ("type of term " ++ show rid ++ " not found"))
Just ty -> pure ty
unsafeGetComponentLength :: (HasCallStack, Monad m) => Codebase m v a -> Hash -> m Reference.CycleSize
unsafeGetComponentLength codebase h =
getComponentLength codebase h >>= \case
Nothing -> error (reportBug "E713350" ("component with hash " ++ show h ++ " not found"))
Just size -> pure size
-- | Get a term with its type.
--
-- Precondition: the term exists in the codebase.
unsafeGetTermWithType :: (HasCallStack, Monad m) => Codebase m v a -> Reference.Id -> m (Term v a, Type v a)
unsafeGetTermWithType codebase rid = do
term <- unsafeGetTerm codebase rid
ty <-
-- A term is sometimes stored with a type annotation (specifically, when the annotation is different from the
-- inferred type). In this case, we can avoid looking up the type separately.
case term of
Term.Ann' _ ty -> pure ty
_ -> unsafeGetTypeOfTermById codebase rid
pure (term, ty)

View File

@ -22,6 +22,7 @@ import Control.Lens (_5,view)
import Unison.Server.Backend ( DefinitionResults
, ShallowListEntry
, BackendError
, IncludeCycles
)
import Data.Configurator.Types ( Configured )
import qualified Data.Map as Map
@ -108,8 +109,9 @@ data Command
GetDefinitionsBySuffixes
:: Maybe Path
-> Branch m
-> IncludeCycles
-> [HQ.HashQualified Name]
-> Command m i v (Either BackendError (DefinitionResults v))
-> Command m i v (DefinitionResults v)
FindShallow
:: Path.Absolute

View File

@ -191,9 +191,9 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour
let namingScope = Backend.AllNames $ fromMaybe Path.empty mayPath
lift $ Backend.hqNameQuery namingScope branch codebase query
LoadSearchResults srs -> lift $ Backend.loadSearchResults codebase srs
GetDefinitionsBySuffixes mayPath branch query -> do
GetDefinitionsBySuffixes mayPath branch includeCycles query -> do
let namingScope = Backend.AllNames $ fromMaybe Path.empty mayPath
lift . runExceptT $ Backend.definitionsBySuffixes namingScope branch codebase query
lift (Backend.definitionsBySuffixes namingScope branch codebase includeCycles query)
FindShallow path -> lift . runExceptT $ Backend.findShallow codebase path
MakeStandalone ppe ref out -> lift $ do
let cl = Codebase.toCodeLookup codebase

View File

@ -1,14 +1,4 @@
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ApplicativeDo #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE EmptyCase #-}
{-# LANGUAGE TemplateHaskell #-}
module Unison.Codebase.Editor.HandleInput
( loop
@ -648,8 +638,7 @@ loop = do
ShowReflogI -> do
entries <- convertEntries Nothing [] <$> eval LoadReflog
numberedArgs .=
fmap (('#':) . SBH.toString . Output.hash) entries
numberedArgs .= fmap (('#':) . SBH.toString . Output.hash) entries
respond $ ShowReflog entries
where
-- reverses & formats entries, adds synthetic entries when there is a
@ -886,9 +875,9 @@ loop = do
else case Branch._history b of
Causal.One{} ->
respond $ History diffCap acc (EndOfLog . sbh $ Branch.headHash b)
Causal.Merge{..} ->
Causal.Merge{Causal.tails} ->
respond $ History diffCap acc (MergeTail (sbh $ Branch.headHash b) . map sbh $ Map.keys tails)
Causal.Cons{..} -> do
Causal.Cons{Causal.tail} -> do
b' <- fmap Branch.Branch . eval . Eval $ snd tail
let elem = (sbh $ Branch.headHash b, Branch.namesDiff b' b)
doHistory (n+1) b' (elem : acc)
@ -1114,39 +1103,7 @@ loop = do
ns -> pure ns
traverse_ (displayI basicPrettyPrintNames outputLoc) names
ShowDefinitionI outputLoc inputQuery -> do
-- If the query is empty, run a fuzzy search.
query <- case inputQuery of
[] -> do
let fuzzyBranch = case outputLoc of
-- fuzzy finding for 'view' is global
ConsoleLocation{} -> root0
-- fuzzy finding for 'edit's are local to the current branch
LatestFileLocation{} -> currentBranch0
FileLocation{} -> currentBranch0
fuzzySelectTermsAndTypes fuzzyBranch
q -> pure q
res <- eval $ GetDefinitionsBySuffixes (Just currentPath'') root' query
case res of
Left e -> handleBackendError e
Right (Backend.DefinitionResults terms types misses) -> do
let loc = case outputLoc of
ConsoleLocation -> Nothing
FileLocation path -> Just path
LatestFileLocation ->
fmap fst latestFile' <|> Just "scratch.u"
printNames =
Backend.getCurrentPrettyNames (Backend.AllNames currentPath'') root'
ppe = PPE.fromNamesDecl hqLength printNames
unless (null types && null terms) $
eval . Notify $
DisplayDefinitions loc ppe types terms
unless (null misses) $
eval . Notify $ SearchTermsNotFound misses
-- We set latestFile to be programmatically generated, if we
-- are viewing these definitions to a file - this will skip the
-- next update for that file (which will happen immediately)
latestFile .= ((, True) <$> loc)
ShowDefinitionI outputLoc query -> handleShowDefinition outputLoc query
FindPatchI -> do
let patches =
[ Path.toName $ Path.snoc p seg
@ -1848,6 +1805,63 @@ loop = do
Right input -> lastInput .= Just input
_ -> pure ()
-- | Handle a @ShowDefinitionI@ input command, i.e. `view` or `edit`.
handleShowDefinition :: forall m v. Functor m => OutputLocation -> [HQ.HashQualified Name] -> Action' m v ()
handleShowDefinition outputLoc inputQuery = do
-- If the query is empty, run a fuzzy search.
query <-
if null inputQuery
then do
branch <- fuzzyBranch
fuzzySelectTermsAndTypes branch
else pure inputQuery
currentPath' <- Path.unabsolute <$> use currentPath
root' <- use root
hqLength <- eval CodebaseHashLength
Backend.DefinitionResults terms types misses <-
eval (GetDefinitionsBySuffixes (Just currentPath') root' includeCycles query)
outputPath <- getOutputPath
when (not (null types && null terms)) do
let printNames = Backend.getCurrentPrettyNames (Backend.AllNames currentPath') root'
let ppe = PPE.fromNamesDecl hqLength printNames
respond (DisplayDefinitions outputPath ppe types terms)
when (not (null misses)) (respond (SearchTermsNotFound misses))
-- We set latestFile to be programmatically generated, if we
-- are viewing these definitions to a file - this will skip the
-- next update for that file (which will happen immediately)
latestFile .= ((,True) <$> outputPath)
where
-- `view`: fuzzy find globally; `edit`: fuzzy find local to current branch
fuzzyBranch :: Action' m v (Branch0 m)
fuzzyBranch =
case outputLoc of
ConsoleLocation {} -> Branch.head <$> use root
-- fuzzy finding for 'edit's are local to the current branch
LatestFileLocation {} -> currentBranch0
FileLocation {} -> currentBranch0
where
currentBranch0 = do
currentPath' <- use currentPath
currentBranch <- getAt currentPath'
pure (Branch.head currentBranch)
-- `view`: don't include cycles; `edit`: include cycles
includeCycles =
case outputLoc of
ConsoleLocation -> Backend.DontIncludeCycles
FileLocation _ -> Backend.IncludeCycles
LatestFileLocation -> Backend.IncludeCycles
-- Get the file path to send the definition(s) to. `Nothing` means the terminal.
getOutputPath :: Action' m v (Maybe FilePath)
getOutputPath =
case outputLoc of
ConsoleLocation -> pure Nothing
FileLocation path -> pure (Just path)
LatestFileLocation ->
use latestFile <&> \case
Nothing -> Just "scratch.u"
Just (path, _) -> Just path
-- todo: compare to `getHQTerms` / `getHQTypes`. Is one universally better?
resolveHQToLabeledDependencies :: Functor m => HQ.HashQualified Name -> Action' m v (Set LabeledDependency)
resolveHQToLabeledDependencies = \case
@ -2404,7 +2418,7 @@ applySelection
-> SlurpResult v
-> SlurpResult v
applySelection [] _ = id
applySelection hqs file = \sr@SlurpResult{..} ->
applySelection hqs file = \sr@SlurpResult{adds, updates} ->
sr { adds = adds `SC.intersection` closed
, updates = updates `SC.intersection` closed
, extraDefinitions = closed `SC.difference` selection
@ -2658,7 +2672,7 @@ filterBySlurpResult :: Ord v
=> SlurpResult v
-> UF.TypecheckedUnisonFile v Ann
-> UF.TypecheckedUnisonFile v Ann
filterBySlurpResult SlurpResult{..}
filterBySlurpResult SlurpResult{adds, updates}
(UF.TypecheckedUnisonFileId
dataDeclarations'
effectDeclarations'

View File

@ -34,7 +34,6 @@ import Unison.DataDeclaration ( Decl )
import Unison.Util.Relation (Relation)
import qualified Unison.Codebase.Branch as Branch
import qualified Unison.Codebase.Editor.SlurpResult as SR
import qualified Unison.Codebase.Metadata as Metadata
import qualified Unison.Codebase.Path as Path
import qualified Unison.Codebase.Runtime as Runtime
import qualified Unison.HashQualified as HQ
@ -184,9 +183,6 @@ data Output v
| ConfiguredMetadataParseError Path' String (P.Pretty P.ColorText)
| NoConfiguredGitUrl PushPull Path'
| ConfiguredGitUrlParseError PushPull Path' Text String
| DisplayLinks PPE.PrettyPrintEnvDecl Metadata.Metadata
(Map Reference (DisplayObject () (Decl v Ann)))
(Map Reference (DisplayObject (Type v Ann) (Term v Ann)))
| MetadataMissingType PPE.PrettyPrintEnv Referent
| TermMissingType Reference
| MetadataAmbiguous (HQ.HashQualified Name) PPE.PrettyPrintEnv [Referent]
@ -306,7 +302,6 @@ isFailure o = case o of
ConfiguredMetadataParseError{} -> True
NoConfiguredGitUrl{} -> True
ConfiguredGitUrlParseError{} -> True
DisplayLinks{} -> False
MetadataMissingType{} -> True
MetadataAmbiguous{} -> True
PatchNeedsToBeConflictFree{} -> True

View File

@ -331,6 +331,10 @@ sqliteCodebase debugName root = do
getDeclComponent h1@(Cv.hash1to2 -> h2) =
runDB' conn $ map (Cv.decl2to1 h1) <$> Ops.loadDeclComponent h2
getCycleLength :: MonadIO m => Hash -> m (Maybe Reference.CycleSize)
getCycleLength (Cv.hash1to2 -> h2) =
runDB conn $ Ops.getCycleLen h2
--putTermComponent :: MonadIO m => Hash -> [(Term Symbol Ann, Type Symbol Ann)] -> m ()
--putTerms :: MonadIO m => Map Reference.Id (Term Symbol Ann, Type Symbol Ann) -> m () -- dies horribly if missing dependencies?
@ -783,9 +787,8 @@ sqliteCodebase debugName root = do
putTypeDeclaration
-- _getTermComponent
getTermComponentWithTypes
-- _getTermComponentLength
getDeclComponent
-- _getDeclComponentLength
getCycleLength
(getRootBranch rootBranchCache)
(putRootBranch rootBranchCache)
(rootBranchUpdates rootBranchCache)

View File

@ -41,9 +41,8 @@ data Codebase m v a = Codebase
putTypeDeclaration :: Reference.Id -> Decl v a -> m (),
-- getTermComponent :: Hash -> m (Maybe [Term v a]),
getTermComponentWithTypes :: Hash -> m (Maybe [(Term v a, Type v a)]),
-- getTermComponentLength :: Hash -> m (Reference.CycleSize),
getDeclComponent :: Hash -> m (Maybe [Decl v a]),
-- getDeclComponentLength :: Hash -> m (Reference.CycleSize),
getComponentLength :: Hash -> m (Maybe Reference.CycleSize),
getRootBranch :: m (Either GetRootBranchError (Branch m)),
putRootBranch :: Branch m -> m (),
rootBranchUpdates :: m (IO (), IO (Set Branch.Hash)),

View File

@ -256,8 +256,8 @@ prettyRemoteNamespace =
notifyUser :: forall v . Var v => FilePath -> Output v -> IO Pretty
notifyUser dir o = case o of
Success -> pure $ P.bold "Done."
PrintMessage pretty -> do
pure pretty
PrintMessage pretty -> do
pure pretty
BadRootBranch e -> case e of
Codebase.NoRootBranch ->
pure . P.fatalCallout $ "I couldn't find the codebase root!"
@ -336,15 +336,6 @@ notifyUser dir o = case o of
displayDefinitions outputLoc ppe types terms
DisplayRendered outputLoc pp ->
displayRendered outputLoc pp
DisplayLinks ppe md types terms ->
if Map.null md then pure $ P.wrap "Nothing to show here. Use the "
<> IP.makeExample' IP.link <> " command to add links from this definition."
else
pure $ intercalateMap "\n\n" go (Map.toList md)
where
go (_key, rs) =
displayDefinitions' ppe (Map.restrictKeys types rs)
(Map.restrictKeys terms rs)
TestResults stats ppe _showSuccess _showFailures oks fails -> case stats of
CachedTests 0 _ -> pure . P.callout "😶" $ "No tests to run."
CachedTests n n' | n == n' -> pure $
@ -1187,43 +1178,88 @@ displayRendered outputLoc pp =
P.indentN 2 pp
]
displayDefinitions :: Var v => Ord a1 =>
Maybe FilePath
-> PPE.PrettyPrintEnvDecl
-> Map Reference.Reference (DisplayObject () (DD.Decl v a1))
-> Map Reference.Reference (DisplayObject (Type v a1) (Term v a1))
-> IO Pretty
displayDefinitions _outputLoc _ppe types terms | Map.null types && Map.null terms =
pure $ P.callout "😶" "No results to display."
displayDefinitions ::
Var v =>
Ord a1 =>
Maybe FilePath ->
PPE.PrettyPrintEnvDecl ->
Map Reference.Reference (DisplayObject () (DD.Decl v a1)) ->
Map Reference.Reference (DisplayObject (Type v a1) (Term v a1)) ->
IO Pretty
displayDefinitions _outputLoc _ppe types terms
| Map.null types && Map.null terms =
pure $ P.callout "😶" "No results to display."
displayDefinitions outputLoc ppe types terms =
maybe displayOnly scratchAndDisplay outputLoc
where
displayOnly = pure code
scratchAndDisplay path = do
path' <- canonicalizePath path
prependToFile code path'
pure (message code path')
where
prependToFile code path = do
existingContents <- do
exists <- doesFileExist path
if exists then readFile path
else pure ""
writeFile path . Text.pack . P.toPlain 80 $
P.lines [ code, ""
, "---- " <> "Anything below this line is ignored by Unison."
, "", P.text existingContents ]
message code path =
P.callout "☝️" $ P.lines [
P.wrap $ "I added these definitions to the top of " <> fromString path,
"",
P.indentN 2 code,
"",
P.wrap $
"You can edit them there, then do" <> makeExample' IP.update <>
"to replace the definitions currently in this namespace."
]
code = displayDefinitions' ppe types terms
displayOnly = pure code
scratchAndDisplay path = do
path' <- canonicalizePath path
prependToFile code path'
pure (message code path')
where
prependToFile code path = do
existingContents <- do
exists <- doesFileExist path
if exists
then readFile path
else pure ""
writeFile path . Text.pack . P.toPlain 80 $
P.lines
[ code,
"",
"---- " <> "Anything below this line is ignored by Unison.",
"",
P.text existingContents
]
message code path =
P.callout "☝️" $
P.lines
[ P.wrap $ "I added these definitions to the top of " <> fromString path,
"",
P.indentN 2 code,
"",
P.wrap $
"You can edit them there, then do" <> makeExample' IP.update
<> "to replace the definitions currently in this namespace."
]
code =
P.syntaxToColor $ P.sep "\n\n" (prettyTypes <> prettyTerms)
where
ppeBody r = PPE.declarationPPE ppe r
ppeDecl = PPE.unsuffixifiedPPE ppe
prettyTerms =
map go . Map.toList $
-- sort by name
Map.mapKeys (first (PPE.termName ppeDecl . Referent.Ref) . dupe) terms
prettyTypes =
map go2 . Map.toList $
Map.mapKeys (first (PPE.typeName ppeDecl) . dupe) types
go ((n, r), dt) =
case dt of
MissingObject r -> missing n r
BuiltinObject typ ->
P.hang
("builtin " <> prettyHashQualified n <> " :")
(TypePrinter.prettySyntax (ppeBody r) typ)
UserObject tm -> TermPrinter.prettyBinding (ppeBody r) n tm
go2 ((n, r), dt) =
case dt of
MissingObject r -> missing n r
BuiltinObject _ -> builtin n
UserObject decl -> case decl of
Left d -> DeclPrinter.prettyEffectDecl (ppeBody r) r n d
Right d -> DeclPrinter.prettyDataDecl (PPE.declarationPPEDecl ppe r) r n d
builtin n = P.wrap $ "--" <> prettyHashQualified n <> " is built-in."
missing n r =
P.wrap
( "-- The name " <> prettyHashQualified n <> " is assigned to the "
<> "reference "
<> fromString (show r ++ ",")
<> "which is missing from the codebase."
)
<> P.newline
<> tip "You might need to repair the codebase manually."
displayTestResults :: Bool -- whether to show the tip
-> PPE.PrettyPrintEnv

View File

@ -54,6 +54,7 @@ module Unison.Runtime.ANF
, BLit(..)
, packTags
, unpackTags
, maskTags
, ANFM
, Branched(.., MatchDataCover)
, Func(..)
@ -426,6 +427,10 @@ packTags (RTag rt) (CTag ct) = ri .|. ci
unpackTags :: Word64 -> (RTag, CTag)
unpackTags w = (RTag $ w `shiftR` 16, CTag . fromIntegral $ w .&. 0xFFFF)
-- Masks a packed tag to extract just the constructor tag portion
maskTags :: Word64 -> Word64
maskTags w = w .&. 0xFFFF
ensureRTag :: (Ord n, Show n, Num n) => String -> n -> r -> r
ensureRTag s n x
| n > 0xFFFFFFFFFFFF

View File

@ -1,3 +1,4 @@
{-# language ViewPatterns #-}
{-# language PatternGuards #-}
{-# language TupleSections #-}
{-# language PatternSynonyms #-}
@ -21,6 +22,7 @@ import Unison.Type
import Unison.Var (Var)
import Unison.Reference (Reference)
import Unison.Runtime.ANF (maskTags)
import Unison.Runtime.Foreign
(Foreign, HashAlgorithm(..), maybeUnwrapBuiltin, maybeUnwrapForeign)
import Unison.Runtime.MCode (CombIx(..))
@ -46,14 +48,14 @@ decompile
=> (Word64 -> Word64 -> Maybe (Term v ()))
-> Closure
-> Either Error (Term v ())
decompile _ (DataC rf ct [] [])
decompile _ (DataC rf (maskTags -> ct) [] [])
| rf == booleanRef
= boolean () <$> tag2bool ct
decompile _ (DataC rf ct [i] [])
decompile _ (DataC rf (maskTags -> ct) [i] [])
= decompileUnboxed rf ct i
decompile topTerms (DataC rf _ [] [b]) | rf == anyRef
= app () (builtin() "Any.Any") <$> decompile topTerms b
decompile topTerms (DataC rf ct [] bs)
decompile topTerms (DataC rf (maskTags -> ct) [] bs)
= apps' (con rf ct) <$> traverse (decompile topTerms) bs
decompile topTerms (PApV (CIx rf rt k) [] bs)
| Just t <- topTerms rt k

View File

@ -64,6 +64,7 @@ import Unison.Runtime.ANF
, SuperGroup(..)
, CTag
, Tag(..)
, packTags
, pattern TVar
, pattern TLit
, pattern TApp
@ -813,9 +814,11 @@ emitFunction rns _ _ _ (FComb r) as
| otherwise -- slow path
= App False (Env n 0) as
where n = cnum rns r
emitFunction _ _ _ _ (FCon r t) as
= Ins (Pack r (rawTag t) as)
emitFunction rns _ _ _ (FCon r t) as
= Ins (Pack r (packTags rt t) as)
. Yield $ BArg1 0
where
rt = toEnum . fromIntegral $ dnum rns r
emitFunction rns _ _ _ (FReq r e) as
-- Currently implementing packed calling convention for abilities
= Ins (Lit (MI . fromIntegral $ rawTag e))
@ -913,8 +916,10 @@ emitLet _ _ _ _ _ _ (TLit l)
-- = fmap (Ins . Name (Env n 0) $ emitArgs grp ctx args)
-- where
-- n = cnum rns r
emitLet _ grp _ _ _ ctx (TApp (FCon r n) args)
= fmap (Ins . Pack r (rawTag n) $ emitArgs grp ctx args)
emitLet rns grp _ _ _ ctx (TApp (FCon r n) args)
= fmap (Ins . Pack r (packTags rt n) $ emitArgs grp ctx args)
where
rt = toEnum . fromIntegral $ dnum rns r
emitLet _ grp _ _ _ ctx (TApp (FPrim p) args)
= fmap (Ins . either emitPOp emitFOp p $ emitArgs grp ctx args)
emitLet rns grp rec d vcs ctx bnd

View File

@ -15,7 +15,8 @@ import GHC.Conc as STM (unsafeIOToSTM)
import Data.Maybe (fromMaybe)
import Data.Bits
import Data.Foldable (toList, traverse_)
import Data.Foldable (toList, traverse_, fold)
import Data.Ord (comparing)
import Data.Traversable
import Data.Word (Word64)
@ -39,8 +40,15 @@ import Unison.Referent (pattern Ref)
import qualified Unison.ShortHash as SH
import Unison.Symbol (Symbol)
import Unison.Runtime.ANF
as ANF (Mem(..), CompileExn(..), SuperGroup, valueLinks, groupLinks)
import Unison.Runtime.ANF as ANF
( Mem(..)
, CompileExn(..)
, SuperGroup
, valueLinks
, groupLinks
, maskTags
, packTags
)
import qualified Unison.Runtime.ANF as ANF
import Unison.Runtime.Builtin
import Unison.Runtime.Exception
@ -637,44 +645,44 @@ dumpData
-> IO (Stack 'UN, Stack 'BX)
dumpData !_ !ustk !bstk (Enum _ t) = do
ustk <- bump ustk
pokeN ustk t
pokeN ustk $ maskTags t
pure (ustk, bstk)
dumpData !_ !ustk !bstk (DataU1 _ t x) = do
ustk <- bumpn ustk 2
pokeOff ustk 1 x
pokeN ustk t
pokeN ustk $ maskTags t
pure (ustk, bstk)
dumpData !_ !ustk !bstk (DataU2 _ t x y) = do
ustk <- bumpn ustk 3
pokeOff ustk 2 y
pokeOff ustk 1 x
pokeN ustk t
pokeN ustk $ maskTags t
pure (ustk, bstk)
dumpData !_ !ustk !bstk (DataB1 _ t x) = do
ustk <- bump ustk
bstk <- bump bstk
poke bstk x
pokeN ustk t
pokeN ustk $ maskTags t
pure (ustk, bstk)
dumpData !_ !ustk !bstk (DataB2 _ t x y) = do
ustk <- bump ustk
bstk <- bumpn bstk 2
pokeOff bstk 1 y
poke bstk x
pokeN ustk t
pokeN ustk $ maskTags t
pure (ustk, bstk)
dumpData !_ !ustk !bstk (DataUB _ t x y) = do
ustk <- bumpn ustk 2
bstk <- bump bstk
pokeOff ustk 1 x
poke bstk y
pokeN ustk t
pokeN ustk $ maskTags t
pure (ustk, bstk)
dumpData !_ !ustk !bstk (DataG _ t us bs) = do
ustk <- dumpSeg ustk us S
bstk <- dumpSeg bstk bs S
ustk <- bump ustk
pokeN ustk t
pokeN ustk $ maskTags t
pure (ustk, bstk)
dumpData !mr !_ !_ clo
= die $ "dumpData: bad closure: " ++ show clo
@ -1169,13 +1177,13 @@ bprim1 !ustk !bstk PAKT i = do
pokeBi bstk . Tx.pack . toList $ clo2char <$> s
pure (ustk, bstk)
where
clo2char (DataU1 _ 0 i) = toEnum i
clo2char (DataU1 _ t i) | t == charTag = toEnum i
clo2char c = error $ "pack text: non-character closure: " ++ show c
bprim1 !ustk !bstk UPKT i = do
t <- peekOffBi bstk i
bstk <- bump bstk
pokeS bstk . Sq.fromList
. fmap (DataU1 Rf.charRef 0 . fromEnum) . Tx.unpack $ t
. fmap (DataU1 Rf.charRef charTag . fromEnum) . Tx.unpack $ t
pure (ustk, bstk)
bprim1 !ustk !bstk PAKB i = do
s <- peekOffS bstk i
@ -1183,12 +1191,12 @@ bprim1 !ustk !bstk PAKB i = do
pokeBi bstk . By.fromWord8s . fmap clo2w8 $ toList s
pure (ustk, bstk)
where
clo2w8 (DataU1 _ 0 n) = toEnum n
clo2w8 (DataU1 _ t n) | t == natTag = toEnum n
clo2w8 c = error $ "pack bytes: non-natural closure: " ++ show c
bprim1 !ustk !bstk UPKB i = do
b <- peekOffBi bstk i
bstk <- bump bstk
pokeS bstk . Sq.fromList . fmap (DataU1 Rf.natRef 0 . fromEnum)
pokeS bstk . Sq.fromList . fmap (DataU1 Rf.natRef natTag . fromEnum)
$ By.toWord8s b
pure (ustk, bstk)
bprim1 !ustk !bstk SIZB i = do
@ -1578,7 +1586,7 @@ reflectValue rty = goV
goV (PApV cix ua ba)
= ANF.Partial (goIx cix) (fromIntegral <$> ua) <$> traverse goV ba
goV (DataC r t us bs)
= ANF.Data r t (fromIntegral <$> us) <$> traverse goV bs
= ANF.Data r (maskTags t) (fromIntegral <$> us) <$> traverse goV bs
goV (CapV k us bs)
= ANF.Cont (fromIntegral <$> us) <$> traverse goV bs <*> goK k
goV (Foreign f) = ANF.BLit <$> goF f
@ -1641,8 +1649,9 @@ reifyValue0 (rty, rtm) = goV
goV (ANF.Partial gr ua ba)
= pap <$> (goIx gr) <*> traverse goV ba
where pap i = PApV i (fromIntegral <$> ua)
goV (ANF.Data r t us bs)
= DataC r t (fromIntegral <$> us) <$> traverse goV bs
goV (ANF.Data r t0 us bs) = do
t <- flip packTags (fromIntegral t0) . fromIntegral <$> refTy r
DataC r t (fromIntegral <$> us) <$> traverse goV bs
goV (ANF.Cont us bs k) = cv <$> goK k <*> traverse goV bs
where
cv k bs = CapV k (fromIntegral <$> us) bs
@ -1666,3 +1675,131 @@ reifyValue0 (rty, rtm) = goV
goL (ANF.TmLink r) = pure . Foreign $ Wrap Rf.termLinkRef r
goL (ANF.TyLink r) = pure . Foreign $ Wrap Rf.typeLinkRef r
goL (ANF.Bytes b) = pure . Foreign $ Wrap Rf.bytesRef b
-- Universal comparison functions
closureNum :: Closure -> Int
closureNum PAp{} = 0
closureNum DataC{} = 1
closureNum Captured{} = 2
closureNum Foreign{} = 3
closureNum BlackHole{} = error "BlackHole"
universalEq
:: (Foreign -> Foreign -> Bool)
-> Closure
-> Closure
-> Bool
universalEq frn = eqc
where
eql cm l r = length l == length r && and (zipWith cm l r)
eqc (DataC _ ct1 us1 bs1) (DataC _ ct2 us2 bs2)
= ct1 == ct2
&& eql (==) us1 us2
&& eql eqc bs1 bs2
eqc (PApV i1 us1 bs1) (PApV i2 us2 bs2)
= i1 == i2
&& eql (==) us1 us2
&& eql eqc bs1 bs2
eqc (CapV k1 us1 bs1) (CapV k2 us2 bs2)
= k1 == k2
&& eql (==) us1 us2
&& eql eqc bs1 bs2
eqc (Foreign fl) (Foreign fr)
| Just sl <- maybeUnwrapForeign Rf.listRef fl
, Just sr <- maybeUnwrapForeign Rf.listRef fr
= length sl == length sr && and (Sq.zipWith eqc sl sr)
| otherwise = frn fl fr
eqc c d = closureNum c == closureNum d
-- IEEE floating point layout is such that comparison as integers
-- somewhat works. Positive floating values map to positive integers
-- and negatives map to negatives. The corner cases are:
--
-- 1. If both numbers are negative, ordering is flipped.
-- 2. There is both +0 and -0, with -0 being represented as the
-- minimum signed integer.
-- 3. NaN does weird things.
--
-- So, the strategy here is to compare normally if one argument is
-- positive, since positive numbers compare normally to others.
-- Otherwise, the sign bit is cleared and the numbers are compared
-- backwards. Clearing the sign bit maps -0 to +0 and maps a negative
-- number to its absolute value (including infinities). The multiple
-- NaN values are just handled according to bit patterns, rather than
-- IEEE specified behavior.
--
-- Transitivity is somewhat non-obvious for this implementation.
--
-- if i <= j and j <= k
-- if j > 0 then k > 0, so all 3 comparisons use `compare`
-- if k > 0 then k > i, since i <= j <= 0
-- if all 3 are <= 0, all 3 comparisons use the alternate
-- comparison, which is transitive via `compare`
compareAsFloat :: Int -> Int -> Ordering
compareAsFloat i j
| i > 0 || j > 0 = compare i j
| otherwise = compare (clear j) (clear i)
where clear k = clearBit k 64
compareAsNat :: Int -> Int -> Ordering
compareAsNat i j = compare ni nj
where
ni, nj :: Word
ni = fromIntegral i
nj = fromIntegral j
floatTag :: Word64
floatTag
| Just n <- M.lookup Rf.floatRef builtinTypeNumbering
, rt <- toEnum (fromIntegral n)
= packTags rt 0
| otherwise = error "internal error: floatTag"
natTag :: Word64
natTag
| Just n <- M.lookup Rf.natRef builtinTypeNumbering
, rt <- toEnum (fromIntegral n)
= packTags rt 0
| otherwise = error "internal error: natTag"
charTag :: Word64
charTag
| Just n <- M.lookup Rf.charRef builtinTypeNumbering
, rt <- toEnum (fromIntegral n)
= packTags rt 0
| otherwise = error "internal error: charTag"
universalCompare
:: (Foreign -> Foreign -> Ordering)
-> Closure
-> Closure
-> Ordering
universalCompare frn = cmpc False
where
cmpl cm l r
= compare (length l) (length r) <> fold (zipWith cm l r)
cmpc _ (DataC _ ct1 [i] []) (DataC _ ct2 [j] [])
| ct1 == floatTag && ct2 == floatTag = compareAsFloat i j
| ct1 == natTag && ct2 == natTag = compareAsNat i j
cmpc tyEq (DataC rf1 ct1 us1 bs1) (DataC rf2 ct2 us2 bs2)
= (if tyEq && ct1 /= ct2 then compare rf1 rf2 else EQ)
<> compare (maskTags ct1) (maskTags ct2)
<> cmpl compare us1 us2
-- when comparing corresponding `Any` values, which have
-- existentials inside check that type references match
<> cmpl (cmpc $ tyEq || rf1 == Rf.anyRef) bs1 bs2
cmpc tyEq (PApV i1 us1 bs1) (PApV i2 us2 bs2)
= compare i1 i2
<> cmpl compare us1 us2
<> cmpl (cmpc tyEq) bs1 bs2
cmpc _ (CapV k1 us1 bs1) (CapV k2 us2 bs2)
= compare k1 k2
<> cmpl compare us1 us2
<> cmpl (cmpc True) bs1 bs2
cmpc tyEq (Foreign fl) (Foreign fr)
| Just sl <- maybeUnwrapForeign Rf.listRef fl
, Just sr <- maybeUnwrapForeign Rf.listRef fr
= comparing Sq.length sl sr <> fold (Sq.zipWith (cmpc tyEq) sl sr)
| otherwise = frn fl fr
cmpc _ c d = comparing closureNum c d

View File

@ -17,8 +17,6 @@ module Unison.Runtime.Stack
, Off
, SZ
, FP
, universalEq
, universalCompare
, marshalToForeign
, unull
, bnull
@ -49,15 +47,11 @@ import GHC.Exts as L (IsList(..))
import Control.Monad (when)
import Control.Monad.Primitive
import Data.Ord (comparing)
import Data.Foldable (fold)
import Data.Foldable as F (toList, for_)
import Data.Primitive.ByteArray
import Data.Primitive.PrimArray
import Data.Primitive.Array
import Data.Sequence (Seq)
import qualified Data.Sequence as Sq
import Data.Word
import Unison.Reference (Reference)
@ -158,73 +152,6 @@ pattern CapV k us bs <- Captured k (ints -> us) (L.toList -> bs)
{-# complete DataC, PApV, Captured, Foreign, BlackHole #-}
{-# complete DataC, PApV, CapV, Foreign, BlackHole #-}
closureNum :: Closure -> Int
closureNum PAp{} = 0
closureNum DataC{} = 1
closureNum Captured{} = 2
closureNum Foreign{} = 3
closureNum BlackHole{} = error "BlackHole"
universalEq
:: (Foreign -> Foreign -> Bool)
-> Closure
-> Closure
-> Bool
universalEq frn = eqc False
where
eql cm l r = length l == length r && and (zipWith cm l r)
eqc tyEq (DataC rf1 ct1 us1 bs1) (DataC rf2 ct2 us2 bs2)
= (if tyEq then rf1 == rf2 else True)
&& ct1 == ct2
&& eql (==) us1 us2
&& eql (eqc $ tyEq || rf1 == Ty.anyRef) bs1 bs2
eqc tyEq (PApV i1 us1 bs1) (PApV i2 us2 bs2)
= i1 == i2
&& eql (==) us1 us2
&& eql (eqc tyEq) bs1 bs2
eqc _ (CapV k1 us1 bs1) (CapV k2 us2 bs2)
= k1 == k2
&& eql (==) us1 us2
&& eql (eqc True) bs1 bs2
eqc tyEq (Foreign fl) (Foreign fr)
| Just sl <- maybeUnwrapForeign Ty.listRef fl
, Just sr <- maybeUnwrapForeign Ty.listRef fr
= length sl == length sr && and (Sq.zipWith (eqc tyEq) sl sr)
| otherwise = frn fl fr
eqc _ c d = closureNum c == closureNum d
universalCompare
:: (Foreign -> Foreign -> Ordering)
-> Closure
-> Closure
-> Ordering
universalCompare frn = cmpc False
where
cmpl cm l r
= compare (length l) (length r) <> fold (zipWith cm l r)
cmpc tyEq (DataC rf1 ct1 us1 bs1) (DataC rf2 ct2 us2 bs2)
= (if tyEq then compare rf1 rf2 else EQ)
<> compare ct1 ct2
<> cmpl compare us1 us2
-- when comparing corresponding `Any` values, which have
-- existentials inside check that type references match
<> cmpl (cmpc $ tyEq || rf1 == Ty.anyRef) bs1 bs2
cmpc tyEq (PApV i1 us1 bs1) (PApV i2 us2 bs2)
= compare i1 i2
<> cmpl compare us1 us2
<> cmpl (cmpc tyEq) bs1 bs2
cmpc _ (CapV k1 us1 bs1) (CapV k2 us2 bs2)
= compare k1 k2
<> cmpl compare us1 us2
<> cmpl (cmpc True) bs1 bs2
cmpc tyEq (Foreign fl) (Foreign fr)
| Just sl <- maybeUnwrapForeign Ty.listRef fl
, Just sr <- maybeUnwrapForeign Ty.listRef fr
= comparing Sq.length sl sr <> fold (Sq.zipWith (cmpc tyEq) sl sr)
| otherwise = frn fl fr
cmpc _ c d = comparing closureNum c d
marshalToForeign :: HasCallStack => Closure -> Foreign
marshalToForeign (Foreign x) = x
marshalToForeign c

View File

@ -86,6 +86,7 @@ import qualified Unison.TypePrinter as TypePrinter
import qualified Unison.Typechecker as Typechecker
import Unison.Util.List (uniqueBy)
import Unison.Util.Pretty (Width)
import qualified Unison.Util.Map as Map
import qualified Unison.Util.Pretty as Pretty
import qualified Unison.Util.Relation as R
import qualified Unison.Util.Star3 as Star3
@ -449,22 +450,22 @@ typeReferencesByShortHash codebase sh = do
B.intrinsicTypeReferences
pure (fromBuiltins <> Set.map Reference.DerivedId fromCodebase)
typeReferencesByShortHash
:: Monad m => Codebase m v a -> ShortHash -> m (Set Reference)
-- | Look up types in the codebase by short hash, and include builtins.
typeReferencesByShortHash :: Monad m => Codebase m v a -> ShortHash -> m (Set Reference)
termReferencesByShortHash codebase sh = do
fromCodebase <- Codebase.termReferencesByPrefix codebase sh
let fromBuiltins = Set.filter (\r -> sh == Reference.toShortHash r)
B.intrinsicTermReferences
pure (fromBuiltins <> Set.map Reference.DerivedId fromCodebase)
pure (fromBuiltins <> Set.mapMonotonic Reference.DerivedId fromCodebase)
termReferentsByShortHash
:: Monad m => Codebase m v a -> ShortHash -> m (Set Referent)
-- | Look up terms in the codebase by short hash, and include builtins.
termReferentsByShortHash :: Monad m => Codebase m v a -> ShortHash -> m (Set Referent)
termReferentsByShortHash codebase sh = do
fromCodebase <- Codebase.termReferentsByPrefix codebase sh
let fromBuiltins = Set.map Referent.Ref $ Set.filter
(\r -> sh == Reference.toShortHash r)
B.intrinsicTermReferences
pure (fromBuiltins <> Set.map (fmap Reference.DerivedId) fromCodebase)
pure (fromBuiltins <> Set.mapMonotonic (over Referent.reference_ Reference.DerivedId) fromCodebase)
-- currentPathNames :: Path -> Names
-- currentPathNames = Branch.toNames . Branch.head . Branch.getAt
@ -616,9 +617,9 @@ hqNameQuery namesScope root codebase hqs = do
-- TODO: Move this to its own module
data DefinitionResults v =
DefinitionResults
{ termResults :: Map Reference (DisplayObject (Type v Ann) (Term v Ann))
, typeResults :: Map Reference (DisplayObject () (DD.Decl v Ann))
, noResults :: [HQ.HashQualified Name]
{ termResults :: Map Reference (DisplayObject (Type v Ann) (Term v Ann)),
typeResults :: Map Reference (DisplayObject () (DD.Decl v Ann)),
noResults :: [HQ.HashQualified Name]
}
-- Separates type references from term references and returns types and terms,
@ -678,11 +679,9 @@ prettyDefinitionsBySuffixes
-> Backend IO DefinitionDisplayResults
prettyDefinitionsBySuffixes namesScope root renderWidth suffixifyBindings rt codebase query
= do
branch <- resolveBranchHash root codebase
DefinitionResults terms types misses <- definitionsBySuffixes namesScope
branch
codebase
query
branch <- resolveBranchHash root codebase
DefinitionResults terms types misses <-
lift (definitionsBySuffixes namesScope branch codebase DontIncludeCycles query)
hqLength <- lift $ Codebase.hashLength codebase
-- We might like to make sure that the user search terms get used as
-- the names in the pretty-printer, but the current implementation
@ -842,11 +841,12 @@ docsInBranchToHtmlFiles runtime codebase root currentPath directory = do
let currentBranch = Branch.getAt' currentPath root
let allTerms = (R.toList . Branch.deepTerms . Branch.head) currentBranch
docTermsWithNames <- filterM (isDoc codebase . fst) allTerms
let docNamesByRef = Map.fromList docTermsWithNames
hqLength <- Codebase.hashLength codebase
let printNames = getCurrentPrettyNames (AllNames currentPath) root
let ppe = PPE.fromNamesDecl hqLength printNames
docs <- for docTermsWithNames (renderDoc' ppe runtime codebase)
liftIO $ traverse_ (renderDocToHtmlFile directory) docs
liftIO $ traverse_ (renderDocToHtmlFile docNamesByRef directory) docs
where
renderDoc' ppe runtime codebase (ref, name) = do
@ -877,15 +877,15 @@ docsInBranchToHtmlFiles runtime codebase root currentPath directory = do
in dir </> fileName
renderDocToHtmlFile :: FilePath -> (Name, UnisonHash, Doc.Doc) -> IO ()
renderDocToHtmlFile destination (docName, _, doc) =
renderDocToHtmlFile :: Map Referent Name -> FilePath -> (Name, UnisonHash, Doc.Doc) -> IO ()
renderDocToHtmlFile docNamesByRef destination (docName, _, doc) =
let
fullPath = docFilePath destination docName
directoryPath = takeDirectory fullPath
in do
-- Ensure all directories exists
_ <- createDirectoryIfMissing True directoryPath
Lucid.renderToFile fullPath (DocHtml.toHtml doc)
Lucid.renderToFile fullPath (DocHtml.toHtml docNamesByRef doc)
bestNameForTerm
:: forall v . Var v => PPE.PrettyPrintEnv -> Width -> Referent -> Text
@ -923,61 +923,77 @@ resolveRootBranchHash mayRoot codebase = case mayRoot of
h <- expandShortBranchHash codebase sbh
resolveBranchHash (Just h) codebase
-- | Determines whether we include full cycles in the results, (e.g. if I search for `isEven`, will I find `isOdd` too?)
data IncludeCycles
= IncludeCycles
| DontIncludeCycles
definitionsBySuffixes
:: forall m v
. (MonadIO m)
=> Var v
=> NameScoping
-> Branch m
-> Codebase m v Ann
-> [HQ.HashQualified Name]
-> Backend m (DefinitionResults v)
definitionsBySuffixes namesScope branch codebase query = do
-- First find the hashes by name and note any query misses.
QueryResult misses results <- lift
$ hqNameQuery namesScope branch codebase query
-- Now load the terms/types for those hashes.
results' <- lift $ loadSearchResults codebase results
let termTypes :: Map.Map Reference (Type v Ann)
termTypes = Map.fromList
[ (r, t) | SR'.Tm _ (Just t) (Referent.Ref r) _ <- results' ]
(collatedTypes, collatedTerms) = collateReferences
(mapMaybe SR'.tpReference results')
(mapMaybe SR'.tmReferent results')
-- load the `collatedTerms` and types into a Map Reference.Id Term/Type
-- for later
loadedDerivedTerms <-
lift $ fmap (Map.fromList . catMaybes) . for (toList collatedTerms) $ \case
Reference.DerivedId i -> fmap (i, ) <$> Codebase.getTerm codebase i
Reference.Builtin{} -> pure Nothing
loadedDerivedTypes <-
lift $ fmap (Map.fromList . catMaybes) . for (toList collatedTypes) $ \case
Reference.DerivedId i ->
fmap (i, ) <$> Codebase.getTypeDeclaration codebase i
Reference.Builtin{} -> pure Nothing
-- Populate DisplayObjects for the search results, in anticipation of
-- rendering the definitions.
loadedDisplayTerms <- fmap Map.fromList . for (toList collatedTerms) $ \case
r@(Reference.DerivedId i) -> do
let tm = Map.lookup i loadedDerivedTerms
-- We add a type annotation to the term using if it doesn't
-- already have one that the user provided
pure . (r, ) $ case liftA2 (,) tm (Map.lookup r termTypes) of
Nothing -> MissingObject $ Reference.idToShortHash i
Just (tm, typ) -> case tm of
Term.Ann' _ _ -> UserObject tm
_ -> UserObject (Term.ann (ABT.annotation tm) tm typ)
r@(Reference.Builtin _) -> pure $ (r,) $ case Map.lookup r B.termRefTypes of
Nothing -> MissingObject $ Reference.toShortHash r
Just typ -> BuiltinObject (mempty <$ typ)
let loadedDisplayTypes = Map.fromList . (`fmap` toList collatedTypes) $ \case
r@(Reference.DerivedId i) ->
(r, )
. maybe (MissingObject $ Reference.idToShortHash i) UserObject
$ Map.lookup i loadedDerivedTypes
r@(Reference.Builtin _) -> (r, BuiltinObject ())
pure $ DefinitionResults loadedDisplayTerms loadedDisplayTypes misses
definitionsBySuffixes ::
forall m v.
MonadIO m =>
Var v =>
NameScoping ->
Branch m ->
Codebase m v Ann ->
IncludeCycles ->
[HQ.HashQualified Name] ->
m (DefinitionResults v)
definitionsBySuffixes namesScope branch codebase includeCycles query = do
QueryResult misses results <- hqNameQuery namesScope branch codebase query
-- todo: remember to replace this with getting components directly,
-- and maybe even remove getComponentLength from Codebase interface altogether
terms <- do
let termRefsWithoutCycles = searchResultsToTermRefs results
termRefs <- case includeCycles of
IncludeCycles ->
Monoid.foldMapM
(Codebase.componentReferencesForReference codebase)
termRefsWithoutCycles
DontIncludeCycles -> pure termRefsWithoutCycles
Map.foldMapM (\ref -> (ref,) <$> displayTerm ref) termRefs
types <- do
let typeRefsWithoutCycles = searchResultsToTypeRefs results
typeRefs <- case includeCycles of
IncludeCycles ->
Monoid.foldMapM
(Codebase.componentReferencesForReference codebase)
typeRefsWithoutCycles
DontIncludeCycles -> pure typeRefsWithoutCycles
Map.foldMapM (\ref -> (ref,) <$> displayType ref) typeRefs
pure (DefinitionResults terms types misses)
where
searchResultsToTermRefs :: [SR.SearchResult] -> Set Reference
searchResultsToTermRefs results =
Set.fromList [r | SR.Tm' _ (Referent.Ref r) _ <- results]
searchResultsToTypeRefs :: [SR.SearchResult] -> Set Reference
searchResultsToTypeRefs results =
Set.fromList (mapMaybe f results)
where
f :: SR.SearchResult -> Maybe Reference
f = \case
SR.Tm' _ (Referent.Con r _ _) _ -> Just r
SR.Tp' _ r _ -> Just r
_ -> Nothing
displayTerm :: Reference -> m (DisplayObject (Type v Ann) (Term v Ann))
displayTerm = \case
ref@(Reference.Builtin _) -> do
pure case Map.lookup ref B.termRefTypes of
-- This would be better as a `MissingBuiltin` constructor; `MissingObject` is kind of being
-- misused here. Is `MissingObject` even possible anymore?
Nothing -> MissingObject $ Reference.toShortHash ref
Just typ -> BuiltinObject (mempty <$ typ)
Reference.DerivedId rid -> do
(term, ty) <- Codebase.unsafeGetTermWithType codebase rid
pure case term of
Term.Ann' _ _ -> UserObject term
-- manually annotate if necessary
_ -> UserObject (Term.ann (ABT.annotation term) term ty)
displayType :: Reference -> m (DisplayObject () (DD.Decl v Ann))
displayType = \case
Reference.Builtin _ -> pure (BuiltinObject ())
Reference.DerivedId rid -> do
decl <- Codebase.unsafeGetTypeDeclaration codebase rid
pure (UserObject decl)
termsToSyntax
:: Var v
@ -1051,4 +1067,3 @@ loadTypeDisplayObject c = \case
Reference.DerivedId id ->
maybe (MissingObject $ Reference.idToShortHash id) UserObject
<$> Codebase.getTypeDeclaration c id

View File

@ -4,18 +4,25 @@
module Unison.Server.Doc.AsHtml where
import Data.Foldable
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as Text
import Lucid
import qualified Lucid as L
import Unison.Codebase.Editor.DisplayObject (DisplayObject (..))
import Unison.Name (Name)
import qualified Unison.Name as Name
import Unison.Referent (Referent)
import qualified Unison.Referent as Referent
import Unison.Server.Doc
import Unison.Server.Syntax (SyntaxText)
import qualified Unison.Server.Syntax as Syntax
data NamedLinkHref
= Href Text
| DocLinkHref Name
| ReferenceHref Text
| InvalidHref
@ -42,25 +49,38 @@ codeBlock :: [Attribute] -> Html () -> Html ()
codeBlock attrs =
pre_ attrs . code_ []
normalizeHref :: NamedLinkHref -> Doc -> NamedLinkHref
normalizeHref href doc =
case doc of
Word w ->
case href of
InvalidHref ->
Href w
Href h ->
Href (h <> w)
ReferenceHref _ ->
normalizeHref :: Map Referent Name -> Doc -> NamedLinkHref
normalizeHref docNamesByRef = go InvalidHref
where
go href doc =
case doc of
Word w ->
case href of
InvalidHref ->
Href w
Href h ->
Href (h <> w)
ReferenceHref _ ->
href
DocLinkHref _ ->
href
Group d_ ->
go href d_
Join ds ->
foldl' go href ds
Special (Link syntax) ->
case Syntax.firstReference syntax of
Just r ->
-- Convert references to docs to names, so we can construct links
-- matching the file structure being generated from all the docs
case Referent.fromText r >>= flip Map.lookup docNamesByRef of
Just n ->
DocLinkHref n
Nothing ->
ReferenceHref r
Nothing -> InvalidHref
_ ->
href
Group d_ ->
normalizeHref href d_
Join ds ->
foldl' normalizeHref href ds
Special (Link syntax) ->
maybe InvalidHref ReferenceHref (Syntax.firstReference syntax)
_ ->
href
data IsFolded
= IsFolded Bool [Html ()] [Html ()]
@ -119,32 +139,32 @@ mergeWords sep = foldr merge_ []
-- Used for things like extract an src of an image. I.e something that has to
-- be a Text and not a Doc
toText :: Text -> Doc -> Text
toText sep doc =
toText sep doc =
case doc of
Paragraph ds ->
listToText ds
Group d ->
toText sep d
Join ds ->
listToText ds
Bold d ->
toText sep d
Italic d ->
toText sep d
Strikethrough d ->
toText sep d
Blockquote d ->
toText sep d
Section d ds ->
toText sep d <> sep <> listToText ds
UntitledSection ds ->
listToText ds
Column ds ->
listToText ds
Word w ->
w
_ ->
""
Paragraph ds ->
listToText ds
Group d ->
toText sep d
Join ds ->
listToText ds
Bold d ->
toText sep d
Italic d ->
toText sep d
Strikethrough d ->
toText sep d
Blockquote d ->
toText sep d
Section d ds ->
toText sep d <> sep <> listToText ds
UntitledSection ds ->
listToText ds
Column ds ->
listToText ds
Word w ->
w
_ ->
""
where
isEmpty s =
s == Text.empty
@ -154,8 +174,8 @@ toText sep doc =
. filter (not . isEmpty)
. map (toText sep)
toHtml :: Doc -> Html ()
toHtml document =
toHtml :: Map Referent Name -> Doc -> Html ()
toHtml docNamesByRef document =
let toHtml_ sectionLevel doc =
let -- Make it simple to retain the sectionLevel when recurring.
-- the Section variant increments it locally
@ -250,11 +270,14 @@ toHtml document =
h sectionLevel $ currentSectionLevelToHtml title
in section_ [] $ sequence_ (titleEl : map (sectionContentToHtml (toHtml_ (sectionLevel + 1))) docs)
NamedLink label href ->
case normalizeHref InvalidHref href of
case normalizeHref docNamesByRef href of
Href h ->
a_ [class_ "named-link", href_ h, rel_ "noopener", target_ "_blank"] $ currentSectionLevelToHtml label
DocLinkHref name ->
let href = "/" <> Text.replace "." "/" (Name.toText name) <> ".html"
in a_ [class_ "named-link doc-link", href_ href] $ currentSectionLevelToHtml label
ReferenceHref ref ->
a_ [class_ "named-link", data_ "ref" ref] $ currentSectionLevelToHtml label
span_ [class_ "named-link", data_ "ref" ref, data_ "ref-type" "term"] $ currentSectionLevelToHtml label
InvalidHref ->
span_ [class_ "named-link invalid-href"] $ currentSectionLevelToHtml label
Image altText src caption ->

View File

@ -12,6 +12,7 @@ module Unison.Server.Syntax where
import Data.Aeson (ToJSON)
import qualified Data.List as List
import Data.List.Extra
import qualified Data.List.NonEmpty as List.NonEmpty
import Data.OpenApi (ToSchema (..))
import Data.Proxy (Proxy (..))
@ -34,7 +35,6 @@ import Unison.Util.AnnotatedText
segment,
)
import qualified Unison.Util.SyntaxText as SyntaxText
import Data.List.Extra
type SyntaxText = AnnotatedText Element
@ -196,13 +196,13 @@ segmentToHtml (Segment segmentText element) =
ref =
case el of
TypeReference h ->
Just h
Just (h, "type")
TermReference h ->
Just h
Just (h, "term")
AbilityConstructorReference h ->
Just h
Just (h, "ability-constructor")
DataConstructorReference h ->
Just h
Just (h, "data-constructor")
_ ->
Nothing
@ -231,8 +231,8 @@ segmentToHtml (Segment segmentText element) =
| isFQN = nameToHtml (Name.unsafeFromText sText)
| otherwise = L.toHtml sText
in case ref of
Just r ->
span_ [class_ className, data_ "ref" r] content
Just (r, refType) ->
span_ [class_ className, data_ "ref" r, data_ "ref-type" refType] content
_ ->
span_ [class_ className] content

View File

@ -1,16 +1,36 @@
module Unison.Util.Map
( unionWithM
) where
( foldMapM,
unionWithM,
)
where
import qualified Control.Monad as Monad
import qualified Data.Map as Map
import Data.Foldable (foldlM)
import qualified Data.Map.Strict as Map
import Unison.Prelude
unionWithM :: forall m k a.
(Monad m, Ord k) => (a -> a -> m a) -> Map k a -> Map k a -> m (Map k a)
unionWithM f m1 m2 = Monad.foldM go m1 $ Map.toList m2 where
go :: Map k a -> (k, a) -> m (Map k a)
go m1 (k, a2) = case Map.lookup k m1 of
Just a1 -> do a <- f a1 a2; pure $ Map.insert k a m1
Nothing -> pure $ Map.insert k a2 m1
-- | Construct a map from a foldable container by mapping each element to monadic action that returns a key and a value.
--
-- The map is constructed from the left: if two elements map to the same key, the second will overwrite the first.
foldMapM :: (Ord k, Monad m, Foldable t) => (a -> m (k, v)) -> t a -> m (Map k v)
foldMapM f =
foldlM g Map.empty
where
g acc x = do
(k, v) <- f x
pure $! Map.insert k v acc
unionWithM ::
forall m k a.
(Monad m, Ord k) =>
(a -> a -> m a) ->
Map k a ->
Map k a ->
m (Map k a)
unionWithM f m1 m2 =
Monad.foldM go m1 $ Map.toList m2
where
go :: Map k a -> (k, a) -> m (Map k a)
go m1 (k, a2) = case Map.lookup k m1 of
Just a1 -> do a <- f a1 a2; pure $ Map.insert k a m1
Nothing -> pure $ Map.insert k a2 m1

View File

@ -204,6 +204,7 @@ library
src
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveFunctor
DeriveGeneric
@ -217,6 +218,7 @@ library
NamedFieldPuns
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
@ -331,6 +333,7 @@ executable prettyprintdemo
prettyprintdemo
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveFunctor
DeriveGeneric
@ -344,6 +347,7 @@ executable prettyprintdemo
NamedFieldPuns
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
@ -404,6 +408,7 @@ executable tests
tests
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveFunctor
DeriveGeneric
@ -417,6 +422,7 @@ executable tests
NamedFieldPuns
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
@ -465,6 +471,7 @@ benchmark runtime
benchmarks/runtime
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveFunctor
DeriveGeneric
@ -478,6 +485,7 @@ benchmark runtime
NamedFieldPuns
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications

View File

@ -14,9 +14,11 @@ module Unison.Reference
Pos,
CycleSize, Size,
derivedBase32Hex,
component,
components,
groupByComponent,
componentFor,
componentFromLength,
unsafeFromText,
idFromText,
isPrefixOf,
@ -25,6 +27,7 @@ module Unison.Reference
readSuffix,
showShort,
showSuffix,
toHash,
toId,
toText,
unsafeId,
@ -43,6 +46,7 @@ import qualified Unison.ShortHash as SH
import Data.Char (isDigit)
import Control.Lens (Prism')
import Data.Generics.Sum (_Ctor)
import qualified Data.Set as Set
-- | Either a builtin or a user defined (hashed) top-level declaration.
--
@ -125,6 +129,9 @@ type CycleSize = Word64
componentFor :: H.Hash -> [a] -> [(Id, a)]
componentFor h as = [ (Id h i, a) | (fromIntegral -> i, a) <- zip [0..] as]
componentFromLength :: H.Hash -> CycleSize -> Set Id
componentFromLength h size = Set.fromList [Id h i | i <- [0 .. size -1]]
derivedBase32Hex :: Text -> Pos -> Reference
derivedBase32Hex b32Hex i = DerivedId (Id (fromMaybe msg h) i)
where
@ -144,6 +151,9 @@ toId :: Reference -> Maybe Id
toId (DerivedId id) = Just id
toId Builtin{} = Nothing
toHash :: Reference -> Maybe H.Hash
toHash r = idToHash <$> toId r
-- examples:
-- `##Text.take` — builtins dont have cycles
-- `#2tWjVAuc7` — derived, no cycle

View File

@ -1,8 +1,24 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
module Unison.Referent' where
module Unison.Referent'
( Referent' (..),
-- * Basic queries
isConstructor,
Unison.Referent'.fold,
-- * Lenses
reference_,
-- * Conversions
toReference',
toTermReference,
toTypeReference,
)
where
import Control.Lens (Lens, lens)
import Unison.ConstructorType (ConstructorType)
import Unison.DataDeclaration.ConstructorId (ConstructorId)
import Unison.Prelude
@ -20,6 +36,14 @@ import Unison.Prelude
data Referent' r = Ref' r | Con' r ConstructorId ConstructorType
deriving (Show, Ord, Eq, Functor, Generic)
-- | A lens onto the reference in a referent.
reference_ :: Lens (Referent' r) (Referent' r') r r'
reference_ =
lens toReference' \rt rc ->
case rt of
Ref' _ -> Ref' rc
Con' _ cid ct -> Con' rc cid ct
isConstructor :: Referent' r -> Bool
isConstructor Con' {} = True
isConstructor _ = False

View File

@ -12,6 +12,9 @@ module Unison.Referent
toReference,
fromText,
-- * Lenses
reference_,
-- * ShortHash helpers
isPrefixOf,
toShortHash,
@ -29,7 +32,7 @@ import Unison.DataDeclaration.ConstructorId (ConstructorId)
import Unison.Prelude hiding (fold)
import Unison.Reference (Reference)
import qualified Unison.Reference as R
import Unison.Referent' (Referent' (..), toReference')
import Unison.Referent' (Referent' (..), toReference', reference_)
import Unison.ShortHash (ShortHash)
import qualified Unison.ShortHash as SH

View File

@ -16,5 +16,10 @@ use Universal == < > <= >= compare
0 > 0,
0 >= 0,
1 >= 0,
0 >= 1)
0 >= 1,
-1.0 < -2.0,
0x8000000000000000 < 0,
-0.0 < 0.0,
-1.0 < 2.0,
1.0 < -2.0)

View File

@ -14,4 +14,9 @@
false,
true,
true,
false,
false,
false,
false,
true,
false)

View File

@ -150,7 +150,9 @@ test> Nat.tests.conversions =
unsnoc "abc" == Some ("ab", ?c),
uncons "abc" == Some (?a, "bc"),
unsnoc "" == None,
uncons "" == None
uncons "" == None,
Text.fromCharList (Text.toCharList "abc") == "abc",
Bytes.fromList (Bytes.toList 0xsACE0BA5E) == 0xsACE0BA5E
]
```

View File

@ -139,7 +139,9 @@ test> Nat.tests.conversions =
unsnoc "abc" == Some ("ab", ?c),
uncons "abc" == Some (?a, "bc"),
unsnoc "" == None,
uncons "" == None
uncons "" == None,
Text.fromCharList (Text.toCharList "abc") == "abc",
Bytes.fromList (Bytes.toList 0xsACE0BA5E) == 0xsACE0BA5E
]
```